1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "gimple-fold.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
44 #include "tree-object-size.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
63 #include "diagnostic-core.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
70 enum strlen_range_kind
{
71 /* Compute the exact constant string length. */
73 /* Compute the maximum constant string length. */
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
81 /* Determine the integer value of the argument (not string length). */
86 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
111 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
114 struct cgraph_node
*node
;
117 if (DECL_ABSTRACT_P (decl
))
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
122 || !VAR_OR_FUNCTION_DECL_P (decl
))
125 /* Static objects can be referred only if they are defined and not optimized
127 if (!TREE_PUBLIC (decl
))
129 if (DECL_EXTERNAL (decl
))
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab
->function_flags_ready
)
135 snode
= symtab_node::get (decl
);
136 if (!snode
|| !snode
->definition
)
138 node
= dyn_cast
<cgraph_node
*> (snode
);
139 return !node
|| !node
->inlined_to
;
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
146 || !VAR_P (from_decl
)
147 || (!DECL_EXTERNAL (from_decl
)
148 && (vnode
= varpool_node::get (from_decl
)) != NULL
149 && vnode
->definition
)
151 && (vnode
= varpool_node::get (from_decl
)) != NULL
152 && vnode
->in_other_partition
))
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl
)
158 && DECL_EXTERNAL (decl
)
159 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
160 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
178 if (!symtab
->function_flags_ready
)
181 snode
= symtab_node::get (decl
);
183 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
184 && (!snode
->in_other_partition
185 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
187 node
= dyn_cast
<cgraph_node
*> (snode
);
188 return !node
|| !node
->inlined_to
;
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
196 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
198 if (gimple_in_ssa_p (cfun
))
199 return make_ssa_name (type
, stmt
);
201 return create_tmp_reg (type
);
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
209 canonicalize_constructor_val (tree cval
, tree from_decl
)
211 if (CONSTANT_CLASS_P (cval
))
214 tree orig_cval
= cval
;
216 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
219 tree ptr
= TREE_OPERAND (cval
, 0);
220 if (is_gimple_min_invariant (ptr
))
221 cval
= build1_loc (EXPR_LOCATION (cval
),
222 ADDR_EXPR
, TREE_TYPE (ptr
),
223 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
225 fold_convert (ptr_type_node
,
226 TREE_OPERAND (cval
, 1))));
228 if (TREE_CODE (cval
) == ADDR_EXPR
)
230 tree base
= NULL_TREE
;
231 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
233 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
235 TREE_OPERAND (cval
, 0) = base
;
238 base
= get_base_address (TREE_OPERAND (cval
, 0));
242 if (VAR_OR_FUNCTION_DECL_P (base
)
243 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
245 if (TREE_TYPE (base
) == error_mark_node
)
248 TREE_ADDRESSABLE (base
) = 1;
249 else if (TREE_CODE (base
) == FUNCTION_DECL
)
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base
);
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
258 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
261 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval
) == INTEGER_CST
)
267 if (TREE_OVERFLOW_P (cval
))
268 cval
= drop_tree_overflow (cval
);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
270 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
280 get_symbol_constant_value (tree sym
)
282 tree val
= ctor_for_folding (sym
);
283 if (val
!= error_mark_node
)
287 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
288 if (val
&& is_gimple_min_invariant (val
))
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
297 && is_gimple_reg_type (TREE_TYPE (sym
)))
298 return build_zero_cst (TREE_TYPE (sym
));
306 /* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
312 maybe_fold_reference (tree expr
, bool is_lhs
)
316 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr
) == REALPART_EXPR
318 || TREE_CODE (expr
) == IMAGPART_EXPR
)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr
),
323 TREE_OPERAND (expr
, 0));
324 else if (TREE_CODE (expr
) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr
),
329 TREE_OPERAND (expr
, 0),
330 TREE_OPERAND (expr
, 1),
331 TREE_OPERAND (expr
, 2));
334 && (result
= fold_const_aggregate_ref (expr
))
335 && is_gimple_min_invariant (result
))
342 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
348 fold_gimple_assign (gimple_stmt_iterator
*si
)
350 gimple
*stmt
= gsi_stmt (*si
);
351 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
352 location_t loc
= gimple_location (stmt
);
354 tree result
= NULL_TREE
;
356 switch (get_gimple_rhs_class (subcode
))
358 case GIMPLE_SINGLE_RHS
:
360 tree rhs
= gimple_assign_rhs1 (stmt
);
362 if (TREE_CLOBBER_P (rhs
))
365 if (REFERENCE_CLASS_P (rhs
))
366 return maybe_fold_reference (rhs
, false);
368 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
370 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
371 if (is_gimple_min_invariant (val
))
373 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
376 vec
<cgraph_node
*>targets
377 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
378 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
380 if (dump_enabled_p ())
382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets
.length () == 1
386 ? targets
[0]->name ()
389 if (targets
.length () == 1)
391 val
= fold_convert (TREE_TYPE (val
),
392 build_fold_addr_expr_loc
393 (loc
, targets
[0]->decl
));
394 STRIP_USELESS_TYPE_CONVERSION (val
);
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
399 val
= build_int_cst (TREE_TYPE (val
), 0);
405 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
407 tree ref
= TREE_OPERAND (rhs
, 0);
408 tree tem
= maybe_fold_reference (ref
, true);
410 && TREE_CODE (tem
) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem
, 1)))
412 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
414 result
= fold_convert (TREE_TYPE (rhs
),
415 build_fold_addr_expr_loc (loc
, tem
));
416 else if (TREE_CODE (ref
) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref
, 1)))
418 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result
);
428 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
433 else if (TREE_CODE (rhs
) == CONSTRUCTOR
434 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
441 if (! CONSTANT_CLASS_P (val
))
444 return build_vector_from_ctor (TREE_TYPE (rhs
),
445 CONSTRUCTOR_ELTS (rhs
));
448 else if (DECL_P (rhs
))
449 return get_symbol_constant_value (rhs
);
453 case GIMPLE_UNARY_RHS
:
456 case GIMPLE_BINARY_RHS
:
459 case GIMPLE_TERNARY_RHS
:
460 result
= fold_ternary_loc (loc
, subcode
,
461 TREE_TYPE (gimple_assign_lhs (stmt
)),
462 gimple_assign_rhs1 (stmt
),
463 gimple_assign_rhs2 (stmt
),
464 gimple_assign_rhs3 (stmt
));
468 STRIP_USELESS_TYPE_CONVERSION (result
);
469 if (valid_gimple_rhs_p (result
))
474 case GIMPLE_INVALID_RHS
:
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
488 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
490 gimple
*stmt
= gsi_stmt (*si_p
);
492 if (gimple_has_location (stmt
))
493 annotate_all_with_location (stmts
, gimple_location (stmt
));
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
497 gimple
*laststore
= NULL
;
498 for (gimple_stmt_iterator i
= gsi_last (stmts
);
499 !gsi_end_p (i
); gsi_prev (&i
))
501 gimple
*new_stmt
= gsi_stmt (i
);
502 if ((gimple_assign_single_p (new_stmt
)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
504 || (is_gimple_call (new_stmt
)
505 && (gimple_call_flags (new_stmt
)
506 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
510 vdef
= gimple_vdef (stmt
);
512 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
513 gimple_set_vdef (new_stmt
, vdef
);
514 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
515 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
516 laststore
= new_stmt
;
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse
= gimple_vuse (stmt
);
523 for (gimple_stmt_iterator i
= gsi_start (stmts
);
524 !gsi_end_p (i
); gsi_next (&i
))
526 gimple
*new_stmt
= gsi_stmt (i
);
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt
))
530 gimple_set_vuse (new_stmt
, reaching_vuse
);
531 gimple_set_modified (new_stmt
, true);
532 if (gimple_vdef (new_stmt
))
533 reaching_vuse
= gimple_vdef (new_stmt
);
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
539 && reaching_vuse
== gimple_vuse (stmt
))
541 tree vdef
= gimple_vdef (stmt
);
543 && TREE_CODE (vdef
) == SSA_NAME
)
545 unlink_stmt_vdef (stmt
);
546 release_ssa_name (vdef
);
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p
, stmts
, false);
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
565 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
568 gimple
*stmt
, *new_stmt
;
569 gimple_stmt_iterator i
;
570 gimple_seq stmts
= NULL
;
572 stmt
= gsi_stmt (*si_p
);
574 gcc_assert (is_gimple_call (stmt
));
576 push_gimplify_context (gimple_in_ssa_p (cfun
));
578 lhs
= gimple_call_lhs (stmt
);
579 if (lhs
== NULL_TREE
)
581 gimplify_and_add (expr
, &stmts
);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts
))
586 pop_gimplify_context (NULL
);
587 if (gimple_in_ssa_p (cfun
))
589 unlink_stmt_vdef (stmt
);
592 gsi_replace (si_p
, gimple_build_nop (), false);
598 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
599 new_stmt
= gimple_build_assign (lhs
, tmp
);
600 i
= gsi_last (stmts
);
601 gsi_insert_after_without_update (&i
, new_stmt
,
602 GSI_CONTINUE_LINKING
);
605 pop_gimplify_context (NULL
);
607 gsi_replace_with_seq_vops (si_p
, stmts
);
611 /* Replace the call at *GSI with the gimple value VAL. */
614 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
616 gimple
*stmt
= gsi_stmt (*gsi
);
617 tree lhs
= gimple_call_lhs (stmt
);
621 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
622 val
= fold_convert (TREE_TYPE (lhs
), val
);
623 repl
= gimple_build_assign (lhs
, val
);
626 repl
= gimple_build_nop ();
627 tree vdef
= gimple_vdef (stmt
);
628 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
630 unlink_stmt_vdef (stmt
);
631 release_ssa_name (vdef
);
633 gsi_replace (gsi
, repl
, false);
636 /* Replace the call at *GSI with the new call REPL and fold that
640 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
642 gimple
*stmt
= gsi_stmt (*gsi
);
643 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
644 gimple_set_location (repl
, gimple_location (stmt
));
645 gimple_move_vops (repl
, stmt
);
646 gsi_replace (gsi
, repl
, false);
650 /* Return true if VAR is a VAR_DECL or a component thereof. */
653 var_decl_component_p (tree var
)
656 while (handled_component_p (inner
))
657 inner
= TREE_OPERAND (inner
, 0);
658 return (DECL_P (inner
)
659 || (TREE_CODE (inner
) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
663 /* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
667 size_must_be_zero_p (tree size
)
669 if (integer_zerop (size
))
672 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
675 tree type
= TREE_TYPE (size
);
676 int prec
= TYPE_PRECISION (type
);
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
681 value_range
valid_range (build_int_cst (type
, 0),
682 wide_int_to_tree (type
, ssize_max
));
684 get_range_info (size
, vr
);
685 vr
.intersect (&valid_range
);
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
698 tree dest
, tree src
, enum built_in_function code
)
700 gimple
*stmt
= gsi_stmt (*gsi
);
701 tree lhs
= gimple_call_lhs (stmt
);
702 tree len
= gimple_call_arg (stmt
, 2);
703 location_t loc
= gimple_location (stmt
);
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len
))
710 if (gimple_call_lhs (stmt
))
711 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
713 repl
= gimple_build_nop ();
714 tree vdef
= gimple_vdef (stmt
);
715 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
717 unlink_stmt_vdef (stmt
);
718 release_ssa_name (vdef
);
720 gsi_replace (gsi
, repl
, false);
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src
, dest
, 0))
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
731 unlink_stmt_vdef (stmt
);
732 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
733 release_ssa_name (gimple_vdef (stmt
));
736 gsi_replace (gsi
, gimple_build_nop (), false);
743 /* We cannot (easily) change the type of the copy if it is a storage
744 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
745 modify the storage order of objects (see storage_order_barrier_p). */
747 = POINTER_TYPE_P (TREE_TYPE (src
))
748 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
750 = POINTER_TYPE_P (TREE_TYPE (dest
))
751 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
752 tree destvar
, srcvar
, srcoff
;
753 unsigned int src_align
, dest_align
;
754 unsigned HOST_WIDE_INT tmp_len
;
757 /* Build accesses at offset zero with a ref-all character type. */
759 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
762 /* If we can perform the copy efficiently with first doing all loads
763 and then all stores inline it that way. Currently efficiently
764 means that we can load all the memory into a single integer
765 register which is what MOVE_MAX gives us. */
766 src_align
= get_pointer_alignment (src
);
767 dest_align
= get_pointer_alignment (dest
);
768 if (tree_fits_uhwi_p (len
)
769 && compare_tree_int (len
, MOVE_MAX
) <= 0
770 /* FIXME: Don't transform copies from strings with known length.
771 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
772 from being handled, and the case was XFAILed for that reason.
773 Now that it is handled and the XFAIL removed, as soon as other
774 strlenopt tests that rely on it for passing are adjusted, this
775 hack can be removed. */
776 && !c_strlen (src
, 1)
777 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
778 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
780 && AGGREGATE_TYPE_P (srctype
)
781 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
783 && AGGREGATE_TYPE_P (desttype
)
784 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
786 unsigned ilen
= tree_to_uhwi (len
);
787 if (pow2p_hwi (ilen
))
789 /* Detect out-of-bounds accesses without issuing warnings.
790 Avoid folding out-of-bounds copies but to avoid false
791 positives for unreachable code defer warning until after
792 DCE has worked its magic.
793 -Wrestrict is still diagnosed. */
794 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
797 if (warning
!= OPT_Wrestrict
)
800 scalar_int_mode mode
;
801 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
803 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
804 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
805 /* If the destination pointer is not aligned we must be able
806 to emit an unaligned store. */
807 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
808 || !targetm
.slow_unaligned_access (mode
, dest_align
)
809 || (optab_handler (movmisalign_optab
, mode
)
810 != CODE_FOR_nothing
)))
813 tree desttype
= type
;
814 if (src_align
< GET_MODE_ALIGNMENT (mode
))
815 srctype
= build_aligned_type (type
, src_align
);
816 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
817 tree tem
= fold_const_aggregate_ref (srcmem
);
820 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
821 && targetm
.slow_unaligned_access (mode
, src_align
)
822 && (optab_handler (movmisalign_optab
, mode
)
823 == CODE_FOR_nothing
))
828 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
830 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
832 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
834 gimple_assign_set_lhs (new_stmt
, srcmem
);
835 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
836 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
838 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
839 desttype
= build_aligned_type (type
, dest_align
);
841 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
844 gimple_move_vops (new_stmt
, stmt
);
847 gsi_replace (gsi
, new_stmt
, false);
850 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
857 if (code
== BUILT_IN_MEMMOVE
)
859 /* Both DEST and SRC must be pointer types.
860 ??? This is what old code did. Is the testing for pointer types
863 If either SRC is readonly or length is 1, we can use memcpy. */
864 if (!dest_align
|| !src_align
)
866 if (readonly_data_expr (src
)
867 || (tree_fits_uhwi_p (len
)
868 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
869 >= tree_to_uhwi (len
))))
871 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
874 gimple_call_set_fndecl (stmt
, fn
);
875 gimple_call_set_arg (stmt
, 0, dest
);
876 gimple_call_set_arg (stmt
, 1, src
);
881 /* If *src and *dest can't overlap, optimize into memcpy as well. */
882 if (TREE_CODE (src
) == ADDR_EXPR
883 && TREE_CODE (dest
) == ADDR_EXPR
)
885 tree src_base
, dest_base
, fn
;
886 poly_int64 src_offset
= 0, dest_offset
= 0;
889 srcvar
= TREE_OPERAND (src
, 0);
890 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
891 if (src_base
== NULL
)
893 destvar
= TREE_OPERAND (dest
, 0);
894 dest_base
= get_addr_base_and_unit_offset (destvar
,
896 if (dest_base
== NULL
)
898 if (!poly_int_tree_p (len
, &maxsize
))
900 if (SSA_VAR_P (src_base
)
901 && SSA_VAR_P (dest_base
))
903 if (operand_equal_p (src_base
, dest_base
, 0)
904 && ranges_maybe_overlap_p (src_offset
, maxsize
,
905 dest_offset
, maxsize
))
908 else if (TREE_CODE (src_base
) == MEM_REF
909 && TREE_CODE (dest_base
) == MEM_REF
)
911 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
912 TREE_OPERAND (dest_base
, 0), 0))
914 poly_offset_int full_src_offset
915 = mem_ref_offset (src_base
) + src_offset
;
916 poly_offset_int full_dest_offset
917 = mem_ref_offset (dest_base
) + dest_offset
;
918 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
919 full_dest_offset
, maxsize
))
925 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
928 gimple_call_set_fndecl (stmt
, fn
);
929 gimple_call_set_arg (stmt
, 0, dest
);
930 gimple_call_set_arg (stmt
, 1, src
);
935 /* If the destination and source do not alias optimize into
937 if ((is_gimple_min_invariant (dest
)
938 || TREE_CODE (dest
) == SSA_NAME
)
939 && (is_gimple_min_invariant (src
)
940 || TREE_CODE (src
) == SSA_NAME
))
943 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
944 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
945 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
948 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
951 gimple_call_set_fndecl (stmt
, fn
);
952 gimple_call_set_arg (stmt
, 0, dest
);
953 gimple_call_set_arg (stmt
, 1, src
);
962 if (!tree_fits_shwi_p (len
))
965 || (AGGREGATE_TYPE_P (srctype
)
966 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
969 || (AGGREGATE_TYPE_P (desttype
)
970 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
972 /* In the following try to find a type that is most natural to be
973 used for the memcpy source and destination and that allows
974 the most optimization when memcpy is turned into a plain assignment
975 using that type. In theory we could always use a char[len] type
976 but that only gains us that the destination and source possibly
977 no longer will have their address taken. */
978 if (TREE_CODE (srctype
) == ARRAY_TYPE
979 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
980 srctype
= TREE_TYPE (srctype
);
981 if (TREE_CODE (desttype
) == ARRAY_TYPE
982 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
983 desttype
= TREE_TYPE (desttype
);
984 if (TREE_ADDRESSABLE (srctype
)
985 || TREE_ADDRESSABLE (desttype
))
988 /* Make sure we are not copying using a floating-point mode or
989 a type whose size possibly does not match its precision. */
990 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
991 || TREE_CODE (desttype
) == BOOLEAN_TYPE
992 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
993 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
994 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
995 || TREE_CODE (srctype
) == BOOLEAN_TYPE
996 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
997 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1005 src_align
= get_pointer_alignment (src
);
1006 dest_align
= get_pointer_alignment (dest
);
1008 /* Choose between src and destination type for the access based
1009 on alignment, whether the access constitutes a register access
1010 and whether it may actually expose a declaration for SSA rewrite
1011 or SRA decomposition. Also try to expose a string constant, we
1012 might be able to concatenate several of them later into a single
1014 destvar
= NULL_TREE
;
1016 if (TREE_CODE (dest
) == ADDR_EXPR
1017 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1018 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1019 && dest_align
>= TYPE_ALIGN (desttype
)
1020 && (is_gimple_reg_type (desttype
)
1021 || src_align
>= TYPE_ALIGN (desttype
)))
1022 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1023 else if (TREE_CODE (src
) == ADDR_EXPR
1024 && var_decl_component_p (TREE_OPERAND (src
, 0))
1025 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1026 && src_align
>= TYPE_ALIGN (srctype
)
1027 && (is_gimple_reg_type (srctype
)
1028 || dest_align
>= TYPE_ALIGN (srctype
)))
1029 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1030 /* FIXME: Don't transform copies from strings with known original length.
1031 As soon as strlenopt tests that rely on it for passing are adjusted,
1032 this hack can be removed. */
1033 else if (gimple_call_alloca_for_var_p (stmt
)
1034 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1035 && integer_zerop (srcoff
)
1036 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1037 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1038 srctype
= TREE_TYPE (srcvar
);
1042 /* Now that we chose an access type express the other side in
1043 terms of it if the target allows that with respect to alignment
1045 if (srcvar
== NULL_TREE
)
1047 if (src_align
>= TYPE_ALIGN (desttype
))
1048 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1051 if (STRICT_ALIGNMENT
)
1053 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1055 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1058 else if (destvar
== NULL_TREE
)
1060 if (dest_align
>= TYPE_ALIGN (srctype
))
1061 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1064 if (STRICT_ALIGNMENT
)
1066 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1068 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1072 /* Same as above, detect out-of-bounds accesses without issuing
1073 warnings. Avoid folding out-of-bounds copies but to avoid
1074 false positives for unreachable code defer warning until
1075 after DCE has worked its magic.
1076 -Wrestrict is still diagnosed. */
1077 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1078 dest
, src
, len
, len
,
1080 if (warning
!= OPT_Wrestrict
)
1084 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1086 tree tem
= fold_const_aggregate_ref (srcvar
);
1089 if (! is_gimple_min_invariant (srcvar
))
1091 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1092 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1094 gimple_assign_set_lhs (new_stmt
, srcvar
);
1095 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1096 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1098 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1099 goto set_vop_and_replace
;
1102 /* We get an aggregate copy. If the source is a STRING_CST, then
1103 directly use its type to perform the copy. */
1104 if (TREE_CODE (srcvar
) == STRING_CST
)
1107 /* Or else, use an unsigned char[] type to perform the copy in order
1108 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1109 types or float modes behavior on copying. */
1112 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1113 tree_to_uhwi (len
));
1115 if (src_align
> TYPE_ALIGN (srctype
))
1116 srctype
= build_aligned_type (srctype
, src_align
);
1117 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1120 if (dest_align
> TYPE_ALIGN (desttype
))
1121 desttype
= build_aligned_type (desttype
, dest_align
);
1122 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1123 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1125 set_vop_and_replace
:
1126 gimple_move_vops (new_stmt
, stmt
);
1129 gsi_replace (gsi
, new_stmt
, false);
1132 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1136 gimple_seq stmts
= NULL
;
1137 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1139 else if (code
== BUILT_IN_MEMPCPY
)
1141 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1142 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1143 TREE_TYPE (dest
), dest
, len
);
1148 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1149 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1150 gsi_replace (gsi
, repl
, false);
1154 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1155 to built-in memcmp (a, b, len). */
1158 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1160 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1165 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1167 gimple
*stmt
= gsi_stmt (*gsi
);
1168 tree a
= gimple_call_arg (stmt
, 0);
1169 tree b
= gimple_call_arg (stmt
, 1);
1170 tree len
= gimple_call_arg (stmt
, 2);
1172 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1173 replace_call_with_call_and_fold (gsi
, repl
);
1178 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1179 to built-in memmove (dest, src, len). */
1182 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1184 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1189 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1190 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1191 len) into memmove (dest, src, len). */
1193 gimple
*stmt
= gsi_stmt (*gsi
);
1194 tree src
= gimple_call_arg (stmt
, 0);
1195 tree dest
= gimple_call_arg (stmt
, 1);
1196 tree len
= gimple_call_arg (stmt
, 2);
1198 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1199 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1200 replace_call_with_call_and_fold (gsi
, repl
);
1205 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1206 to built-in memset (dest, 0, len). */
1209 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1211 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1216 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1218 gimple
*stmt
= gsi_stmt (*gsi
);
1219 tree dest
= gimple_call_arg (stmt
, 0);
1220 tree len
= gimple_call_arg (stmt
, 1);
1222 gimple_seq seq
= NULL
;
1223 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1224 gimple_seq_add_stmt_without_update (&seq
, repl
);
1225 gsi_replace_with_seq_vops (gsi
, seq
);
1231 /* Fold function call to builtin memset or bzero at *GSI setting the
1232 memory of size LEN to VAL. Return whether a simplification was made. */
1235 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1237 gimple
*stmt
= gsi_stmt (*gsi
);
1239 unsigned HOST_WIDE_INT length
, cval
;
1241 /* If the LEN parameter is zero, return DEST. */
1242 if (integer_zerop (len
))
1244 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1248 if (! tree_fits_uhwi_p (len
))
1251 if (TREE_CODE (c
) != INTEGER_CST
)
1254 tree dest
= gimple_call_arg (stmt
, 0);
1256 if (TREE_CODE (var
) != ADDR_EXPR
)
1259 var
= TREE_OPERAND (var
, 0);
1260 if (TREE_THIS_VOLATILE (var
))
1263 etype
= TREE_TYPE (var
);
1264 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1265 etype
= TREE_TYPE (etype
);
1267 if (!INTEGRAL_TYPE_P (etype
)
1268 && !POINTER_TYPE_P (etype
))
1271 if (! var_decl_component_p (var
))
1274 length
= tree_to_uhwi (len
);
1275 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1276 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1277 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1278 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1281 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1284 if (!type_has_mode_precision_p (etype
))
1285 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1286 TYPE_UNSIGNED (etype
));
1288 if (integer_zerop (c
))
1292 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1295 cval
= TREE_INT_CST_LOW (c
);
1299 cval
|= (cval
<< 31) << 1;
1302 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1303 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1304 gimple_move_vops (store
, stmt
);
1305 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1306 if (gimple_call_lhs (stmt
))
1308 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1309 gsi_replace (gsi
, asgn
, false);
1313 gimple_stmt_iterator gsi2
= *gsi
;
1315 gsi_remove (&gsi2
, true);
1321 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1324 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1325 c_strlen_data
*pdata
, unsigned eltsize
)
1327 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1329 /* The length computed by this invocation of the function. */
1330 tree val
= NULL_TREE
;
1332 /* True if VAL is an optimistic (tight) bound determined from
1333 the size of the character array in which the string may be
1334 stored. In that case, the computed VAL is used to set
1336 bool tight_bound
= false;
1338 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1339 if (TREE_CODE (arg
) == ADDR_EXPR
1340 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1342 tree op
= TREE_OPERAND (arg
, 0);
1343 if (integer_zerop (TREE_OPERAND (op
, 1)))
1345 tree aop0
= TREE_OPERAND (op
, 0);
1346 if (TREE_CODE (aop0
) == INDIRECT_REF
1347 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1348 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1351 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1352 && rkind
== SRK_LENRANGE
)
1354 /* Fail if an array is the last member of a struct object
1355 since it could be treated as a (fake) flexible array
1357 tree idx
= TREE_OPERAND (op
, 1);
1359 arg
= TREE_OPERAND (op
, 0);
1360 tree optype
= TREE_TYPE (arg
);
1361 if (tree dom
= TYPE_DOMAIN (optype
))
1362 if (tree bound
= TYPE_MAX_VALUE (dom
))
1363 if (TREE_CODE (bound
) == INTEGER_CST
1364 && TREE_CODE (idx
) == INTEGER_CST
1365 && tree_int_cst_lt (bound
, idx
))
1370 if (rkind
== SRK_INT_VALUE
)
1372 /* We are computing the maximum value (not string length). */
1374 if (TREE_CODE (val
) != INTEGER_CST
1375 || tree_int_cst_sgn (val
) < 0)
1380 c_strlen_data lendata
= { };
1381 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1383 if (!val
&& lendata
.decl
)
1385 /* ARG refers to an unterminated const character array.
1386 DATA.DECL with size DATA.LEN. */
1387 val
= lendata
.minlen
;
1388 pdata
->decl
= lendata
.decl
;
1392 /* Set if VAL represents the maximum length based on array size (set
1393 when exact length cannot be determined). */
1394 bool maxbound
= false;
1396 if (!val
&& rkind
== SRK_LENRANGE
)
1398 if (TREE_CODE (arg
) == ADDR_EXPR
)
1399 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1402 if (TREE_CODE (arg
) == ARRAY_REF
)
1404 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1406 /* Determine the "innermost" array type. */
1407 while (TREE_CODE (optype
) == ARRAY_TYPE
1408 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1409 optype
= TREE_TYPE (optype
);
1411 /* Avoid arrays of pointers. */
1412 tree eltype
= TREE_TYPE (optype
);
1413 if (TREE_CODE (optype
) != ARRAY_TYPE
1414 || !INTEGRAL_TYPE_P (eltype
))
1417 /* Fail when the array bound is unknown or zero. */
1418 val
= TYPE_SIZE_UNIT (optype
);
1420 || TREE_CODE (val
) != INTEGER_CST
1421 || integer_zerop (val
))
1424 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
1429 pdata
->minlen
= ssize_int (0);
1433 else if (TREE_CODE (arg
) == COMPONENT_REF
1434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1437 /* Use the type of the member array to determine the upper
1438 bound on the length of the array. This may be overly
1439 optimistic if the array itself isn't NUL-terminated and
1440 the caller relies on the subsequent member to contain
1441 the NUL but that would only be considered valid if
1442 the array were the last member of a struct. */
1444 tree fld
= TREE_OPERAND (arg
, 1);
1446 tree optype
= TREE_TYPE (fld
);
1448 /* Determine the "innermost" array type. */
1449 while (TREE_CODE (optype
) == ARRAY_TYPE
1450 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1451 optype
= TREE_TYPE (optype
);
1453 /* Fail when the array bound is unknown or zero. */
1454 val
= TYPE_SIZE_UNIT (optype
);
1456 || TREE_CODE (val
) != INTEGER_CST
1457 || integer_zerop (val
))
1459 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1462 /* Set the minimum size to zero since the string in
1463 the array could have zero length. */
1464 pdata
->minlen
= ssize_int (0);
1466 /* The array size determined above is an optimistic bound
1467 on the length. If the array isn't nul-terminated the
1468 length computed by the library function would be greater.
1469 Even though using strlen to cross the subobject boundary
1470 is undefined, avoid drawing conclusions from the member
1471 type about the length here. */
1474 else if (TREE_CODE (arg
) == MEM_REF
1475 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1477 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1479 /* Handle a MEM_REF into a DECL accessing an array of integers,
1480 being conservative about references to extern structures with
1481 flexible array members that can be initialized to arbitrary
1482 numbers of elements as an extension (static structs are okay).
1483 FIXME: Make this less conservative -- see
1484 component_ref_size in tree.c. */
1485 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1486 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1487 && (decl_binds_to_current_def_p (ref
)
1488 || !array_at_struct_end_p (arg
)))
1490 /* Fail if the offset is out of bounds. Such accesses
1491 should be diagnosed at some point. */
1492 val
= DECL_SIZE_UNIT (ref
);
1494 || TREE_CODE (val
) != INTEGER_CST
1495 || integer_zerop (val
))
1498 poly_offset_int psiz
= wi::to_offset (val
);
1499 poly_offset_int poff
= mem_ref_offset (arg
);
1500 if (known_le (psiz
, poff
))
1503 pdata
->minlen
= ssize_int (0);
1505 /* Subtract the offset and one for the terminating nul. */
1508 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1509 /* Since VAL reflects the size of a declared object
1510 rather the type of the access it is not a tight bound. */
1513 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1515 /* Avoid handling pointers to arrays. GCC might misuse
1516 a pointer to an array of one bound to point to an array
1517 object of a greater bound. */
1518 tree argtype
= TREE_TYPE (arg
);
1519 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1521 val
= TYPE_SIZE_UNIT (argtype
);
1523 || TREE_CODE (val
) != INTEGER_CST
1524 || integer_zerop (val
))
1526 val
= wide_int_to_tree (TREE_TYPE (val
),
1527 wi::sub (wi::to_wide (val
), 1));
1529 /* Set the minimum size to zero since the string in
1530 the array could have zero length. */
1531 pdata
->minlen
= ssize_int (0);
1540 /* Adjust the lower bound on the string length as necessary. */
1542 || (rkind
!= SRK_STRLEN
1543 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1544 && TREE_CODE (val
) == INTEGER_CST
1545 && tree_int_cst_lt (val
, pdata
->minlen
)))
1546 pdata
->minlen
= val
;
1548 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1550 /* Adjust the tighter (more optimistic) string length bound
1551 if necessary and proceed to adjust the more conservative
1553 if (TREE_CODE (val
) == INTEGER_CST
)
1555 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1556 pdata
->maxbound
= val
;
1559 pdata
->maxbound
= val
;
1561 else if (pdata
->maxbound
|| maxbound
)
1562 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1563 if VAL corresponds to the maximum length determined based
1564 on the type of the object. */
1565 pdata
->maxbound
= val
;
1569 /* VAL computed above represents an optimistically tight bound
1570 on the length of the string based on the referenced object's
1571 or subobject's type. Determine the conservative upper bound
1572 based on the enclosing object's size if possible. */
1573 if (rkind
== SRK_LENRANGE
)
1576 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1579 /* When the call above fails due to a non-constant offset
1580 assume the offset is zero and use the size of the whole
1581 enclosing object instead. */
1582 base
= get_base_address (arg
);
1585 /* If the base object is a pointer no upper bound on the length
1586 can be determined. Otherwise the maximum length is equal to
1587 the size of the enclosing object minus the offset of
1588 the referenced subobject minus 1 (for the terminating nul). */
1589 tree type
= TREE_TYPE (base
);
1590 if (TREE_CODE (type
) == POINTER_TYPE
1591 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1592 || !(val
= DECL_SIZE_UNIT (base
)))
1593 val
= build_all_ones_cst (size_type_node
);
1596 val
= DECL_SIZE_UNIT (base
);
1597 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1598 size_int (offset
+ 1));
1607 /* Adjust the more conservative bound if possible/necessary
1608 and fail otherwise. */
1609 if (rkind
!= SRK_STRLEN
)
1611 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1612 || TREE_CODE (val
) != INTEGER_CST
)
1615 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1616 pdata
->maxlen
= val
;
1619 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1621 /* Fail if the length of this ARG is different from that
1622 previously determined from another ARG. */
1627 pdata
->maxlen
= val
;
1628 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1631 /* For an ARG referencing one or more strings, try to obtain the range
1632 of their lengths, or the size of the largest array ARG referes to if
1633 the range of lengths cannot be determined, and store all in *PDATA.
1634 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1635 the maximum constant value.
1636 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1637 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1638 length or if we are unable to determine the length, return false.
1639 VISITED is a bitmap of visited variables.
1640 RKIND determines the kind of value or range to obtain (see
1642 Set PDATA->DECL if ARG refers to an unterminated constant array.
1643 On input, set ELTSIZE to 1 for normal single byte character strings,
1644 and either 2 or 4 for wide characer strings (the size of wchar_t).
1645 Return true if *PDATA was successfully populated and false otherwise. */
1648 get_range_strlen (tree arg
, bitmap
*visited
,
1649 strlen_range_kind rkind
,
1650 c_strlen_data
*pdata
, unsigned eltsize
)
1653 if (TREE_CODE (arg
) != SSA_NAME
)
1654 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1656 /* If ARG is registered for SSA update we cannot look at its defining
1658 if (name_registered_for_update_p (arg
))
1661 /* If we were already here, break the infinite cycle. */
1663 *visited
= BITMAP_ALLOC (NULL
);
1664 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1668 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1670 switch (gimple_code (def_stmt
))
1673 /* The RHS of the statement defining VAR must either have a
1674 constant length or come from another SSA_NAME with a constant
1676 if (gimple_assign_single_p (def_stmt
)
1677 || gimple_assign_unary_nop_p (def_stmt
))
1679 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1680 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1682 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1684 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1685 gimple_assign_rhs3 (def_stmt
) };
1687 for (unsigned int i
= 0; i
< 2; i
++)
1688 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1690 if (rkind
!= SRK_LENRANGE
)
1692 /* Set the upper bound to the maximum to prevent
1693 it from being adjusted in the next iteration but
1694 leave MINLEN and the more conservative MAXBOUND
1695 determined so far alone (or leave them null if
1696 they haven't been set yet). That the MINLEN is
1697 in fact zero can be determined from MAXLEN being
1698 unbounded but the discovered minimum is used for
1700 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1707 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1708 must have a constant length. */
1709 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1711 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1713 /* If this PHI has itself as an argument, we cannot
1714 determine the string length of this argument. However,
1715 if we can find a constant string length for the other
1716 PHI args then we can still be sure that this is a
1717 constant string length. So be optimistic and just
1718 continue with the next argument. */
1719 if (arg
== gimple_phi_result (def_stmt
))
1722 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1724 if (rkind
!= SRK_LENRANGE
)
1726 /* Set the upper bound to the maximum to prevent
1727 it from being adjusted in the next iteration but
1728 leave MINLEN and the more conservative MAXBOUND
1729 determined so far alone (or leave them null if
1730 they haven't been set yet). That the MINLEN is
1731 in fact zero can be determined from MAXLEN being
1732 unbounded but the discovered minimum is used for
1734 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1744 /* Try to obtain the range of the lengths of the string(s) referenced
1745 by ARG, or the size of the largest array ARG refers to if the range
1746 of lengths cannot be determined, and store all in *PDATA which must
1747 be zero-initialized on input except PDATA->MAXBOUND may be set to
1748 a non-null tree node other than INTEGER_CST to request to have it
1749 set to the length of the longest string in a PHI. ELTSIZE is
1750 the expected size of the string element in bytes: 1 for char and
1751 some power of 2 for wide characters.
1752 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1753 for optimization. Returning false means that a nonzero PDATA->MINLEN
1754 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1755 is -1 (in that case, the actual range is indeterminate, i.e.,
1756 [0, PTRDIFF_MAX - 2]. */
1759 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1761 bitmap visited
= NULL
;
1762 tree maxbound
= pdata
->maxbound
;
1764 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1766 /* On failure extend the length range to an impossible maximum
1767 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1768 members can stay unchanged regardless. */
1769 pdata
->minlen
= ssize_int (0);
1770 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1772 else if (!pdata
->minlen
)
1773 pdata
->minlen
= ssize_int (0);
1775 /* If it's unchanged from it initial non-null value, set the conservative
1776 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1777 if (maxbound
&& pdata
->maxbound
== maxbound
)
1778 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1781 BITMAP_FREE (visited
);
1783 return !integer_all_onesp (pdata
->maxlen
);
1786 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1787 For ARG of pointer types, NONSTR indicates if the caller is prepared
1788 to handle unterminated strings. For integer ARG and when RKIND ==
1789 SRK_INT_VALUE, NONSTR must be null.
1791 If an unterminated array is discovered and our caller handles
1792 unterminated arrays, then bubble up the offending DECL and
1793 return the maximum size. Otherwise return NULL. */
1796 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
1798 /* A non-null NONSTR is meaningless when determining the maximum
1799 value of an integer ARG. */
1800 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
1801 /* ARG must have an integral type when RKIND says so. */
1802 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
1804 bitmap visited
= NULL
;
1806 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1808 c_strlen_data lendata
= { };
1809 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
1810 lendata
.maxlen
= NULL_TREE
;
1811 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
1812 lendata
.maxlen
= NULL_TREE
;
1815 BITMAP_FREE (visited
);
1819 /* For callers prepared to handle unterminated arrays set
1820 *NONSTR to point to the declaration of the array and return
1821 the maximum length/size. */
1822 *nonstr
= lendata
.decl
;
1823 return lendata
.maxlen
;
1826 /* Fail if the constant array isn't nul-terminated. */
1827 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
1831 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1832 If LEN is not NULL, it represents the length of the string to be
1833 copied. Return NULL_TREE if no simplification can be made. */
1836 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1837 tree dest
, tree src
)
1839 gimple
*stmt
= gsi_stmt (*gsi
);
1840 location_t loc
= gimple_location (stmt
);
1843 /* If SRC and DEST are the same (and not volatile), return DEST. */
1844 if (operand_equal_p (src
, dest
, 0))
1846 /* Issue -Wrestrict unless the pointers are null (those do
1847 not point to objects and so do not indicate an overlap;
1848 such calls could be the result of sanitization and jump
1850 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
1852 tree func
= gimple_call_fndecl (stmt
);
1854 warning_at (loc
, OPT_Wrestrict
,
1855 "%qD source argument is the same as destination",
1859 replace_call_with_value (gsi
, dest
);
1863 if (optimize_function_for_size_p (cfun
))
1866 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1870 /* Set to non-null if ARG refers to an unterminated array. */
1872 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
1876 /* Avoid folding calls with unterminated arrays. */
1877 if (!gimple_no_warning_p (stmt
))
1878 warn_string_no_nul (loc
, NULL_TREE
, "strcpy", src
, nonstr
);
1879 gimple_set_no_warning (stmt
, true);
1886 len
= fold_convert_loc (loc
, size_type_node
, len
);
1887 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1888 len
= force_gimple_operand_gsi (gsi
, len
, true,
1889 NULL_TREE
, true, GSI_SAME_STMT
);
1890 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1891 replace_call_with_call_and_fold (gsi
, repl
);
1895 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1896 If SLEN is not NULL, it represents the length of the source string.
1897 Return NULL_TREE if no simplification can be made. */
1900 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1901 tree dest
, tree src
, tree len
)
1903 gimple
*stmt
= gsi_stmt (*gsi
);
1904 location_t loc
= gimple_location (stmt
);
1905 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
1907 /* If the LEN parameter is zero, return DEST. */
1908 if (integer_zerop (len
))
1910 /* Avoid warning if the destination refers to an array/pointer
1911 decorate with attribute nonstring. */
1914 tree fndecl
= gimple_call_fndecl (stmt
);
1916 /* Warn about the lack of nul termination: the result is not
1917 a (nul-terminated) string. */
1918 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1919 if (slen
&& !integer_zerop (slen
))
1920 warning_at (loc
, OPT_Wstringop_truncation
,
1921 "%G%qD destination unchanged after copying no bytes "
1922 "from a string of length %E",
1923 stmt
, fndecl
, slen
);
1925 warning_at (loc
, OPT_Wstringop_truncation
,
1926 "%G%qD destination unchanged after copying no bytes",
1930 replace_call_with_value (gsi
, dest
);
1934 /* We can't compare slen with len as constants below if len is not a
1936 if (TREE_CODE (len
) != INTEGER_CST
)
1939 /* Now, we must be passed a constant src ptr parameter. */
1940 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1941 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1944 /* The size of the source string including the terminating nul. */
1945 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1947 /* We do not support simplification of this case, though we do
1948 support it when expanding trees into RTL. */
1949 /* FIXME: generate a call to __builtin_memset. */
1950 if (tree_int_cst_lt (ssize
, len
))
1953 /* Diagnose truncation that leaves the copy unterminated. */
1954 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
1956 /* OK transform into builtin memcpy. */
1957 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1961 len
= fold_convert_loc (loc
, size_type_node
, len
);
1962 len
= force_gimple_operand_gsi (gsi
, len
, true,
1963 NULL_TREE
, true, GSI_SAME_STMT
);
1964 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1965 replace_call_with_call_and_fold (gsi
, repl
);
1970 /* Fold function call to builtin strchr or strrchr.
1971 If both arguments are constant, evaluate and fold the result,
1972 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1973 In general strlen is significantly faster than strchr
1974 due to being a simpler operation. */
1976 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1978 gimple
*stmt
= gsi_stmt (*gsi
);
1979 tree str
= gimple_call_arg (stmt
, 0);
1980 tree c
= gimple_call_arg (stmt
, 1);
1981 location_t loc
= gimple_location (stmt
);
1985 if (!gimple_call_lhs (stmt
))
1988 /* Avoid folding if the first argument is not a nul-terminated array.
1989 Defer warning until later. */
1990 if (!check_nul_terminated_array (NULL_TREE
, str
))
1993 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1995 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1999 replace_call_with_value (gsi
, integer_zero_node
);
2003 tree len
= build_int_cst (size_type_node
, p1
- p
);
2004 gimple_seq stmts
= NULL
;
2005 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2006 POINTER_PLUS_EXPR
, str
, len
);
2007 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2008 gsi_replace_with_seq_vops (gsi
, stmts
);
2012 if (!integer_zerop (c
))
2015 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2016 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2018 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2022 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2023 replace_call_with_call_and_fold (gsi
, repl
);
2031 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2036 /* Create newstr = strlen (str). */
2037 gimple_seq stmts
= NULL
;
2038 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2039 gimple_set_location (new_stmt
, loc
);
2040 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2041 gimple_call_set_lhs (new_stmt
, len
);
2042 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2044 /* Create (str p+ strlen (str)). */
2045 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2046 POINTER_PLUS_EXPR
, str
, len
);
2047 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2048 gsi_replace_with_seq_vops (gsi
, stmts
);
2049 /* gsi now points at the assignment to the lhs, get a
2050 stmt iterator to the strlen.
2051 ??? We can't use gsi_for_stmt as that doesn't work when the
2052 CFG isn't built yet. */
2053 gimple_stmt_iterator gsi2
= *gsi
;
2059 /* Fold function call to builtin strstr.
2060 If both arguments are constant, evaluate and fold the result,
2061 additionally fold strstr (x, "") into x and strstr (x, "c")
2062 into strchr (x, 'c'). */
2064 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2066 gimple
*stmt
= gsi_stmt (*gsi
);
2067 if (!gimple_call_lhs (stmt
))
2070 tree haystack
= gimple_call_arg (stmt
, 0);
2071 tree needle
= gimple_call_arg (stmt
, 1);
2073 /* Avoid folding if either argument is not a nul-terminated array.
2074 Defer warning until later. */
2075 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2076 || !check_nul_terminated_array (NULL_TREE
, needle
))
2079 const char *q
= c_getstr (needle
);
2083 if (const char *p
= c_getstr (haystack
))
2085 const char *r
= strstr (p
, q
);
2089 replace_call_with_value (gsi
, integer_zero_node
);
2093 tree len
= build_int_cst (size_type_node
, r
- p
);
2094 gimple_seq stmts
= NULL
;
2096 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2098 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2099 gsi_replace_with_seq_vops (gsi
, stmts
);
2103 /* For strstr (x, "") return x. */
2106 replace_call_with_value (gsi
, haystack
);
2110 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2113 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2116 tree c
= build_int_cst (integer_type_node
, q
[0]);
2117 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2118 replace_call_with_call_and_fold (gsi
, repl
);
2126 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2129 Return NULL_TREE if no simplification was possible, otherwise return the
2130 simplified form of the call as a tree.
2132 The simplified form may be a constant or other expression which
2133 computes the same value, but in a more efficient manner (including
2134 calls to other builtin functions).
2136 The call may contain arguments which need to be evaluated, but
2137 which are not useful to determine the result of the call. In
2138 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2139 COMPOUND_EXPR will be an argument which must be evaluated.
2140 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2141 COMPOUND_EXPR in the chain will contain the tree for the simplified
2142 form of the builtin function call. */
2145 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2147 gimple
*stmt
= gsi_stmt (*gsi
);
2148 location_t loc
= gimple_location (stmt
);
2150 const char *p
= c_getstr (src
);
2152 /* If the string length is zero, return the dst parameter. */
2153 if (p
&& *p
== '\0')
2155 replace_call_with_value (gsi
, dst
);
2159 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2162 /* See if we can store by pieces into (dst + strlen(dst)). */
2164 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2165 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2167 if (!strlen_fn
|| !memcpy_fn
)
2170 /* If the length of the source string isn't computable don't
2171 split strcat into strlen and memcpy. */
2172 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2176 /* Create strlen (dst). */
2177 gimple_seq stmts
= NULL
, stmts2
;
2178 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2179 gimple_set_location (repl
, loc
);
2180 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2181 gimple_call_set_lhs (repl
, newdst
);
2182 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2184 /* Create (dst p+ strlen (dst)). */
2185 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2186 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2187 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2189 len
= fold_convert_loc (loc
, size_type_node
, len
);
2190 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2191 build_int_cst (size_type_node
, 1));
2192 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2193 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2195 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2196 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2197 if (gimple_call_lhs (stmt
))
2199 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2200 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2201 gsi_replace_with_seq_vops (gsi
, stmts
);
2202 /* gsi now points at the assignment to the lhs, get a
2203 stmt iterator to the memcpy call.
2204 ??? We can't use gsi_for_stmt as that doesn't work when the
2205 CFG isn't built yet. */
2206 gimple_stmt_iterator gsi2
= *gsi
;
2212 gsi_replace_with_seq_vops (gsi
, stmts
);
2218 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2219 are the arguments to the call. */
2222 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2224 gimple
*stmt
= gsi_stmt (*gsi
);
2225 tree dest
= gimple_call_arg (stmt
, 0);
2226 tree src
= gimple_call_arg (stmt
, 1);
2227 tree size
= gimple_call_arg (stmt
, 2);
2233 /* If the SRC parameter is "", return DEST. */
2234 if (p
&& *p
== '\0')
2236 replace_call_with_value (gsi
, dest
);
2240 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2243 /* If __builtin_strcat_chk is used, assume strcat is available. */
2244 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2248 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2249 replace_call_with_call_and_fold (gsi
, repl
);
2253 /* Simplify a call to the strncat builtin. */
2256 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2258 gimple
*stmt
= gsi_stmt (*gsi
);
2259 tree dst
= gimple_call_arg (stmt
, 0);
2260 tree src
= gimple_call_arg (stmt
, 1);
2261 tree len
= gimple_call_arg (stmt
, 2);
2263 const char *p
= c_getstr (src
);
2265 /* If the requested length is zero, or the src parameter string
2266 length is zero, return the dst parameter. */
2267 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2269 replace_call_with_value (gsi
, dst
);
2273 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2276 unsigned srclen
= strlen (p
);
2278 int cmpsrc
= compare_tree_int (len
, srclen
);
2280 /* Return early if the requested len is less than the string length.
2281 Warnings will be issued elsewhere later. */
2285 unsigned HOST_WIDE_INT dstsize
;
2287 bool nowarn
= gimple_no_warning_p (stmt
);
2289 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2291 int cmpdst
= compare_tree_int (len
, dstsize
);
2295 tree fndecl
= gimple_call_fndecl (stmt
);
2297 /* Strncat copies (at most) LEN bytes and always appends
2298 the terminating NUL so the specified bound should never
2299 be equal to (or greater than) the size of the destination.
2300 If it is, the copy could overflow. */
2301 location_t loc
= gimple_location (stmt
);
2302 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2304 ? G_("%G%qD specified bound %E equals "
2306 : G_("%G%qD specified bound %E exceeds "
2307 "destination size %wu"),
2308 stmt
, fndecl
, len
, dstsize
);
2310 gimple_set_no_warning (stmt
, true);
2314 if (!nowarn
&& cmpsrc
== 0)
2316 tree fndecl
= gimple_call_fndecl (stmt
);
2317 location_t loc
= gimple_location (stmt
);
2319 /* To avoid possible overflow the specified bound should also
2320 not be equal to the length of the source, even when the size
2321 of the destination is unknown (it's not an uncommon mistake
2322 to specify as the bound to strncpy the length of the source). */
2323 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2324 "%G%qD specified bound %E equals source length",
2326 gimple_set_no_warning (stmt
, true);
2329 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2331 /* If the replacement _DECL isn't initialized, don't do the
2336 /* Otherwise, emit a call to strcat. */
2337 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2338 replace_call_with_call_and_fold (gsi
, repl
);
2342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2346 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2348 gimple
*stmt
= gsi_stmt (*gsi
);
2349 tree dest
= gimple_call_arg (stmt
, 0);
2350 tree src
= gimple_call_arg (stmt
, 1);
2351 tree len
= gimple_call_arg (stmt
, 2);
2352 tree size
= gimple_call_arg (stmt
, 3);
2357 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2358 if ((p
&& *p
== '\0')
2359 || integer_zerop (len
))
2361 replace_call_with_value (gsi
, dest
);
2365 if (! tree_fits_uhwi_p (size
))
2368 if (! integer_all_onesp (size
))
2370 tree src_len
= c_strlen (src
, 1);
2372 && tree_fits_uhwi_p (src_len
)
2373 && tree_fits_uhwi_p (len
)
2374 && ! tree_int_cst_lt (len
, src_len
))
2376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2377 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2381 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2382 replace_call_with_call_and_fold (gsi
, repl
);
2388 /* If __builtin_strncat_chk is used, assume strncat is available. */
2389 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2393 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2394 replace_call_with_call_and_fold (gsi
, repl
);
2398 /* Build and append gimple statements to STMTS that would load a first
2399 character of a memory location identified by STR. LOC is location
2400 of the statement. */
2403 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2407 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2408 tree cst_uchar_ptr_node
2409 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2410 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2412 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2413 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2414 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2416 gimple_assign_set_lhs (stmt
, var
);
2417 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2422 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2425 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2427 gimple
*stmt
= gsi_stmt (*gsi
);
2428 tree callee
= gimple_call_fndecl (stmt
);
2429 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2431 tree type
= integer_type_node
;
2432 tree str1
= gimple_call_arg (stmt
, 0);
2433 tree str2
= gimple_call_arg (stmt
, 1);
2434 tree lhs
= gimple_call_lhs (stmt
);
2436 tree bound_node
= NULL_TREE
;
2437 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2439 /* Handle strncmp and strncasecmp functions. */
2440 if (gimple_call_num_args (stmt
) == 3)
2442 bound_node
= gimple_call_arg (stmt
, 2);
2443 if (tree_fits_uhwi_p (bound_node
))
2444 bound
= tree_to_uhwi (bound_node
);
2447 /* If the BOUND parameter is zero, return zero. */
2450 replace_call_with_value (gsi
, integer_zero_node
);
2454 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2455 if (operand_equal_p (str1
, str2
, 0))
2457 replace_call_with_value (gsi
, integer_zero_node
);
2461 /* Initially set to the number of characters, including the terminating
2462 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2463 the array Sx is not terminated by a nul.
2464 For nul-terminated strings then adjusted to their length so that
2465 LENx == NULPOSx holds. */
2466 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2467 const char *p1
= getbyterep (str1
, &len1
);
2468 const char *p2
= getbyterep (str2
, &len2
);
2470 /* The position of the terminating nul character if one exists, otherwise
2471 a value greater than LENx. */
2472 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2476 size_t n
= strnlen (p1
, len1
);
2483 size_t n
= strnlen (p2
, len2
);
2488 /* For known strings, return an immediate value. */
2492 bool known_result
= false;
2496 case BUILT_IN_STRCMP
:
2497 case BUILT_IN_STRCMP_EQ
:
2498 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2501 r
= strcmp (p1
, p2
);
2502 known_result
= true;
2505 case BUILT_IN_STRNCMP
:
2506 case BUILT_IN_STRNCMP_EQ
:
2508 if (bound
== HOST_WIDE_INT_M1U
)
2511 /* Reduce the bound to be no more than the length
2512 of the shorter of the two strings, or the sizes
2513 of the unterminated arrays. */
2514 unsigned HOST_WIDE_INT n
= bound
;
2516 if (len1
== nulpos1
&& len1
< n
)
2518 if (len2
== nulpos2
&& len2
< n
)
2521 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2524 r
= strncmp (p1
, p2
, n
);
2525 known_result
= true;
2528 /* Only handleable situation is where the string are equal (result 0),
2529 which is already handled by operand_equal_p case. */
2530 case BUILT_IN_STRCASECMP
:
2532 case BUILT_IN_STRNCASECMP
:
2534 if (bound
== HOST_WIDE_INT_M1U
)
2536 r
= strncmp (p1
, p2
, bound
);
2538 known_result
= true;
2547 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2552 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2553 || fcode
== BUILT_IN_STRCMP
2554 || fcode
== BUILT_IN_STRCMP_EQ
2555 || fcode
== BUILT_IN_STRCASECMP
;
2557 location_t loc
= gimple_location (stmt
);
2559 /* If the second arg is "", return *(const unsigned char*)arg1. */
2560 if (p2
&& *p2
== '\0' && nonzero_bound
)
2562 gimple_seq stmts
= NULL
;
2563 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2566 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2567 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2570 gsi_replace_with_seq_vops (gsi
, stmts
);
2574 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2575 if (p1
&& *p1
== '\0' && nonzero_bound
)
2577 gimple_seq stmts
= NULL
;
2578 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2582 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2583 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2584 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2586 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2587 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2590 gsi_replace_with_seq_vops (gsi
, stmts
);
2594 /* If BOUND is one, return an expression corresponding to
2595 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2596 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2598 gimple_seq stmts
= NULL
;
2599 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2600 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2604 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2605 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2606 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2608 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2609 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2610 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2612 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2613 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2616 gsi_replace_with_seq_vops (gsi
, stmts
);
2620 /* If BOUND is greater than the length of one constant string,
2621 and the other argument is also a nul-terminated string, replace
2622 strncmp with strcmp. */
2623 if (fcode
== BUILT_IN_STRNCMP
2624 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2625 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2626 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2628 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2631 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2632 replace_call_with_call_and_fold (gsi
, repl
);
2639 /* Fold a call to the memchr pointed by GSI iterator. */
2642 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2644 gimple
*stmt
= gsi_stmt (*gsi
);
2645 tree lhs
= gimple_call_lhs (stmt
);
2646 tree arg1
= gimple_call_arg (stmt
, 0);
2647 tree arg2
= gimple_call_arg (stmt
, 1);
2648 tree len
= gimple_call_arg (stmt
, 2);
2650 /* If the LEN parameter is zero, return zero. */
2651 if (integer_zerop (len
))
2653 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2658 if (TREE_CODE (arg2
) != INTEGER_CST
2659 || !tree_fits_uhwi_p (len
)
2660 || !target_char_cst_p (arg2
, &c
))
2663 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2664 unsigned HOST_WIDE_INT string_length
;
2665 const char *p1
= getbyterep (arg1
, &string_length
);
2669 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2672 tree mem_size
, offset_node
;
2673 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2674 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2675 ? 0 : tree_to_uhwi (offset_node
);
2676 /* MEM_SIZE is the size of the array the string literal
2678 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2679 gcc_checking_assert (string_length
<= string_size
);
2680 if (length
<= string_size
)
2682 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2688 unsigned HOST_WIDE_INT offset
= r
- p1
;
2689 gimple_seq stmts
= NULL
;
2690 if (lhs
!= NULL_TREE
)
2692 tree offset_cst
= build_int_cst (sizetype
, offset
);
2693 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2695 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2698 gimple_seq_add_stmt_without_update (&stmts
,
2699 gimple_build_nop ());
2701 gsi_replace_with_seq_vops (gsi
, stmts
);
2709 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2710 to the call. IGNORE is true if the value returned
2711 by the builtin will be ignored. UNLOCKED is true is true if this
2712 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2713 the known length of the string. Return NULL_TREE if no simplification
2717 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2718 tree arg0
, tree arg1
,
2721 gimple
*stmt
= gsi_stmt (*gsi
);
2723 /* If we're using an unlocked function, assume the other unlocked
2724 functions exist explicitly. */
2725 tree
const fn_fputc
= (unlocked
2726 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2727 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2728 tree
const fn_fwrite
= (unlocked
2729 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2730 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2732 /* If the return value is used, don't do the transformation. */
2733 if (gimple_call_lhs (stmt
))
2736 /* Get the length of the string passed to fputs. If the length
2737 can't be determined, punt. */
2738 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2740 || TREE_CODE (len
) != INTEGER_CST
)
2743 switch (compare_tree_int (len
, 1))
2745 case -1: /* length is 0, delete the call entirely . */
2746 replace_call_with_value (gsi
, integer_zero_node
);
2749 case 0: /* length is 1, call fputc. */
2751 const char *p
= c_getstr (arg0
);
2757 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2759 (integer_type_node
, p
[0]), arg1
);
2760 replace_call_with_call_and_fold (gsi
, repl
);
2765 case 1: /* length is greater than 1, call fwrite. */
2767 /* If optimizing for size keep fputs. */
2768 if (optimize_function_for_size_p (cfun
))
2770 /* New argument list transforming fputs(string, stream) to
2771 fwrite(string, 1, len, stream). */
2775 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2776 size_one_node
, len
, arg1
);
2777 replace_call_with_call_and_fold (gsi
, repl
);
2786 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2787 DEST, SRC, LEN, and SIZE are the arguments to the call.
2788 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2789 code of the builtin. If MAXLEN is not NULL, it is maximum length
2790 passed as third argument. */
2793 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2794 tree dest
, tree src
, tree len
, tree size
,
2795 enum built_in_function fcode
)
2797 gimple
*stmt
= gsi_stmt (*gsi
);
2798 location_t loc
= gimple_location (stmt
);
2799 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2802 /* If SRC and DEST are the same (and not volatile), return DEST
2803 (resp. DEST+LEN for __mempcpy_chk). */
2804 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2806 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2808 replace_call_with_value (gsi
, dest
);
2813 gimple_seq stmts
= NULL
;
2814 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2815 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2816 TREE_TYPE (dest
), dest
, len
);
2817 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2818 replace_call_with_value (gsi
, temp
);
2823 if (! tree_fits_uhwi_p (size
))
2826 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
2827 if (! integer_all_onesp (size
))
2829 if (! tree_fits_uhwi_p (len
))
2831 /* If LEN is not constant, try MAXLEN too.
2832 For MAXLEN only allow optimizing into non-_ocs function
2833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2834 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2836 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2838 /* (void) __mempcpy_chk () can be optimized into
2839 (void) __memcpy_chk (). */
2840 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2844 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2845 replace_call_with_call_and_fold (gsi
, repl
);
2854 if (tree_int_cst_lt (size
, maxlen
))
2859 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2860 mem{cpy,pcpy,move,set} is available. */
2863 case BUILT_IN_MEMCPY_CHK
:
2864 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2866 case BUILT_IN_MEMPCPY_CHK
:
2867 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2869 case BUILT_IN_MEMMOVE_CHK
:
2870 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2872 case BUILT_IN_MEMSET_CHK
:
2873 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2882 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2883 replace_call_with_call_and_fold (gsi
, repl
);
2887 /* Fold a call to the __st[rp]cpy_chk builtin.
2888 DEST, SRC, and SIZE are the arguments to the call.
2889 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2890 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2891 strings passed as second argument. */
2894 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2896 tree src
, tree size
,
2897 enum built_in_function fcode
)
2899 gimple
*stmt
= gsi_stmt (*gsi
);
2900 location_t loc
= gimple_location (stmt
);
2901 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2904 /* If SRC and DEST are the same (and not volatile), return DEST. */
2905 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2907 /* Issue -Wrestrict unless the pointers are null (those do
2908 not point to objects and so do not indicate an overlap;
2909 such calls could be the result of sanitization and jump
2911 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
2913 tree func
= gimple_call_fndecl (stmt
);
2915 warning_at (loc
, OPT_Wrestrict
,
2916 "%qD source argument is the same as destination",
2920 replace_call_with_value (gsi
, dest
);
2924 if (! tree_fits_uhwi_p (size
))
2927 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
2928 if (! integer_all_onesp (size
))
2930 len
= c_strlen (src
, 1);
2931 if (! len
|| ! tree_fits_uhwi_p (len
))
2933 /* If LEN is not constant, try MAXLEN too.
2934 For MAXLEN only allow optimizing into non-_ocs function
2935 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2936 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2938 if (fcode
== BUILT_IN_STPCPY_CHK
)
2943 /* If return value of __stpcpy_chk is ignored,
2944 optimize into __strcpy_chk. */
2945 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2949 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2950 replace_call_with_call_and_fold (gsi
, repl
);
2954 if (! len
|| TREE_SIDE_EFFECTS (len
))
2957 /* If c_strlen returned something, but not a constant,
2958 transform __strcpy_chk into __memcpy_chk. */
2959 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2963 gimple_seq stmts
= NULL
;
2964 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
2965 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2966 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2967 build_int_cst (size_type_node
, 1));
2968 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2969 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2970 replace_call_with_call_and_fold (gsi
, repl
);
2977 if (! tree_int_cst_lt (maxlen
, size
))
2981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2982 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2983 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2987 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2988 replace_call_with_call_and_fold (gsi
, repl
);
2992 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2993 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2994 length passed as third argument. IGNORE is true if return value can be
2995 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2998 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2999 tree dest
, tree src
,
3000 tree len
, tree size
,
3001 enum built_in_function fcode
)
3003 gimple
*stmt
= gsi_stmt (*gsi
);
3004 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3007 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3009 /* If return value of __stpncpy_chk is ignored,
3010 optimize into __strncpy_chk. */
3011 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3014 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3015 replace_call_with_call_and_fold (gsi
, repl
);
3020 if (! tree_fits_uhwi_p (size
))
3023 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3024 if (! integer_all_onesp (size
))
3026 if (! tree_fits_uhwi_p (len
))
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3037 if (tree_int_cst_lt (size
, maxlen
))
3041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3042 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
3043 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3047 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3048 replace_call_with_call_and_fold (gsi
, repl
);
3052 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3053 Return NULL_TREE if no simplification can be made. */
3056 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3058 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3059 location_t loc
= gimple_location (stmt
);
3060 tree dest
= gimple_call_arg (stmt
, 0);
3061 tree src
= gimple_call_arg (stmt
, 1);
3064 /* If the result is unused, replace stpcpy with strcpy. */
3065 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3067 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3070 gimple_call_set_fndecl (stmt
, fn
);
3075 /* Set to non-null if ARG refers to an unterminated array. */
3076 c_strlen_data data
= { };
3077 /* The size of the unterminated array if SRC referes to one. */
3079 /* True if the size is exact/constant, false if it's the lower bound
3082 tree len
= c_strlen (src
, 1, &data
, 1);
3084 || TREE_CODE (len
) != INTEGER_CST
)
3086 data
.decl
= unterminated_array (src
, &size
, &exact
);
3093 /* Avoid folding calls with unterminated arrays. */
3094 if (!gimple_no_warning_p (stmt
))
3095 warn_string_no_nul (loc
, NULL_TREE
, "stpcpy", src
, data
.decl
, size
,
3097 gimple_set_no_warning (stmt
, true);
3101 if (optimize_function_for_size_p (cfun
)
3102 /* If length is zero it's small enough. */
3103 && !integer_zerop (len
))
3106 /* If the source has a known length replace stpcpy with memcpy. */
3107 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3111 gimple_seq stmts
= NULL
;
3112 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3113 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3114 tem
, build_int_cst (size_type_node
, 1));
3115 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3116 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3117 gimple_move_vops (repl
, stmt
);
3118 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3119 /* Replace the result with dest + len. */
3121 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3122 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3123 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3124 POINTER_PLUS_EXPR
, dest
, tem
);
3125 gsi_replace (gsi
, ret
, false);
3126 /* Finally fold the memcpy call. */
3127 gimple_stmt_iterator gsi2
= *gsi
;
3133 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3134 NULL_TREE if a normal call should be emitted rather than expanding
3135 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3136 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3137 passed as second argument. */
3140 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3141 enum built_in_function fcode
)
3143 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3144 tree dest
, size
, len
, fn
, fmt
, flag
;
3145 const char *fmt_str
;
3147 /* Verify the required arguments in the original call. */
3148 if (gimple_call_num_args (stmt
) < 5)
3151 dest
= gimple_call_arg (stmt
, 0);
3152 len
= gimple_call_arg (stmt
, 1);
3153 flag
= gimple_call_arg (stmt
, 2);
3154 size
= gimple_call_arg (stmt
, 3);
3155 fmt
= gimple_call_arg (stmt
, 4);
3157 if (! tree_fits_uhwi_p (size
))
3160 if (! integer_all_onesp (size
))
3162 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3163 if (! tree_fits_uhwi_p (len
))
3165 /* If LEN is not constant, try MAXLEN too.
3166 For MAXLEN only allow optimizing into non-_ocs function
3167 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3168 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3174 if (tree_int_cst_lt (size
, maxlen
))
3178 if (!init_target_chars ())
3181 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3182 or if format doesn't contain % chars or is "%s". */
3183 if (! integer_zerop (flag
))
3185 fmt_str
= c_getstr (fmt
);
3186 if (fmt_str
== NULL
)
3188 if (strchr (fmt_str
, target_percent
) != NULL
3189 && strcmp (fmt_str
, target_percent_s
))
3193 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3195 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3196 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3200 /* Replace the called function and the first 5 argument by 3 retaining
3201 trailing varargs. */
3202 gimple_call_set_fndecl (stmt
, fn
);
3203 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3204 gimple_call_set_arg (stmt
, 0, dest
);
3205 gimple_call_set_arg (stmt
, 1, len
);
3206 gimple_call_set_arg (stmt
, 2, fmt
);
3207 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3208 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3209 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3214 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3215 Return NULL_TREE if a normal call should be emitted rather than
3216 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3217 or BUILT_IN_VSPRINTF_CHK. */
3220 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3221 enum built_in_function fcode
)
3223 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3224 tree dest
, size
, len
, fn
, fmt
, flag
;
3225 const char *fmt_str
;
3226 unsigned nargs
= gimple_call_num_args (stmt
);
3228 /* Verify the required arguments in the original call. */
3231 dest
= gimple_call_arg (stmt
, 0);
3232 flag
= gimple_call_arg (stmt
, 1);
3233 size
= gimple_call_arg (stmt
, 2);
3234 fmt
= gimple_call_arg (stmt
, 3);
3236 if (! tree_fits_uhwi_p (size
))
3241 if (!init_target_chars ())
3244 /* Check whether the format is a literal string constant. */
3245 fmt_str
= c_getstr (fmt
);
3246 if (fmt_str
!= NULL
)
3248 /* If the format doesn't contain % args or %%, we know the size. */
3249 if (strchr (fmt_str
, target_percent
) == 0)
3251 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3252 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3254 /* If the format is "%s" and first ... argument is a string literal,
3255 we know the size too. */
3256 else if (fcode
== BUILT_IN_SPRINTF_CHK
3257 && strcmp (fmt_str
, target_percent_s
) == 0)
3263 arg
= gimple_call_arg (stmt
, 4);
3264 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3266 len
= c_strlen (arg
, 1);
3267 if (! len
|| ! tree_fits_uhwi_p (len
))
3274 if (! integer_all_onesp (size
))
3276 if (! len
|| ! tree_int_cst_lt (len
, size
))
3280 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3281 or if format doesn't contain % chars or is "%s". */
3282 if (! integer_zerop (flag
))
3284 if (fmt_str
== NULL
)
3286 if (strchr (fmt_str
, target_percent
) != NULL
3287 && strcmp (fmt_str
, target_percent_s
))
3291 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3292 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3293 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3297 /* Replace the called function and the first 4 argument by 2 retaining
3298 trailing varargs. */
3299 gimple_call_set_fndecl (stmt
, fn
);
3300 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3301 gimple_call_set_arg (stmt
, 0, dest
);
3302 gimple_call_set_arg (stmt
, 1, fmt
);
3303 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3304 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3305 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3310 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3311 ORIG may be null if this is a 2-argument call. We don't attempt to
3312 simplify calls with more than 3 arguments.
3314 Return true if simplification was possible, otherwise false. */
3317 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3319 gimple
*stmt
= gsi_stmt (*gsi
);
3320 tree dest
= gimple_call_arg (stmt
, 0);
3321 tree fmt
= gimple_call_arg (stmt
, 1);
3322 tree orig
= NULL_TREE
;
3323 const char *fmt_str
= NULL
;
3325 /* Verify the required arguments in the original call. We deal with two
3326 types of sprintf() calls: 'sprintf (str, fmt)' and
3327 'sprintf (dest, "%s", orig)'. */
3328 if (gimple_call_num_args (stmt
) > 3)
3331 if (gimple_call_num_args (stmt
) == 3)
3332 orig
= gimple_call_arg (stmt
, 2);
3334 /* Check whether the format is a literal string constant. */
3335 fmt_str
= c_getstr (fmt
);
3336 if (fmt_str
== NULL
)
3339 if (!init_target_chars ())
3342 /* If the format doesn't contain % args or %%, use strcpy. */
3343 if (strchr (fmt_str
, target_percent
) == NULL
)
3345 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3350 /* Don't optimize sprintf (buf, "abc", ptr++). */
3354 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3355 'format' is known to contain no % formats. */
3356 gimple_seq stmts
= NULL
;
3357 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3359 /* Propagate the NO_WARNING bit to avoid issuing the same
3360 warning more than once. */
3361 if (gimple_no_warning_p (stmt
))
3362 gimple_set_no_warning (repl
, true);
3364 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3365 if (tree lhs
= gimple_call_lhs (stmt
))
3367 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3369 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3370 gsi_replace_with_seq_vops (gsi
, stmts
);
3371 /* gsi now points at the assignment to the lhs, get a
3372 stmt iterator to the memcpy call.
3373 ??? We can't use gsi_for_stmt as that doesn't work when the
3374 CFG isn't built yet. */
3375 gimple_stmt_iterator gsi2
= *gsi
;
3381 gsi_replace_with_seq_vops (gsi
, stmts
);
3387 /* If the format is "%s", use strcpy if the result isn't used. */
3388 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3391 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3396 /* Don't crash on sprintf (str1, "%s"). */
3400 tree orig_len
= NULL_TREE
;
3401 if (gimple_call_lhs (stmt
))
3403 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3409 gimple_seq stmts
= NULL
;
3410 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3412 /* Propagate the NO_WARNING bit to avoid issuing the same
3413 warning more than once. */
3414 if (gimple_no_warning_p (stmt
))
3415 gimple_set_no_warning (repl
, true);
3417 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3418 if (tree lhs
= gimple_call_lhs (stmt
))
3420 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3421 TREE_TYPE (orig_len
)))
3422 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3423 repl
= gimple_build_assign (lhs
, orig_len
);
3424 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3425 gsi_replace_with_seq_vops (gsi
, stmts
);
3426 /* gsi now points at the assignment to the lhs, get a
3427 stmt iterator to the memcpy call.
3428 ??? We can't use gsi_for_stmt as that doesn't work when the
3429 CFG isn't built yet. */
3430 gimple_stmt_iterator gsi2
= *gsi
;
3436 gsi_replace_with_seq_vops (gsi
, stmts
);
3444 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3445 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3446 attempt to simplify calls with more than 4 arguments.
3448 Return true if simplification was possible, otherwise false. */
3451 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3453 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3454 tree dest
= gimple_call_arg (stmt
, 0);
3455 tree destsize
= gimple_call_arg (stmt
, 1);
3456 tree fmt
= gimple_call_arg (stmt
, 2);
3457 tree orig
= NULL_TREE
;
3458 const char *fmt_str
= NULL
;
3460 if (gimple_call_num_args (stmt
) > 4)
3463 if (gimple_call_num_args (stmt
) == 4)
3464 orig
= gimple_call_arg (stmt
, 3);
3466 if (!tree_fits_uhwi_p (destsize
))
3468 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3470 /* Check whether the format is a literal string constant. */
3471 fmt_str
= c_getstr (fmt
);
3472 if (fmt_str
== NULL
)
3475 if (!init_target_chars ())
3478 /* If the format doesn't contain % args or %%, use strcpy. */
3479 if (strchr (fmt_str
, target_percent
) == NULL
)
3481 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3485 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3489 /* We could expand this as
3490 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3492 memcpy (str, fmt_with_nul_at_cstm1, cst);
3493 but in the former case that might increase code size
3494 and in the latter case grow .rodata section too much.
3496 size_t len
= strlen (fmt_str
);
3500 gimple_seq stmts
= NULL
;
3501 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3502 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3503 if (tree lhs
= gimple_call_lhs (stmt
))
3505 repl
= gimple_build_assign (lhs
,
3506 build_int_cst (TREE_TYPE (lhs
), len
));
3507 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3508 gsi_replace_with_seq_vops (gsi
, stmts
);
3509 /* gsi now points at the assignment to the lhs, get a
3510 stmt iterator to the memcpy call.
3511 ??? We can't use gsi_for_stmt as that doesn't work when the
3512 CFG isn't built yet. */
3513 gimple_stmt_iterator gsi2
= *gsi
;
3519 gsi_replace_with_seq_vops (gsi
, stmts
);
3525 /* If the format is "%s", use strcpy if the result isn't used. */
3526 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3528 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3532 /* Don't crash on snprintf (str1, cst, "%s"). */
3536 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3537 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3540 /* We could expand this as
3541 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3543 memcpy (str1, str2_with_nul_at_cstm1, cst);
3544 but in the former case that might increase code size
3545 and in the latter case grow .rodata section too much.
3547 if (compare_tree_int (orig_len
, destlen
) >= 0)
3550 /* Convert snprintf (str1, cst, "%s", str2) into
3551 strcpy (str1, str2) if strlen (str2) < cst. */
3552 gimple_seq stmts
= NULL
;
3553 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3554 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3555 if (tree lhs
= gimple_call_lhs (stmt
))
3557 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3558 TREE_TYPE (orig_len
)))
3559 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3560 repl
= gimple_build_assign (lhs
, orig_len
);
3561 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3562 gsi_replace_with_seq_vops (gsi
, stmts
);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2
= *gsi
;
3573 gsi_replace_with_seq_vops (gsi
, stmts
);
3581 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3582 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3583 more than 3 arguments, and ARG may be null in the 2-argument case.
3585 Return NULL_TREE if no simplification was possible, otherwise return the
3586 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3587 code of the function to be simplified. */
3590 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3591 tree fp
, tree fmt
, tree arg
,
3592 enum built_in_function fcode
)
3594 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3595 tree fn_fputc
, fn_fputs
;
3596 const char *fmt_str
= NULL
;
3598 /* If the return value is used, don't do the transformation. */
3599 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3602 /* Check whether the format is a literal string constant. */
3603 fmt_str
= c_getstr (fmt
);
3604 if (fmt_str
== NULL
)
3607 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3609 /* If we're using an unlocked function, assume the other
3610 unlocked functions exist explicitly. */
3611 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3612 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3616 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3617 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3620 if (!init_target_chars ())
3623 /* If the format doesn't contain % args or %%, use strcpy. */
3624 if (strchr (fmt_str
, target_percent
) == NULL
)
3626 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3630 /* If the format specifier was "", fprintf does nothing. */
3631 if (fmt_str
[0] == '\0')
3633 replace_call_with_value (gsi
, NULL_TREE
);
3637 /* When "string" doesn't contain %, replace all cases of
3638 fprintf (fp, string) with fputs (string, fp). The fputs
3639 builtin will take care of special cases like length == 1. */
3642 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3643 replace_call_with_call_and_fold (gsi
, repl
);
3648 /* The other optimizations can be done only on the non-va_list variants. */
3649 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3652 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3653 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3655 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3659 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3660 replace_call_with_call_and_fold (gsi
, repl
);
3665 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3666 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3669 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3673 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3674 replace_call_with_call_and_fold (gsi
, repl
);
3682 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3683 FMT and ARG are the arguments to the call; we don't fold cases with
3684 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3686 Return NULL_TREE if no simplification was possible, otherwise return the
3687 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3688 code of the function to be simplified. */
3691 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3692 tree arg
, enum built_in_function fcode
)
3694 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3695 tree fn_putchar
, fn_puts
, newarg
;
3696 const char *fmt_str
= NULL
;
3698 /* If the return value is used, don't do the transformation. */
3699 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3702 /* Check whether the format is a literal string constant. */
3703 fmt_str
= c_getstr (fmt
);
3704 if (fmt_str
== NULL
)
3707 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3709 /* If we're using an unlocked function, assume the other
3710 unlocked functions exist explicitly. */
3711 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3712 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3716 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3717 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3720 if (!init_target_chars ())
3723 if (strcmp (fmt_str
, target_percent_s
) == 0
3724 || strchr (fmt_str
, target_percent
) == NULL
)
3728 if (strcmp (fmt_str
, target_percent_s
) == 0)
3730 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3733 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3736 str
= c_getstr (arg
);
3742 /* The format specifier doesn't contain any '%' characters. */
3743 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3749 /* If the string was "", printf does nothing. */
3752 replace_call_with_value (gsi
, NULL_TREE
);
3756 /* If the string has length of 1, call putchar. */
3759 /* Given printf("c"), (where c is any one character,)
3760 convert "c"[0] to an int and pass that to the replacement
3762 newarg
= build_int_cst (integer_type_node
, str
[0]);
3765 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3766 replace_call_with_call_and_fold (gsi
, repl
);
3772 /* If the string was "string\n", call puts("string"). */
3773 size_t len
= strlen (str
);
3774 if ((unsigned char)str
[len
- 1] == target_newline
3775 && (size_t) (int) len
== len
3780 /* Create a NUL-terminated string that's one char shorter
3781 than the original, stripping off the trailing '\n'. */
3782 newstr
= xstrdup (str
);
3783 newstr
[len
- 1] = '\0';
3784 newarg
= build_string_literal (len
, newstr
);
3788 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3789 replace_call_with_call_and_fold (gsi
, repl
);
3794 /* We'd like to arrange to call fputs(string,stdout) here,
3795 but we need stdout and don't have a way to get it yet. */
3800 /* The other optimizations can be done only on the non-va_list variants. */
3801 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3804 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3805 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3807 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3811 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3812 replace_call_with_call_and_fold (gsi
, repl
);
3817 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3818 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3820 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3825 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3826 replace_call_with_call_and_fold (gsi
, repl
);
3836 /* Fold a call to __builtin_strlen with known length LEN. */
3839 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3841 gimple
*stmt
= gsi_stmt (*gsi
);
3842 tree arg
= gimple_call_arg (stmt
, 0);
3847 c_strlen_data lendata
= { };
3848 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
3850 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
3851 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
3853 /* The range of lengths refers to either a single constant
3854 string or to the longest and shortest constant string
3855 referenced by the argument of the strlen() call, or to
3856 the strings that can possibly be stored in the arrays
3857 the argument refers to. */
3858 minlen
= wi::to_wide (lendata
.minlen
);
3859 maxlen
= wi::to_wide (lendata
.maxlen
);
3863 unsigned prec
= TYPE_PRECISION (sizetype
);
3865 minlen
= wi::shwi (0, prec
);
3866 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
3869 if (minlen
== maxlen
)
3871 /* Fold the strlen call to a constant. */
3872 tree type
= TREE_TYPE (lendata
.minlen
);
3873 tree len
= force_gimple_operand_gsi (gsi
,
3874 wide_int_to_tree (type
, minlen
),
3875 true, NULL
, true, GSI_SAME_STMT
);
3876 replace_call_with_value (gsi
, len
);
3880 /* Set the strlen() range to [0, MAXLEN]. */
3881 if (tree lhs
= gimple_call_lhs (stmt
))
3882 set_strlen_range (lhs
, minlen
, maxlen
);
3887 /* Fold a call to __builtin_acc_on_device. */
3890 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3892 /* Defer folding until we know which compiler we're in. */
3893 if (symtab
->state
!= EXPANSION
)
3896 unsigned val_host
= GOMP_DEVICE_HOST
;
3897 unsigned val_dev
= GOMP_DEVICE_NONE
;
3899 #ifdef ACCEL_COMPILER
3900 val_host
= GOMP_DEVICE_NOT_HOST
;
3901 val_dev
= ACCEL_COMPILER_acc_device
;
3904 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3906 tree host_eq
= make_ssa_name (boolean_type_node
);
3907 gimple
*host_ass
= gimple_build_assign
3908 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3909 gimple_set_location (host_ass
, loc
);
3910 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3912 tree dev_eq
= make_ssa_name (boolean_type_node
);
3913 gimple
*dev_ass
= gimple_build_assign
3914 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3915 gimple_set_location (dev_ass
, loc
);
3916 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3918 tree result
= make_ssa_name (boolean_type_node
);
3919 gimple
*result_ass
= gimple_build_assign
3920 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3921 gimple_set_location (result_ass
, loc
);
3922 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3924 replace_call_with_value (gsi
, result
);
3929 /* Fold realloc (0, n) -> malloc (n). */
3932 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3934 gimple
*stmt
= gsi_stmt (*gsi
);
3935 tree arg
= gimple_call_arg (stmt
, 0);
3936 tree size
= gimple_call_arg (stmt
, 1);
3938 if (operand_equal_p (arg
, null_pointer_node
, 0))
3940 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3943 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3944 replace_call_with_call_and_fold (gsi
, repl
);
3951 /* Number of bytes into which any type but aggregate or vector types
3953 static constexpr size_t clear_padding_unit
3954 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
3955 /* Buffer size on which __builtin_clear_padding folding code works. */
3956 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
3958 /* Data passed through __builtin_clear_padding folding. */
3959 struct clear_padding_struct
{
3961 /* 0 during __builtin_clear_padding folding, nonzero during
3962 clear_type_padding_in_mask. In that case, instead of clearing the
3963 non-padding bits in union_ptr array clear the padding bits in there. */
3967 gimple_stmt_iterator
*gsi
;
3968 /* Alignment of buf->base + 0. */
3970 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
3972 /* Number of padding bytes before buf->off that don't have padding clear
3973 code emitted yet. */
3974 HOST_WIDE_INT padding_bytes
;
3975 /* The size of the whole object. Never emit code to touch
3976 buf->base + buf->sz or following bytes. */
3978 /* Number of bytes recorded in buf->buf. */
3980 /* When inside union, instead of emitting code we and bits inside of
3981 the union_ptr array. */
3982 unsigned char *union_ptr
;
3983 /* Set bits mean padding bits that need to be cleared by the builtin. */
3984 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
3987 /* Emit code to clear padding requested in BUF->buf - set bits
3988 in there stand for padding that should be cleared. FULL is true
3989 if everything from the buffer should be flushed, otherwise
3990 it can leave up to 2 * clear_padding_unit bytes for further
3994 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
3996 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
3997 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
3999 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4000 size_t end
= buf
->size
;
4002 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4003 * clear_padding_unit
);
4004 size_t padding_bytes
= buf
->padding_bytes
;
4007 if (buf
->clear_in_mask
)
4009 /* During clear_type_padding_in_mask, clear the padding
4010 bits set in buf->buf in the buf->union_ptr mask. */
4011 for (size_t i
= 0; i
< end
; i
++)
4013 if (buf
->buf
[i
] == (unsigned char) ~0)
4017 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4020 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4025 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4029 buf
->padding_bytes
= 0;
4033 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4036 buf
->padding_bytes
= padding_bytes
;
4040 /* Inside of a union, instead of emitting any code, instead
4041 clear all bits in the union_ptr buffer that are clear
4042 in buf. Whole padding bytes don't clear anything. */
4043 for (size_t i
= 0; i
< end
; i
++)
4045 if (buf
->buf
[i
] == (unsigned char) ~0)
4050 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4057 buf
->padding_bytes
= 0;
4061 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4064 buf
->padding_bytes
= padding_bytes
;
4068 size_t wordsize
= UNITS_PER_WORD
;
4069 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4071 size_t nonzero_first
= wordsize
;
4072 size_t nonzero_last
= 0;
4073 size_t zero_first
= wordsize
;
4074 size_t zero_last
= 0;
4075 bool all_ones
= true, bytes_only
= true;
4076 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4077 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4079 gcc_assert (wordsize
> 1);
4084 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4088 if (nonzero_first
== wordsize
)
4090 nonzero_first
= j
- i
;
4091 nonzero_last
= j
- i
;
4093 if (nonzero_last
!= j
- i
)
4095 nonzero_last
= j
+ 1 - i
;
4099 if (zero_first
== wordsize
)
4101 zero_last
= j
+ 1 - i
;
4103 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4109 size_t padding_end
= i
;
4112 if (nonzero_first
== 0
4113 && nonzero_last
== wordsize
4116 /* All bits are padding and we had some padding
4117 before too. Just extend it. */
4118 padding_bytes
+= wordsize
;
4121 if (all_ones
&& nonzero_first
== 0)
4123 padding_bytes
+= nonzero_last
;
4124 padding_end
+= nonzero_last
;
4125 nonzero_first
= wordsize
;
4128 else if (bytes_only
&& nonzero_first
== 0)
4130 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4131 padding_bytes
+= zero_first
;
4132 padding_end
+= zero_first
;
4135 if (padding_bytes
== 1)
4137 atype
= char_type_node
;
4138 src
= build_zero_cst (char_type_node
);
4142 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4143 src
= build_constructor (atype
, NULL
);
4145 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4146 build_int_cst (buf
->alias_type
,
4147 buf
->off
+ padding_end
4149 gimple
*g
= gimple_build_assign (dst
, src
);
4150 gimple_set_location (g
, buf
->loc
);
4151 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4153 buf
->padding_bytes
= 0;
4155 if (nonzero_first
== wordsize
)
4156 /* All bits in a word are 0, there are no padding bits. */
4158 if (all_ones
&& nonzero_last
== wordsize
)
4160 /* All bits between nonzero_first and end of word are padding
4161 bits, start counting padding_bytes. */
4162 padding_bytes
= nonzero_last
- nonzero_first
;
4167 /* If bitfields aren't involved in this word, prefer storing
4168 individual bytes or groups of them over performing a RMW
4169 operation on the whole word. */
4170 gcc_assert (i
+ zero_last
<= end
);
4171 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4176 for (k
= j
; k
< i
+ zero_last
; k
++)
4177 if (buf
->buf
[k
] == 0)
4179 HOST_WIDE_INT off
= buf
->off
+ j
;
4183 atype
= char_type_node
;
4184 src
= build_zero_cst (char_type_node
);
4188 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4189 src
= build_constructor (atype
, NULL
);
4191 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4193 build_int_cst (buf
->alias_type
, off
));
4194 gimple
*g
= gimple_build_assign (dst
, src
);
4195 gimple_set_location (g
, buf
->loc
);
4196 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4200 if (nonzero_last
== wordsize
)
4201 padding_bytes
= nonzero_last
- zero_last
;
4204 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4206 if (nonzero_last
- nonzero_first
<= eltsz
4207 && ((nonzero_first
& ~(eltsz
- 1))
4208 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4212 type
= char_type_node
;
4214 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4216 size_t start
= nonzero_first
& ~(eltsz
- 1);
4217 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4219 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4220 atype
= build_aligned_type (type
, buf
->align
);
4221 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4222 build_int_cst (buf
->alias_type
, off
));
4226 && nonzero_first
== start
4227 && nonzero_last
== start
+ eltsz
)
4228 src
= build_zero_cst (type
);
4231 src
= make_ssa_name (type
);
4232 g
= gimple_build_assign (src
, unshare_expr (dst
));
4233 gimple_set_location (g
, buf
->loc
);
4234 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4235 tree mask
= native_interpret_expr (type
,
4236 buf
->buf
+ i
+ start
,
4238 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4239 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4240 tree src_masked
= make_ssa_name (type
);
4241 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4243 gimple_set_location (g
, buf
->loc
);
4244 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4247 g
= gimple_build_assign (dst
, src
);
4248 gimple_set_location (g
, buf
->loc
);
4249 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4259 if (padding_bytes
== 1)
4261 atype
= char_type_node
;
4262 src
= build_zero_cst (char_type_node
);
4266 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4267 src
= build_constructor (atype
, NULL
);
4269 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4270 build_int_cst (buf
->alias_type
,
4273 gimple
*g
= gimple_build_assign (dst
, src
);
4274 gimple_set_location (g
, buf
->loc
);
4275 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4277 size_t end_rem
= end
% UNITS_PER_WORD
;
4278 buf
->off
+= end
- end_rem
;
4279 buf
->size
= end_rem
;
4280 memset (buf
->buf
, 0, buf
->size
);
4281 buf
->padding_bytes
= 0;
4285 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4288 buf
->padding_bytes
= padding_bytes
;
4292 /* Append PADDING_BYTES padding bytes. */
4295 clear_padding_add_padding (clear_padding_struct
*buf
,
4296 HOST_WIDE_INT padding_bytes
)
4298 if (padding_bytes
== 0)
4300 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4301 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4302 clear_padding_flush (buf
, false);
4303 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4304 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4306 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4307 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4308 buf
->size
= clear_padding_buf_size
;
4309 clear_padding_flush (buf
, false);
4310 gcc_assert (buf
->padding_bytes
);
4311 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4312 is guaranteed to be all ones. */
4313 padding_bytes
+= buf
->size
;
4314 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4315 memset (buf
->buf
, ~0, buf
->size
);
4316 buf
->off
+= padding_bytes
- buf
->size
;
4317 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4321 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4322 buf
->size
+= padding_bytes
;
4326 static void clear_padding_type (clear_padding_struct
*, tree
, HOST_WIDE_INT
);
4328 /* Clear padding bits of union type TYPE. */
4331 clear_padding_union (clear_padding_struct
*buf
, tree type
, HOST_WIDE_INT sz
)
4333 clear_padding_struct
*union_buf
;
4334 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4335 size_t start_size
= 0;
4338 start_off
= buf
->off
+ buf
->size
;
4339 next_off
= start_off
+ sz
;
4340 start_size
= start_off
% UNITS_PER_WORD
;
4341 start_off
-= start_size
;
4342 clear_padding_flush (buf
, true);
4347 if (sz
+ buf
->size
> clear_padding_buf_size
)
4348 clear_padding_flush (buf
, false);
4349 union_buf
= XALLOCA (clear_padding_struct
);
4350 union_buf
->loc
= buf
->loc
;
4351 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4352 union_buf
->base
= NULL_TREE
;
4353 union_buf
->alias_type
= NULL_TREE
;
4354 union_buf
->gsi
= NULL
;
4355 union_buf
->align
= 0;
4357 union_buf
->padding_bytes
= 0;
4359 union_buf
->size
= 0;
4360 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4361 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4363 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4364 memset (union_buf
->union_ptr
, ~0, sz
);
4367 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4368 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4370 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4372 if (TREE_TYPE (field
) == error_mark_node
)
4374 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4375 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4376 if (!buf
->clear_in_mask
)
4377 error_at (buf
->loc
, "flexible array member %qD does not have "
4378 "well defined padding bits for %qs",
4379 field
, "__builtin_clear_padding");
4382 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4383 gcc_assert (union_buf
->size
== 0);
4384 union_buf
->off
= start_off
;
4385 union_buf
->size
= start_size
;
4386 memset (union_buf
->buf
, ~0, start_size
);
4387 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
);
4388 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4389 clear_padding_flush (union_buf
, true);
4392 if (buf
== union_buf
)
4394 buf
->off
= next_off
;
4395 buf
->size
= next_off
% UNITS_PER_WORD
;
4396 buf
->off
-= buf
->size
;
4397 memset (buf
->buf
, ~0, buf
->size
);
4399 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4403 unsigned char *union_ptr
= union_buf
->union_ptr
;
4406 clear_padding_flush (buf
, false);
4407 HOST_WIDE_INT this_sz
4408 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4409 clear_padding_buf_size
- buf
->size
);
4410 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4411 buf
->size
+= this_sz
;
4412 union_ptr
+= this_sz
;
4415 XDELETE (union_buf
->union_ptr
);
4419 /* The only known floating point formats with padding bits are the
4420 IEEE extended ones. */
4423 clear_padding_real_needs_padding_p (tree type
)
4425 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4427 && fmt
->signbit_ro
== fmt
->signbit_rw
4428 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4431 /* Return true if TYPE might contain any padding bits. */
4434 clear_padding_type_may_have_padding_p (tree type
)
4436 switch (TREE_CODE (type
))
4444 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4446 return clear_padding_real_needs_padding_p (type
);
4452 /* Emit a runtime loop:
4453 for (; buf.base != end; buf.base += sz)
4454 __builtin_clear_padding (buf.base); */
4457 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
, tree end
)
4459 tree l1
= create_artificial_label (buf
->loc
);
4460 tree l2
= create_artificial_label (buf
->loc
);
4461 tree l3
= create_artificial_label (buf
->loc
);
4462 gimple
*g
= gimple_build_goto (l2
);
4463 gimple_set_location (g
, buf
->loc
);
4464 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4465 g
= gimple_build_label (l1
);
4466 gimple_set_location (g
, buf
->loc
);
4467 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4468 clear_padding_type (buf
, type
, buf
->sz
);
4469 clear_padding_flush (buf
, true);
4470 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4471 size_int (buf
->sz
));
4472 gimple_set_location (g
, buf
->loc
);
4473 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4474 g
= gimple_build_label (l2
);
4475 gimple_set_location (g
, buf
->loc
);
4476 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4477 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4478 gimple_set_location (g
, buf
->loc
);
4479 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4480 g
= gimple_build_label (l3
);
4481 gimple_set_location (g
, buf
->loc
);
4482 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4485 /* Clear padding bits for TYPE. Called recursively from
4486 gimple_fold_builtin_clear_padding. */
4489 clear_padding_type (clear_padding_struct
*buf
, tree type
, HOST_WIDE_INT sz
)
4491 switch (TREE_CODE (type
))
4494 HOST_WIDE_INT cur_pos
;
4496 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4497 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4499 tree ftype
= TREE_TYPE (field
);
4500 if (DECL_BIT_FIELD (field
))
4502 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4505 HOST_WIDE_INT pos
= int_byte_position (field
);
4507 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4508 bpos
%= BITS_PER_UNIT
;
4510 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4511 if (pos
+ end
> cur_pos
)
4513 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4514 cur_pos
= pos
+ end
;
4516 gcc_assert (cur_pos
> pos
4517 && ((unsigned HOST_WIDE_INT
) buf
->size
4518 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4519 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4520 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4521 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4522 " in %qs", "__builtin_clear_padding");
4523 else if (BYTES_BIG_ENDIAN
)
4526 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4527 *p
&= ~(((1 << fldsz
) - 1)
4528 << (BITS_PER_UNIT
- bpos
- fldsz
));
4533 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4535 fldsz
-= BITS_PER_UNIT
- bpos
;
4537 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4538 p
+= fldsz
/ BITS_PER_UNIT
;
4539 fldsz
%= BITS_PER_UNIT
;
4541 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4546 /* Little endian. */
4547 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4548 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4553 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4555 fldsz
-= BITS_PER_UNIT
- bpos
;
4557 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4558 p
+= fldsz
/ BITS_PER_UNIT
;
4559 fldsz
%= BITS_PER_UNIT
;
4561 *p
&= ~((1 << fldsz
) - 1);
4565 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4567 if (ftype
== error_mark_node
)
4569 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4570 && !COMPLETE_TYPE_P (ftype
));
4571 if (!buf
->clear_in_mask
)
4572 error_at (buf
->loc
, "flexible array member %qD does not "
4573 "have well defined padding bits for %qs",
4574 field
, "__builtin_clear_padding");
4576 else if (is_empty_type (TREE_TYPE (field
)))
4580 HOST_WIDE_INT pos
= int_byte_position (field
);
4581 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4582 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4583 clear_padding_add_padding (buf
, pos
- cur_pos
);
4585 clear_padding_type (buf
, TREE_TYPE (field
), fldsz
);
4589 gcc_assert (sz
>= cur_pos
);
4590 clear_padding_add_padding (buf
, sz
- cur_pos
);
4593 HOST_WIDE_INT nelts
, fldsz
;
4594 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4599 && sz
> 8 * UNITS_PER_WORD
4600 && buf
->union_ptr
== NULL
4601 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4603 /* For sufficiently large array of more than one elements,
4604 emit a runtime loop to keep code size manageable. */
4605 tree base
= buf
->base
;
4606 unsigned int prev_align
= buf
->align
;
4607 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4608 HOST_WIDE_INT prev_sz
= buf
->sz
;
4609 clear_padding_flush (buf
, true);
4610 tree elttype
= TREE_TYPE (type
);
4611 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4612 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4613 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4614 base
, size_int (off
));
4615 gimple_set_location (g
, buf
->loc
);
4616 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4617 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4619 gimple_set_location (g
, buf
->loc
);
4620 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4622 buf
->align
= TYPE_ALIGN (elttype
);
4625 clear_padding_emit_loop (buf
, elttype
, end
);
4628 buf
->align
= prev_align
;
4629 buf
->size
= off
% UNITS_PER_WORD
;
4630 buf
->off
= off
- buf
->size
;
4631 memset (buf
->buf
, 0, buf
->size
);
4634 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4635 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4638 clear_padding_union (buf
, type
, sz
);
4641 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4642 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4643 clear_padding_flush (buf
, false);
4644 if (clear_padding_real_needs_padding_p (type
))
4646 /* Use native_interpret_expr + native_encode_expr to figure out
4647 which bits are padding. */
4648 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4649 tree cst
= native_interpret_expr (type
, buf
->buf
+ buf
->size
, sz
);
4650 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4651 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4652 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4653 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4654 buf
->buf
[buf
->size
+ i
] ^= ~0;
4657 memset (buf
->buf
+ buf
->size
, 0, sz
);
4661 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4662 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4663 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4666 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4667 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4668 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4669 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4672 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4673 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4674 clear_padding_flush (buf
, false);
4675 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4679 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4680 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4681 clear_padding_flush (buf
, false);
4682 memset (buf
->buf
+ buf
->size
, 0, sz
);
4688 /* Clear padding bits of TYPE in MASK. */
4691 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4693 clear_padding_struct buf
;
4694 buf
.loc
= UNKNOWN_LOCATION
;
4695 buf
.clear_in_mask
= true;
4696 buf
.base
= NULL_TREE
;
4697 buf
.alias_type
= NULL_TREE
;
4701 buf
.padding_bytes
= 0;
4702 buf
.sz
= int_size_in_bytes (type
);
4704 buf
.union_ptr
= mask
;
4705 clear_padding_type (&buf
, type
, buf
.sz
);
4706 clear_padding_flush (&buf
, true);
4709 /* Fold __builtin_clear_padding builtin. */
4712 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4714 gimple
*stmt
= gsi_stmt (*gsi
);
4715 gcc_assert (gimple_call_num_args (stmt
) == 2);
4716 tree ptr
= gimple_call_arg (stmt
, 0);
4717 tree typearg
= gimple_call_arg (stmt
, 1);
4718 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4719 location_t loc
= gimple_location (stmt
);
4720 clear_padding_struct buf
;
4721 gimple_stmt_iterator gsiprev
= *gsi
;
4722 /* This should be folded during the lower pass. */
4723 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4724 gcc_assert (COMPLETE_TYPE_P (type
));
4725 gsi_prev (&gsiprev
);
4728 buf
.clear_in_mask
= false;
4730 buf
.alias_type
= NULL_TREE
;
4732 buf
.align
= get_pointer_alignment (ptr
);
4733 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4734 buf
.align
= MAX (buf
.align
, talign
);
4736 buf
.padding_bytes
= 0;
4738 buf
.sz
= int_size_in_bytes (type
);
4739 buf
.union_ptr
= NULL
;
4740 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4741 sorry_at (loc
, "%s not supported for variable length aggregates",
4742 "__builtin_clear_padding");
4743 /* The implementation currently assumes 8-bit host and target
4744 chars which is the case for all currently supported targets
4745 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4746 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4747 sorry_at (loc
, "%s not supported on this target",
4748 "__builtin_clear_padding");
4749 else if (!clear_padding_type_may_have_padding_p (type
))
4751 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4753 tree sz
= TYPE_SIZE_UNIT (type
);
4754 tree elttype
= type
;
4755 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4756 while (TREE_CODE (elttype
) == ARRAY_TYPE
4757 && int_size_in_bytes (elttype
) < 0)
4758 elttype
= TREE_TYPE (elttype
);
4759 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4760 gcc_assert (eltsz
>= 0);
4763 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4764 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4765 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4766 gimple_set_location (g
, loc
);
4767 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4768 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4769 gimple_set_location (g
, loc
);
4770 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4772 buf
.align
= TYPE_ALIGN (elttype
);
4773 buf
.alias_type
= build_pointer_type (elttype
);
4774 clear_padding_emit_loop (&buf
, elttype
, end
);
4779 if (!is_gimple_mem_ref_addr (buf
.base
))
4781 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4782 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4783 gimple_set_location (g
, loc
);
4784 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4786 buf
.alias_type
= build_pointer_type (type
);
4787 clear_padding_type (&buf
, type
, buf
.sz
);
4788 clear_padding_flush (&buf
, true);
4791 gimple_stmt_iterator gsiprev2
= *gsi
;
4792 gsi_prev (&gsiprev2
);
4793 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4794 gsi_replace (gsi
, gimple_build_nop (), true);
4797 gsi_remove (gsi
, true);
4803 /* Fold the non-target builtin at *GSI and return whether any simplification
4807 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
4809 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
4810 tree callee
= gimple_call_fndecl (stmt
);
4812 /* Give up for always_inline inline builtins until they are
4814 if (avoid_folding_inline_builtin (callee
))
4817 unsigned n
= gimple_call_num_args (stmt
);
4818 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
4822 return gimple_fold_builtin_bcmp (gsi
);
4823 case BUILT_IN_BCOPY
:
4824 return gimple_fold_builtin_bcopy (gsi
);
4825 case BUILT_IN_BZERO
:
4826 return gimple_fold_builtin_bzero (gsi
);
4828 case BUILT_IN_MEMSET
:
4829 return gimple_fold_builtin_memset (gsi
,
4830 gimple_call_arg (stmt
, 1),
4831 gimple_call_arg (stmt
, 2));
4832 case BUILT_IN_MEMCPY
:
4833 case BUILT_IN_MEMPCPY
:
4834 case BUILT_IN_MEMMOVE
:
4835 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
4836 gimple_call_arg (stmt
, 1), fcode
);
4837 case BUILT_IN_SPRINTF_CHK
:
4838 case BUILT_IN_VSPRINTF_CHK
:
4839 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
4840 case BUILT_IN_STRCAT_CHK
:
4841 return gimple_fold_builtin_strcat_chk (gsi
);
4842 case BUILT_IN_STRNCAT_CHK
:
4843 return gimple_fold_builtin_strncat_chk (gsi
);
4844 case BUILT_IN_STRLEN
:
4845 return gimple_fold_builtin_strlen (gsi
);
4846 case BUILT_IN_STRCPY
:
4847 return gimple_fold_builtin_strcpy (gsi
,
4848 gimple_call_arg (stmt
, 0),
4849 gimple_call_arg (stmt
, 1));
4850 case BUILT_IN_STRNCPY
:
4851 return gimple_fold_builtin_strncpy (gsi
,
4852 gimple_call_arg (stmt
, 0),
4853 gimple_call_arg (stmt
, 1),
4854 gimple_call_arg (stmt
, 2));
4855 case BUILT_IN_STRCAT
:
4856 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
4857 gimple_call_arg (stmt
, 1));
4858 case BUILT_IN_STRNCAT
:
4859 return gimple_fold_builtin_strncat (gsi
);
4860 case BUILT_IN_INDEX
:
4861 case BUILT_IN_STRCHR
:
4862 return gimple_fold_builtin_strchr (gsi
, false);
4863 case BUILT_IN_RINDEX
:
4864 case BUILT_IN_STRRCHR
:
4865 return gimple_fold_builtin_strchr (gsi
, true);
4866 case BUILT_IN_STRSTR
:
4867 return gimple_fold_builtin_strstr (gsi
);
4868 case BUILT_IN_STRCMP
:
4869 case BUILT_IN_STRCMP_EQ
:
4870 case BUILT_IN_STRCASECMP
:
4871 case BUILT_IN_STRNCMP
:
4872 case BUILT_IN_STRNCMP_EQ
:
4873 case BUILT_IN_STRNCASECMP
:
4874 return gimple_fold_builtin_string_compare (gsi
);
4875 case BUILT_IN_MEMCHR
:
4876 return gimple_fold_builtin_memchr (gsi
);
4877 case BUILT_IN_FPUTS
:
4878 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
4879 gimple_call_arg (stmt
, 1), false);
4880 case BUILT_IN_FPUTS_UNLOCKED
:
4881 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
4882 gimple_call_arg (stmt
, 1), true);
4883 case BUILT_IN_MEMCPY_CHK
:
4884 case BUILT_IN_MEMPCPY_CHK
:
4885 case BUILT_IN_MEMMOVE_CHK
:
4886 case BUILT_IN_MEMSET_CHK
:
4887 return gimple_fold_builtin_memory_chk (gsi
,
4888 gimple_call_arg (stmt
, 0),
4889 gimple_call_arg (stmt
, 1),
4890 gimple_call_arg (stmt
, 2),
4891 gimple_call_arg (stmt
, 3),
4893 case BUILT_IN_STPCPY
:
4894 return gimple_fold_builtin_stpcpy (gsi
);
4895 case BUILT_IN_STRCPY_CHK
:
4896 case BUILT_IN_STPCPY_CHK
:
4897 return gimple_fold_builtin_stxcpy_chk (gsi
,
4898 gimple_call_arg (stmt
, 0),
4899 gimple_call_arg (stmt
, 1),
4900 gimple_call_arg (stmt
, 2),
4902 case BUILT_IN_STRNCPY_CHK
:
4903 case BUILT_IN_STPNCPY_CHK
:
4904 return gimple_fold_builtin_stxncpy_chk (gsi
,
4905 gimple_call_arg (stmt
, 0),
4906 gimple_call_arg (stmt
, 1),
4907 gimple_call_arg (stmt
, 2),
4908 gimple_call_arg (stmt
, 3),
4910 case BUILT_IN_SNPRINTF_CHK
:
4911 case BUILT_IN_VSNPRINTF_CHK
:
4912 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
4914 case BUILT_IN_FPRINTF
:
4915 case BUILT_IN_FPRINTF_UNLOCKED
:
4916 case BUILT_IN_VFPRINTF
:
4917 if (n
== 2 || n
== 3)
4918 return gimple_fold_builtin_fprintf (gsi
,
4919 gimple_call_arg (stmt
, 0),
4920 gimple_call_arg (stmt
, 1),
4922 ? gimple_call_arg (stmt
, 2)
4926 case BUILT_IN_FPRINTF_CHK
:
4927 case BUILT_IN_VFPRINTF_CHK
:
4928 if (n
== 3 || n
== 4)
4929 return gimple_fold_builtin_fprintf (gsi
,
4930 gimple_call_arg (stmt
, 0),
4931 gimple_call_arg (stmt
, 2),
4933 ? gimple_call_arg (stmt
, 3)
4937 case BUILT_IN_PRINTF
:
4938 case BUILT_IN_PRINTF_UNLOCKED
:
4939 case BUILT_IN_VPRINTF
:
4940 if (n
== 1 || n
== 2)
4941 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
4943 ? gimple_call_arg (stmt
, 1)
4944 : NULL_TREE
, fcode
);
4946 case BUILT_IN_PRINTF_CHK
:
4947 case BUILT_IN_VPRINTF_CHK
:
4948 if (n
== 2 || n
== 3)
4949 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
4951 ? gimple_call_arg (stmt
, 2)
4952 : NULL_TREE
, fcode
);
4954 case BUILT_IN_ACC_ON_DEVICE
:
4955 return gimple_fold_builtin_acc_on_device (gsi
,
4956 gimple_call_arg (stmt
, 0));
4957 case BUILT_IN_REALLOC
:
4958 return gimple_fold_builtin_realloc (gsi
);
4960 case BUILT_IN_CLEAR_PADDING
:
4961 return gimple_fold_builtin_clear_padding (gsi
);
4966 /* Try the generic builtin folder. */
4967 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
4968 tree result
= fold_call_stmt (stmt
, ignore
);
4972 STRIP_NOPS (result
);
4974 result
= fold_convert (gimple_call_return_type (stmt
), result
);
4975 if (!update_call_from_tree (gsi
, result
))
4976 gimplify_and_update_call_from_tree (gsi
, result
);
4983 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4984 function calls to constants, where possible. */
4987 fold_internal_goacc_dim (const gimple
*call
)
4989 int axis
= oacc_get_ifn_dim_arg (call
);
4990 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
4991 tree result
= NULL_TREE
;
4992 tree type
= TREE_TYPE (gimple_call_lhs (call
));
4994 switch (gimple_call_internal_fn (call
))
4996 case IFN_GOACC_DIM_POS
:
4997 /* If the size is 1, we know the answer. */
4999 result
= build_int_cst (type
, 0);
5001 case IFN_GOACC_DIM_SIZE
:
5002 /* If the size is not dynamic, we know the answer. */
5004 result
= build_int_cst (type
, size
);
5013 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5014 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5015 &var where var is only addressable because of such calls. */
5018 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5020 if (gimple_call_num_args (stmt
) != 6
5021 || !flag_inline_atomics
5023 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5024 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5025 || !gimple_vdef (stmt
)
5026 || !gimple_vuse (stmt
))
5029 tree fndecl
= gimple_call_fndecl (stmt
);
5030 switch (DECL_FUNCTION_CODE (fndecl
))
5032 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5033 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5034 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5035 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5036 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5042 tree expected
= gimple_call_arg (stmt
, 1);
5043 if (TREE_CODE (expected
) != ADDR_EXPR
5044 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5047 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5048 if (!is_gimple_reg_type (etype
)
5049 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5050 || TREE_THIS_VOLATILE (etype
)
5051 || VECTOR_TYPE_P (etype
)
5052 || TREE_CODE (etype
) == COMPLEX_TYPE
5053 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5054 might not preserve all the bits. See PR71716. */
5055 || SCALAR_FLOAT_TYPE_P (etype
)
5056 || maybe_ne (TYPE_PRECISION (etype
),
5057 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5060 tree weak
= gimple_call_arg (stmt
, 3);
5061 if (!integer_zerop (weak
) && !integer_onep (weak
))
5064 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5065 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5066 machine_mode mode
= TYPE_MODE (itype
);
5068 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5070 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5073 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5080 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5082 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5083 i = IMAGPART_EXPR <t>;
5085 e = REALPART_EXPR <t>; */
5088 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5090 gimple
*stmt
= gsi_stmt (*gsi
);
5091 tree fndecl
= gimple_call_fndecl (stmt
);
5092 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5093 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5094 tree ctype
= build_complex_type (itype
);
5095 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5096 bool throws
= false;
5098 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5100 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5101 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5102 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5104 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5105 build1 (VIEW_CONVERT_EXPR
, itype
,
5106 gimple_assign_lhs (g
)));
5107 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5109 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5110 + int_size_in_bytes (itype
);
5111 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5112 gimple_call_arg (stmt
, 0),
5113 gimple_assign_lhs (g
),
5114 gimple_call_arg (stmt
, 2),
5115 build_int_cst (integer_type_node
, flag
),
5116 gimple_call_arg (stmt
, 4),
5117 gimple_call_arg (stmt
, 5));
5118 tree lhs
= make_ssa_name (ctype
);
5119 gimple_call_set_lhs (g
, lhs
);
5120 gimple_move_vops (g
, stmt
);
5121 tree oldlhs
= gimple_call_lhs (stmt
);
5122 if (stmt_can_throw_internal (cfun
, stmt
))
5125 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5127 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5128 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5129 gimple_call_set_lhs (stmt
, NULL_TREE
);
5130 gsi_replace (gsi
, g
, true);
5133 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5134 build1 (IMAGPART_EXPR
, itype
, lhs
));
5137 gsi_insert_on_edge_immediate (e
, g
);
5138 *gsi
= gsi_for_stmt (g
);
5141 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5142 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5143 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5145 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5146 build1 (REALPART_EXPR
, itype
, lhs
));
5147 if (throws
&& oldlhs
== NULL_TREE
)
5149 gsi_insert_on_edge_immediate (e
, g
);
5150 *gsi
= gsi_for_stmt (g
);
5153 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5154 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5156 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5158 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5159 gimple_assign_lhs (g
)));
5160 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5162 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5163 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5167 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5168 doesn't fit into TYPE. The test for overflow should be regardless of
5169 -fwrapv, and even for unsigned types. */
5172 arith_overflowed_p (enum tree_code code
, const_tree type
,
5173 const_tree arg0
, const_tree arg1
)
5175 widest2_int warg0
= widest2_int_cst (arg0
);
5176 widest2_int warg1
= widest2_int_cst (arg1
);
5180 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5181 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5182 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5183 default: gcc_unreachable ();
5185 signop sign
= TYPE_SIGN (type
);
5186 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5188 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5191 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5192 for the memory it references, otherwise return null. VECTYPE is the
5193 type of the memory vector. */
5196 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5198 tree ptr
= gimple_call_arg (call
, 0);
5199 tree alias_align
= gimple_call_arg (call
, 1);
5200 tree mask
= gimple_call_arg (call
, 2);
5201 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5204 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5205 if (TYPE_ALIGN (vectype
) != align
)
5206 vectype
= build_aligned_type (vectype
, align
);
5207 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5208 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5211 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5214 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5216 tree lhs
= gimple_call_lhs (call
);
5220 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5222 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5223 gimple_set_location (new_stmt
, gimple_location (call
));
5224 gimple_move_vops (new_stmt
, call
);
5225 gsi_replace (gsi
, new_stmt
, false);
5231 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5234 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5236 tree rhs
= gimple_call_arg (call
, 3);
5237 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5239 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5240 gimple_set_location (new_stmt
, gimple_location (call
));
5241 gimple_move_vops (new_stmt
, call
);
5242 gsi_replace (gsi
, new_stmt
, false);
5248 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5249 The statement may be replaced by another statement, e.g., if the call
5250 simplifies to a constant value. Return true if any changes were made.
5251 It is assumed that the operands have been previously folded. */
5254 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5256 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5258 bool changed
= false;
5261 /* Fold *& in call arguments. */
5262 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5263 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
5265 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
5268 gimple_call_set_arg (stmt
, i
, tmp
);
5273 /* Check for virtual calls that became direct calls. */
5274 callee
= gimple_call_fn (stmt
);
5275 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5277 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5279 if (dump_file
&& virtual_method_call_p (callee
)
5280 && !possible_polymorphic_call_target_p
5281 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5282 (OBJ_TYPE_REF_EXPR (callee
)))))
5285 "Type inheritance inconsistent devirtualization of ");
5286 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5287 fprintf (dump_file
, " to ");
5288 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5289 fprintf (dump_file
, "\n");
5292 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5295 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5298 vec
<cgraph_node
*>targets
5299 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5300 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5302 tree lhs
= gimple_call_lhs (stmt
);
5303 if (dump_enabled_p ())
5305 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5306 "folding virtual function call to %s\n",
5307 targets
.length () == 1
5308 ? targets
[0]->name ()
5309 : "__builtin_unreachable");
5311 if (targets
.length () == 1)
5313 tree fndecl
= targets
[0]->decl
;
5314 gimple_call_set_fndecl (stmt
, fndecl
);
5316 /* If changing the call to __cxa_pure_virtual
5317 or similar noreturn function, adjust gimple_call_fntype
5319 if (gimple_call_noreturn_p (stmt
)
5320 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5321 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5322 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5324 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5325 /* If the call becomes noreturn, remove the lhs. */
5327 && gimple_call_noreturn_p (stmt
)
5328 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5329 || should_remove_lhs_p (lhs
)))
5331 if (TREE_CODE (lhs
) == SSA_NAME
)
5333 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5334 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5335 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5336 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5338 gimple_call_set_lhs (stmt
, NULL_TREE
);
5340 maybe_remove_unused_call_args (cfun
, stmt
);
5344 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5345 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5346 gimple_set_location (new_stmt
, gimple_location (stmt
));
5347 /* If the call had a SSA name as lhs morph that into
5348 an uninitialized value. */
5349 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5351 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5352 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5353 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5354 set_ssa_default_def (cfun
, var
, lhs
);
5356 gimple_move_vops (new_stmt
, stmt
);
5357 gsi_replace (gsi
, new_stmt
, false);
5364 /* Check for indirect calls that became direct calls, and then
5365 no longer require a static chain. */
5366 if (gimple_call_chain (stmt
))
5368 tree fn
= gimple_call_fndecl (stmt
);
5369 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5371 gimple_call_set_chain (stmt
, NULL
);
5376 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
5379 gimple_call_set_chain (stmt
, tmp
);
5388 /* Check for builtins that CCP can handle using information not
5389 available in the generic fold routines. */
5390 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5392 if (gimple_fold_builtin (gsi
))
5395 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5397 changed
|= targetm
.gimple_fold_builtin (gsi
);
5399 else if (gimple_call_internal_p (stmt
))
5401 enum tree_code subcode
= ERROR_MARK
;
5402 tree result
= NULL_TREE
;
5403 bool cplx_result
= false;
5404 tree overflow
= NULL_TREE
;
5405 switch (gimple_call_internal_fn (stmt
))
5407 case IFN_BUILTIN_EXPECT
:
5408 result
= fold_builtin_expect (gimple_location (stmt
),
5409 gimple_call_arg (stmt
, 0),
5410 gimple_call_arg (stmt
, 1),
5411 gimple_call_arg (stmt
, 2),
5414 case IFN_UBSAN_OBJECT_SIZE
:
5416 tree offset
= gimple_call_arg (stmt
, 1);
5417 tree objsize
= gimple_call_arg (stmt
, 2);
5418 if (integer_all_onesp (objsize
)
5419 || (TREE_CODE (offset
) == INTEGER_CST
5420 && TREE_CODE (objsize
) == INTEGER_CST
5421 && tree_int_cst_le (offset
, objsize
)))
5423 replace_call_with_value (gsi
, NULL_TREE
);
5429 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5431 replace_call_with_value (gsi
, NULL_TREE
);
5435 case IFN_UBSAN_BOUNDS
:
5437 tree index
= gimple_call_arg (stmt
, 1);
5438 tree bound
= gimple_call_arg (stmt
, 2);
5439 if (TREE_CODE (index
) == INTEGER_CST
5440 && TREE_CODE (bound
) == INTEGER_CST
)
5442 index
= fold_convert (TREE_TYPE (bound
), index
);
5443 if (TREE_CODE (index
) == INTEGER_CST
5444 && tree_int_cst_le (index
, bound
))
5446 replace_call_with_value (gsi
, NULL_TREE
);
5452 case IFN_GOACC_DIM_SIZE
:
5453 case IFN_GOACC_DIM_POS
:
5454 result
= fold_internal_goacc_dim (stmt
);
5456 case IFN_UBSAN_CHECK_ADD
:
5457 subcode
= PLUS_EXPR
;
5459 case IFN_UBSAN_CHECK_SUB
:
5460 subcode
= MINUS_EXPR
;
5462 case IFN_UBSAN_CHECK_MUL
:
5463 subcode
= MULT_EXPR
;
5465 case IFN_ADD_OVERFLOW
:
5466 subcode
= PLUS_EXPR
;
5469 case IFN_SUB_OVERFLOW
:
5470 subcode
= MINUS_EXPR
;
5473 case IFN_MUL_OVERFLOW
:
5474 subcode
= MULT_EXPR
;
5478 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5480 case IFN_MASK_STORE
:
5481 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5486 if (subcode
!= ERROR_MARK
)
5488 tree arg0
= gimple_call_arg (stmt
, 0);
5489 tree arg1
= gimple_call_arg (stmt
, 1);
5490 tree type
= TREE_TYPE (arg0
);
5493 tree lhs
= gimple_call_lhs (stmt
);
5494 if (lhs
== NULL_TREE
)
5497 type
= TREE_TYPE (TREE_TYPE (lhs
));
5499 if (type
== NULL_TREE
)
5501 /* x = y + 0; x = y - 0; x = y * 0; */
5502 else if (integer_zerop (arg1
))
5503 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5504 /* x = 0 + y; x = 0 * y; */
5505 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5506 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5508 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5509 result
= integer_zero_node
;
5510 /* x = y * 1; x = 1 * y; */
5511 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5513 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5515 else if (TREE_CODE (arg0
) == INTEGER_CST
5516 && TREE_CODE (arg1
) == INTEGER_CST
)
5519 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5520 fold_convert (type
, arg1
));
5522 result
= int_const_binop (subcode
, arg0
, arg1
);
5523 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5526 overflow
= build_one_cst (type
);
5533 if (result
== integer_zero_node
)
5534 result
= build_zero_cst (type
);
5535 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5537 if (TREE_CODE (result
) == INTEGER_CST
)
5539 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5541 overflow
= build_one_cst (type
);
5543 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5544 && TYPE_UNSIGNED (type
))
5545 || (TYPE_PRECISION (type
)
5546 < (TYPE_PRECISION (TREE_TYPE (result
))
5547 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5548 && !TYPE_UNSIGNED (type
)))))
5551 result
= fold_convert (type
, result
);
5558 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5559 result
= drop_tree_overflow (result
);
5562 if (overflow
== NULL_TREE
)
5563 overflow
= build_zero_cst (TREE_TYPE (result
));
5564 tree ctype
= build_complex_type (TREE_TYPE (result
));
5565 if (TREE_CODE (result
) == INTEGER_CST
5566 && TREE_CODE (overflow
) == INTEGER_CST
)
5567 result
= build_complex (ctype
, result
, overflow
);
5569 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5570 ctype
, result
, overflow
);
5572 if (!update_call_from_tree (gsi
, result
))
5573 gimplify_and_update_call_from_tree (gsi
, result
);
5582 /* Return true whether NAME has a use on STMT. */
5585 has_use_on_stmt (tree name
, gimple
*stmt
)
5587 imm_use_iterator iter
;
5588 use_operand_p use_p
;
5589 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5590 if (USE_STMT (use_p
) == stmt
)
5595 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5598 Replaces *GSI with the simplification result in RCODE and OPS
5599 and the associated statements in *SEQ. Does the replacement
5600 according to INPLACE and returns true if the operation succeeded. */
5603 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5604 gimple_match_op
*res_op
,
5605 gimple_seq
*seq
, bool inplace
)
5607 gimple
*stmt
= gsi_stmt (*gsi
);
5608 tree
*ops
= res_op
->ops
;
5609 unsigned int num_ops
= res_op
->num_ops
;
5611 /* Play safe and do not allow abnormals to be mentioned in
5612 newly created statements. See also maybe_push_res_to_seq.
5613 As an exception allow such uses if there was a use of the
5614 same SSA name on the old stmt. */
5615 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5616 if (TREE_CODE (ops
[i
]) == SSA_NAME
5617 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5618 && !has_use_on_stmt (ops
[i
], stmt
))
5621 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5622 for (unsigned int i
= 0; i
< 2; ++i
)
5623 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5624 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5625 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5628 /* Don't insert new statements when INPLACE is true, even if we could
5629 reuse STMT for the final statement. */
5630 if (inplace
&& !gimple_seq_empty_p (*seq
))
5633 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5635 gcc_assert (res_op
->code
.is_tree_code ());
5636 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
5637 /* GIMPLE_CONDs condition may not throw. */
5638 && (!flag_exceptions
5639 || !cfun
->can_throw_non_call_exceptions
5640 || !operation_could_trap_p (res_op
->code
,
5641 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5643 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
5644 else if (res_op
->code
== SSA_NAME
)
5645 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5646 build_zero_cst (TREE_TYPE (ops
[0])));
5647 else if (res_op
->code
== INTEGER_CST
)
5649 if (integer_zerop (ops
[0]))
5650 gimple_cond_make_false (cond_stmt
);
5652 gimple_cond_make_true (cond_stmt
);
5656 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5659 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5660 build_zero_cst (TREE_TYPE (res
)));
5664 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5666 fprintf (dump_file
, "gimple_simplified to ");
5667 if (!gimple_seq_empty_p (*seq
))
5668 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5669 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5672 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5675 else if (is_gimple_assign (stmt
)
5676 && res_op
->code
.is_tree_code ())
5679 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
5681 maybe_build_generic_op (res_op
);
5682 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
5683 res_op
->op_or_null (0),
5684 res_op
->op_or_null (1),
5685 res_op
->op_or_null (2));
5686 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5688 fprintf (dump_file
, "gimple_simplified to ");
5689 if (!gimple_seq_empty_p (*seq
))
5690 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5691 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5694 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5698 else if (res_op
->code
.is_fn_code ()
5699 && gimple_call_combined_fn (stmt
) == res_op
->code
)
5701 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5702 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5703 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5704 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5706 fprintf (dump_file
, "gimple_simplified to ");
5707 if (!gimple_seq_empty_p (*seq
))
5708 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5709 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5711 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5716 if (gimple_has_lhs (stmt
))
5718 tree lhs
= gimple_get_lhs (stmt
);
5719 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5721 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5723 fprintf (dump_file
, "gimple_simplified to ");
5724 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5726 gsi_replace_with_seq_vops (gsi
, *seq
);
5736 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5739 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5744 if (TREE_CODE (*t
) == ADDR_EXPR
)
5745 t
= &TREE_OPERAND (*t
, 0);
5747 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5748 generic vector extension. The actual vector referenced is
5749 view-converted to an array type for this purpose. If the index
5750 is constant the canonical representation in the middle-end is a
5751 BIT_FIELD_REF so re-write the former to the latter here. */
5752 if (TREE_CODE (*t
) == ARRAY_REF
5753 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5754 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5755 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5757 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5758 if (VECTOR_TYPE_P (vtype
))
5760 tree low
= array_ref_low_bound (*t
);
5761 if (TREE_CODE (low
) == INTEGER_CST
)
5763 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5765 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5766 wi::to_widest (low
));
5767 idx
= wi::mul (idx
, wi::to_widest
5768 (TYPE_SIZE (TREE_TYPE (*t
))));
5770 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5771 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5773 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5775 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5776 TYPE_SIZE (TREE_TYPE (*t
)),
5777 wide_int_to_tree (bitsizetype
, idx
));
5785 while (handled_component_p (*t
))
5786 t
= &TREE_OPERAND (*t
, 0);
5788 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5789 of invariant addresses into a SSA name MEM_REF address. */
5790 if (TREE_CODE (*t
) == MEM_REF
5791 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5793 tree addr
= TREE_OPERAND (*t
, 0);
5794 if (TREE_CODE (addr
) == ADDR_EXPR
5795 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5796 || handled_component_p (TREE_OPERAND (addr
, 0))))
5800 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5809 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5810 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5811 TREE_OPERAND (*t
, 1),
5812 size_int (coffset
));
5815 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5816 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5819 /* Canonicalize back MEM_REFs to plain reference trees if the object
5820 accessed is a decl that has the same access semantics as the MEM_REF. */
5821 if (TREE_CODE (*t
) == MEM_REF
5822 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5823 && integer_zerop (TREE_OPERAND (*t
, 1))
5824 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
5826 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5827 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
5828 if (/* Same volatile qualification. */
5829 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
5830 /* Same TBAA behavior with -fstrict-aliasing. */
5831 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
5832 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
5833 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
5834 /* Same alignment. */
5835 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
5836 /* We have to look out here to not drop a required conversion
5837 from the rhs to the lhs if *t appears on the lhs or vice-versa
5838 if it appears on the rhs. Thus require strict type
5840 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
5842 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5847 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
5848 && TREE_CODE (*t
) == MEM_REF
5849 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
5853 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
5857 gcc_assert (TREE_CODE (base
) == MEM_REF
);
5859 if (mem_ref_offset (base
).to_shwi (&moffset
))
5862 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
5865 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
5872 /* Canonicalize TARGET_MEM_REF in particular with respect to
5873 the indexes becoming constant. */
5874 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
5876 tree tem
= maybe_fold_tmr (*t
);
5880 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
5881 recompute_tree_invariant_for_addr_expr (*orig_t
);
5889 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
5890 distinguishes both cases. */
5893 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
5895 bool changed
= false;
5896 gimple
*stmt
= gsi_stmt (*gsi
);
5897 bool nowarning
= gimple_no_warning_p (stmt
);
5899 fold_defer_overflow_warnings ();
5901 /* First do required canonicalization of [TARGET_]MEM_REF addresses
5903 ??? This shouldn't be done in generic folding but in the
5904 propagation helpers which also know whether an address was
5906 Also canonicalize operand order. */
5907 switch (gimple_code (stmt
))
5910 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
5912 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
5913 if ((REFERENCE_CLASS_P (*rhs
)
5914 || TREE_CODE (*rhs
) == ADDR_EXPR
)
5915 && maybe_canonicalize_mem_ref_addr (rhs
))
5917 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
5918 if (REFERENCE_CLASS_P (*lhs
)
5919 && maybe_canonicalize_mem_ref_addr (lhs
))
5924 /* Canonicalize operand order. */
5925 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5926 if (TREE_CODE_CLASS (code
) == tcc_comparison
5927 || commutative_tree_code (code
)
5928 || commutative_ternary_tree_code (code
))
5930 tree rhs1
= gimple_assign_rhs1 (stmt
);
5931 tree rhs2
= gimple_assign_rhs2 (stmt
);
5932 if (tree_swap_operands_p (rhs1
, rhs2
))
5934 gimple_assign_set_rhs1 (stmt
, rhs2
);
5935 gimple_assign_set_rhs2 (stmt
, rhs1
);
5936 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
5937 gimple_assign_set_rhs_code (stmt
,
5938 swap_tree_comparison (code
));
5946 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5948 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
5949 if (REFERENCE_CLASS_P (*arg
)
5950 && maybe_canonicalize_mem_ref_addr (arg
))
5953 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
5955 && REFERENCE_CLASS_P (*lhs
)
5956 && maybe_canonicalize_mem_ref_addr (lhs
))
5962 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5963 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
5965 tree link
= gimple_asm_output_op (asm_stmt
, i
);
5966 tree op
= TREE_VALUE (link
);
5967 if (REFERENCE_CLASS_P (op
)
5968 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
5971 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
5973 tree link
= gimple_asm_input_op (asm_stmt
, i
);
5974 tree op
= TREE_VALUE (link
);
5975 if ((REFERENCE_CLASS_P (op
)
5976 || TREE_CODE (op
) == ADDR_EXPR
)
5977 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
5983 if (gimple_debug_bind_p (stmt
))
5985 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
5987 && (REFERENCE_CLASS_P (*val
)
5988 || TREE_CODE (*val
) == ADDR_EXPR
)
5989 && maybe_canonicalize_mem_ref_addr (val
, true))
5995 /* Canonicalize operand order. */
5996 tree lhs
= gimple_cond_lhs (stmt
);
5997 tree rhs
= gimple_cond_rhs (stmt
);
5998 if (tree_swap_operands_p (lhs
, rhs
))
6000 gcond
*gc
= as_a
<gcond
*> (stmt
);
6001 gimple_cond_set_lhs (gc
, rhs
);
6002 gimple_cond_set_rhs (gc
, lhs
);
6003 gimple_cond_set_code (gc
,
6004 swap_tree_comparison (gimple_cond_code (gc
)));
6011 /* Dispatch to pattern-based folding. */
6013 || is_gimple_assign (stmt
)
6014 || gimple_code (stmt
) == GIMPLE_COND
)
6016 gimple_seq seq
= NULL
;
6017 gimple_match_op res_op
;
6018 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6019 valueize
, valueize
))
6021 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6024 gimple_seq_discard (seq
);
6028 stmt
= gsi_stmt (*gsi
);
6030 /* Fold the main computation performed by the statement. */
6031 switch (gimple_code (stmt
))
6035 /* Try to canonicalize for boolean-typed X the comparisons
6036 X == 0, X == 1, X != 0, and X != 1. */
6037 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6038 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6040 tree lhs
= gimple_assign_lhs (stmt
);
6041 tree op1
= gimple_assign_rhs1 (stmt
);
6042 tree op2
= gimple_assign_rhs2 (stmt
);
6043 tree type
= TREE_TYPE (op1
);
6045 /* Check whether the comparison operands are of the same boolean
6046 type as the result type is.
6047 Check that second operand is an integer-constant with value
6049 if (TREE_CODE (op2
) == INTEGER_CST
6050 && (integer_zerop (op2
) || integer_onep (op2
))
6051 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6053 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6054 bool is_logical_not
= false;
6056 /* X == 0 and X != 1 is a logical-not.of X
6057 X == 1 and X != 0 is X */
6058 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6059 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6060 is_logical_not
= true;
6062 if (is_logical_not
== false)
6063 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6064 /* Only for one-bit precision typed X the transformation
6065 !X -> ~X is valied. */
6066 else if (TYPE_PRECISION (type
) == 1)
6067 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6068 /* Otherwise we use !X -> X ^ 1. */
6070 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6071 build_int_cst (type
, 1));
6077 unsigned old_num_ops
= gimple_num_ops (stmt
);
6078 tree lhs
= gimple_assign_lhs (stmt
);
6079 tree new_rhs
= fold_gimple_assign (gsi
);
6081 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6082 TREE_TYPE (new_rhs
)))
6083 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6086 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6088 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6095 changed
|= gimple_fold_call (gsi
, inplace
);
6099 /* Fold *& in asm operands. */
6101 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6103 const char **oconstraints
;
6104 const char *constraint
;
6105 bool allows_mem
, allows_reg
;
6107 noutputs
= gimple_asm_noutputs (asm_stmt
);
6108 oconstraints
= XALLOCAVEC (const char *, noutputs
);
6110 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6112 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6113 tree op
= TREE_VALUE (link
);
6115 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6116 if (REFERENCE_CLASS_P (op
)
6117 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
6119 TREE_VALUE (link
) = op
;
6123 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6125 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6126 tree op
= TREE_VALUE (link
);
6128 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6129 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6130 oconstraints
, &allows_mem
, &allows_reg
);
6131 if (REFERENCE_CLASS_P (op
)
6132 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
6135 TREE_VALUE (link
) = op
;
6143 if (gimple_debug_bind_p (stmt
))
6145 tree val
= gimple_debug_bind_get_value (stmt
);
6147 && REFERENCE_CLASS_P (val
))
6149 tree tem
= maybe_fold_reference (val
, false);
6152 gimple_debug_bind_set_value (stmt
, tem
);
6157 && TREE_CODE (val
) == ADDR_EXPR
)
6159 tree ref
= TREE_OPERAND (val
, 0);
6160 tree tem
= maybe_fold_reference (ref
, false);
6163 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
6164 gimple_debug_bind_set_value (stmt
, tem
);
6173 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6174 tree ret
= gimple_return_retval(ret_stmt
);
6176 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6178 tree val
= valueize (ret
);
6179 if (val
&& val
!= ret
6180 && may_propagate_copy (ret
, val
))
6182 gimple_return_set_retval (ret_stmt
, val
);
6192 stmt
= gsi_stmt (*gsi
);
6194 /* Fold *& on the lhs. */
6195 if (gimple_has_lhs (stmt
))
6197 tree lhs
= gimple_get_lhs (stmt
);
6198 if (lhs
&& REFERENCE_CLASS_P (lhs
))
6200 tree new_lhs
= maybe_fold_reference (lhs
, true);
6203 gimple_set_lhs (stmt
, new_lhs
);
6209 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6213 /* Valueziation callback that ends up not following SSA edges. */
6216 no_follow_ssa_edges (tree
)
6221 /* Valueization callback that ends up following single-use SSA edges only. */
6224 follow_single_use_edges (tree val
)
6226 if (TREE_CODE (val
) == SSA_NAME
6227 && !has_single_use (val
))
6232 /* Valueization callback that follows all SSA edges. */
6235 follow_all_ssa_edges (tree val
)
6240 /* Fold the statement pointed to by GSI. In some cases, this function may
6241 replace the whole statement with a new one. Returns true iff folding
6243 The statement pointed to by GSI should be in valid gimple form but may
6244 be in unfolded state as resulting from for example constant propagation
6245 which can produce *&x = 0. */
6248 fold_stmt (gimple_stmt_iterator
*gsi
)
6250 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6254 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6256 return fold_stmt_1 (gsi
, false, valueize
);
6259 /* Perform the minimal folding on statement *GSI. Only operations like
6260 *&x created by constant propagation are handled. The statement cannot
6261 be replaced with a new one. Return true if the statement was
6262 changed, false otherwise.
6263 The statement *GSI should be in valid gimple form but may
6264 be in unfolded state as resulting from for example constant propagation
6265 which can produce *&x = 0. */
6268 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6270 gimple
*stmt
= gsi_stmt (*gsi
);
6271 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6272 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6276 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6277 if EXPR is null or we don't know how.
6278 If non-null, the result always has boolean type. */
6281 canonicalize_bool (tree expr
, bool invert
)
6287 if (integer_nonzerop (expr
))
6288 return boolean_false_node
;
6289 else if (integer_zerop (expr
))
6290 return boolean_true_node
;
6291 else if (TREE_CODE (expr
) == SSA_NAME
)
6292 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6293 build_int_cst (TREE_TYPE (expr
), 0));
6294 else if (COMPARISON_CLASS_P (expr
))
6295 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6297 TREE_OPERAND (expr
, 0),
6298 TREE_OPERAND (expr
, 1));
6304 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6306 if (integer_nonzerop (expr
))
6307 return boolean_true_node
;
6308 else if (integer_zerop (expr
))
6309 return boolean_false_node
;
6310 else if (TREE_CODE (expr
) == SSA_NAME
)
6311 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6312 build_int_cst (TREE_TYPE (expr
), 0));
6313 else if (COMPARISON_CLASS_P (expr
))
6314 return fold_build2 (TREE_CODE (expr
),
6316 TREE_OPERAND (expr
, 0),
6317 TREE_OPERAND (expr
, 1));
6323 /* Check to see if a boolean expression EXPR is logically equivalent to the
6324 comparison (OP1 CODE OP2). Check for various identities involving
6328 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6329 const_tree op1
, const_tree op2
)
6333 /* The obvious case. */
6334 if (TREE_CODE (expr
) == code
6335 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6336 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6339 /* Check for comparing (name, name != 0) and the case where expr
6340 is an SSA_NAME with a definition matching the comparison. */
6341 if (TREE_CODE (expr
) == SSA_NAME
6342 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6344 if (operand_equal_p (expr
, op1
, 0))
6345 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6346 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6347 s
= SSA_NAME_DEF_STMT (expr
);
6348 if (is_gimple_assign (s
)
6349 && gimple_assign_rhs_code (s
) == code
6350 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6351 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6355 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6356 of name is a comparison, recurse. */
6357 if (TREE_CODE (op1
) == SSA_NAME
6358 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6360 s
= SSA_NAME_DEF_STMT (op1
);
6361 if (is_gimple_assign (s
)
6362 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6364 enum tree_code c
= gimple_assign_rhs_code (s
);
6365 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6366 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6367 return same_bool_comparison_p (expr
, c
,
6368 gimple_assign_rhs1 (s
),
6369 gimple_assign_rhs2 (s
));
6370 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6371 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6372 return same_bool_comparison_p (expr
,
6373 invert_tree_comparison (c
, false),
6374 gimple_assign_rhs1 (s
),
6375 gimple_assign_rhs2 (s
));
6381 /* Check to see if two boolean expressions OP1 and OP2 are logically
6385 same_bool_result_p (const_tree op1
, const_tree op2
)
6387 /* Simple cases first. */
6388 if (operand_equal_p (op1
, op2
, 0))
6391 /* Check the cases where at least one of the operands is a comparison.
6392 These are a bit smarter than operand_equal_p in that they apply some
6393 identifies on SSA_NAMEs. */
6394 if (COMPARISON_CLASS_P (op2
)
6395 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6396 TREE_OPERAND (op2
, 0),
6397 TREE_OPERAND (op2
, 1)))
6399 if (COMPARISON_CLASS_P (op1
)
6400 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6401 TREE_OPERAND (op1
, 0),
6402 TREE_OPERAND (op1
, 1)))
6409 /* Forward declarations for some mutually recursive functions. */
6412 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6413 enum tree_code code2
, tree op2a
, tree op2b
);
6415 and_var_with_comparison (tree type
, tree var
, bool invert
,
6416 enum tree_code code2
, tree op2a
, tree op2b
);
6418 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6419 enum tree_code code2
, tree op2a
, tree op2b
);
6421 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6422 enum tree_code code2
, tree op2a
, tree op2b
);
6424 or_var_with_comparison (tree
, tree var
, bool invert
,
6425 enum tree_code code2
, tree op2a
, tree op2b
);
6427 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6428 enum tree_code code2
, tree op2a
, tree op2b
);
6430 /* Helper function for and_comparisons_1: try to simplify the AND of the
6431 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6432 If INVERT is true, invert the value of the VAR before doing the AND.
6433 Return NULL_EXPR if we can't simplify this to a single expression. */
6436 and_var_with_comparison (tree type
, tree var
, bool invert
,
6437 enum tree_code code2
, tree op2a
, tree op2b
)
6440 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6442 /* We can only deal with variables whose definitions are assignments. */
6443 if (!is_gimple_assign (stmt
))
6446 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6447 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6448 Then we only have to consider the simpler non-inverted cases. */
6450 t
= or_var_with_comparison_1 (type
, stmt
,
6451 invert_tree_comparison (code2
, false),
6454 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6455 return canonicalize_bool (t
, invert
);
6458 /* Try to simplify the AND of the ssa variable defined by the assignment
6459 STMT with the comparison specified by (OP2A CODE2 OP2B).
6460 Return NULL_EXPR if we can't simplify this to a single expression. */
6463 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6464 enum tree_code code2
, tree op2a
, tree op2b
)
6466 tree var
= gimple_assign_lhs (stmt
);
6467 tree true_test_var
= NULL_TREE
;
6468 tree false_test_var
= NULL_TREE
;
6469 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6471 /* Check for identities like (var AND (var == 0)) => false. */
6472 if (TREE_CODE (op2a
) == SSA_NAME
6473 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6475 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6476 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6478 true_test_var
= op2a
;
6479 if (var
== true_test_var
)
6482 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6483 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6485 false_test_var
= op2a
;
6486 if (var
== false_test_var
)
6487 return boolean_false_node
;
6491 /* If the definition is a comparison, recurse on it. */
6492 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6494 tree t
= and_comparisons_1 (type
, innercode
,
6495 gimple_assign_rhs1 (stmt
),
6496 gimple_assign_rhs2 (stmt
),
6504 /* If the definition is an AND or OR expression, we may be able to
6505 simplify by reassociating. */
6506 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6507 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6509 tree inner1
= gimple_assign_rhs1 (stmt
);
6510 tree inner2
= gimple_assign_rhs2 (stmt
);
6513 tree partial
= NULL_TREE
;
6514 bool is_and
= (innercode
== BIT_AND_EXPR
);
6516 /* Check for boolean identities that don't require recursive examination
6518 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6519 inner1 AND (inner1 OR inner2) => inner1
6520 !inner1 AND (inner1 AND inner2) => false
6521 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6522 Likewise for similar cases involving inner2. */
6523 if (inner1
== true_test_var
)
6524 return (is_and
? var
: inner1
);
6525 else if (inner2
== true_test_var
)
6526 return (is_and
? var
: inner2
);
6527 else if (inner1
== false_test_var
)
6529 ? boolean_false_node
6530 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6532 else if (inner2
== false_test_var
)
6534 ? boolean_false_node
6535 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6538 /* Next, redistribute/reassociate the AND across the inner tests.
6539 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6540 if (TREE_CODE (inner1
) == SSA_NAME
6541 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6542 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6543 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6544 gimple_assign_rhs1 (s
),
6545 gimple_assign_rhs2 (s
),
6546 code2
, op2a
, op2b
)))
6548 /* Handle the AND case, where we are reassociating:
6549 (inner1 AND inner2) AND (op2a code2 op2b)
6551 If the partial result t is a constant, we win. Otherwise
6552 continue on to try reassociating with the other inner test. */
6555 if (integer_onep (t
))
6557 else if (integer_zerop (t
))
6558 return boolean_false_node
;
6561 /* Handle the OR case, where we are redistributing:
6562 (inner1 OR inner2) AND (op2a code2 op2b)
6563 => (t OR (inner2 AND (op2a code2 op2b))) */
6564 else if (integer_onep (t
))
6565 return boolean_true_node
;
6567 /* Save partial result for later. */
6571 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6572 if (TREE_CODE (inner2
) == SSA_NAME
6573 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6574 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6575 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6576 gimple_assign_rhs1 (s
),
6577 gimple_assign_rhs2 (s
),
6578 code2
, op2a
, op2b
)))
6580 /* Handle the AND case, where we are reassociating:
6581 (inner1 AND inner2) AND (op2a code2 op2b)
6582 => (inner1 AND t) */
6585 if (integer_onep (t
))
6587 else if (integer_zerop (t
))
6588 return boolean_false_node
;
6589 /* If both are the same, we can apply the identity
6591 else if (partial
&& same_bool_result_p (t
, partial
))
6595 /* Handle the OR case. where we are redistributing:
6596 (inner1 OR inner2) AND (op2a code2 op2b)
6597 => (t OR (inner1 AND (op2a code2 op2b)))
6598 => (t OR partial) */
6601 if (integer_onep (t
))
6602 return boolean_true_node
;
6605 /* We already got a simplification for the other
6606 operand to the redistributed OR expression. The
6607 interesting case is when at least one is false.
6608 Or, if both are the same, we can apply the identity
6610 if (integer_zerop (partial
))
6612 else if (integer_zerop (t
))
6614 else if (same_bool_result_p (t
, partial
))
6623 /* Try to simplify the AND of two comparisons defined by
6624 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6625 If this can be done without constructing an intermediate value,
6626 return the resulting tree; otherwise NULL_TREE is returned.
6627 This function is deliberately asymmetric as it recurses on SSA_DEFs
6628 in the first comparison but not the second. */
6631 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6632 enum tree_code code2
, tree op2a
, tree op2b
)
6634 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6636 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6637 if (operand_equal_p (op1a
, op2a
, 0)
6638 && operand_equal_p (op1b
, op2b
, 0))
6640 /* Result will be either NULL_TREE, or a combined comparison. */
6641 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6642 TRUTH_ANDIF_EXPR
, code1
, code2
,
6643 truth_type
, op1a
, op1b
);
6648 /* Likewise the swapped case of the above. */
6649 if (operand_equal_p (op1a
, op2b
, 0)
6650 && operand_equal_p (op1b
, op2a
, 0))
6652 /* Result will be either NULL_TREE, or a combined comparison. */
6653 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6654 TRUTH_ANDIF_EXPR
, code1
,
6655 swap_tree_comparison (code2
),
6656 truth_type
, op1a
, op1b
);
6661 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6662 NAME's definition is a truth value. See if there are any simplifications
6663 that can be done against the NAME's definition. */
6664 if (TREE_CODE (op1a
) == SSA_NAME
6665 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6666 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6668 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6669 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6670 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6671 switch (gimple_code (stmt
))
6674 /* Try to simplify by copy-propagating the definition. */
6675 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6679 /* If every argument to the PHI produces the same result when
6680 ANDed with the second comparison, we win.
6681 Do not do this unless the type is bool since we need a bool
6682 result here anyway. */
6683 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6685 tree result
= NULL_TREE
;
6687 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6689 tree arg
= gimple_phi_arg_def (stmt
, i
);
6691 /* If this PHI has itself as an argument, ignore it.
6692 If all the other args produce the same result,
6694 if (arg
== gimple_phi_result (stmt
))
6696 else if (TREE_CODE (arg
) == INTEGER_CST
)
6698 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6701 result
= boolean_false_node
;
6702 else if (!integer_zerop (result
))
6706 result
= fold_build2 (code2
, boolean_type_node
,
6708 else if (!same_bool_comparison_p (result
,
6712 else if (TREE_CODE (arg
) == SSA_NAME
6713 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6716 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6717 /* In simple cases we can look through PHI nodes,
6718 but we have to be careful with loops.
6720 if (! dom_info_available_p (CDI_DOMINATORS
)
6721 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6722 || dominated_by_p (CDI_DOMINATORS
,
6723 gimple_bb (def_stmt
),
6726 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6732 else if (!same_bool_result_p (result
, temp
))
6748 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6749 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6750 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6751 simplify this to a single expression. As we are going to lower the cost
6752 of building SSA names / gimple stmts significantly, we need to allocate
6753 them ont the stack. This will cause the code to be a bit ugly. */
6756 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6757 enum tree_code code1
,
6758 tree op1a
, tree op1b
,
6759 enum tree_code code2
, tree op2a
,
6762 /* Allocate gimple stmt1 on the stack. */
6764 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6765 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6766 gimple_assign_set_rhs_code (stmt1
, code1
);
6767 gimple_assign_set_rhs1 (stmt1
, op1a
);
6768 gimple_assign_set_rhs2 (stmt1
, op1b
);
6770 /* Allocate gimple stmt2 on the stack. */
6772 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6773 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6774 gimple_assign_set_rhs_code (stmt2
, code2
);
6775 gimple_assign_set_rhs1 (stmt2
, op2a
);
6776 gimple_assign_set_rhs2 (stmt2
, op2b
);
6778 /* Allocate SSA names(lhs1) on the stack. */
6779 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6780 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6781 TREE_SET_CODE (lhs1
, SSA_NAME
);
6782 TREE_TYPE (lhs1
) = type
;
6783 init_ssa_name_imm_use (lhs1
);
6785 /* Allocate SSA names(lhs2) on the stack. */
6786 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6787 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6788 TREE_SET_CODE (lhs2
, SSA_NAME
);
6789 TREE_TYPE (lhs2
) = type
;
6790 init_ssa_name_imm_use (lhs2
);
6792 gimple_assign_set_lhs (stmt1
, lhs1
);
6793 gimple_assign_set_lhs (stmt2
, lhs2
);
6795 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6796 type
, gimple_assign_lhs (stmt1
),
6797 gimple_assign_lhs (stmt2
));
6798 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
6800 if (gimple_simplified_result_is_gimple_val (&op
))
6802 tree res
= op
.ops
[0];
6804 return build2 (code1
, type
, op1a
, op1b
);
6805 else if (res
== lhs2
)
6806 return build2 (code2
, type
, op2a
, op2b
);
6810 else if (op
.code
.is_tree_code ()
6811 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6813 tree op0
= op
.ops
[0];
6814 tree op1
= op
.ops
[1];
6815 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6816 return NULL_TREE
; /* not simple */
6818 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6825 /* Try to simplify the AND of two comparisons, specified by
6826 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6827 If this can be simplified to a single expression (without requiring
6828 introducing more SSA variables to hold intermediate values),
6829 return the resulting tree. Otherwise return NULL_TREE.
6830 If the result expression is non-null, it has boolean type. */
6833 maybe_fold_and_comparisons (tree type
,
6834 enum tree_code code1
, tree op1a
, tree op1b
,
6835 enum tree_code code2
, tree op2a
, tree op2b
)
6837 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6840 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6843 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
6844 op1a
, op1b
, code2
, op2a
,
6851 /* Helper function for or_comparisons_1: try to simplify the OR of the
6852 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6853 If INVERT is true, invert the value of VAR before doing the OR.
6854 Return NULL_EXPR if we can't simplify this to a single expression. */
6857 or_var_with_comparison (tree type
, tree var
, bool invert
,
6858 enum tree_code code2
, tree op2a
, tree op2b
)
6861 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6863 /* We can only deal with variables whose definitions are assignments. */
6864 if (!is_gimple_assign (stmt
))
6867 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6868 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6869 Then we only have to consider the simpler non-inverted cases. */
6871 t
= and_var_with_comparison_1 (type
, stmt
,
6872 invert_tree_comparison (code2
, false),
6875 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6876 return canonicalize_bool (t
, invert
);
6879 /* Try to simplify the OR of the ssa variable defined by the assignment
6880 STMT with the comparison specified by (OP2A CODE2 OP2B).
6881 Return NULL_EXPR if we can't simplify this to a single expression. */
6884 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
6885 enum tree_code code2
, tree op2a
, tree op2b
)
6887 tree var
= gimple_assign_lhs (stmt
);
6888 tree true_test_var
= NULL_TREE
;
6889 tree false_test_var
= NULL_TREE
;
6890 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6892 /* Check for identities like (var OR (var != 0)) => true . */
6893 if (TREE_CODE (op2a
) == SSA_NAME
6894 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6896 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6897 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6899 true_test_var
= op2a
;
6900 if (var
== true_test_var
)
6903 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6904 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6906 false_test_var
= op2a
;
6907 if (var
== false_test_var
)
6908 return boolean_true_node
;
6912 /* If the definition is a comparison, recurse on it. */
6913 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6915 tree t
= or_comparisons_1 (type
, innercode
,
6916 gimple_assign_rhs1 (stmt
),
6917 gimple_assign_rhs2 (stmt
),
6925 /* If the definition is an AND or OR expression, we may be able to
6926 simplify by reassociating. */
6927 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6928 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6930 tree inner1
= gimple_assign_rhs1 (stmt
);
6931 tree inner2
= gimple_assign_rhs2 (stmt
);
6934 tree partial
= NULL_TREE
;
6935 bool is_or
= (innercode
== BIT_IOR_EXPR
);
6937 /* Check for boolean identities that don't require recursive examination
6939 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6940 inner1 OR (inner1 AND inner2) => inner1
6941 !inner1 OR (inner1 OR inner2) => true
6942 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6944 if (inner1
== true_test_var
)
6945 return (is_or
? var
: inner1
);
6946 else if (inner2
== true_test_var
)
6947 return (is_or
? var
: inner2
);
6948 else if (inner1
== false_test_var
)
6951 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6953 else if (inner2
== false_test_var
)
6956 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6959 /* Next, redistribute/reassociate the OR across the inner tests.
6960 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6961 if (TREE_CODE (inner1
) == SSA_NAME
6962 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6963 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6964 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
6965 gimple_assign_rhs1 (s
),
6966 gimple_assign_rhs2 (s
),
6967 code2
, op2a
, op2b
)))
6969 /* Handle the OR case, where we are reassociating:
6970 (inner1 OR inner2) OR (op2a code2 op2b)
6972 If the partial result t is a constant, we win. Otherwise
6973 continue on to try reassociating with the other inner test. */
6976 if (integer_onep (t
))
6977 return boolean_true_node
;
6978 else if (integer_zerop (t
))
6982 /* Handle the AND case, where we are redistributing:
6983 (inner1 AND inner2) OR (op2a code2 op2b)
6984 => (t AND (inner2 OR (op2a code op2b))) */
6985 else if (integer_zerop (t
))
6986 return boolean_false_node
;
6988 /* Save partial result for later. */
6992 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6993 if (TREE_CODE (inner2
) == SSA_NAME
6994 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6995 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6996 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
6997 gimple_assign_rhs1 (s
),
6998 gimple_assign_rhs2 (s
),
6999 code2
, op2a
, op2b
)))
7001 /* Handle the OR case, where we are reassociating:
7002 (inner1 OR inner2) OR (op2a code2 op2b)
7004 => (t OR partial) */
7007 if (integer_zerop (t
))
7009 else if (integer_onep (t
))
7010 return boolean_true_node
;
7011 /* If both are the same, we can apply the identity
7013 else if (partial
&& same_bool_result_p (t
, partial
))
7017 /* Handle the AND case, where we are redistributing:
7018 (inner1 AND inner2) OR (op2a code2 op2b)
7019 => (t AND (inner1 OR (op2a code2 op2b)))
7020 => (t AND partial) */
7023 if (integer_zerop (t
))
7024 return boolean_false_node
;
7027 /* We already got a simplification for the other
7028 operand to the redistributed AND expression. The
7029 interesting case is when at least one is true.
7030 Or, if both are the same, we can apply the identity
7032 if (integer_onep (partial
))
7034 else if (integer_onep (t
))
7036 else if (same_bool_result_p (t
, partial
))
7045 /* Try to simplify the OR of two comparisons defined by
7046 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7047 If this can be done without constructing an intermediate value,
7048 return the resulting tree; otherwise NULL_TREE is returned.
7049 This function is deliberately asymmetric as it recurses on SSA_DEFs
7050 in the first comparison but not the second. */
7053 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7054 enum tree_code code2
, tree op2a
, tree op2b
)
7056 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7058 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7059 if (operand_equal_p (op1a
, op2a
, 0)
7060 && operand_equal_p (op1b
, op2b
, 0))
7062 /* Result will be either NULL_TREE, or a combined comparison. */
7063 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7064 TRUTH_ORIF_EXPR
, code1
, code2
,
7065 truth_type
, op1a
, op1b
);
7070 /* Likewise the swapped case of the above. */
7071 if (operand_equal_p (op1a
, op2b
, 0)
7072 && operand_equal_p (op1b
, op2a
, 0))
7074 /* Result will be either NULL_TREE, or a combined comparison. */
7075 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7076 TRUTH_ORIF_EXPR
, code1
,
7077 swap_tree_comparison (code2
),
7078 truth_type
, op1a
, op1b
);
7083 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7084 NAME's definition is a truth value. See if there are any simplifications
7085 that can be done against the NAME's definition. */
7086 if (TREE_CODE (op1a
) == SSA_NAME
7087 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7088 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7090 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7091 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7092 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7093 switch (gimple_code (stmt
))
7096 /* Try to simplify by copy-propagating the definition. */
7097 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7101 /* If every argument to the PHI produces the same result when
7102 ORed with the second comparison, we win.
7103 Do not do this unless the type is bool since we need a bool
7104 result here anyway. */
7105 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7107 tree result
= NULL_TREE
;
7109 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7111 tree arg
= gimple_phi_arg_def (stmt
, i
);
7113 /* If this PHI has itself as an argument, ignore it.
7114 If all the other args produce the same result,
7116 if (arg
== gimple_phi_result (stmt
))
7118 else if (TREE_CODE (arg
) == INTEGER_CST
)
7120 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7123 result
= boolean_true_node
;
7124 else if (!integer_onep (result
))
7128 result
= fold_build2 (code2
, boolean_type_node
,
7130 else if (!same_bool_comparison_p (result
,
7134 else if (TREE_CODE (arg
) == SSA_NAME
7135 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7138 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7139 /* In simple cases we can look through PHI nodes,
7140 but we have to be careful with loops.
7142 if (! dom_info_available_p (CDI_DOMINATORS
)
7143 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7144 || dominated_by_p (CDI_DOMINATORS
,
7145 gimple_bb (def_stmt
),
7148 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7154 else if (!same_bool_result_p (result
, temp
))
7170 /* Try to simplify the OR of two comparisons, specified by
7171 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7172 If this can be simplified to a single expression (without requiring
7173 introducing more SSA variables to hold intermediate values),
7174 return the resulting tree. Otherwise return NULL_TREE.
7175 If the result expression is non-null, it has boolean type. */
7178 maybe_fold_or_comparisons (tree type
,
7179 enum tree_code code1
, tree op1a
, tree op1b
,
7180 enum tree_code code2
, tree op2a
, tree op2b
)
7182 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
7185 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
7188 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7189 op1a
, op1b
, code2
, op2a
,
7196 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7198 Either NULL_TREE, a simplified but non-constant or a constant
7201 ??? This should go into a gimple-fold-inline.h file to be eventually
7202 privatized with the single valueize function used in the various TUs
7203 to avoid the indirect function call overhead. */
7206 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7207 tree (*gvalueize
) (tree
))
7209 gimple_match_op res_op
;
7210 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7211 edges if there are intermediate VARYING defs. For this reason
7212 do not follow SSA edges here even though SCCVN can technically
7213 just deal fine with that. */
7214 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7216 tree res
= NULL_TREE
;
7217 if (gimple_simplified_result_is_gimple_val (&res_op
))
7218 res
= res_op
.ops
[0];
7219 else if (mprts_hook
)
7220 res
= mprts_hook (&res_op
);
7223 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7225 fprintf (dump_file
, "Match-and-simplified ");
7226 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7227 fprintf (dump_file
, " to ");
7228 print_generic_expr (dump_file
, res
);
7229 fprintf (dump_file
, "\n");
7235 location_t loc
= gimple_location (stmt
);
7236 switch (gimple_code (stmt
))
7240 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7242 switch (get_gimple_rhs_class (subcode
))
7244 case GIMPLE_SINGLE_RHS
:
7246 tree rhs
= gimple_assign_rhs1 (stmt
);
7247 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7249 if (TREE_CODE (rhs
) == SSA_NAME
)
7251 /* If the RHS is an SSA_NAME, return its known constant value,
7253 return (*valueize
) (rhs
);
7255 /* Handle propagating invariant addresses into address
7257 else if (TREE_CODE (rhs
) == ADDR_EXPR
7258 && !is_gimple_min_invariant (rhs
))
7260 poly_int64 offset
= 0;
7262 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7266 && (CONSTANT_CLASS_P (base
)
7267 || decl_address_invariant_p (base
)))
7268 return build_invariant_address (TREE_TYPE (rhs
),
7271 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7272 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7273 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7274 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7279 nelts
= CONSTRUCTOR_NELTS (rhs
);
7280 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7281 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7283 val
= (*valueize
) (val
);
7284 if (TREE_CODE (val
) == INTEGER_CST
7285 || TREE_CODE (val
) == REAL_CST
7286 || TREE_CODE (val
) == FIXED_CST
)
7287 vec
.quick_push (val
);
7292 return vec
.build ();
7294 if (subcode
== OBJ_TYPE_REF
)
7296 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7297 /* If callee is constant, we can fold away the wrapper. */
7298 if (is_gimple_min_invariant (val
))
7302 if (kind
== tcc_reference
)
7304 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7305 || TREE_CODE (rhs
) == REALPART_EXPR
7306 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7307 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7309 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7310 return fold_unary_loc (EXPR_LOCATION (rhs
),
7312 TREE_TYPE (rhs
), val
);
7314 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7315 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7317 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7318 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7320 TREE_TYPE (rhs
), val
,
7321 TREE_OPERAND (rhs
, 1),
7322 TREE_OPERAND (rhs
, 2));
7324 else if (TREE_CODE (rhs
) == MEM_REF
7325 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7327 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7328 if (TREE_CODE (val
) == ADDR_EXPR
7329 && is_gimple_min_invariant (val
))
7331 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7333 TREE_OPERAND (rhs
, 1));
7338 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7340 else if (kind
== tcc_declaration
)
7341 return get_symbol_constant_value (rhs
);
7345 case GIMPLE_UNARY_RHS
:
7348 case GIMPLE_BINARY_RHS
:
7349 /* Translate &x + CST into an invariant form suitable for
7350 further propagation. */
7351 if (subcode
== POINTER_PLUS_EXPR
)
7353 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7354 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7355 if (TREE_CODE (op0
) == ADDR_EXPR
7356 && TREE_CODE (op1
) == INTEGER_CST
)
7358 tree off
= fold_convert (ptr_type_node
, op1
);
7360 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7361 fold_build2 (MEM_REF
,
7362 TREE_TYPE (TREE_TYPE (op0
)),
7363 unshare_expr (op0
), off
));
7366 /* Canonicalize bool != 0 and bool == 0 appearing after
7367 valueization. While gimple_simplify handles this
7368 it can get confused by the ~X == 1 -> X == 0 transform
7369 which we cant reduce to a SSA name or a constant
7370 (and we have no way to tell gimple_simplify to not
7371 consider those transforms in the first place). */
7372 else if (subcode
== EQ_EXPR
7373 || subcode
== NE_EXPR
)
7375 tree lhs
= gimple_assign_lhs (stmt
);
7376 tree op0
= gimple_assign_rhs1 (stmt
);
7377 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7380 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7381 op0
= (*valueize
) (op0
);
7382 if (TREE_CODE (op0
) == INTEGER_CST
)
7383 std::swap (op0
, op1
);
7384 if (TREE_CODE (op1
) == INTEGER_CST
7385 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7386 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7392 case GIMPLE_TERNARY_RHS
:
7394 /* Handle ternary operators that can appear in GIMPLE form. */
7395 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7396 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7397 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7398 return fold_ternary_loc (loc
, subcode
,
7399 gimple_expr_type (stmt
), op0
, op1
, op2
);
7410 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7412 if (gimple_call_internal_p (stmt
))
7414 enum tree_code subcode
= ERROR_MARK
;
7415 switch (gimple_call_internal_fn (stmt
))
7417 case IFN_UBSAN_CHECK_ADD
:
7418 subcode
= PLUS_EXPR
;
7420 case IFN_UBSAN_CHECK_SUB
:
7421 subcode
= MINUS_EXPR
;
7423 case IFN_UBSAN_CHECK_MUL
:
7424 subcode
= MULT_EXPR
;
7426 case IFN_BUILTIN_EXPECT
:
7428 tree arg0
= gimple_call_arg (stmt
, 0);
7429 tree op0
= (*valueize
) (arg0
);
7430 if (TREE_CODE (op0
) == INTEGER_CST
)
7437 tree arg0
= gimple_call_arg (stmt
, 0);
7438 tree arg1
= gimple_call_arg (stmt
, 1);
7439 tree op0
= (*valueize
) (arg0
);
7440 tree op1
= (*valueize
) (arg1
);
7442 if (TREE_CODE (op0
) != INTEGER_CST
7443 || TREE_CODE (op1
) != INTEGER_CST
)
7448 /* x * 0 = 0 * x = 0 without overflow. */
7449 if (integer_zerop (op0
) || integer_zerop (op1
))
7450 return build_zero_cst (TREE_TYPE (arg0
));
7453 /* y - y = 0 without overflow. */
7454 if (operand_equal_p (op0
, op1
, 0))
7455 return build_zero_cst (TREE_TYPE (arg0
));
7462 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7464 && TREE_CODE (res
) == INTEGER_CST
7465 && !TREE_OVERFLOW (res
))
7470 fn
= (*valueize
) (gimple_call_fn (stmt
));
7471 if (TREE_CODE (fn
) == ADDR_EXPR
7472 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7473 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7474 && gimple_builtin_call_types_compatible_p (stmt
,
7475 TREE_OPERAND (fn
, 0)))
7477 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7480 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7481 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7482 retval
= fold_builtin_call_array (loc
,
7483 gimple_call_return_type (call_stmt
),
7484 fn
, gimple_call_num_args (stmt
), args
);
7487 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7488 STRIP_NOPS (retval
);
7489 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7502 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7503 Returns NULL_TREE if folding to a constant is not possible, otherwise
7504 returns a constant according to is_gimple_min_invariant. */
7507 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7509 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7510 if (res
&& is_gimple_min_invariant (res
))
7516 /* The following set of functions are supposed to fold references using
7517 their constant initializers. */
7519 /* See if we can find constructor defining value of BASE.
7520 When we know the consructor with constant offset (such as
7521 base is array[40] and we do know constructor of array), then
7522 BIT_OFFSET is adjusted accordingly.
7524 As a special case, return error_mark_node when constructor
7525 is not explicitly available, but it is known to be zero
7526 such as 'static const int a;'. */
7528 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7529 tree (*valueize
)(tree
))
7531 poly_int64 bit_offset2
, size
, max_size
;
7534 if (TREE_CODE (base
) == MEM_REF
)
7536 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7537 if (!boff
.to_shwi (bit_offset
))
7541 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7542 base
= valueize (TREE_OPERAND (base
, 0));
7543 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7545 base
= TREE_OPERAND (base
, 0);
7548 && TREE_CODE (base
) == SSA_NAME
)
7549 base
= valueize (base
);
7551 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7552 DECL_INITIAL. If BASE is a nested reference into another
7553 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7554 the inner reference. */
7555 switch (TREE_CODE (base
))
7560 tree init
= ctor_for_folding (base
);
7562 /* Our semantic is exact opposite of ctor_for_folding;
7563 NULL means unknown, while error_mark_node is 0. */
7564 if (init
== error_mark_node
)
7567 return error_mark_node
;
7571 case VIEW_CONVERT_EXPR
:
7572 return get_base_constructor (TREE_OPERAND (base
, 0),
7573 bit_offset
, valueize
);
7577 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7579 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7581 *bit_offset
+= bit_offset2
;
7582 return get_base_constructor (base
, bit_offset
, valueize
);
7588 if (CONSTANT_CLASS_P (base
))
7595 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7596 to the memory at bit OFFSET. When non-null, TYPE is the expected
7597 type of the reference; otherwise the type of the referenced element
7598 is used instead. When SIZE is zero, attempt to fold a reference to
7599 the entire element which OFFSET refers to. Increment *SUBOFF by
7600 the bit offset of the accessed element. */
7603 fold_array_ctor_reference (tree type
, tree ctor
,
7604 unsigned HOST_WIDE_INT offset
,
7605 unsigned HOST_WIDE_INT size
,
7607 unsigned HOST_WIDE_INT
*suboff
)
7609 offset_int low_bound
;
7610 offset_int elt_size
;
7611 offset_int access_index
;
7612 tree domain_type
= NULL_TREE
;
7613 HOST_WIDE_INT inner_offset
;
7615 /* Compute low bound and elt size. */
7616 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7617 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7618 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7620 /* Static constructors for variably sized objects make no sense. */
7621 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7623 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7627 /* Static constructors for variably sized objects make no sense. */
7628 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7630 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7632 /* When TYPE is non-null, verify that it specifies a constant-sized
7633 access of a multiple of the array element size. Avoid division
7634 by zero below when ELT_SIZE is zero, such as with the result of
7635 an initializer for a zero-length array or an empty struct. */
7638 && (!TYPE_SIZE_UNIT (type
)
7639 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7642 /* Compute the array index we look for. */
7643 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7645 access_index
+= low_bound
;
7647 /* And offset within the access. */
7648 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7650 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7651 if (size
> elt_sz
* BITS_PER_UNIT
)
7653 /* native_encode_expr constraints. */
7654 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7655 || size
% BITS_PER_UNIT
!= 0
7656 || inner_offset
% BITS_PER_UNIT
!= 0
7657 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7661 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7663 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7664 return build_zero_cst (type
);
7666 /* native-encode adjacent ctor elements. */
7667 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7668 unsigned bufoff
= 0;
7669 offset_int index
= 0;
7670 offset_int max_index
= access_index
;
7671 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7673 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7674 else if (!CONSTANT_CLASS_P (val
))
7678 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7680 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7681 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7684 index
= max_index
= wi::to_offset (elt
->index
);
7685 index
= wi::umax (index
, access_index
);
7688 if (bufoff
+ elt_sz
> sizeof (buf
))
7689 elt_sz
= sizeof (buf
) - bufoff
;
7690 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7691 inner_offset
/ BITS_PER_UNIT
);
7692 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7698 if (wi::cmpu (access_index
, index
) == 0)
7700 else if (wi::cmpu (access_index
, max_index
) > 0)
7703 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7705 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7710 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7712 max_index
= access_index
;
7715 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7717 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7718 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7721 index
= max_index
= wi::to_offset (elt
->index
);
7722 index
= wi::umax (index
, access_index
);
7723 if (wi::cmpu (access_index
, index
) == 0)
7726 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7730 while (bufoff
< size
/ BITS_PER_UNIT
);
7732 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7735 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7737 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7739 /* For the final reference to the entire accessed element
7740 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7741 may be null) in favor of the type of the element, and set
7742 SIZE to the size of the accessed element. */
7744 type
= TREE_TYPE (val
);
7745 size
= elt_sz
* BITS_PER_UNIT
;
7747 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7748 && TREE_CODE (val
) == CONSTRUCTOR
7749 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7750 /* If this isn't the last element in the CTOR and a CTOR itself
7751 and it does not cover the whole object we are requesting give up
7752 since we're not set up for combining from multiple CTORs. */
7755 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7756 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7760 /* Memory not explicitly mentioned in constructor is 0 (or
7761 the reference is out of range). */
7762 return type
? build_zero_cst (type
) : NULL_TREE
;
7765 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7766 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7767 is the expected type of the reference; otherwise the type of
7768 the referenced member is used instead. When SIZE is zero,
7769 attempt to fold a reference to the entire member which OFFSET
7770 refers to; in this case. Increment *SUBOFF by the bit offset
7771 of the accessed member. */
7774 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7775 unsigned HOST_WIDE_INT offset
,
7776 unsigned HOST_WIDE_INT size
,
7778 unsigned HOST_WIDE_INT
*suboff
)
7780 unsigned HOST_WIDE_INT cnt
;
7783 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7786 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7787 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7788 tree field_size
= DECL_SIZE (cfield
);
7792 /* Determine the size of the flexible array member from
7793 the size of the initializer provided for it. */
7794 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7797 /* Variable sized objects in static constructors makes no sense,
7798 but field_size can be NULL for flexible array members. */
7799 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7800 && TREE_CODE (byte_offset
) == INTEGER_CST
7801 && (field_size
!= NULL_TREE
7802 ? TREE_CODE (field_size
) == INTEGER_CST
7803 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7805 /* Compute bit offset of the field. */
7806 offset_int bitoffset
7807 = (wi::to_offset (field_offset
)
7808 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7809 /* Compute bit offset where the field ends. */
7810 offset_int bitoffset_end
;
7811 if (field_size
!= NULL_TREE
)
7812 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7816 /* Compute the bit offset of the end of the desired access.
7817 As a special case, if the size of the desired access is
7818 zero, assume the access is to the entire field (and let
7819 the caller make any necessary adjustments by storing
7820 the actual bounds of the field in FIELDBOUNDS). */
7821 offset_int access_end
= offset_int (offset
);
7825 access_end
= bitoffset_end
;
7827 /* Is there any overlap between the desired access at
7828 [OFFSET, OFFSET+SIZE) and the offset of the field within
7829 the object at [BITOFFSET, BITOFFSET_END)? */
7830 if (wi::cmps (access_end
, bitoffset
) > 0
7831 && (field_size
== NULL_TREE
7832 || wi::lts_p (offset
, bitoffset_end
)))
7834 *suboff
+= bitoffset
.to_uhwi ();
7836 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
7838 /* For the final reference to the entire accessed member
7839 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7840 be null) in favor of the type of the member, and set
7841 SIZE to the size of the accessed member. */
7842 offset
= bitoffset
.to_uhwi ();
7843 type
= TREE_TYPE (cval
);
7844 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
7847 /* We do have overlap. Now see if the field is large enough
7848 to cover the access. Give up for accesses that extend
7849 beyond the end of the object or that span multiple fields. */
7850 if (wi::cmps (access_end
, bitoffset_end
) > 0)
7852 if (offset
< bitoffset
)
7855 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
7856 return fold_ctor_reference (type
, cval
,
7857 inner_offset
.to_uhwi (), size
,
7865 return build_zero_cst (type
);
7868 /* CTOR is value initializing memory. Fold a reference of TYPE and
7869 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7870 is zero, attempt to fold a reference to the entire subobject
7871 which OFFSET refers to. This is used when folding accesses to
7872 string members of aggregates. When non-null, set *SUBOFF to
7873 the bit offset of the accessed subobject. */
7876 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
7877 const poly_uint64
&poly_size
, tree from_decl
,
7878 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
7882 /* We found the field with exact match. */
7884 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
7885 && known_eq (poly_offset
, 0U))
7886 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
7888 /* The remaining optimizations need a constant size and offset. */
7889 unsigned HOST_WIDE_INT size
, offset
;
7890 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
7893 /* We are at the end of walk, see if we can view convert the
7895 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
7896 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7897 && !compare_tree_int (TYPE_SIZE (type
), size
)
7898 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
7900 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
7903 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
7905 STRIP_USELESS_TYPE_CONVERSION (ret
);
7909 /* For constants and byte-aligned/sized reads try to go through
7910 native_encode/interpret. */
7911 if (CONSTANT_CLASS_P (ctor
)
7912 && BITS_PER_UNIT
== 8
7913 && offset
% BITS_PER_UNIT
== 0
7914 && offset
/ BITS_PER_UNIT
<= INT_MAX
7915 && size
% BITS_PER_UNIT
== 0
7916 && size
<= MAX_BITSIZE_MODE_ANY_MODE
7917 && can_native_interpret_type_p (type
))
7919 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7920 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
7921 offset
/ BITS_PER_UNIT
);
7923 return native_interpret_expr (type
, buf
, len
);
7925 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
7927 unsigned HOST_WIDE_INT dummy
= 0;
7932 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
7933 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
7934 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
7937 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
7940 /* Fall back to native_encode_initializer. Needs to be done
7941 only in the outermost fold_ctor_reference call (because it itself
7942 recurses into CONSTRUCTORs) and doesn't update suboff. */
7943 if (ret
== NULL_TREE
7945 && BITS_PER_UNIT
== 8
7946 && offset
% BITS_PER_UNIT
== 0
7947 && offset
/ BITS_PER_UNIT
<= INT_MAX
7948 && size
% BITS_PER_UNIT
== 0
7949 && size
<= MAX_BITSIZE_MODE_ANY_MODE
7950 && can_native_interpret_type_p (type
))
7952 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7953 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
7954 offset
/ BITS_PER_UNIT
);
7956 return native_interpret_expr (type
, buf
, len
);
7965 /* Return the tree representing the element referenced by T if T is an
7966 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7967 names using VALUEIZE. Return NULL_TREE otherwise. */
7970 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
7972 tree ctor
, idx
, base
;
7973 poly_int64 offset
, size
, max_size
;
7977 if (TREE_THIS_VOLATILE (t
))
7981 return get_symbol_constant_value (t
);
7983 tem
= fold_read_from_constant_string (t
);
7987 switch (TREE_CODE (t
))
7990 case ARRAY_RANGE_REF
:
7991 /* Constant indexes are handled well by get_base_constructor.
7992 Only special case variable offsets.
7993 FIXME: This code can't handle nested references with variable indexes
7994 (they will be handled only by iteration of ccp). Perhaps we can bring
7995 get_ref_base_and_extent here and make it use a valueize callback. */
7996 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
7998 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
7999 && poly_int_tree_p (idx
))
8001 tree low_bound
, unit_size
;
8003 /* If the resulting bit-offset is constant, track it. */
8004 if ((low_bound
= array_ref_low_bound (t
),
8005 poly_int_tree_p (low_bound
))
8006 && (unit_size
= array_ref_element_size (t
),
8007 tree_fits_uhwi_p (unit_size
)))
8009 poly_offset_int woffset
8010 = wi::sext (wi::to_poly_offset (idx
)
8011 - wi::to_poly_offset (low_bound
),
8012 TYPE_PRECISION (sizetype
));
8013 woffset
*= tree_to_uhwi (unit_size
);
8014 woffset
*= BITS_PER_UNIT
;
8015 if (woffset
.to_shwi (&offset
))
8017 base
= TREE_OPERAND (t
, 0);
8018 ctor
= get_base_constructor (base
, &offset
, valueize
);
8019 /* Empty constructor. Always fold to 0. */
8020 if (ctor
== error_mark_node
)
8021 return build_zero_cst (TREE_TYPE (t
));
8022 /* Out of bound array access. Value is undefined,
8024 if (maybe_lt (offset
, 0))
8026 /* We cannot determine ctor. */
8029 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8030 tree_to_uhwi (unit_size
)
8040 case TARGET_MEM_REF
:
8042 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8043 ctor
= get_base_constructor (base
, &offset
, valueize
);
8045 /* Empty constructor. Always fold to 0. */
8046 if (ctor
== error_mark_node
)
8047 return build_zero_cst (TREE_TYPE (t
));
8048 /* We do not know precise address. */
8049 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8051 /* We cannot determine ctor. */
8055 /* Out of bound array access. Value is undefined, but don't fold. */
8056 if (maybe_lt (offset
, 0))
8059 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8063 /* For bit field reads try to read the representative and
8065 if (TREE_CODE (t
) == COMPONENT_REF
8066 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8067 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8069 HOST_WIDE_INT csize
, coffset
;
8070 tree field
= TREE_OPERAND (t
, 1);
8071 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8072 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8073 && size
.is_constant (&csize
)
8074 && offset
.is_constant (&coffset
)
8075 && (coffset
% BITS_PER_UNIT
!= 0
8076 || csize
% BITS_PER_UNIT
!= 0)
8078 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8080 poly_int64 bitoffset
;
8081 poly_uint64 field_offset
, repr_offset
;
8082 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8083 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8084 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8087 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8088 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8089 HOST_WIDE_INT bitoff
;
8090 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8091 - TYPE_PRECISION (TREE_TYPE (field
)));
8092 if (bitoffset
.is_constant (&bitoff
)
8097 size
= tree_to_uhwi (DECL_SIZE (repr
));
8099 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8101 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8103 if (!BYTES_BIG_ENDIAN
)
8104 tem
= wide_int_to_tree (TREE_TYPE (field
),
8105 wi::lrshift (wi::to_wide (tem
),
8108 tem
= wide_int_to_tree (TREE_TYPE (field
),
8109 wi::lrshift (wi::to_wide (tem
),
8121 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8122 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8123 return fold_build1_loc (EXPR_LOCATION (t
),
8124 TREE_CODE (t
), TREE_TYPE (t
), c
);
8136 fold_const_aggregate_ref (tree t
)
8138 return fold_const_aggregate_ref_1 (t
, NULL
);
8141 /* Lookup virtual method with index TOKEN in a virtual table V
8143 Set CAN_REFER if non-NULL to false if method
8144 is not referable or if the virtual table is ill-formed (such as rewriten
8145 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8148 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8150 unsigned HOST_WIDE_INT offset
,
8153 tree vtable
= v
, init
, fn
;
8154 unsigned HOST_WIDE_INT size
;
8155 unsigned HOST_WIDE_INT elt_size
, access_index
;
8161 /* First of all double check we have virtual table. */
8162 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8164 /* Pass down that we lost track of the target. */
8170 init
= ctor_for_folding (v
);
8172 /* The virtual tables should always be born with constructors
8173 and we always should assume that they are avaialble for
8174 folding. At the moment we do not stream them in all cases,
8175 but it should never happen that ctor seem unreachable. */
8177 if (init
== error_mark_node
)
8179 /* Pass down that we lost track of the target. */
8184 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8185 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8186 offset
*= BITS_PER_UNIT
;
8187 offset
+= token
* size
;
8189 /* Lookup the value in the constructor that is assumed to be array.
8190 This is equivalent to
8191 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8192 offset, size, NULL);
8193 but in a constant time. We expect that frontend produced a simple
8194 array without indexed initializers. */
8196 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8197 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8198 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8199 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8201 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8202 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8204 /* The C++ FE can now produce indexed fields, and we check if the indexes
8206 if (access_index
< CONSTRUCTOR_NELTS (init
))
8208 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8209 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8210 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8216 /* For type inconsistent program we may end up looking up virtual method
8217 in virtual table that does not contain TOKEN entries. We may overrun
8218 the virtual table and pick up a constant or RTTI info pointer.
8219 In any case the call is undefined. */
8221 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8222 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8223 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8226 fn
= TREE_OPERAND (fn
, 0);
8228 /* When cgraph node is missing and function is not public, we cannot
8229 devirtualize. This can happen in WHOPR when the actual method
8230 ends up in other partition, because we found devirtualization
8231 possibility too late. */
8232 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8243 /* Make sure we create a cgraph node for functions we'll reference.
8244 They can be non-existent if the reference comes from an entry
8245 of an external vtable for example. */
8246 cgraph_node::get_create (fn
);
8251 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8252 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8253 KNOWN_BINFO carries the binfo describing the true type of
8254 OBJ_TYPE_REF_OBJECT(REF).
8255 Set CAN_REFER if non-NULL to false if method
8256 is not referable or if the virtual table is ill-formed (such as rewriten
8257 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8260 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8263 unsigned HOST_WIDE_INT offset
;
8266 v
= BINFO_VTABLE (known_binfo
);
8267 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8271 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8277 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8280 /* Given a pointer value T, return a simplified version of an
8281 indirection through T, or NULL_TREE if no simplification is
8282 possible. Note that the resulting type may be different from
8283 the type pointed to in the sense that it is still compatible
8284 from the langhooks point of view. */
8287 gimple_fold_indirect_ref (tree t
)
8289 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8294 subtype
= TREE_TYPE (sub
);
8295 if (!POINTER_TYPE_P (subtype
)
8296 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8299 if (TREE_CODE (sub
) == ADDR_EXPR
)
8301 tree op
= TREE_OPERAND (sub
, 0);
8302 tree optype
= TREE_TYPE (op
);
8304 if (useless_type_conversion_p (type
, optype
))
8307 /* *(foo *)&fooarray => fooarray[0] */
8308 if (TREE_CODE (optype
) == ARRAY_TYPE
8309 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8310 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8312 tree type_domain
= TYPE_DOMAIN (optype
);
8313 tree min_val
= size_zero_node
;
8314 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8315 min_val
= TYPE_MIN_VALUE (type_domain
);
8316 if (TREE_CODE (min_val
) == INTEGER_CST
)
8317 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8319 /* *(foo *)&complexfoo => __real__ complexfoo */
8320 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8321 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8322 return fold_build1 (REALPART_EXPR
, type
, op
);
8323 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8324 else if (TREE_CODE (optype
) == VECTOR_TYPE
8325 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8327 tree part_width
= TYPE_SIZE (type
);
8328 tree index
= bitsize_int (0);
8329 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8333 /* *(p + CST) -> ... */
8334 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8335 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8337 tree addr
= TREE_OPERAND (sub
, 0);
8338 tree off
= TREE_OPERAND (sub
, 1);
8342 addrtype
= TREE_TYPE (addr
);
8344 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8345 if (TREE_CODE (addr
) == ADDR_EXPR
8346 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8347 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8348 && tree_fits_uhwi_p (off
))
8350 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8351 tree part_width
= TYPE_SIZE (type
);
8352 unsigned HOST_WIDE_INT part_widthi
8353 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8354 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8355 tree index
= bitsize_int (indexi
);
8356 if (known_lt (offset
/ part_widthi
,
8357 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8358 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8362 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8363 if (TREE_CODE (addr
) == ADDR_EXPR
8364 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8365 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8367 tree size
= TYPE_SIZE_UNIT (type
);
8368 if (tree_int_cst_equal (size
, off
))
8369 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8372 /* *(p + CST) -> MEM_REF <p, CST>. */
8373 if (TREE_CODE (addr
) != ADDR_EXPR
8374 || DECL_P (TREE_OPERAND (addr
, 0)))
8375 return fold_build2 (MEM_REF
, type
,
8377 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8380 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8381 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8382 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8383 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8386 tree min_val
= size_zero_node
;
8388 sub
= gimple_fold_indirect_ref (sub
);
8390 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8391 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8392 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8393 min_val
= TYPE_MIN_VALUE (type_domain
);
8394 if (TREE_CODE (min_val
) == INTEGER_CST
)
8395 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8401 /* Return true if CODE is an operation that when operating on signed
8402 integer types involves undefined behavior on overflow and the
8403 operation can be expressed with unsigned arithmetic. */
8406 arith_code_with_undefined_signed_overflow (tree_code code
)
8415 case POINTER_PLUS_EXPR
:
8422 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8423 operation that can be transformed to unsigned arithmetic by converting
8424 its operand, carrying out the operation in the corresponding unsigned
8425 type and converting the result back to the original type.
8427 Returns a sequence of statements that replace STMT and also contain
8428 a modified form of STMT itself. */
8431 rewrite_to_defined_overflow (gimple
*stmt
)
8433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8435 fprintf (dump_file
, "rewriting stmt with undefined signed "
8437 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8440 tree lhs
= gimple_assign_lhs (stmt
);
8441 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8442 gimple_seq stmts
= NULL
;
8443 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8444 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8446 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8448 tree op
= gimple_op (stmt
, i
);
8449 op
= gimple_convert (&stmts
, type
, op
);
8450 gimple_set_op (stmt
, i
, op
);
8452 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8453 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8454 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8455 gimple_set_modified (stmt
, true);
8456 gimple_seq_add_stmt (&stmts
, stmt
);
8457 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8458 gimple_seq_add_stmt (&stmts
, cvt
);
8464 /* The valueization hook we use for the gimple_build API simplification.
8465 This makes us match fold_buildN behavior by only combining with
8466 statements in the sequence(s) we are currently building. */
8469 gimple_build_valueize (tree op
)
8471 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8476 /* Build the expression CODE OP0 of type TYPE with location LOC,
8477 simplifying it first if possible. Returns the built
8478 expression value and appends statements possibly defining it
8482 gimple_build (gimple_seq
*seq
, location_t loc
,
8483 enum tree_code code
, tree type
, tree op0
)
8485 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8488 res
= create_tmp_reg_or_ssa_name (type
);
8490 if (code
== REALPART_EXPR
8491 || code
== IMAGPART_EXPR
8492 || code
== VIEW_CONVERT_EXPR
)
8493 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8495 stmt
= gimple_build_assign (res
, code
, op0
);
8496 gimple_set_location (stmt
, loc
);
8497 gimple_seq_add_stmt_without_update (seq
, stmt
);
8502 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8503 simplifying it first if possible. Returns the built
8504 expression value and appends statements possibly defining it
8508 gimple_build (gimple_seq
*seq
, location_t loc
,
8509 enum tree_code code
, tree type
, tree op0
, tree op1
)
8511 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8514 res
= create_tmp_reg_or_ssa_name (type
);
8515 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8516 gimple_set_location (stmt
, loc
);
8517 gimple_seq_add_stmt_without_update (seq
, stmt
);
8522 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8523 simplifying it first if possible. Returns the built
8524 expression value and appends statements possibly defining it
8528 gimple_build (gimple_seq
*seq
, location_t loc
,
8529 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8531 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8532 seq
, gimple_build_valueize
);
8535 res
= create_tmp_reg_or_ssa_name (type
);
8537 if (code
== BIT_FIELD_REF
)
8538 stmt
= gimple_build_assign (res
, code
,
8539 build3 (code
, type
, op0
, op1
, op2
));
8541 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8542 gimple_set_location (stmt
, loc
);
8543 gimple_seq_add_stmt_without_update (seq
, stmt
);
8548 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8549 void) with a location LOC. Returns the built expression value (or NULL_TREE
8550 if TYPE is void) and appends statements possibly defining it to SEQ. */
8553 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8555 tree res
= NULL_TREE
;
8557 if (internal_fn_p (fn
))
8558 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8561 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8562 stmt
= gimple_build_call (decl
, 0);
8564 if (!VOID_TYPE_P (type
))
8566 res
= create_tmp_reg_or_ssa_name (type
);
8567 gimple_call_set_lhs (stmt
, res
);
8569 gimple_set_location (stmt
, loc
);
8570 gimple_seq_add_stmt_without_update (seq
, stmt
);
8574 /* Build the call FN (ARG0) with a result of type TYPE
8575 (or no result if TYPE is void) with location LOC,
8576 simplifying it first if possible. Returns the built
8577 expression value (or NULL_TREE if TYPE is void) and appends
8578 statements possibly defining it to SEQ. */
8581 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8582 tree type
, tree arg0
)
8584 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8588 if (internal_fn_p (fn
))
8589 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8592 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8593 stmt
= gimple_build_call (decl
, 1, arg0
);
8595 if (!VOID_TYPE_P (type
))
8597 res
= create_tmp_reg_or_ssa_name (type
);
8598 gimple_call_set_lhs (stmt
, res
);
8600 gimple_set_location (stmt
, loc
);
8601 gimple_seq_add_stmt_without_update (seq
, stmt
);
8606 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8607 (or no result if TYPE is void) with location LOC,
8608 simplifying it first if possible. Returns the built
8609 expression value (or NULL_TREE if TYPE is void) and appends
8610 statements possibly defining it to SEQ. */
8613 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8614 tree type
, tree arg0
, tree arg1
)
8616 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8620 if (internal_fn_p (fn
))
8621 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8624 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8625 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8627 if (!VOID_TYPE_P (type
))
8629 res
= create_tmp_reg_or_ssa_name (type
);
8630 gimple_call_set_lhs (stmt
, res
);
8632 gimple_set_location (stmt
, loc
);
8633 gimple_seq_add_stmt_without_update (seq
, stmt
);
8638 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8639 (or no result if TYPE is void) with location LOC,
8640 simplifying it first if possible. Returns the built
8641 expression value (or NULL_TREE if TYPE is void) and appends
8642 statements possibly defining it to SEQ. */
8645 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8646 tree type
, tree arg0
, tree arg1
, tree arg2
)
8648 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8649 seq
, gimple_build_valueize
);
8653 if (internal_fn_p (fn
))
8654 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8655 3, arg0
, arg1
, arg2
);
8658 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8659 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8661 if (!VOID_TYPE_P (type
))
8663 res
= create_tmp_reg_or_ssa_name (type
);
8664 gimple_call_set_lhs (stmt
, res
);
8666 gimple_set_location (stmt
, loc
);
8667 gimple_seq_add_stmt_without_update (seq
, stmt
);
8672 /* Build the conversion (TYPE) OP with a result of type TYPE
8673 with location LOC if such conversion is neccesary in GIMPLE,
8674 simplifying it first.
8675 Returns the built expression value and appends
8676 statements possibly defining it to SEQ. */
8679 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8681 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8683 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8686 /* Build the conversion (ptrofftype) OP with a result of a type
8687 compatible with ptrofftype with location LOC if such conversion
8688 is neccesary in GIMPLE, simplifying it first.
8689 Returns the built expression value and appends
8690 statements possibly defining it to SEQ. */
8693 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8695 if (ptrofftype_p (TREE_TYPE (op
)))
8697 return gimple_convert (seq
, loc
, sizetype
, op
);
8700 /* Build a vector of type TYPE in which each element has the value OP.
8701 Return a gimple value for the result, appending any new statements
8705 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8708 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8709 && !CONSTANT_CLASS_P (op
))
8710 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8712 tree res
, vec
= build_vector_from_val (type
, op
);
8713 if (is_gimple_val (vec
))
8715 if (gimple_in_ssa_p (cfun
))
8716 res
= make_ssa_name (type
);
8718 res
= create_tmp_reg (type
);
8719 gimple
*stmt
= gimple_build_assign (res
, vec
);
8720 gimple_set_location (stmt
, loc
);
8721 gimple_seq_add_stmt_without_update (seq
, stmt
);
8725 /* Build a vector from BUILDER, handling the case in which some elements
8726 are non-constant. Return a gimple value for the result, appending any
8727 new instructions to SEQ.
8729 BUILDER must not have a stepped encoding on entry. This is because
8730 the function is not geared up to handle the arithmetic that would
8731 be needed in the variable case, and any code building a vector that
8732 is known to be constant should use BUILDER->build () directly. */
8735 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8736 tree_vector_builder
*builder
)
8738 gcc_assert (builder
->nelts_per_pattern () <= 2);
8739 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8740 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8741 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8743 tree type
= builder
->type ();
8744 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8745 vec
<constructor_elt
, va_gc
> *v
;
8746 vec_alloc (v
, nelts
);
8747 for (i
= 0; i
< nelts
; ++i
)
8748 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8751 if (gimple_in_ssa_p (cfun
))
8752 res
= make_ssa_name (type
);
8754 res
= create_tmp_reg (type
);
8755 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8756 gimple_set_location (stmt
, loc
);
8757 gimple_seq_add_stmt_without_update (seq
, stmt
);
8760 return builder
->build ();
8763 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8764 and generate a value guaranteed to be rounded upwards to ALIGN.
8766 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8769 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
8770 tree old_size
, unsigned HOST_WIDE_INT align
)
8772 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
8773 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8774 gcc_assert (INTEGRAL_TYPE_P (type
));
8775 tree tree_mask
= build_int_cst (type
, tg_mask
);
8776 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
8779 tree mask
= build_int_cst (type
, -align
);
8780 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
8783 /* Return true if the result of assignment STMT is known to be non-negative.
8784 If the return value is based on the assumption that signed overflow is
8785 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8786 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8789 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8792 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8793 switch (get_gimple_rhs_class (code
))
8795 case GIMPLE_UNARY_RHS
:
8796 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8797 gimple_expr_type (stmt
),
8798 gimple_assign_rhs1 (stmt
),
8799 strict_overflow_p
, depth
);
8800 case GIMPLE_BINARY_RHS
:
8801 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8802 gimple_expr_type (stmt
),
8803 gimple_assign_rhs1 (stmt
),
8804 gimple_assign_rhs2 (stmt
),
8805 strict_overflow_p
, depth
);
8806 case GIMPLE_TERNARY_RHS
:
8808 case GIMPLE_SINGLE_RHS
:
8809 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
8810 strict_overflow_p
, depth
);
8811 case GIMPLE_INVALID_RHS
:
8817 /* Return true if return value of call STMT is known to be non-negative.
8818 If the return value is based on the assumption that signed overflow is
8819 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8820 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8823 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8826 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
8827 gimple_call_arg (stmt
, 0) : NULL_TREE
;
8828 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
8829 gimple_call_arg (stmt
, 1) : NULL_TREE
;
8831 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
8832 gimple_call_combined_fn (stmt
),
8835 strict_overflow_p
, depth
);
8838 /* Return true if return value of call STMT is known to be non-negative.
8839 If the return value is based on the assumption that signed overflow is
8840 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8841 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8844 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8847 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8849 tree arg
= gimple_phi_arg_def (stmt
, i
);
8850 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
8856 /* Return true if STMT is known to compute a non-negative value.
8857 If the return value is based on the assumption that signed overflow is
8858 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8859 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8862 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8865 switch (gimple_code (stmt
))
8868 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8871 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8874 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8881 /* Return true if the floating-point value computed by assignment STMT
8882 is known to have an integer value. We also allow +Inf, -Inf and NaN
8883 to be considered integer values. Return false for signaling NaN.
8885 DEPTH is the current nesting depth of the query. */
8888 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
8890 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8891 switch (get_gimple_rhs_class (code
))
8893 case GIMPLE_UNARY_RHS
:
8894 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
8895 gimple_assign_rhs1 (stmt
), depth
);
8896 case GIMPLE_BINARY_RHS
:
8897 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
8898 gimple_assign_rhs1 (stmt
),
8899 gimple_assign_rhs2 (stmt
), depth
);
8900 case GIMPLE_TERNARY_RHS
:
8902 case GIMPLE_SINGLE_RHS
:
8903 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
8904 case GIMPLE_INVALID_RHS
:
8910 /* Return true if the floating-point value computed by call STMT is known
8911 to have an integer value. We also allow +Inf, -Inf and NaN to be
8912 considered integer values. Return false for signaling NaN.
8914 DEPTH is the current nesting depth of the query. */
8917 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
8919 tree arg0
= (gimple_call_num_args (stmt
) > 0
8920 ? gimple_call_arg (stmt
, 0)
8922 tree arg1
= (gimple_call_num_args (stmt
) > 1
8923 ? gimple_call_arg (stmt
, 1)
8925 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
8929 /* Return true if the floating-point result of phi STMT is known to have
8930 an integer value. We also allow +Inf, -Inf and NaN to be considered
8931 integer values. Return false for signaling NaN.
8933 DEPTH is the current nesting depth of the query. */
8936 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
8938 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8940 tree arg
= gimple_phi_arg_def (stmt
, i
);
8941 if (!integer_valued_real_single_p (arg
, depth
+ 1))
8947 /* Return true if the floating-point value computed by STMT is known
8948 to have an integer value. We also allow +Inf, -Inf and NaN to be
8949 considered integer values. Return false for signaling NaN.
8951 DEPTH is the current nesting depth of the query. */
8954 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
8956 switch (gimple_code (stmt
))
8959 return gimple_assign_integer_valued_real_p (stmt
, depth
);
8961 return gimple_call_integer_valued_real_p (stmt
, depth
);
8963 return gimple_phi_integer_valued_real_p (stmt
, depth
);