1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "gimple-fold.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
44 #include "tree-object-size.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
63 #include "diagnostic-core.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
70 enum strlen_range_kind
{
71 /* Compute the exact constant string length. */
73 /* Compute the maximum constant string length. */
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
81 /* Determine the integer value of the argument (not string length). */
86 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
111 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
114 struct cgraph_node
*node
;
117 if (DECL_ABSTRACT_P (decl
))
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
122 || !VAR_OR_FUNCTION_DECL_P (decl
))
125 /* Static objects can be referred only if they are defined and not optimized
127 if (!TREE_PUBLIC (decl
))
129 if (DECL_EXTERNAL (decl
))
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab
->function_flags_ready
)
135 snode
= symtab_node::get (decl
);
136 if (!snode
|| !snode
->definition
)
138 node
= dyn_cast
<cgraph_node
*> (snode
);
139 return !node
|| !node
->inlined_to
;
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
146 || !VAR_P (from_decl
)
147 || (!DECL_EXTERNAL (from_decl
)
148 && (vnode
= varpool_node::get (from_decl
)) != NULL
149 && vnode
->definition
)
151 && (vnode
= varpool_node::get (from_decl
)) != NULL
152 && vnode
->in_other_partition
))
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl
)
158 && DECL_EXTERNAL (decl
)
159 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
160 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
178 if (!symtab
->function_flags_ready
)
181 snode
= symtab_node::get (decl
);
183 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
184 && (!snode
->in_other_partition
185 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
187 node
= dyn_cast
<cgraph_node
*> (snode
);
188 return !node
|| !node
->inlined_to
;
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
196 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
198 if (gimple_in_ssa_p (cfun
))
199 return make_ssa_name (type
, stmt
);
201 return create_tmp_reg (type
);
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
209 canonicalize_constructor_val (tree cval
, tree from_decl
)
211 if (CONSTANT_CLASS_P (cval
))
214 tree orig_cval
= cval
;
216 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
219 tree ptr
= TREE_OPERAND (cval
, 0);
220 if (is_gimple_min_invariant (ptr
))
221 cval
= build1_loc (EXPR_LOCATION (cval
),
222 ADDR_EXPR
, TREE_TYPE (ptr
),
223 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
225 fold_convert (ptr_type_node
,
226 TREE_OPERAND (cval
, 1))));
228 if (TREE_CODE (cval
) == ADDR_EXPR
)
230 tree base
= NULL_TREE
;
231 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
233 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
235 TREE_OPERAND (cval
, 0) = base
;
238 base
= get_base_address (TREE_OPERAND (cval
, 0));
242 if (VAR_OR_FUNCTION_DECL_P (base
)
243 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
245 if (TREE_TYPE (base
) == error_mark_node
)
248 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
249 but since the use can be in a debug stmt we can't. */
251 else if (TREE_CODE (base
) == FUNCTION_DECL
)
253 /* Make sure we create a cgraph node for functions we'll reference.
254 They can be non-existent if the reference comes from an entry
255 of an external vtable for example. */
256 cgraph_node::get_create (base
);
258 /* Fixup types in global initializers. */
259 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
260 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
262 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
263 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
266 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
267 if (TREE_CODE (cval
) == INTEGER_CST
)
269 if (TREE_OVERFLOW_P (cval
))
270 cval
= drop_tree_overflow (cval
);
271 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
272 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
278 /* If SYM is a constant variable with known value, return the value.
279 NULL_TREE is returned otherwise. */
282 get_symbol_constant_value (tree sym
)
284 tree val
= ctor_for_folding (sym
);
285 if (val
!= error_mark_node
)
289 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
290 if (val
&& is_gimple_min_invariant (val
))
295 /* Variables declared 'const' without an initializer
296 have zero as the initializer if they may not be
297 overridden at link or run time. */
299 && is_gimple_reg_type (TREE_TYPE (sym
)))
300 return build_zero_cst (TREE_TYPE (sym
));
308 /* Subroutine of fold_stmt. We perform constant folding of the
309 memory reference tree EXPR. */
312 maybe_fold_reference (tree expr
)
314 tree result
= NULL_TREE
;
316 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr
) == REALPART_EXPR
318 || TREE_CODE (expr
) == IMAGPART_EXPR
)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
320 result
= fold_unary_loc (EXPR_LOCATION (expr
),
323 TREE_OPERAND (expr
, 0));
324 else if (TREE_CODE (expr
) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
326 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
329 TREE_OPERAND (expr
, 0),
330 TREE_OPERAND (expr
, 1),
331 TREE_OPERAND (expr
, 2));
333 result
= fold_const_aggregate_ref (expr
);
335 if (result
&& is_gimple_min_invariant (result
))
341 /* Return true if EXPR is an acceptable right-hand-side for a
342 GIMPLE assignment. We validate the entire tree, not just
343 the root node, thus catching expressions that embed complex
344 operands that are not permitted in GIMPLE. This function
345 is needed because the folding routines in fold-const.c
346 may return such expressions in some cases, e.g., an array
347 access with an embedded index addition. It may make more
348 sense to have folding routines that are sensitive to the
349 constraints on GIMPLE operands, rather than abandoning any
350 any attempt to fold if the usual folding turns out to be too
354 valid_gimple_rhs_p (tree expr
)
356 enum tree_code code
= TREE_CODE (expr
);
358 switch (TREE_CODE_CLASS (code
))
360 case tcc_declaration
:
361 if (!is_gimple_variable (expr
))
366 /* All constants are ok. */
370 /* GENERIC allows comparisons with non-boolean types, reject
371 those for GIMPLE. Let vector-typed comparisons pass - rules
372 for GENERIC and GIMPLE are the same here. */
373 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
374 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
375 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
376 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
381 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
382 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
387 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
397 if (is_gimple_min_invariant (expr
))
399 t
= TREE_OPERAND (expr
, 0);
400 while (handled_component_p (t
))
402 /* ??? More checks needed, see the GIMPLE verifier. */
403 if ((TREE_CODE (t
) == ARRAY_REF
404 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
405 && !is_gimple_val (TREE_OPERAND (t
, 1)))
407 t
= TREE_OPERAND (t
, 0);
409 if (!is_gimple_id (t
))
415 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
417 if ((code
== COND_EXPR
418 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
419 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
420 || !is_gimple_val (TREE_OPERAND (expr
, 1))
421 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
432 case tcc_exceptional
:
433 if (code
== CONSTRUCTOR
)
437 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
438 if (!is_gimple_val (elt
))
442 if (code
!= SSA_NAME
)
447 if (code
== BIT_FIELD_REF
)
448 return is_gimple_val (TREE_OPERAND (expr
, 0));
459 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
460 replacement rhs for the statement or NULL_TREE if no simplification
461 could be made. It is assumed that the operands have been previously
465 fold_gimple_assign (gimple_stmt_iterator
*si
)
467 gimple
*stmt
= gsi_stmt (*si
);
468 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
469 location_t loc
= gimple_location (stmt
);
471 tree result
= NULL_TREE
;
473 switch (get_gimple_rhs_class (subcode
))
475 case GIMPLE_SINGLE_RHS
:
477 tree rhs
= gimple_assign_rhs1 (stmt
);
479 if (TREE_CLOBBER_P (rhs
))
482 if (REFERENCE_CLASS_P (rhs
))
483 return maybe_fold_reference (rhs
);
485 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
487 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
488 if (is_gimple_min_invariant (val
))
490 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
493 vec
<cgraph_node
*>targets
494 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
495 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
497 if (dump_enabled_p ())
499 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
500 "resolving virtual function address "
501 "reference to function %s\n",
502 targets
.length () == 1
503 ? targets
[0]->name ()
506 if (targets
.length () == 1)
508 val
= fold_convert (TREE_TYPE (val
),
509 build_fold_addr_expr_loc
510 (loc
, targets
[0]->decl
));
511 STRIP_USELESS_TYPE_CONVERSION (val
);
514 /* We cannot use __builtin_unreachable here because it
515 cannot have address taken. */
516 val
= build_int_cst (TREE_TYPE (val
), 0);
522 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
524 tree ref
= TREE_OPERAND (rhs
, 0);
525 if (TREE_CODE (ref
) == MEM_REF
526 && integer_zerop (TREE_OPERAND (ref
, 1)))
528 result
= TREE_OPERAND (ref
, 0);
529 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
531 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
536 else if (TREE_CODE (rhs
) == CONSTRUCTOR
537 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
539 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
544 if (! CONSTANT_CLASS_P (val
))
547 return build_vector_from_ctor (TREE_TYPE (rhs
),
548 CONSTRUCTOR_ELTS (rhs
));
551 else if (DECL_P (rhs
)
552 && is_gimple_reg_type (TREE_TYPE (rhs
)))
553 return get_symbol_constant_value (rhs
);
557 case GIMPLE_UNARY_RHS
:
560 case GIMPLE_BINARY_RHS
:
563 case GIMPLE_TERNARY_RHS
:
564 result
= fold_ternary_loc (loc
, subcode
,
565 TREE_TYPE (gimple_assign_lhs (stmt
)),
566 gimple_assign_rhs1 (stmt
),
567 gimple_assign_rhs2 (stmt
),
568 gimple_assign_rhs3 (stmt
));
572 STRIP_USELESS_TYPE_CONVERSION (result
);
573 if (valid_gimple_rhs_p (result
))
578 case GIMPLE_INVALID_RHS
:
586 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
592 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
594 gimple
*stmt
= gsi_stmt (*si_p
);
596 if (gimple_has_location (stmt
))
597 annotate_all_with_location (stmts
, gimple_location (stmt
));
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
601 gimple
*laststore
= NULL
;
602 for (gimple_stmt_iterator i
= gsi_last (stmts
);
603 !gsi_end_p (i
); gsi_prev (&i
))
605 gimple
*new_stmt
= gsi_stmt (i
);
606 if ((gimple_assign_single_p (new_stmt
)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
608 || (is_gimple_call (new_stmt
)
609 && (gimple_call_flags (new_stmt
)
610 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
614 vdef
= gimple_vdef (stmt
);
616 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
617 gimple_set_vdef (new_stmt
, vdef
);
618 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
619 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
620 laststore
= new_stmt
;
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse
= gimple_vuse (stmt
);
627 for (gimple_stmt_iterator i
= gsi_start (stmts
);
628 !gsi_end_p (i
); gsi_next (&i
))
630 gimple
*new_stmt
= gsi_stmt (i
);
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt
))
634 gimple_set_vuse (new_stmt
, reaching_vuse
);
635 gimple_set_modified (new_stmt
, true);
636 if (gimple_vdef (new_stmt
))
637 reaching_vuse
= gimple_vdef (new_stmt
);
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
643 && reaching_vuse
== gimple_vuse (stmt
))
645 tree vdef
= gimple_vdef (stmt
);
647 && TREE_CODE (vdef
) == SSA_NAME
)
649 unlink_stmt_vdef (stmt
);
650 release_ssa_name (vdef
);
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p
, stmts
, false);
658 /* Helper function for update_gimple_call and
659 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
660 with GIMPLE_CALL NEW_STMT. */
663 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
666 tree lhs
= gimple_call_lhs (stmt
);
667 gimple_call_set_lhs (new_stmt
, lhs
);
668 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
669 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
670 gimple_move_vops (new_stmt
, stmt
);
671 gimple_set_location (new_stmt
, gimple_location (stmt
));
672 if (gimple_block (new_stmt
) == NULL_TREE
)
673 gimple_set_block (new_stmt
, gimple_block (stmt
));
674 gsi_replace (si_p
, new_stmt
, false);
677 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
678 with number of arguments NARGS, where the arguments in GIMPLE form
679 follow NARGS argument. */
682 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
685 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
687 gcc_assert (is_gimple_call (stmt
));
688 va_start (ap
, nargs
);
689 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
690 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
695 /* Return true if EXPR is a CALL_EXPR suitable for representation
696 as a single GIMPLE_CALL statement. If the arguments require
697 further gimplification, return false. */
700 valid_gimple_call_p (tree expr
)
704 if (TREE_CODE (expr
) != CALL_EXPR
)
707 nargs
= call_expr_nargs (expr
);
708 for (i
= 0; i
< nargs
; i
++)
710 tree arg
= CALL_EXPR_ARG (expr
, i
);
711 if (is_gimple_reg_type (TREE_TYPE (arg
)))
713 if (!is_gimple_val (arg
))
717 if (!is_gimple_lvalue (arg
))
724 /* Convert EXPR into a GIMPLE value suitable for substitution on the
725 RHS of an assignment. Insert the necessary statements before
726 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
727 is replaced. If the call is expected to produces a result, then it
728 is replaced by an assignment of the new RHS to the result variable.
729 If the result is to be ignored, then the call is replaced by a
730 GIMPLE_NOP. A proper VDEF chain is retained by making the first
731 VUSE and the last VDEF of the whole sequence be the same as the replaced
732 statement and using new SSA names for stores in between. */
735 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
738 gimple
*stmt
, *new_stmt
;
739 gimple_stmt_iterator i
;
740 gimple_seq stmts
= NULL
;
742 stmt
= gsi_stmt (*si_p
);
744 gcc_assert (is_gimple_call (stmt
));
746 if (valid_gimple_call_p (expr
))
748 /* The call has simplified to another call. */
749 tree fn
= CALL_EXPR_FN (expr
);
751 unsigned nargs
= call_expr_nargs (expr
);
752 vec
<tree
> args
= vNULL
;
758 args
.safe_grow_cleared (nargs
, true);
760 for (i
= 0; i
< nargs
; i
++)
761 args
[i
] = CALL_EXPR_ARG (expr
, i
);
764 new_stmt
= gimple_build_call_vec (fn
, args
);
765 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
770 lhs
= gimple_call_lhs (stmt
);
771 if (lhs
== NULL_TREE
)
773 push_gimplify_context (gimple_in_ssa_p (cfun
));
774 gimplify_and_add (expr
, &stmts
);
775 pop_gimplify_context (NULL
);
777 /* We can end up with folding a memcpy of an empty class assignment
778 which gets optimized away by C++ gimplification. */
779 if (gimple_seq_empty_p (stmts
))
781 if (gimple_in_ssa_p (cfun
))
783 unlink_stmt_vdef (stmt
);
786 gsi_replace (si_p
, gimple_build_nop (), false);
792 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
793 new_stmt
= gimple_build_assign (lhs
, tmp
);
794 i
= gsi_last (stmts
);
795 gsi_insert_after_without_update (&i
, new_stmt
,
796 GSI_CONTINUE_LINKING
);
799 gsi_replace_with_seq_vops (si_p
, stmts
);
803 /* Replace the call at *GSI with the gimple value VAL. */
806 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
808 gimple
*stmt
= gsi_stmt (*gsi
);
809 tree lhs
= gimple_call_lhs (stmt
);
813 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
814 val
= fold_convert (TREE_TYPE (lhs
), val
);
815 repl
= gimple_build_assign (lhs
, val
);
818 repl
= gimple_build_nop ();
819 tree vdef
= gimple_vdef (stmt
);
820 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
822 unlink_stmt_vdef (stmt
);
823 release_ssa_name (vdef
);
825 gsi_replace (gsi
, repl
, false);
828 /* Replace the call at *GSI with the new call REPL and fold that
832 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
834 gimple
*stmt
= gsi_stmt (*gsi
);
835 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
836 gimple_set_location (repl
, gimple_location (stmt
));
837 gimple_move_vops (repl
, stmt
);
838 gsi_replace (gsi
, repl
, false);
842 /* Return true if VAR is a VAR_DECL or a component thereof. */
845 var_decl_component_p (tree var
)
848 while (handled_component_p (inner
))
849 inner
= TREE_OPERAND (inner
, 0);
850 return (DECL_P (inner
)
851 || (TREE_CODE (inner
) == MEM_REF
852 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
855 /* Return TRUE if the SIZE argument, representing the size of an
856 object, is in a range of values of which exactly zero is valid. */
859 size_must_be_zero_p (tree size
)
861 if (integer_zerop (size
))
864 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
867 tree type
= TREE_TYPE (size
);
868 int prec
= TYPE_PRECISION (type
);
870 /* Compute the value of SSIZE_MAX, the largest positive value that
871 can be stored in ssize_t, the signed counterpart of size_t. */
872 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
873 value_range
valid_range (build_int_cst (type
, 0),
874 wide_int_to_tree (type
, ssize_max
));
876 get_range_info (size
, vr
);
877 vr
.intersect (&valid_range
);
881 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
882 diagnose (otherwise undefined) overlapping copies without preventing
883 folding. When folded, GCC guarantees that overlapping memcpy has
884 the same semantics as memmove. Call to the library memcpy need not
885 provide the same guarantee. Return false if no simplification can
889 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
890 tree dest
, tree src
, enum built_in_function code
)
892 gimple
*stmt
= gsi_stmt (*gsi
);
893 tree lhs
= gimple_call_lhs (stmt
);
894 tree len
= gimple_call_arg (stmt
, 2);
895 location_t loc
= gimple_location (stmt
);
897 /* If the LEN parameter is a constant zero or in range where
898 the only valid value is zero, return DEST. */
899 if (size_must_be_zero_p (len
))
902 if (gimple_call_lhs (stmt
))
903 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
905 repl
= gimple_build_nop ();
906 tree vdef
= gimple_vdef (stmt
);
907 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
909 unlink_stmt_vdef (stmt
);
910 release_ssa_name (vdef
);
912 gsi_replace (gsi
, repl
, false);
916 /* If SRC and DEST are the same (and not volatile), return
917 DEST{,+LEN,+LEN-1}. */
918 if (operand_equal_p (src
, dest
, 0))
920 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
921 It's safe and may even be emitted by GCC itself (see bug
923 unlink_stmt_vdef (stmt
);
924 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
925 release_ssa_name (gimple_vdef (stmt
));
928 gsi_replace (gsi
, gimple_build_nop (), false);
935 /* We cannot (easily) change the type of the copy if it is a storage
936 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
937 modify the storage order of objects (see storage_order_barrier_p). */
939 = POINTER_TYPE_P (TREE_TYPE (src
))
940 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
942 = POINTER_TYPE_P (TREE_TYPE (dest
))
943 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
944 tree destvar
, srcvar
, srcoff
;
945 unsigned int src_align
, dest_align
;
946 unsigned HOST_WIDE_INT tmp_len
;
949 /* Build accesses at offset zero with a ref-all character type. */
951 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
954 /* If we can perform the copy efficiently with first doing all loads
955 and then all stores inline it that way. Currently efficiently
956 means that we can load all the memory into a single integer
957 register which is what MOVE_MAX gives us. */
958 src_align
= get_pointer_alignment (src
);
959 dest_align
= get_pointer_alignment (dest
);
960 if (tree_fits_uhwi_p (len
)
961 && compare_tree_int (len
, MOVE_MAX
) <= 0
962 /* FIXME: Don't transform copies from strings with known length.
963 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
964 from being handled, and the case was XFAILed for that reason.
965 Now that it is handled and the XFAIL removed, as soon as other
966 strlenopt tests that rely on it for passing are adjusted, this
967 hack can be removed. */
968 && !c_strlen (src
, 1)
969 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
970 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
972 && AGGREGATE_TYPE_P (srctype
)
973 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
975 && AGGREGATE_TYPE_P (desttype
)
976 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
978 unsigned ilen
= tree_to_uhwi (len
);
979 if (pow2p_hwi (ilen
))
981 /* Detect out-of-bounds accesses without issuing warnings.
982 Avoid folding out-of-bounds copies but to avoid false
983 positives for unreachable code defer warning until after
984 DCE has worked its magic.
985 -Wrestrict is still diagnosed. */
986 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
989 if (warning
!= OPT_Wrestrict
)
992 scalar_int_mode mode
;
993 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
995 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
996 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
997 /* If the destination pointer is not aligned we must be able
998 to emit an unaligned store. */
999 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1000 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1001 || (optab_handler (movmisalign_optab
, mode
)
1002 != CODE_FOR_nothing
)))
1004 tree srctype
= type
;
1005 tree desttype
= type
;
1006 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1007 srctype
= build_aligned_type (type
, src_align
);
1008 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1009 tree tem
= fold_const_aggregate_ref (srcmem
);
1012 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1013 && targetm
.slow_unaligned_access (mode
, src_align
)
1014 && (optab_handler (movmisalign_optab
, mode
)
1015 == CODE_FOR_nothing
))
1020 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1022 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1024 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1026 gimple_assign_set_lhs (new_stmt
, srcmem
);
1027 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1028 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1030 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1031 desttype
= build_aligned_type (type
, dest_align
);
1033 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1036 gimple_move_vops (new_stmt
, stmt
);
1039 gsi_replace (gsi
, new_stmt
, false);
1042 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1049 if (code
== BUILT_IN_MEMMOVE
)
1051 /* Both DEST and SRC must be pointer types.
1052 ??? This is what old code did. Is the testing for pointer types
1055 If either SRC is readonly or length is 1, we can use memcpy. */
1056 if (!dest_align
|| !src_align
)
1058 if (readonly_data_expr (src
)
1059 || (tree_fits_uhwi_p (len
)
1060 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1061 >= tree_to_uhwi (len
))))
1063 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1066 gimple_call_set_fndecl (stmt
, fn
);
1067 gimple_call_set_arg (stmt
, 0, dest
);
1068 gimple_call_set_arg (stmt
, 1, src
);
1073 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1074 if (TREE_CODE (src
) == ADDR_EXPR
1075 && TREE_CODE (dest
) == ADDR_EXPR
)
1077 tree src_base
, dest_base
, fn
;
1078 poly_int64 src_offset
= 0, dest_offset
= 0;
1079 poly_uint64 maxsize
;
1081 srcvar
= TREE_OPERAND (src
, 0);
1082 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1083 if (src_base
== NULL
)
1085 destvar
= TREE_OPERAND (dest
, 0);
1086 dest_base
= get_addr_base_and_unit_offset (destvar
,
1088 if (dest_base
== NULL
)
1089 dest_base
= destvar
;
1090 if (!poly_int_tree_p (len
, &maxsize
))
1092 if (SSA_VAR_P (src_base
)
1093 && SSA_VAR_P (dest_base
))
1095 if (operand_equal_p (src_base
, dest_base
, 0)
1096 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1097 dest_offset
, maxsize
))
1100 else if (TREE_CODE (src_base
) == MEM_REF
1101 && TREE_CODE (dest_base
) == MEM_REF
)
1103 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1104 TREE_OPERAND (dest_base
, 0), 0))
1106 poly_offset_int full_src_offset
1107 = mem_ref_offset (src_base
) + src_offset
;
1108 poly_offset_int full_dest_offset
1109 = mem_ref_offset (dest_base
) + dest_offset
;
1110 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1111 full_dest_offset
, maxsize
))
1117 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1120 gimple_call_set_fndecl (stmt
, fn
);
1121 gimple_call_set_arg (stmt
, 0, dest
);
1122 gimple_call_set_arg (stmt
, 1, src
);
1127 /* If the destination and source do not alias optimize into
1129 if ((is_gimple_min_invariant (dest
)
1130 || TREE_CODE (dest
) == SSA_NAME
)
1131 && (is_gimple_min_invariant (src
)
1132 || TREE_CODE (src
) == SSA_NAME
))
1135 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1136 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1137 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1140 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1143 gimple_call_set_fndecl (stmt
, fn
);
1144 gimple_call_set_arg (stmt
, 0, dest
);
1145 gimple_call_set_arg (stmt
, 1, src
);
1154 if (!tree_fits_shwi_p (len
))
1157 || (AGGREGATE_TYPE_P (srctype
)
1158 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1161 || (AGGREGATE_TYPE_P (desttype
)
1162 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1164 /* In the following try to find a type that is most natural to be
1165 used for the memcpy source and destination and that allows
1166 the most optimization when memcpy is turned into a plain assignment
1167 using that type. In theory we could always use a char[len] type
1168 but that only gains us that the destination and source possibly
1169 no longer will have their address taken. */
1170 if (TREE_CODE (srctype
) == ARRAY_TYPE
1171 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1172 srctype
= TREE_TYPE (srctype
);
1173 if (TREE_CODE (desttype
) == ARRAY_TYPE
1174 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1175 desttype
= TREE_TYPE (desttype
);
1176 if (TREE_ADDRESSABLE (srctype
)
1177 || TREE_ADDRESSABLE (desttype
))
1180 /* Make sure we are not copying using a floating-point mode or
1181 a type whose size possibly does not match its precision. */
1182 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1183 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1184 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1185 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1186 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1187 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1188 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1189 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1197 src_align
= get_pointer_alignment (src
);
1198 dest_align
= get_pointer_alignment (dest
);
1200 /* Choose between src and destination type for the access based
1201 on alignment, whether the access constitutes a register access
1202 and whether it may actually expose a declaration for SSA rewrite
1203 or SRA decomposition. Also try to expose a string constant, we
1204 might be able to concatenate several of them later into a single
1206 destvar
= NULL_TREE
;
1208 if (TREE_CODE (dest
) == ADDR_EXPR
1209 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1210 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1211 && dest_align
>= TYPE_ALIGN (desttype
)
1212 && (is_gimple_reg_type (desttype
)
1213 || src_align
>= TYPE_ALIGN (desttype
)))
1214 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1215 else if (TREE_CODE (src
) == ADDR_EXPR
1216 && var_decl_component_p (TREE_OPERAND (src
, 0))
1217 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1218 && src_align
>= TYPE_ALIGN (srctype
)
1219 && (is_gimple_reg_type (srctype
)
1220 || dest_align
>= TYPE_ALIGN (srctype
)))
1221 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1222 /* FIXME: Don't transform copies from strings with known original length.
1223 As soon as strlenopt tests that rely on it for passing are adjusted,
1224 this hack can be removed. */
1225 else if (gimple_call_alloca_for_var_p (stmt
)
1226 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1227 && integer_zerop (srcoff
)
1228 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1229 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1230 srctype
= TREE_TYPE (srcvar
);
1234 /* Now that we chose an access type express the other side in
1235 terms of it if the target allows that with respect to alignment
1237 if (srcvar
== NULL_TREE
)
1239 if (src_align
>= TYPE_ALIGN (desttype
))
1240 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1243 if (STRICT_ALIGNMENT
)
1245 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1247 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1250 else if (destvar
== NULL_TREE
)
1252 if (dest_align
>= TYPE_ALIGN (srctype
))
1253 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1256 if (STRICT_ALIGNMENT
)
1258 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1260 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1264 /* Same as above, detect out-of-bounds accesses without issuing
1265 warnings. Avoid folding out-of-bounds copies but to avoid
1266 false positives for unreachable code defer warning until
1267 after DCE has worked its magic.
1268 -Wrestrict is still diagnosed. */
1269 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1270 dest
, src
, len
, len
,
1272 if (warning
!= OPT_Wrestrict
)
1276 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1278 tree tem
= fold_const_aggregate_ref (srcvar
);
1281 if (! is_gimple_min_invariant (srcvar
))
1283 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1284 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1286 gimple_assign_set_lhs (new_stmt
, srcvar
);
1287 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1288 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1290 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1291 goto set_vop_and_replace
;
1294 /* We get an aggregate copy. If the source is a STRING_CST, then
1295 directly use its type to perform the copy. */
1296 if (TREE_CODE (srcvar
) == STRING_CST
)
1299 /* Or else, use an unsigned char[] type to perform the copy in order
1300 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1301 types or float modes behavior on copying. */
1304 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1305 tree_to_uhwi (len
));
1307 if (src_align
> TYPE_ALIGN (srctype
))
1308 srctype
= build_aligned_type (srctype
, src_align
);
1309 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1312 if (dest_align
> TYPE_ALIGN (desttype
))
1313 desttype
= build_aligned_type (desttype
, dest_align
);
1314 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1315 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1317 set_vop_and_replace
:
1318 gimple_move_vops (new_stmt
, stmt
);
1321 gsi_replace (gsi
, new_stmt
, false);
1324 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1328 gimple_seq stmts
= NULL
;
1329 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1331 else if (code
== BUILT_IN_MEMPCPY
)
1333 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1334 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1335 TREE_TYPE (dest
), dest
, len
);
1340 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1341 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1342 gsi_replace (gsi
, repl
, false);
1346 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1347 to built-in memcmp (a, b, len). */
1350 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1352 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1357 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1359 gimple
*stmt
= gsi_stmt (*gsi
);
1360 tree a
= gimple_call_arg (stmt
, 0);
1361 tree b
= gimple_call_arg (stmt
, 1);
1362 tree len
= gimple_call_arg (stmt
, 2);
1364 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1365 replace_call_with_call_and_fold (gsi
, repl
);
1370 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1371 to built-in memmove (dest, src, len). */
1374 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1376 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1381 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1382 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1383 len) into memmove (dest, src, len). */
1385 gimple
*stmt
= gsi_stmt (*gsi
);
1386 tree src
= gimple_call_arg (stmt
, 0);
1387 tree dest
= gimple_call_arg (stmt
, 1);
1388 tree len
= gimple_call_arg (stmt
, 2);
1390 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1391 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1392 replace_call_with_call_and_fold (gsi
, repl
);
1397 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1398 to built-in memset (dest, 0, len). */
1401 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1403 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1408 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1410 gimple
*stmt
= gsi_stmt (*gsi
);
1411 tree dest
= gimple_call_arg (stmt
, 0);
1412 tree len
= gimple_call_arg (stmt
, 1);
1414 gimple_seq seq
= NULL
;
1415 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1416 gimple_seq_add_stmt_without_update (&seq
, repl
);
1417 gsi_replace_with_seq_vops (gsi
, seq
);
1423 /* Fold function call to builtin memset or bzero at *GSI setting the
1424 memory of size LEN to VAL. Return whether a simplification was made. */
1427 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1429 gimple
*stmt
= gsi_stmt (*gsi
);
1431 unsigned HOST_WIDE_INT length
, cval
;
1433 /* If the LEN parameter is zero, return DEST. */
1434 if (integer_zerop (len
))
1436 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1440 if (! tree_fits_uhwi_p (len
))
1443 if (TREE_CODE (c
) != INTEGER_CST
)
1446 tree dest
= gimple_call_arg (stmt
, 0);
1448 if (TREE_CODE (var
) != ADDR_EXPR
)
1451 var
= TREE_OPERAND (var
, 0);
1452 if (TREE_THIS_VOLATILE (var
))
1455 etype
= TREE_TYPE (var
);
1456 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1457 etype
= TREE_TYPE (etype
);
1459 if (!INTEGRAL_TYPE_P (etype
)
1460 && !POINTER_TYPE_P (etype
))
1463 if (! var_decl_component_p (var
))
1466 length
= tree_to_uhwi (len
);
1467 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1468 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1469 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1470 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1473 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1476 if (!type_has_mode_precision_p (etype
))
1477 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1478 TYPE_UNSIGNED (etype
));
1480 if (integer_zerop (c
))
1484 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1487 cval
= TREE_INT_CST_LOW (c
);
1491 cval
|= (cval
<< 31) << 1;
1494 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1495 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1496 gimple_move_vops (store
, stmt
);
1497 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1498 if (gimple_call_lhs (stmt
))
1500 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1501 gsi_replace (gsi
, asgn
, false);
1505 gimple_stmt_iterator gsi2
= *gsi
;
1507 gsi_remove (&gsi2
, true);
1513 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1516 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1517 c_strlen_data
*pdata
, unsigned eltsize
)
1519 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1521 /* The length computed by this invocation of the function. */
1522 tree val
= NULL_TREE
;
1524 /* True if VAL is an optimistic (tight) bound determined from
1525 the size of the character array in which the string may be
1526 stored. In that case, the computed VAL is used to set
1528 bool tight_bound
= false;
1530 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1531 if (TREE_CODE (arg
) == ADDR_EXPR
1532 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1534 tree op
= TREE_OPERAND (arg
, 0);
1535 if (integer_zerop (TREE_OPERAND (op
, 1)))
1537 tree aop0
= TREE_OPERAND (op
, 0);
1538 if (TREE_CODE (aop0
) == INDIRECT_REF
1539 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1540 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1543 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1544 && rkind
== SRK_LENRANGE
)
1546 /* Fail if an array is the last member of a struct object
1547 since it could be treated as a (fake) flexible array
1549 tree idx
= TREE_OPERAND (op
, 1);
1551 arg
= TREE_OPERAND (op
, 0);
1552 tree optype
= TREE_TYPE (arg
);
1553 if (tree dom
= TYPE_DOMAIN (optype
))
1554 if (tree bound
= TYPE_MAX_VALUE (dom
))
1555 if (TREE_CODE (bound
) == INTEGER_CST
1556 && TREE_CODE (idx
) == INTEGER_CST
1557 && tree_int_cst_lt (bound
, idx
))
1562 if (rkind
== SRK_INT_VALUE
)
1564 /* We are computing the maximum value (not string length). */
1566 if (TREE_CODE (val
) != INTEGER_CST
1567 || tree_int_cst_sgn (val
) < 0)
1572 c_strlen_data lendata
= { };
1573 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1575 if (!val
&& lendata
.decl
)
1577 /* ARG refers to an unterminated const character array.
1578 DATA.DECL with size DATA.LEN. */
1579 val
= lendata
.minlen
;
1580 pdata
->decl
= lendata
.decl
;
1584 /* Set if VAL represents the maximum length based on array size (set
1585 when exact length cannot be determined). */
1586 bool maxbound
= false;
1588 if (!val
&& rkind
== SRK_LENRANGE
)
1590 if (TREE_CODE (arg
) == ADDR_EXPR
)
1591 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1594 if (TREE_CODE (arg
) == ARRAY_REF
)
1596 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1598 /* Determine the "innermost" array type. */
1599 while (TREE_CODE (optype
) == ARRAY_TYPE
1600 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1601 optype
= TREE_TYPE (optype
);
1603 /* Avoid arrays of pointers. */
1604 tree eltype
= TREE_TYPE (optype
);
1605 if (TREE_CODE (optype
) != ARRAY_TYPE
1606 || !INTEGRAL_TYPE_P (eltype
))
1609 /* Fail when the array bound is unknown or zero. */
1610 val
= TYPE_SIZE_UNIT (optype
);
1612 || TREE_CODE (val
) != INTEGER_CST
1613 || integer_zerop (val
))
1616 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1619 /* Set the minimum size to zero since the string in
1620 the array could have zero length. */
1621 pdata
->minlen
= ssize_int (0);
1625 else if (TREE_CODE (arg
) == COMPONENT_REF
1626 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1629 /* Use the type of the member array to determine the upper
1630 bound on the length of the array. This may be overly
1631 optimistic if the array itself isn't NUL-terminated and
1632 the caller relies on the subsequent member to contain
1633 the NUL but that would only be considered valid if
1634 the array were the last member of a struct. */
1636 tree fld
= TREE_OPERAND (arg
, 1);
1638 tree optype
= TREE_TYPE (fld
);
1640 /* Determine the "innermost" array type. */
1641 while (TREE_CODE (optype
) == ARRAY_TYPE
1642 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1643 optype
= TREE_TYPE (optype
);
1645 /* Fail when the array bound is unknown or zero. */
1646 val
= TYPE_SIZE_UNIT (optype
);
1648 || TREE_CODE (val
) != INTEGER_CST
1649 || integer_zerop (val
))
1651 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1654 /* Set the minimum size to zero since the string in
1655 the array could have zero length. */
1656 pdata
->minlen
= ssize_int (0);
1658 /* The array size determined above is an optimistic bound
1659 on the length. If the array isn't nul-terminated the
1660 length computed by the library function would be greater.
1661 Even though using strlen to cross the subobject boundary
1662 is undefined, avoid drawing conclusions from the member
1663 type about the length here. */
1666 else if (TREE_CODE (arg
) == MEM_REF
1667 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1668 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1669 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1671 /* Handle a MEM_REF into a DECL accessing an array of integers,
1672 being conservative about references to extern structures with
1673 flexible array members that can be initialized to arbitrary
1674 numbers of elements as an extension (static structs are okay).
1675 FIXME: Make this less conservative -- see
1676 component_ref_size in tree.c. */
1677 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1678 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1679 && (decl_binds_to_current_def_p (ref
)
1680 || !array_at_struct_end_p (arg
)))
1682 /* Fail if the offset is out of bounds. Such accesses
1683 should be diagnosed at some point. */
1684 val
= DECL_SIZE_UNIT (ref
);
1686 || TREE_CODE (val
) != INTEGER_CST
1687 || integer_zerop (val
))
1690 poly_offset_int psiz
= wi::to_offset (val
);
1691 poly_offset_int poff
= mem_ref_offset (arg
);
1692 if (known_le (psiz
, poff
))
1695 pdata
->minlen
= ssize_int (0);
1697 /* Subtract the offset and one for the terminating nul. */
1700 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1701 /* Since VAL reflects the size of a declared object
1702 rather the type of the access it is not a tight bound. */
1705 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1707 /* Avoid handling pointers to arrays. GCC might misuse
1708 a pointer to an array of one bound to point to an array
1709 object of a greater bound. */
1710 tree argtype
= TREE_TYPE (arg
);
1711 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1713 val
= TYPE_SIZE_UNIT (argtype
);
1715 || TREE_CODE (val
) != INTEGER_CST
1716 || integer_zerop (val
))
1718 val
= wide_int_to_tree (TREE_TYPE (val
),
1719 wi::sub (wi::to_wide (val
), 1));
1721 /* Set the minimum size to zero since the string in
1722 the array could have zero length. */
1723 pdata
->minlen
= ssize_int (0);
1732 /* Adjust the lower bound on the string length as necessary. */
1734 || (rkind
!= SRK_STRLEN
1735 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1736 && TREE_CODE (val
) == INTEGER_CST
1737 && tree_int_cst_lt (val
, pdata
->minlen
)))
1738 pdata
->minlen
= val
;
1740 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1742 /* Adjust the tighter (more optimistic) string length bound
1743 if necessary and proceed to adjust the more conservative
1745 if (TREE_CODE (val
) == INTEGER_CST
)
1747 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1748 pdata
->maxbound
= val
;
1751 pdata
->maxbound
= val
;
1753 else if (pdata
->maxbound
|| maxbound
)
1754 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1755 if VAL corresponds to the maximum length determined based
1756 on the type of the object. */
1757 pdata
->maxbound
= val
;
1761 /* VAL computed above represents an optimistically tight bound
1762 on the length of the string based on the referenced object's
1763 or subobject's type. Determine the conservative upper bound
1764 based on the enclosing object's size if possible. */
1765 if (rkind
== SRK_LENRANGE
)
1768 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1771 /* When the call above fails due to a non-constant offset
1772 assume the offset is zero and use the size of the whole
1773 enclosing object instead. */
1774 base
= get_base_address (arg
);
1777 /* If the base object is a pointer no upper bound on the length
1778 can be determined. Otherwise the maximum length is equal to
1779 the size of the enclosing object minus the offset of
1780 the referenced subobject minus 1 (for the terminating nul). */
1781 tree type
= TREE_TYPE (base
);
1782 if (TREE_CODE (type
) == POINTER_TYPE
1783 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1784 || !(val
= DECL_SIZE_UNIT (base
)))
1785 val
= build_all_ones_cst (size_type_node
);
1788 val
= DECL_SIZE_UNIT (base
);
1789 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1790 size_int (offset
+ 1));
1799 /* Adjust the more conservative bound if possible/necessary
1800 and fail otherwise. */
1801 if (rkind
!= SRK_STRLEN
)
1803 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1804 || TREE_CODE (val
) != INTEGER_CST
)
1807 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1808 pdata
->maxlen
= val
;
1811 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1813 /* Fail if the length of this ARG is different from that
1814 previously determined from another ARG. */
1819 pdata
->maxlen
= val
;
1820 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1823 /* For an ARG referencing one or more strings, try to obtain the range
1824 of their lengths, or the size of the largest array ARG referes to if
1825 the range of lengths cannot be determined, and store all in *PDATA.
1826 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1827 the maximum constant value.
1828 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1829 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1830 length or if we are unable to determine the length, return false.
1831 VISITED is a bitmap of visited variables.
1832 RKIND determines the kind of value or range to obtain (see
1834 Set PDATA->DECL if ARG refers to an unterminated constant array.
1835 On input, set ELTSIZE to 1 for normal single byte character strings,
1836 and either 2 or 4 for wide characer strings (the size of wchar_t).
1837 Return true if *PDATA was successfully populated and false otherwise. */
1840 get_range_strlen (tree arg
, bitmap
*visited
,
1841 strlen_range_kind rkind
,
1842 c_strlen_data
*pdata
, unsigned eltsize
)
1845 if (TREE_CODE (arg
) != SSA_NAME
)
1846 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1848 /* If ARG is registered for SSA update we cannot look at its defining
1850 if (name_registered_for_update_p (arg
))
1853 /* If we were already here, break the infinite cycle. */
1855 *visited
= BITMAP_ALLOC (NULL
);
1856 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1860 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1862 switch (gimple_code (def_stmt
))
1865 /* The RHS of the statement defining VAR must either have a
1866 constant length or come from another SSA_NAME with a constant
1868 if (gimple_assign_single_p (def_stmt
)
1869 || gimple_assign_unary_nop_p (def_stmt
))
1871 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1872 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1874 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1876 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1877 gimple_assign_rhs3 (def_stmt
) };
1879 for (unsigned int i
= 0; i
< 2; i
++)
1880 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1882 if (rkind
!= SRK_LENRANGE
)
1884 /* Set the upper bound to the maximum to prevent
1885 it from being adjusted in the next iteration but
1886 leave MINLEN and the more conservative MAXBOUND
1887 determined so far alone (or leave them null if
1888 they haven't been set yet). That the MINLEN is
1889 in fact zero can be determined from MAXLEN being
1890 unbounded but the discovered minimum is used for
1892 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1899 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1900 must have a constant length. */
1901 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1903 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1905 /* If this PHI has itself as an argument, we cannot
1906 determine the string length of this argument. However,
1907 if we can find a constant string length for the other
1908 PHI args then we can still be sure that this is a
1909 constant string length. So be optimistic and just
1910 continue with the next argument. */
1911 if (arg
== gimple_phi_result (def_stmt
))
1914 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1916 if (rkind
!= SRK_LENRANGE
)
1918 /* Set the upper bound to the maximum to prevent
1919 it from being adjusted in the next iteration but
1920 leave MINLEN and the more conservative MAXBOUND
1921 determined so far alone (or leave them null if
1922 they haven't been set yet). That the MINLEN is
1923 in fact zero can be determined from MAXLEN being
1924 unbounded but the discovered minimum is used for
1926 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1936 /* Try to obtain the range of the lengths of the string(s) referenced
1937 by ARG, or the size of the largest array ARG refers to if the range
1938 of lengths cannot be determined, and store all in *PDATA which must
1939 be zero-initialized on input except PDATA->MAXBOUND may be set to
1940 a non-null tree node other than INTEGER_CST to request to have it
1941 set to the length of the longest string in a PHI. ELTSIZE is
1942 the expected size of the string element in bytes: 1 for char and
1943 some power of 2 for wide characters.
1944 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1945 for optimization. Returning false means that a nonzero PDATA->MINLEN
1946 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1947 is -1 (in that case, the actual range is indeterminate, i.e.,
1948 [0, PTRDIFF_MAX - 2]. */
1951 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1953 bitmap visited
= NULL
;
1954 tree maxbound
= pdata
->maxbound
;
1956 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1958 /* On failure extend the length range to an impossible maximum
1959 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1960 members can stay unchanged regardless. */
1961 pdata
->minlen
= ssize_int (0);
1962 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1964 else if (!pdata
->minlen
)
1965 pdata
->minlen
= ssize_int (0);
1967 /* If it's unchanged from it initial non-null value, set the conservative
1968 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1969 if (maxbound
&& pdata
->maxbound
== maxbound
)
1970 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1973 BITMAP_FREE (visited
);
1975 return !integer_all_onesp (pdata
->maxlen
);
1978 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1979 For ARG of pointer types, NONSTR indicates if the caller is prepared
1980 to handle unterminated strings. For integer ARG and when RKIND ==
1981 SRK_INT_VALUE, NONSTR must be null.
1983 If an unterminated array is discovered and our caller handles
1984 unterminated arrays, then bubble up the offending DECL and
1985 return the maximum size. Otherwise return NULL. */
1988 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
1990 /* A non-null NONSTR is meaningless when determining the maximum
1991 value of an integer ARG. */
1992 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
1993 /* ARG must have an integral type when RKIND says so. */
1994 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
1996 bitmap visited
= NULL
;
1998 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2000 c_strlen_data lendata
= { };
2001 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
2002 lendata
.maxlen
= NULL_TREE
;
2003 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2004 lendata
.maxlen
= NULL_TREE
;
2007 BITMAP_FREE (visited
);
2011 /* For callers prepared to handle unterminated arrays set
2012 *NONSTR to point to the declaration of the array and return
2013 the maximum length/size. */
2014 *nonstr
= lendata
.decl
;
2015 return lendata
.maxlen
;
2018 /* Fail if the constant array isn't nul-terminated. */
2019 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2023 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2024 If LEN is not NULL, it represents the length of the string to be
2025 copied. Return NULL_TREE if no simplification can be made. */
2028 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2029 tree dest
, tree src
)
2031 gimple
*stmt
= gsi_stmt (*gsi
);
2032 location_t loc
= gimple_location (stmt
);
2035 /* If SRC and DEST are the same (and not volatile), return DEST. */
2036 if (operand_equal_p (src
, dest
, 0))
2038 /* Issue -Wrestrict unless the pointers are null (those do
2039 not point to objects and so do not indicate an overlap;
2040 such calls could be the result of sanitization and jump
2042 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
2044 tree func
= gimple_call_fndecl (stmt
);
2046 warning_at (loc
, OPT_Wrestrict
,
2047 "%qD source argument is the same as destination",
2051 replace_call_with_value (gsi
, dest
);
2055 if (optimize_function_for_size_p (cfun
))
2058 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2062 /* Set to non-null if ARG refers to an unterminated array. */
2064 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2068 /* Avoid folding calls with unterminated arrays. */
2069 if (!gimple_no_warning_p (stmt
))
2070 warn_string_no_nul (loc
, NULL_TREE
, "strcpy", src
, nonstr
);
2071 gimple_set_no_warning (stmt
, true);
2078 len
= fold_convert_loc (loc
, size_type_node
, len
);
2079 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2080 len
= force_gimple_operand_gsi (gsi
, len
, true,
2081 NULL_TREE
, true, GSI_SAME_STMT
);
2082 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2083 replace_call_with_call_and_fold (gsi
, repl
);
2087 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2088 If SLEN is not NULL, it represents the length of the source string.
2089 Return NULL_TREE if no simplification can be made. */
2092 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2093 tree dest
, tree src
, tree len
)
2095 gimple
*stmt
= gsi_stmt (*gsi
);
2096 location_t loc
= gimple_location (stmt
);
2097 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2099 /* If the LEN parameter is zero, return DEST. */
2100 if (integer_zerop (len
))
2102 /* Avoid warning if the destination refers to an array/pointer
2103 decorate with attribute nonstring. */
2106 tree fndecl
= gimple_call_fndecl (stmt
);
2108 /* Warn about the lack of nul termination: the result is not
2109 a (nul-terminated) string. */
2110 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2111 if (slen
&& !integer_zerop (slen
))
2112 warning_at (loc
, OPT_Wstringop_truncation
,
2113 "%G%qD destination unchanged after copying no bytes "
2114 "from a string of length %E",
2115 stmt
, fndecl
, slen
);
2117 warning_at (loc
, OPT_Wstringop_truncation
,
2118 "%G%qD destination unchanged after copying no bytes",
2122 replace_call_with_value (gsi
, dest
);
2126 /* We can't compare slen with len as constants below if len is not a
2128 if (TREE_CODE (len
) != INTEGER_CST
)
2131 /* Now, we must be passed a constant src ptr parameter. */
2132 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2133 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2136 /* The size of the source string including the terminating nul. */
2137 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2139 /* We do not support simplification of this case, though we do
2140 support it when expanding trees into RTL. */
2141 /* FIXME: generate a call to __builtin_memset. */
2142 if (tree_int_cst_lt (ssize
, len
))
2145 /* Diagnose truncation that leaves the copy unterminated. */
2146 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2148 /* OK transform into builtin memcpy. */
2149 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2153 len
= fold_convert_loc (loc
, size_type_node
, len
);
2154 len
= force_gimple_operand_gsi (gsi
, len
, true,
2155 NULL_TREE
, true, GSI_SAME_STMT
);
2156 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2157 replace_call_with_call_and_fold (gsi
, repl
);
2162 /* Fold function call to builtin strchr or strrchr.
2163 If both arguments are constant, evaluate and fold the result,
2164 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2165 In general strlen is significantly faster than strchr
2166 due to being a simpler operation. */
2168 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2170 gimple
*stmt
= gsi_stmt (*gsi
);
2171 tree str
= gimple_call_arg (stmt
, 0);
2172 tree c
= gimple_call_arg (stmt
, 1);
2173 location_t loc
= gimple_location (stmt
);
2177 if (!gimple_call_lhs (stmt
))
2180 /* Avoid folding if the first argument is not a nul-terminated array.
2181 Defer warning until later. */
2182 if (!check_nul_terminated_array (NULL_TREE
, str
))
2185 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2187 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2191 replace_call_with_value (gsi
, integer_zero_node
);
2195 tree len
= build_int_cst (size_type_node
, p1
- p
);
2196 gimple_seq stmts
= NULL
;
2197 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2198 POINTER_PLUS_EXPR
, str
, len
);
2199 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2200 gsi_replace_with_seq_vops (gsi
, stmts
);
2204 if (!integer_zerop (c
))
2207 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2208 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2210 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2214 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2215 replace_call_with_call_and_fold (gsi
, repl
);
2223 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2228 /* Create newstr = strlen (str). */
2229 gimple_seq stmts
= NULL
;
2230 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2231 gimple_set_location (new_stmt
, loc
);
2232 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2233 gimple_call_set_lhs (new_stmt
, len
);
2234 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2236 /* Create (str p+ strlen (str)). */
2237 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2238 POINTER_PLUS_EXPR
, str
, len
);
2239 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2240 gsi_replace_with_seq_vops (gsi
, stmts
);
2241 /* gsi now points at the assignment to the lhs, get a
2242 stmt iterator to the strlen.
2243 ??? We can't use gsi_for_stmt as that doesn't work when the
2244 CFG isn't built yet. */
2245 gimple_stmt_iterator gsi2
= *gsi
;
2251 /* Fold function call to builtin strstr.
2252 If both arguments are constant, evaluate and fold the result,
2253 additionally fold strstr (x, "") into x and strstr (x, "c")
2254 into strchr (x, 'c'). */
2256 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2258 gimple
*stmt
= gsi_stmt (*gsi
);
2259 if (!gimple_call_lhs (stmt
))
2262 tree haystack
= gimple_call_arg (stmt
, 0);
2263 tree needle
= gimple_call_arg (stmt
, 1);
2265 /* Avoid folding if either argument is not a nul-terminated array.
2266 Defer warning until later. */
2267 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2268 || !check_nul_terminated_array (NULL_TREE
, needle
))
2271 const char *q
= c_getstr (needle
);
2275 if (const char *p
= c_getstr (haystack
))
2277 const char *r
= strstr (p
, q
);
2281 replace_call_with_value (gsi
, integer_zero_node
);
2285 tree len
= build_int_cst (size_type_node
, r
- p
);
2286 gimple_seq stmts
= NULL
;
2288 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2290 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2291 gsi_replace_with_seq_vops (gsi
, stmts
);
2295 /* For strstr (x, "") return x. */
2298 replace_call_with_value (gsi
, haystack
);
2302 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2305 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2308 tree c
= build_int_cst (integer_type_node
, q
[0]);
2309 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2310 replace_call_with_call_and_fold (gsi
, repl
);
2318 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2321 Return NULL_TREE if no simplification was possible, otherwise return the
2322 simplified form of the call as a tree.
2324 The simplified form may be a constant or other expression which
2325 computes the same value, but in a more efficient manner (including
2326 calls to other builtin functions).
2328 The call may contain arguments which need to be evaluated, but
2329 which are not useful to determine the result of the call. In
2330 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2331 COMPOUND_EXPR will be an argument which must be evaluated.
2332 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2333 COMPOUND_EXPR in the chain will contain the tree for the simplified
2334 form of the builtin function call. */
2337 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2339 gimple
*stmt
= gsi_stmt (*gsi
);
2340 location_t loc
= gimple_location (stmt
);
2342 const char *p
= c_getstr (src
);
2344 /* If the string length is zero, return the dst parameter. */
2345 if (p
&& *p
== '\0')
2347 replace_call_with_value (gsi
, dst
);
2351 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2354 /* See if we can store by pieces into (dst + strlen(dst)). */
2356 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2357 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2359 if (!strlen_fn
|| !memcpy_fn
)
2362 /* If the length of the source string isn't computable don't
2363 split strcat into strlen and memcpy. */
2364 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2368 /* Create strlen (dst). */
2369 gimple_seq stmts
= NULL
, stmts2
;
2370 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2371 gimple_set_location (repl
, loc
);
2372 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2373 gimple_call_set_lhs (repl
, newdst
);
2374 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2376 /* Create (dst p+ strlen (dst)). */
2377 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2378 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2379 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2381 len
= fold_convert_loc (loc
, size_type_node
, len
);
2382 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2383 build_int_cst (size_type_node
, 1));
2384 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2385 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2387 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2388 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2389 if (gimple_call_lhs (stmt
))
2391 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2392 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2393 gsi_replace_with_seq_vops (gsi
, stmts
);
2394 /* gsi now points at the assignment to the lhs, get a
2395 stmt iterator to the memcpy call.
2396 ??? We can't use gsi_for_stmt as that doesn't work when the
2397 CFG isn't built yet. */
2398 gimple_stmt_iterator gsi2
= *gsi
;
2404 gsi_replace_with_seq_vops (gsi
, stmts
);
2410 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2411 are the arguments to the call. */
2414 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2416 gimple
*stmt
= gsi_stmt (*gsi
);
2417 tree dest
= gimple_call_arg (stmt
, 0);
2418 tree src
= gimple_call_arg (stmt
, 1);
2419 tree size
= gimple_call_arg (stmt
, 2);
2425 /* If the SRC parameter is "", return DEST. */
2426 if (p
&& *p
== '\0')
2428 replace_call_with_value (gsi
, dest
);
2432 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2435 /* If __builtin_strcat_chk is used, assume strcat is available. */
2436 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2440 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2441 replace_call_with_call_and_fold (gsi
, repl
);
2445 /* Simplify a call to the strncat builtin. */
2448 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2450 gimple
*stmt
= gsi_stmt (*gsi
);
2451 tree dst
= gimple_call_arg (stmt
, 0);
2452 tree src
= gimple_call_arg (stmt
, 1);
2453 tree len
= gimple_call_arg (stmt
, 2);
2455 const char *p
= c_getstr (src
);
2457 /* If the requested length is zero, or the src parameter string
2458 length is zero, return the dst parameter. */
2459 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2461 replace_call_with_value (gsi
, dst
);
2465 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2468 unsigned srclen
= strlen (p
);
2470 int cmpsrc
= compare_tree_int (len
, srclen
);
2472 /* Return early if the requested len is less than the string length.
2473 Warnings will be issued elsewhere later. */
2477 unsigned HOST_WIDE_INT dstsize
;
2479 bool nowarn
= gimple_no_warning_p (stmt
);
2481 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2483 int cmpdst
= compare_tree_int (len
, dstsize
);
2487 tree fndecl
= gimple_call_fndecl (stmt
);
2489 /* Strncat copies (at most) LEN bytes and always appends
2490 the terminating NUL so the specified bound should never
2491 be equal to (or greater than) the size of the destination.
2492 If it is, the copy could overflow. */
2493 location_t loc
= gimple_location (stmt
);
2494 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2496 ? G_("%G%qD specified bound %E equals "
2498 : G_("%G%qD specified bound %E exceeds "
2499 "destination size %wu"),
2500 stmt
, fndecl
, len
, dstsize
);
2502 gimple_set_no_warning (stmt
, true);
2506 if (!nowarn
&& cmpsrc
== 0)
2508 tree fndecl
= gimple_call_fndecl (stmt
);
2509 location_t loc
= gimple_location (stmt
);
2511 /* To avoid possible overflow the specified bound should also
2512 not be equal to the length of the source, even when the size
2513 of the destination is unknown (it's not an uncommon mistake
2514 to specify as the bound to strncpy the length of the source). */
2515 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2516 "%G%qD specified bound %E equals source length",
2518 gimple_set_no_warning (stmt
, true);
2521 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2523 /* If the replacement _DECL isn't initialized, don't do the
2528 /* Otherwise, emit a call to strcat. */
2529 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2530 replace_call_with_call_and_fold (gsi
, repl
);
2534 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2538 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2540 gimple
*stmt
= gsi_stmt (*gsi
);
2541 tree dest
= gimple_call_arg (stmt
, 0);
2542 tree src
= gimple_call_arg (stmt
, 1);
2543 tree len
= gimple_call_arg (stmt
, 2);
2544 tree size
= gimple_call_arg (stmt
, 3);
2549 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2550 if ((p
&& *p
== '\0')
2551 || integer_zerop (len
))
2553 replace_call_with_value (gsi
, dest
);
2557 if (! tree_fits_uhwi_p (size
))
2560 if (! integer_all_onesp (size
))
2562 tree src_len
= c_strlen (src
, 1);
2564 && tree_fits_uhwi_p (src_len
)
2565 && tree_fits_uhwi_p (len
)
2566 && ! tree_int_cst_lt (len
, src_len
))
2568 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2569 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2573 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2574 replace_call_with_call_and_fold (gsi
, repl
);
2580 /* If __builtin_strncat_chk is used, assume strncat is available. */
2581 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2585 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2586 replace_call_with_call_and_fold (gsi
, repl
);
2590 /* Build and append gimple statements to STMTS that would load a first
2591 character of a memory location identified by STR. LOC is location
2592 of the statement. */
2595 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2599 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2600 tree cst_uchar_ptr_node
2601 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2602 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2604 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2605 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2606 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2608 gimple_assign_set_lhs (stmt
, var
);
2609 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2614 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2617 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2619 gimple
*stmt
= gsi_stmt (*gsi
);
2620 tree callee
= gimple_call_fndecl (stmt
);
2621 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2623 tree type
= integer_type_node
;
2624 tree str1
= gimple_call_arg (stmt
, 0);
2625 tree str2
= gimple_call_arg (stmt
, 1);
2626 tree lhs
= gimple_call_lhs (stmt
);
2628 tree bound_node
= NULL_TREE
;
2629 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2631 /* Handle strncmp and strncasecmp functions. */
2632 if (gimple_call_num_args (stmt
) == 3)
2634 bound_node
= gimple_call_arg (stmt
, 2);
2635 if (tree_fits_uhwi_p (bound_node
))
2636 bound
= tree_to_uhwi (bound_node
);
2639 /* If the BOUND parameter is zero, return zero. */
2642 replace_call_with_value (gsi
, integer_zero_node
);
2646 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2647 if (operand_equal_p (str1
, str2
, 0))
2649 replace_call_with_value (gsi
, integer_zero_node
);
2653 /* Initially set to the number of characters, including the terminating
2654 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2655 the array Sx is not terminated by a nul.
2656 For nul-terminated strings then adjusted to their length so that
2657 LENx == NULPOSx holds. */
2658 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2659 const char *p1
= getbyterep (str1
, &len1
);
2660 const char *p2
= getbyterep (str2
, &len2
);
2662 /* The position of the terminating nul character if one exists, otherwise
2663 a value greater than LENx. */
2664 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2668 size_t n
= strnlen (p1
, len1
);
2675 size_t n
= strnlen (p2
, len2
);
2680 /* For known strings, return an immediate value. */
2684 bool known_result
= false;
2688 case BUILT_IN_STRCMP
:
2689 case BUILT_IN_STRCMP_EQ
:
2690 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2693 r
= strcmp (p1
, p2
);
2694 known_result
= true;
2697 case BUILT_IN_STRNCMP
:
2698 case BUILT_IN_STRNCMP_EQ
:
2700 if (bound
== HOST_WIDE_INT_M1U
)
2703 /* Reduce the bound to be no more than the length
2704 of the shorter of the two strings, or the sizes
2705 of the unterminated arrays. */
2706 unsigned HOST_WIDE_INT n
= bound
;
2708 if (len1
== nulpos1
&& len1
< n
)
2710 if (len2
== nulpos2
&& len2
< n
)
2713 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2716 r
= strncmp (p1
, p2
, n
);
2717 known_result
= true;
2720 /* Only handleable situation is where the string are equal (result 0),
2721 which is already handled by operand_equal_p case. */
2722 case BUILT_IN_STRCASECMP
:
2724 case BUILT_IN_STRNCASECMP
:
2726 if (bound
== HOST_WIDE_INT_M1U
)
2728 r
= strncmp (p1
, p2
, bound
);
2730 known_result
= true;
2739 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2744 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2745 || fcode
== BUILT_IN_STRCMP
2746 || fcode
== BUILT_IN_STRCMP_EQ
2747 || fcode
== BUILT_IN_STRCASECMP
;
2749 location_t loc
= gimple_location (stmt
);
2751 /* If the second arg is "", return *(const unsigned char*)arg1. */
2752 if (p2
&& *p2
== '\0' && nonzero_bound
)
2754 gimple_seq stmts
= NULL
;
2755 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2758 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2759 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2762 gsi_replace_with_seq_vops (gsi
, stmts
);
2766 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2767 if (p1
&& *p1
== '\0' && nonzero_bound
)
2769 gimple_seq stmts
= NULL
;
2770 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2774 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2775 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2776 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2778 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2779 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2782 gsi_replace_with_seq_vops (gsi
, stmts
);
2786 /* If BOUND is one, return an expression corresponding to
2787 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2788 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2790 gimple_seq stmts
= NULL
;
2791 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2792 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2796 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2797 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2798 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2800 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2801 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2802 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2804 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2805 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2808 gsi_replace_with_seq_vops (gsi
, stmts
);
2812 /* If BOUND is greater than the length of one constant string,
2813 and the other argument is also a nul-terminated string, replace
2814 strncmp with strcmp. */
2815 if (fcode
== BUILT_IN_STRNCMP
2816 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2817 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2818 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2820 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2823 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2824 replace_call_with_call_and_fold (gsi
, repl
);
2831 /* Fold a call to the memchr pointed by GSI iterator. */
2834 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2836 gimple
*stmt
= gsi_stmt (*gsi
);
2837 tree lhs
= gimple_call_lhs (stmt
);
2838 tree arg1
= gimple_call_arg (stmt
, 0);
2839 tree arg2
= gimple_call_arg (stmt
, 1);
2840 tree len
= gimple_call_arg (stmt
, 2);
2842 /* If the LEN parameter is zero, return zero. */
2843 if (integer_zerop (len
))
2845 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2850 if (TREE_CODE (arg2
) != INTEGER_CST
2851 || !tree_fits_uhwi_p (len
)
2852 || !target_char_cst_p (arg2
, &c
))
2855 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2856 unsigned HOST_WIDE_INT string_length
;
2857 const char *p1
= getbyterep (arg1
, &string_length
);
2861 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2864 tree mem_size
, offset_node
;
2865 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2866 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2867 ? 0 : tree_to_uhwi (offset_node
);
2868 /* MEM_SIZE is the size of the array the string literal
2870 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2871 gcc_checking_assert (string_length
<= string_size
);
2872 if (length
<= string_size
)
2874 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2880 unsigned HOST_WIDE_INT offset
= r
- p1
;
2881 gimple_seq stmts
= NULL
;
2882 if (lhs
!= NULL_TREE
)
2884 tree offset_cst
= build_int_cst (sizetype
, offset
);
2885 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2887 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2890 gimple_seq_add_stmt_without_update (&stmts
,
2891 gimple_build_nop ());
2893 gsi_replace_with_seq_vops (gsi
, stmts
);
2901 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2902 to the call. IGNORE is true if the value returned
2903 by the builtin will be ignored. UNLOCKED is true is true if this
2904 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2905 the known length of the string. Return NULL_TREE if no simplification
2909 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2910 tree arg0
, tree arg1
,
2913 gimple
*stmt
= gsi_stmt (*gsi
);
2915 /* If we're using an unlocked function, assume the other unlocked
2916 functions exist explicitly. */
2917 tree
const fn_fputc
= (unlocked
2918 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2919 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2920 tree
const fn_fwrite
= (unlocked
2921 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2922 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2924 /* If the return value is used, don't do the transformation. */
2925 if (gimple_call_lhs (stmt
))
2928 /* Get the length of the string passed to fputs. If the length
2929 can't be determined, punt. */
2930 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2932 || TREE_CODE (len
) != INTEGER_CST
)
2935 switch (compare_tree_int (len
, 1))
2937 case -1: /* length is 0, delete the call entirely . */
2938 replace_call_with_value (gsi
, integer_zero_node
);
2941 case 0: /* length is 1, call fputc. */
2943 const char *p
= c_getstr (arg0
);
2949 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2951 (integer_type_node
, p
[0]), arg1
);
2952 replace_call_with_call_and_fold (gsi
, repl
);
2957 case 1: /* length is greater than 1, call fwrite. */
2959 /* If optimizing for size keep fputs. */
2960 if (optimize_function_for_size_p (cfun
))
2962 /* New argument list transforming fputs(string, stream) to
2963 fwrite(string, 1, len, stream). */
2967 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2968 size_one_node
, len
, arg1
);
2969 replace_call_with_call_and_fold (gsi
, repl
);
2978 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2979 DEST, SRC, LEN, and SIZE are the arguments to the call.
2980 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2981 code of the builtin. If MAXLEN is not NULL, it is maximum length
2982 passed as third argument. */
2985 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2986 tree dest
, tree src
, tree len
, tree size
,
2987 enum built_in_function fcode
)
2989 gimple
*stmt
= gsi_stmt (*gsi
);
2990 location_t loc
= gimple_location (stmt
);
2991 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2994 /* If SRC and DEST are the same (and not volatile), return DEST
2995 (resp. DEST+LEN for __mempcpy_chk). */
2996 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2998 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3000 replace_call_with_value (gsi
, dest
);
3005 gimple_seq stmts
= NULL
;
3006 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3007 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3008 TREE_TYPE (dest
), dest
, len
);
3009 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3010 replace_call_with_value (gsi
, temp
);
3015 if (! tree_fits_uhwi_p (size
))
3018 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3019 if (! integer_all_onesp (size
))
3021 if (! tree_fits_uhwi_p (len
))
3023 /* If LEN is not constant, try MAXLEN too.
3024 For MAXLEN only allow optimizing into non-_ocs function
3025 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3026 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3028 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3030 /* (void) __mempcpy_chk () can be optimized into
3031 (void) __memcpy_chk (). */
3032 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3036 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3037 replace_call_with_call_and_fold (gsi
, repl
);
3046 if (tree_int_cst_lt (size
, maxlen
))
3051 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3052 mem{cpy,pcpy,move,set} is available. */
3055 case BUILT_IN_MEMCPY_CHK
:
3056 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3058 case BUILT_IN_MEMPCPY_CHK
:
3059 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3061 case BUILT_IN_MEMMOVE_CHK
:
3062 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3064 case BUILT_IN_MEMSET_CHK
:
3065 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3074 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3075 replace_call_with_call_and_fold (gsi
, repl
);
3079 /* Fold a call to the __st[rp]cpy_chk builtin.
3080 DEST, SRC, and SIZE are the arguments to the call.
3081 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3082 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3083 strings passed as second argument. */
3086 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3088 tree src
, tree size
,
3089 enum built_in_function fcode
)
3091 gimple
*stmt
= gsi_stmt (*gsi
);
3092 location_t loc
= gimple_location (stmt
);
3093 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3096 /* If SRC and DEST are the same (and not volatile), return DEST. */
3097 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3099 /* Issue -Wrestrict unless the pointers are null (those do
3100 not point to objects and so do not indicate an overlap;
3101 such calls could be the result of sanitization and jump
3103 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
3105 tree func
= gimple_call_fndecl (stmt
);
3107 warning_at (loc
, OPT_Wrestrict
,
3108 "%qD source argument is the same as destination",
3112 replace_call_with_value (gsi
, dest
);
3116 if (! tree_fits_uhwi_p (size
))
3119 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3120 if (! integer_all_onesp (size
))
3122 len
= c_strlen (src
, 1);
3123 if (! len
|| ! tree_fits_uhwi_p (len
))
3125 /* If LEN is not constant, try MAXLEN too.
3126 For MAXLEN only allow optimizing into non-_ocs function
3127 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3128 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3130 if (fcode
== BUILT_IN_STPCPY_CHK
)
3135 /* If return value of __stpcpy_chk is ignored,
3136 optimize into __strcpy_chk. */
3137 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3141 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3142 replace_call_with_call_and_fold (gsi
, repl
);
3146 if (! len
|| TREE_SIDE_EFFECTS (len
))
3149 /* If c_strlen returned something, but not a constant,
3150 transform __strcpy_chk into __memcpy_chk. */
3151 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3155 gimple_seq stmts
= NULL
;
3156 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3157 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3158 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3159 build_int_cst (size_type_node
, 1));
3160 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3161 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3162 replace_call_with_call_and_fold (gsi
, repl
);
3169 if (! tree_int_cst_lt (maxlen
, size
))
3173 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3174 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
3175 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3179 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3180 replace_call_with_call_and_fold (gsi
, repl
);
3184 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3185 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3186 length passed as third argument. IGNORE is true if return value can be
3187 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3190 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3191 tree dest
, tree src
,
3192 tree len
, tree size
,
3193 enum built_in_function fcode
)
3195 gimple
*stmt
= gsi_stmt (*gsi
);
3196 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3199 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3201 /* If return value of __stpncpy_chk is ignored,
3202 optimize into __strncpy_chk. */
3203 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3206 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3207 replace_call_with_call_and_fold (gsi
, repl
);
3212 if (! tree_fits_uhwi_p (size
))
3215 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3216 if (! integer_all_onesp (size
))
3218 if (! tree_fits_uhwi_p (len
))
3220 /* If LEN is not constant, try MAXLEN too.
3221 For MAXLEN only allow optimizing into non-_ocs function
3222 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3223 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3229 if (tree_int_cst_lt (size
, maxlen
))
3233 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3234 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
3235 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3239 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3240 replace_call_with_call_and_fold (gsi
, repl
);
3244 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3245 Return NULL_TREE if no simplification can be made. */
3248 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3250 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3251 location_t loc
= gimple_location (stmt
);
3252 tree dest
= gimple_call_arg (stmt
, 0);
3253 tree src
= gimple_call_arg (stmt
, 1);
3256 /* If the result is unused, replace stpcpy with strcpy. */
3257 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3259 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3262 gimple_call_set_fndecl (stmt
, fn
);
3267 /* Set to non-null if ARG refers to an unterminated array. */
3268 c_strlen_data data
= { };
3269 /* The size of the unterminated array if SRC referes to one. */
3271 /* True if the size is exact/constant, false if it's the lower bound
3274 tree len
= c_strlen (src
, 1, &data
, 1);
3276 || TREE_CODE (len
) != INTEGER_CST
)
3278 data
.decl
= unterminated_array (src
, &size
, &exact
);
3285 /* Avoid folding calls with unterminated arrays. */
3286 if (!gimple_no_warning_p (stmt
))
3287 warn_string_no_nul (loc
, NULL_TREE
, "stpcpy", src
, data
.decl
, size
,
3289 gimple_set_no_warning (stmt
, true);
3293 if (optimize_function_for_size_p (cfun
)
3294 /* If length is zero it's small enough. */
3295 && !integer_zerop (len
))
3298 /* If the source has a known length replace stpcpy with memcpy. */
3299 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3303 gimple_seq stmts
= NULL
;
3304 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3305 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3306 tem
, build_int_cst (size_type_node
, 1));
3307 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3308 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3309 gimple_move_vops (repl
, stmt
);
3310 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3311 /* Replace the result with dest + len. */
3313 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3314 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3315 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3316 POINTER_PLUS_EXPR
, dest
, tem
);
3317 gsi_replace (gsi
, ret
, false);
3318 /* Finally fold the memcpy call. */
3319 gimple_stmt_iterator gsi2
= *gsi
;
3325 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3326 NULL_TREE if a normal call should be emitted rather than expanding
3327 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3328 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3329 passed as second argument. */
3332 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3333 enum built_in_function fcode
)
3335 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3336 tree dest
, size
, len
, fn
, fmt
, flag
;
3337 const char *fmt_str
;
3339 /* Verify the required arguments in the original call. */
3340 if (gimple_call_num_args (stmt
) < 5)
3343 dest
= gimple_call_arg (stmt
, 0);
3344 len
= gimple_call_arg (stmt
, 1);
3345 flag
= gimple_call_arg (stmt
, 2);
3346 size
= gimple_call_arg (stmt
, 3);
3347 fmt
= gimple_call_arg (stmt
, 4);
3349 if (! tree_fits_uhwi_p (size
))
3352 if (! integer_all_onesp (size
))
3354 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3355 if (! tree_fits_uhwi_p (len
))
3357 /* If LEN is not constant, try MAXLEN too.
3358 For MAXLEN only allow optimizing into non-_ocs function
3359 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3360 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3366 if (tree_int_cst_lt (size
, maxlen
))
3370 if (!init_target_chars ())
3373 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3374 or if format doesn't contain % chars or is "%s". */
3375 if (! integer_zerop (flag
))
3377 fmt_str
= c_getstr (fmt
);
3378 if (fmt_str
== NULL
)
3380 if (strchr (fmt_str
, target_percent
) != NULL
3381 && strcmp (fmt_str
, target_percent_s
))
3385 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3387 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3388 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3392 /* Replace the called function and the first 5 argument by 3 retaining
3393 trailing varargs. */
3394 gimple_call_set_fndecl (stmt
, fn
);
3395 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3396 gimple_call_set_arg (stmt
, 0, dest
);
3397 gimple_call_set_arg (stmt
, 1, len
);
3398 gimple_call_set_arg (stmt
, 2, fmt
);
3399 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3400 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3401 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3406 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3407 Return NULL_TREE if a normal call should be emitted rather than
3408 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3409 or BUILT_IN_VSPRINTF_CHK. */
3412 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3413 enum built_in_function fcode
)
3415 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3416 tree dest
, size
, len
, fn
, fmt
, flag
;
3417 const char *fmt_str
;
3418 unsigned nargs
= gimple_call_num_args (stmt
);
3420 /* Verify the required arguments in the original call. */
3423 dest
= gimple_call_arg (stmt
, 0);
3424 flag
= gimple_call_arg (stmt
, 1);
3425 size
= gimple_call_arg (stmt
, 2);
3426 fmt
= gimple_call_arg (stmt
, 3);
3428 if (! tree_fits_uhwi_p (size
))
3433 if (!init_target_chars ())
3436 /* Check whether the format is a literal string constant. */
3437 fmt_str
= c_getstr (fmt
);
3438 if (fmt_str
!= NULL
)
3440 /* If the format doesn't contain % args or %%, we know the size. */
3441 if (strchr (fmt_str
, target_percent
) == 0)
3443 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3444 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3446 /* If the format is "%s" and first ... argument is a string literal,
3447 we know the size too. */
3448 else if (fcode
== BUILT_IN_SPRINTF_CHK
3449 && strcmp (fmt_str
, target_percent_s
) == 0)
3455 arg
= gimple_call_arg (stmt
, 4);
3456 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3458 len
= c_strlen (arg
, 1);
3459 if (! len
|| ! tree_fits_uhwi_p (len
))
3466 if (! integer_all_onesp (size
))
3468 if (! len
|| ! tree_int_cst_lt (len
, size
))
3472 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3473 or if format doesn't contain % chars or is "%s". */
3474 if (! integer_zerop (flag
))
3476 if (fmt_str
== NULL
)
3478 if (strchr (fmt_str
, target_percent
) != NULL
3479 && strcmp (fmt_str
, target_percent_s
))
3483 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3484 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3485 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3489 /* Replace the called function and the first 4 argument by 2 retaining
3490 trailing varargs. */
3491 gimple_call_set_fndecl (stmt
, fn
);
3492 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3493 gimple_call_set_arg (stmt
, 0, dest
);
3494 gimple_call_set_arg (stmt
, 1, fmt
);
3495 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3496 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3497 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3502 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3503 ORIG may be null if this is a 2-argument call. We don't attempt to
3504 simplify calls with more than 3 arguments.
3506 Return true if simplification was possible, otherwise false. */
3509 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3511 gimple
*stmt
= gsi_stmt (*gsi
);
3512 tree dest
= gimple_call_arg (stmt
, 0);
3513 tree fmt
= gimple_call_arg (stmt
, 1);
3514 tree orig
= NULL_TREE
;
3515 const char *fmt_str
= NULL
;
3517 /* Verify the required arguments in the original call. We deal with two
3518 types of sprintf() calls: 'sprintf (str, fmt)' and
3519 'sprintf (dest, "%s", orig)'. */
3520 if (gimple_call_num_args (stmt
) > 3)
3523 if (gimple_call_num_args (stmt
) == 3)
3524 orig
= gimple_call_arg (stmt
, 2);
3526 /* Check whether the format is a literal string constant. */
3527 fmt_str
= c_getstr (fmt
);
3528 if (fmt_str
== NULL
)
3531 if (!init_target_chars ())
3534 /* If the format doesn't contain % args or %%, use strcpy. */
3535 if (strchr (fmt_str
, target_percent
) == NULL
)
3537 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3542 /* Don't optimize sprintf (buf, "abc", ptr++). */
3546 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3547 'format' is known to contain no % formats. */
3548 gimple_seq stmts
= NULL
;
3549 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3551 /* Propagate the NO_WARNING bit to avoid issuing the same
3552 warning more than once. */
3553 if (gimple_no_warning_p (stmt
))
3554 gimple_set_no_warning (repl
, true);
3556 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3557 if (tree lhs
= gimple_call_lhs (stmt
))
3559 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3561 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3562 gsi_replace_with_seq_vops (gsi
, stmts
);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2
= *gsi
;
3573 gsi_replace_with_seq_vops (gsi
, stmts
);
3579 /* If the format is "%s", use strcpy if the result isn't used. */
3580 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3583 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3588 /* Don't crash on sprintf (str1, "%s"). */
3592 tree orig_len
= NULL_TREE
;
3593 if (gimple_call_lhs (stmt
))
3595 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3600 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3601 gimple_seq stmts
= NULL
;
3602 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3604 /* Propagate the NO_WARNING bit to avoid issuing the same
3605 warning more than once. */
3606 if (gimple_no_warning_p (stmt
))
3607 gimple_set_no_warning (repl
, true);
3609 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3610 if (tree lhs
= gimple_call_lhs (stmt
))
3612 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3613 TREE_TYPE (orig_len
)))
3614 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3615 repl
= gimple_build_assign (lhs
, orig_len
);
3616 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3617 gsi_replace_with_seq_vops (gsi
, stmts
);
3618 /* gsi now points at the assignment to the lhs, get a
3619 stmt iterator to the memcpy call.
3620 ??? We can't use gsi_for_stmt as that doesn't work when the
3621 CFG isn't built yet. */
3622 gimple_stmt_iterator gsi2
= *gsi
;
3628 gsi_replace_with_seq_vops (gsi
, stmts
);
3636 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3637 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3638 attempt to simplify calls with more than 4 arguments.
3640 Return true if simplification was possible, otherwise false. */
3643 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3645 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3646 tree dest
= gimple_call_arg (stmt
, 0);
3647 tree destsize
= gimple_call_arg (stmt
, 1);
3648 tree fmt
= gimple_call_arg (stmt
, 2);
3649 tree orig
= NULL_TREE
;
3650 const char *fmt_str
= NULL
;
3652 if (gimple_call_num_args (stmt
) > 4)
3655 if (gimple_call_num_args (stmt
) == 4)
3656 orig
= gimple_call_arg (stmt
, 3);
3658 if (!tree_fits_uhwi_p (destsize
))
3660 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3662 /* Check whether the format is a literal string constant. */
3663 fmt_str
= c_getstr (fmt
);
3664 if (fmt_str
== NULL
)
3667 if (!init_target_chars ())
3670 /* If the format doesn't contain % args or %%, use strcpy. */
3671 if (strchr (fmt_str
, target_percent
) == NULL
)
3673 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3677 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3681 /* We could expand this as
3682 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3684 memcpy (str, fmt_with_nul_at_cstm1, cst);
3685 but in the former case that might increase code size
3686 and in the latter case grow .rodata section too much.
3688 size_t len
= strlen (fmt_str
);
3692 gimple_seq stmts
= NULL
;
3693 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3694 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3695 if (tree lhs
= gimple_call_lhs (stmt
))
3697 repl
= gimple_build_assign (lhs
,
3698 build_int_cst (TREE_TYPE (lhs
), len
));
3699 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3700 gsi_replace_with_seq_vops (gsi
, stmts
);
3701 /* gsi now points at the assignment to the lhs, get a
3702 stmt iterator to the memcpy call.
3703 ??? We can't use gsi_for_stmt as that doesn't work when the
3704 CFG isn't built yet. */
3705 gimple_stmt_iterator gsi2
= *gsi
;
3711 gsi_replace_with_seq_vops (gsi
, stmts
);
3717 /* If the format is "%s", use strcpy if the result isn't used. */
3718 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3720 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3724 /* Don't crash on snprintf (str1, cst, "%s"). */
3728 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3729 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3732 /* We could expand this as
3733 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3735 memcpy (str1, str2_with_nul_at_cstm1, cst);
3736 but in the former case that might increase code size
3737 and in the latter case grow .rodata section too much.
3739 if (compare_tree_int (orig_len
, destlen
) >= 0)
3742 /* Convert snprintf (str1, cst, "%s", str2) into
3743 strcpy (str1, str2) if strlen (str2) < cst. */
3744 gimple_seq stmts
= NULL
;
3745 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3746 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3747 if (tree lhs
= gimple_call_lhs (stmt
))
3749 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3750 TREE_TYPE (orig_len
)))
3751 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3752 repl
= gimple_build_assign (lhs
, orig_len
);
3753 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3754 gsi_replace_with_seq_vops (gsi
, stmts
);
3755 /* gsi now points at the assignment to the lhs, get a
3756 stmt iterator to the memcpy call.
3757 ??? We can't use gsi_for_stmt as that doesn't work when the
3758 CFG isn't built yet. */
3759 gimple_stmt_iterator gsi2
= *gsi
;
3765 gsi_replace_with_seq_vops (gsi
, stmts
);
3773 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3774 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3775 more than 3 arguments, and ARG may be null in the 2-argument case.
3777 Return NULL_TREE if no simplification was possible, otherwise return the
3778 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3779 code of the function to be simplified. */
3782 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3783 tree fp
, tree fmt
, tree arg
,
3784 enum built_in_function fcode
)
3786 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3787 tree fn_fputc
, fn_fputs
;
3788 const char *fmt_str
= NULL
;
3790 /* If the return value is used, don't do the transformation. */
3791 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3794 /* Check whether the format is a literal string constant. */
3795 fmt_str
= c_getstr (fmt
);
3796 if (fmt_str
== NULL
)
3799 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3801 /* If we're using an unlocked function, assume the other
3802 unlocked functions exist explicitly. */
3803 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3804 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3808 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3809 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3812 if (!init_target_chars ())
3815 /* If the format doesn't contain % args or %%, use strcpy. */
3816 if (strchr (fmt_str
, target_percent
) == NULL
)
3818 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3822 /* If the format specifier was "", fprintf does nothing. */
3823 if (fmt_str
[0] == '\0')
3825 replace_call_with_value (gsi
, NULL_TREE
);
3829 /* When "string" doesn't contain %, replace all cases of
3830 fprintf (fp, string) with fputs (string, fp). The fputs
3831 builtin will take care of special cases like length == 1. */
3834 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3835 replace_call_with_call_and_fold (gsi
, repl
);
3840 /* The other optimizations can be done only on the non-va_list variants. */
3841 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3844 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3845 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3847 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3851 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3852 replace_call_with_call_and_fold (gsi
, repl
);
3857 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3858 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3861 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3865 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3866 replace_call_with_call_and_fold (gsi
, repl
);
3874 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3875 FMT and ARG are the arguments to the call; we don't fold cases with
3876 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3878 Return NULL_TREE if no simplification was possible, otherwise return the
3879 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3880 code of the function to be simplified. */
3883 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3884 tree arg
, enum built_in_function fcode
)
3886 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3887 tree fn_putchar
, fn_puts
, newarg
;
3888 const char *fmt_str
= NULL
;
3890 /* If the return value is used, don't do the transformation. */
3891 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3894 /* Check whether the format is a literal string constant. */
3895 fmt_str
= c_getstr (fmt
);
3896 if (fmt_str
== NULL
)
3899 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3901 /* If we're using an unlocked function, assume the other
3902 unlocked functions exist explicitly. */
3903 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3904 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3908 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3909 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3912 if (!init_target_chars ())
3915 if (strcmp (fmt_str
, target_percent_s
) == 0
3916 || strchr (fmt_str
, target_percent
) == NULL
)
3920 if (strcmp (fmt_str
, target_percent_s
) == 0)
3922 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3925 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3928 str
= c_getstr (arg
);
3934 /* The format specifier doesn't contain any '%' characters. */
3935 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3941 /* If the string was "", printf does nothing. */
3944 replace_call_with_value (gsi
, NULL_TREE
);
3948 /* If the string has length of 1, call putchar. */
3951 /* Given printf("c"), (where c is any one character,)
3952 convert "c"[0] to an int and pass that to the replacement
3954 newarg
= build_int_cst (integer_type_node
, str
[0]);
3957 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3958 replace_call_with_call_and_fold (gsi
, repl
);
3964 /* If the string was "string\n", call puts("string"). */
3965 size_t len
= strlen (str
);
3966 if ((unsigned char)str
[len
- 1] == target_newline
3967 && (size_t) (int) len
== len
3972 /* Create a NUL-terminated string that's one char shorter
3973 than the original, stripping off the trailing '\n'. */
3974 newstr
= xstrdup (str
);
3975 newstr
[len
- 1] = '\0';
3976 newarg
= build_string_literal (len
, newstr
);
3980 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3981 replace_call_with_call_and_fold (gsi
, repl
);
3986 /* We'd like to arrange to call fputs(string,stdout) here,
3987 but we need stdout and don't have a way to get it yet. */
3992 /* The other optimizations can be done only on the non-va_list variants. */
3993 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3996 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3997 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3999 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
4003 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
4004 replace_call_with_call_and_fold (gsi
, repl
);
4009 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4010 else if (strcmp (fmt_str
, target_percent_c
) == 0)
4012 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
4017 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
4018 replace_call_with_call_and_fold (gsi
, repl
);
4028 /* Fold a call to __builtin_strlen with known length LEN. */
4031 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
4033 gimple
*stmt
= gsi_stmt (*gsi
);
4034 tree arg
= gimple_call_arg (stmt
, 0);
4039 c_strlen_data lendata
= { };
4040 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4042 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4043 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4045 /* The range of lengths refers to either a single constant
4046 string or to the longest and shortest constant string
4047 referenced by the argument of the strlen() call, or to
4048 the strings that can possibly be stored in the arrays
4049 the argument refers to. */
4050 minlen
= wi::to_wide (lendata
.minlen
);
4051 maxlen
= wi::to_wide (lendata
.maxlen
);
4055 unsigned prec
= TYPE_PRECISION (sizetype
);
4057 minlen
= wi::shwi (0, prec
);
4058 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4061 if (minlen
== maxlen
)
4063 /* Fold the strlen call to a constant. */
4064 tree type
= TREE_TYPE (lendata
.minlen
);
4065 tree len
= force_gimple_operand_gsi (gsi
,
4066 wide_int_to_tree (type
, minlen
),
4067 true, NULL
, true, GSI_SAME_STMT
);
4068 replace_call_with_value (gsi
, len
);
4072 /* Set the strlen() range to [0, MAXLEN]. */
4073 if (tree lhs
= gimple_call_lhs (stmt
))
4074 set_strlen_range (lhs
, minlen
, maxlen
);
4079 /* Fold a call to __builtin_acc_on_device. */
4082 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4084 /* Defer folding until we know which compiler we're in. */
4085 if (symtab
->state
!= EXPANSION
)
4088 unsigned val_host
= GOMP_DEVICE_HOST
;
4089 unsigned val_dev
= GOMP_DEVICE_NONE
;
4091 #ifdef ACCEL_COMPILER
4092 val_host
= GOMP_DEVICE_NOT_HOST
;
4093 val_dev
= ACCEL_COMPILER_acc_device
;
4096 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4098 tree host_eq
= make_ssa_name (boolean_type_node
);
4099 gimple
*host_ass
= gimple_build_assign
4100 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4101 gimple_set_location (host_ass
, loc
);
4102 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4104 tree dev_eq
= make_ssa_name (boolean_type_node
);
4105 gimple
*dev_ass
= gimple_build_assign
4106 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4107 gimple_set_location (dev_ass
, loc
);
4108 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4110 tree result
= make_ssa_name (boolean_type_node
);
4111 gimple
*result_ass
= gimple_build_assign
4112 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4113 gimple_set_location (result_ass
, loc
);
4114 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4116 replace_call_with_value (gsi
, result
);
4121 /* Fold realloc (0, n) -> malloc (n). */
4124 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4126 gimple
*stmt
= gsi_stmt (*gsi
);
4127 tree arg
= gimple_call_arg (stmt
, 0);
4128 tree size
= gimple_call_arg (stmt
, 1);
4130 if (operand_equal_p (arg
, null_pointer_node
, 0))
4132 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4135 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4136 replace_call_with_call_and_fold (gsi
, repl
);
4143 /* Number of bytes into which any type but aggregate or vector types
4145 static constexpr size_t clear_padding_unit
4146 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4147 /* Buffer size on which __builtin_clear_padding folding code works. */
4148 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4150 /* Data passed through __builtin_clear_padding folding. */
4151 struct clear_padding_struct
{
4153 /* 0 during __builtin_clear_padding folding, nonzero during
4154 clear_type_padding_in_mask. In that case, instead of clearing the
4155 non-padding bits in union_ptr array clear the padding bits in there. */
4159 gimple_stmt_iterator
*gsi
;
4160 /* Alignment of buf->base + 0. */
4162 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4164 /* Number of padding bytes before buf->off that don't have padding clear
4165 code emitted yet. */
4166 HOST_WIDE_INT padding_bytes
;
4167 /* The size of the whole object. Never emit code to touch
4168 buf->base + buf->sz or following bytes. */
4170 /* Number of bytes recorded in buf->buf. */
4172 /* When inside union, instead of emitting code we and bits inside of
4173 the union_ptr array. */
4174 unsigned char *union_ptr
;
4175 /* Set bits mean padding bits that need to be cleared by the builtin. */
4176 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4179 /* Emit code to clear padding requested in BUF->buf - set bits
4180 in there stand for padding that should be cleared. FULL is true
4181 if everything from the buffer should be flushed, otherwise
4182 it can leave up to 2 * clear_padding_unit bytes for further
4186 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4188 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4189 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4191 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4192 size_t end
= buf
->size
;
4194 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4195 * clear_padding_unit
);
4196 size_t padding_bytes
= buf
->padding_bytes
;
4199 if (buf
->clear_in_mask
)
4201 /* During clear_type_padding_in_mask, clear the padding
4202 bits set in buf->buf in the buf->union_ptr mask. */
4203 for (size_t i
= 0; i
< end
; i
++)
4205 if (buf
->buf
[i
] == (unsigned char) ~0)
4209 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4212 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4217 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4221 buf
->padding_bytes
= 0;
4225 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4228 buf
->padding_bytes
= padding_bytes
;
4232 /* Inside of a union, instead of emitting any code, instead
4233 clear all bits in the union_ptr buffer that are clear
4234 in buf. Whole padding bytes don't clear anything. */
4235 for (size_t i
= 0; i
< end
; i
++)
4237 if (buf
->buf
[i
] == (unsigned char) ~0)
4242 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4249 buf
->padding_bytes
= 0;
4253 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4256 buf
->padding_bytes
= padding_bytes
;
4260 size_t wordsize
= UNITS_PER_WORD
;
4261 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4263 size_t nonzero_first
= wordsize
;
4264 size_t nonzero_last
= 0;
4265 size_t zero_first
= wordsize
;
4266 size_t zero_last
= 0;
4267 bool all_ones
= true, bytes_only
= true;
4268 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4269 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4271 gcc_assert (wordsize
> 1);
4276 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4280 if (nonzero_first
== wordsize
)
4282 nonzero_first
= j
- i
;
4283 nonzero_last
= j
- i
;
4285 if (nonzero_last
!= j
- i
)
4287 nonzero_last
= j
+ 1 - i
;
4291 if (zero_first
== wordsize
)
4293 zero_last
= j
+ 1 - i
;
4295 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4301 size_t padding_end
= i
;
4304 if (nonzero_first
== 0
4305 && nonzero_last
== wordsize
4308 /* All bits are padding and we had some padding
4309 before too. Just extend it. */
4310 padding_bytes
+= wordsize
;
4313 if (all_ones
&& nonzero_first
== 0)
4315 padding_bytes
+= nonzero_last
;
4316 padding_end
+= nonzero_last
;
4317 nonzero_first
= wordsize
;
4320 else if (bytes_only
&& nonzero_first
== 0)
4322 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4323 padding_bytes
+= zero_first
;
4324 padding_end
+= zero_first
;
4327 if (padding_bytes
== 1)
4329 atype
= char_type_node
;
4330 src
= build_zero_cst (char_type_node
);
4334 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4335 src
= build_constructor (atype
, NULL
);
4337 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4338 build_int_cst (buf
->alias_type
,
4339 buf
->off
+ padding_end
4341 gimple
*g
= gimple_build_assign (dst
, src
);
4342 gimple_set_location (g
, buf
->loc
);
4343 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4345 buf
->padding_bytes
= 0;
4347 if (nonzero_first
== wordsize
)
4348 /* All bits in a word are 0, there are no padding bits. */
4350 if (all_ones
&& nonzero_last
== wordsize
)
4352 /* All bits between nonzero_first and end of word are padding
4353 bits, start counting padding_bytes. */
4354 padding_bytes
= nonzero_last
- nonzero_first
;
4359 /* If bitfields aren't involved in this word, prefer storing
4360 individual bytes or groups of them over performing a RMW
4361 operation on the whole word. */
4362 gcc_assert (i
+ zero_last
<= end
);
4363 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4368 for (k
= j
; k
< i
+ zero_last
; k
++)
4369 if (buf
->buf
[k
] == 0)
4371 HOST_WIDE_INT off
= buf
->off
+ j
;
4375 atype
= char_type_node
;
4376 src
= build_zero_cst (char_type_node
);
4380 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4381 src
= build_constructor (atype
, NULL
);
4383 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4385 build_int_cst (buf
->alias_type
, off
));
4386 gimple
*g
= gimple_build_assign (dst
, src
);
4387 gimple_set_location (g
, buf
->loc
);
4388 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4392 if (nonzero_last
== wordsize
)
4393 padding_bytes
= nonzero_last
- zero_last
;
4396 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4398 if (nonzero_last
- nonzero_first
<= eltsz
4399 && ((nonzero_first
& ~(eltsz
- 1))
4400 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4404 type
= char_type_node
;
4406 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4408 size_t start
= nonzero_first
& ~(eltsz
- 1);
4409 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4411 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4412 atype
= build_aligned_type (type
, buf
->align
);
4413 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4414 build_int_cst (buf
->alias_type
, off
));
4418 && nonzero_first
== start
4419 && nonzero_last
== start
+ eltsz
)
4420 src
= build_zero_cst (type
);
4423 src
= make_ssa_name (type
);
4424 g
= gimple_build_assign (src
, unshare_expr (dst
));
4425 gimple_set_location (g
, buf
->loc
);
4426 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4427 tree mask
= native_interpret_expr (type
,
4428 buf
->buf
+ i
+ start
,
4430 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4431 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4432 tree src_masked
= make_ssa_name (type
);
4433 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4435 gimple_set_location (g
, buf
->loc
);
4436 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4439 g
= gimple_build_assign (dst
, src
);
4440 gimple_set_location (g
, buf
->loc
);
4441 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4451 if (padding_bytes
== 1)
4453 atype
= char_type_node
;
4454 src
= build_zero_cst (char_type_node
);
4458 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4459 src
= build_constructor (atype
, NULL
);
4461 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4462 build_int_cst (buf
->alias_type
,
4465 gimple
*g
= gimple_build_assign (dst
, src
);
4466 gimple_set_location (g
, buf
->loc
);
4467 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4469 size_t end_rem
= end
% UNITS_PER_WORD
;
4470 buf
->off
+= end
- end_rem
;
4471 buf
->size
= end_rem
;
4472 memset (buf
->buf
, 0, buf
->size
);
4473 buf
->padding_bytes
= 0;
4477 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4480 buf
->padding_bytes
= padding_bytes
;
4484 /* Append PADDING_BYTES padding bytes. */
4487 clear_padding_add_padding (clear_padding_struct
*buf
,
4488 HOST_WIDE_INT padding_bytes
)
4490 if (padding_bytes
== 0)
4492 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4493 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4494 clear_padding_flush (buf
, false);
4495 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4496 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4498 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4499 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4500 buf
->size
= clear_padding_buf_size
;
4501 clear_padding_flush (buf
, false);
4502 gcc_assert (buf
->padding_bytes
);
4503 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4504 is guaranteed to be all ones. */
4505 padding_bytes
+= buf
->size
;
4506 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4507 memset (buf
->buf
, ~0, buf
->size
);
4508 buf
->off
+= padding_bytes
- buf
->size
;
4509 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4513 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4514 buf
->size
+= padding_bytes
;
4518 static void clear_padding_type (clear_padding_struct
*, tree
, HOST_WIDE_INT
);
4520 /* Clear padding bits of union type TYPE. */
4523 clear_padding_union (clear_padding_struct
*buf
, tree type
, HOST_WIDE_INT sz
)
4525 clear_padding_struct
*union_buf
;
4526 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4527 size_t start_size
= 0;
4530 start_off
= buf
->off
+ buf
->size
;
4531 next_off
= start_off
+ sz
;
4532 start_size
= start_off
% UNITS_PER_WORD
;
4533 start_off
-= start_size
;
4534 clear_padding_flush (buf
, true);
4539 if (sz
+ buf
->size
> clear_padding_buf_size
)
4540 clear_padding_flush (buf
, false);
4541 union_buf
= XALLOCA (clear_padding_struct
);
4542 union_buf
->loc
= buf
->loc
;
4543 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4544 union_buf
->base
= NULL_TREE
;
4545 union_buf
->alias_type
= NULL_TREE
;
4546 union_buf
->gsi
= NULL
;
4547 union_buf
->align
= 0;
4549 union_buf
->padding_bytes
= 0;
4551 union_buf
->size
= 0;
4552 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4553 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4555 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4556 memset (union_buf
->union_ptr
, ~0, sz
);
4559 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4560 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4562 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4564 if (TREE_TYPE (field
) == error_mark_node
)
4566 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4567 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4568 if (!buf
->clear_in_mask
)
4569 error_at (buf
->loc
, "flexible array member %qD does not have "
4570 "well defined padding bits for %qs",
4571 field
, "__builtin_clear_padding");
4574 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4575 gcc_assert (union_buf
->size
== 0);
4576 union_buf
->off
= start_off
;
4577 union_buf
->size
= start_size
;
4578 memset (union_buf
->buf
, ~0, start_size
);
4579 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
);
4580 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4581 clear_padding_flush (union_buf
, true);
4584 if (buf
== union_buf
)
4586 buf
->off
= next_off
;
4587 buf
->size
= next_off
% UNITS_PER_WORD
;
4588 buf
->off
-= buf
->size
;
4589 memset (buf
->buf
, ~0, buf
->size
);
4591 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4595 unsigned char *union_ptr
= union_buf
->union_ptr
;
4598 clear_padding_flush (buf
, false);
4599 HOST_WIDE_INT this_sz
4600 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4601 clear_padding_buf_size
- buf
->size
);
4602 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4603 buf
->size
+= this_sz
;
4604 union_ptr
+= this_sz
;
4607 XDELETE (union_buf
->union_ptr
);
4611 /* The only known floating point formats with padding bits are the
4612 IEEE extended ones. */
4615 clear_padding_real_needs_padding_p (tree type
)
4617 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4619 && fmt
->signbit_ro
== fmt
->signbit_rw
4620 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4623 /* Return true if TYPE might contain any padding bits. */
4626 clear_padding_type_may_have_padding_p (tree type
)
4628 switch (TREE_CODE (type
))
4636 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4638 return clear_padding_real_needs_padding_p (type
);
4644 /* Emit a runtime loop:
4645 for (; buf.base != end; buf.base += sz)
4646 __builtin_clear_padding (buf.base); */
4649 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
, tree end
)
4651 tree l1
= create_artificial_label (buf
->loc
);
4652 tree l2
= create_artificial_label (buf
->loc
);
4653 tree l3
= create_artificial_label (buf
->loc
);
4654 gimple
*g
= gimple_build_goto (l2
);
4655 gimple_set_location (g
, buf
->loc
);
4656 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4657 g
= gimple_build_label (l1
);
4658 gimple_set_location (g
, buf
->loc
);
4659 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4660 clear_padding_type (buf
, type
, buf
->sz
);
4661 clear_padding_flush (buf
, true);
4662 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4663 size_int (buf
->sz
));
4664 gimple_set_location (g
, buf
->loc
);
4665 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4666 g
= gimple_build_label (l2
);
4667 gimple_set_location (g
, buf
->loc
);
4668 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4669 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4670 gimple_set_location (g
, buf
->loc
);
4671 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4672 g
= gimple_build_label (l3
);
4673 gimple_set_location (g
, buf
->loc
);
4674 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4677 /* Clear padding bits for TYPE. Called recursively from
4678 gimple_fold_builtin_clear_padding. */
4681 clear_padding_type (clear_padding_struct
*buf
, tree type
, HOST_WIDE_INT sz
)
4683 switch (TREE_CODE (type
))
4686 HOST_WIDE_INT cur_pos
;
4688 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4689 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4691 tree ftype
= TREE_TYPE (field
);
4692 if (DECL_BIT_FIELD (field
))
4694 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4697 HOST_WIDE_INT pos
= int_byte_position (field
);
4699 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4700 bpos
%= BITS_PER_UNIT
;
4702 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4703 if (pos
+ end
> cur_pos
)
4705 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4706 cur_pos
= pos
+ end
;
4708 gcc_assert (cur_pos
> pos
4709 && ((unsigned HOST_WIDE_INT
) buf
->size
4710 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4711 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4712 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4713 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4714 " in %qs", "__builtin_clear_padding");
4715 else if (BYTES_BIG_ENDIAN
)
4718 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4719 *p
&= ~(((1 << fldsz
) - 1)
4720 << (BITS_PER_UNIT
- bpos
- fldsz
));
4725 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4727 fldsz
-= BITS_PER_UNIT
- bpos
;
4729 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4730 p
+= fldsz
/ BITS_PER_UNIT
;
4731 fldsz
%= BITS_PER_UNIT
;
4733 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4738 /* Little endian. */
4739 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4740 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4745 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4747 fldsz
-= BITS_PER_UNIT
- bpos
;
4749 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4750 p
+= fldsz
/ BITS_PER_UNIT
;
4751 fldsz
%= BITS_PER_UNIT
;
4753 *p
&= ~((1 << fldsz
) - 1);
4757 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4759 if (ftype
== error_mark_node
)
4761 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4762 && !COMPLETE_TYPE_P (ftype
));
4763 if (!buf
->clear_in_mask
)
4764 error_at (buf
->loc
, "flexible array member %qD does not "
4765 "have well defined padding bits for %qs",
4766 field
, "__builtin_clear_padding");
4768 else if (is_empty_type (TREE_TYPE (field
)))
4772 HOST_WIDE_INT pos
= int_byte_position (field
);
4773 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4774 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4775 clear_padding_add_padding (buf
, pos
- cur_pos
);
4777 clear_padding_type (buf
, TREE_TYPE (field
), fldsz
);
4781 gcc_assert (sz
>= cur_pos
);
4782 clear_padding_add_padding (buf
, sz
- cur_pos
);
4785 HOST_WIDE_INT nelts
, fldsz
;
4786 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4791 && sz
> 8 * UNITS_PER_WORD
4792 && buf
->union_ptr
== NULL
4793 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4795 /* For sufficiently large array of more than one elements,
4796 emit a runtime loop to keep code size manageable. */
4797 tree base
= buf
->base
;
4798 unsigned int prev_align
= buf
->align
;
4799 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4800 HOST_WIDE_INT prev_sz
= buf
->sz
;
4801 clear_padding_flush (buf
, true);
4802 tree elttype
= TREE_TYPE (type
);
4803 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4804 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4805 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4806 base
, size_int (off
));
4807 gimple_set_location (g
, buf
->loc
);
4808 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4809 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4811 gimple_set_location (g
, buf
->loc
);
4812 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4814 buf
->align
= TYPE_ALIGN (elttype
);
4817 clear_padding_emit_loop (buf
, elttype
, end
);
4820 buf
->align
= prev_align
;
4821 buf
->size
= off
% UNITS_PER_WORD
;
4822 buf
->off
= off
- buf
->size
;
4823 memset (buf
->buf
, 0, buf
->size
);
4826 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4827 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4830 clear_padding_union (buf
, type
, sz
);
4833 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4834 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4835 clear_padding_flush (buf
, false);
4836 if (clear_padding_real_needs_padding_p (type
))
4838 /* Use native_interpret_expr + native_encode_expr to figure out
4839 which bits are padding. */
4840 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4841 tree cst
= native_interpret_expr (type
, buf
->buf
+ buf
->size
, sz
);
4842 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4843 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4844 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4845 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4846 buf
->buf
[buf
->size
+ i
] ^= ~0;
4849 memset (buf
->buf
+ buf
->size
, 0, sz
);
4853 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4854 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4855 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4858 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4859 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4860 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4861 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4864 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4865 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4866 clear_padding_flush (buf
, false);
4867 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4871 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4872 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4873 clear_padding_flush (buf
, false);
4874 memset (buf
->buf
+ buf
->size
, 0, sz
);
4880 /* Clear padding bits of TYPE in MASK. */
4883 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4885 clear_padding_struct buf
;
4886 buf
.loc
= UNKNOWN_LOCATION
;
4887 buf
.clear_in_mask
= true;
4888 buf
.base
= NULL_TREE
;
4889 buf
.alias_type
= NULL_TREE
;
4893 buf
.padding_bytes
= 0;
4894 buf
.sz
= int_size_in_bytes (type
);
4896 buf
.union_ptr
= mask
;
4897 clear_padding_type (&buf
, type
, buf
.sz
);
4898 clear_padding_flush (&buf
, true);
4901 /* Fold __builtin_clear_padding builtin. */
4904 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4906 gimple
*stmt
= gsi_stmt (*gsi
);
4907 gcc_assert (gimple_call_num_args (stmt
) == 2);
4908 tree ptr
= gimple_call_arg (stmt
, 0);
4909 tree typearg
= gimple_call_arg (stmt
, 1);
4910 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4911 location_t loc
= gimple_location (stmt
);
4912 clear_padding_struct buf
;
4913 gimple_stmt_iterator gsiprev
= *gsi
;
4914 /* This should be folded during the lower pass. */
4915 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4916 gcc_assert (COMPLETE_TYPE_P (type
));
4917 gsi_prev (&gsiprev
);
4920 buf
.clear_in_mask
= false;
4922 buf
.alias_type
= NULL_TREE
;
4924 buf
.align
= get_pointer_alignment (ptr
);
4925 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4926 buf
.align
= MAX (buf
.align
, talign
);
4928 buf
.padding_bytes
= 0;
4930 buf
.sz
= int_size_in_bytes (type
);
4931 buf
.union_ptr
= NULL
;
4932 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4933 sorry_at (loc
, "%s not supported for variable length aggregates",
4934 "__builtin_clear_padding");
4935 /* The implementation currently assumes 8-bit host and target
4936 chars which is the case for all currently supported targets
4937 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4938 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4939 sorry_at (loc
, "%s not supported on this target",
4940 "__builtin_clear_padding");
4941 else if (!clear_padding_type_may_have_padding_p (type
))
4943 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4945 tree sz
= TYPE_SIZE_UNIT (type
);
4946 tree elttype
= type
;
4947 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4948 while (TREE_CODE (elttype
) == ARRAY_TYPE
4949 && int_size_in_bytes (elttype
) < 0)
4950 elttype
= TREE_TYPE (elttype
);
4951 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4952 gcc_assert (eltsz
>= 0);
4955 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4956 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4957 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4958 gimple_set_location (g
, loc
);
4959 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4960 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4961 gimple_set_location (g
, loc
);
4962 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4964 buf
.align
= TYPE_ALIGN (elttype
);
4965 buf
.alias_type
= build_pointer_type (elttype
);
4966 clear_padding_emit_loop (&buf
, elttype
, end
);
4971 if (!is_gimple_mem_ref_addr (buf
.base
))
4973 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4974 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4975 gimple_set_location (g
, loc
);
4976 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4978 buf
.alias_type
= build_pointer_type (type
);
4979 clear_padding_type (&buf
, type
, buf
.sz
);
4980 clear_padding_flush (&buf
, true);
4983 gimple_stmt_iterator gsiprev2
= *gsi
;
4984 gsi_prev (&gsiprev2
);
4985 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4986 gsi_replace (gsi
, gimple_build_nop (), true);
4989 gsi_remove (gsi
, true);
4995 /* Fold the non-target builtin at *GSI and return whether any simplification
4999 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
5001 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
5002 tree callee
= gimple_call_fndecl (stmt
);
5004 /* Give up for always_inline inline builtins until they are
5006 if (avoid_folding_inline_builtin (callee
))
5009 unsigned n
= gimple_call_num_args (stmt
);
5010 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
5014 return gimple_fold_builtin_bcmp (gsi
);
5015 case BUILT_IN_BCOPY
:
5016 return gimple_fold_builtin_bcopy (gsi
);
5017 case BUILT_IN_BZERO
:
5018 return gimple_fold_builtin_bzero (gsi
);
5020 case BUILT_IN_MEMSET
:
5021 return gimple_fold_builtin_memset (gsi
,
5022 gimple_call_arg (stmt
, 1),
5023 gimple_call_arg (stmt
, 2));
5024 case BUILT_IN_MEMCPY
:
5025 case BUILT_IN_MEMPCPY
:
5026 case BUILT_IN_MEMMOVE
:
5027 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5028 gimple_call_arg (stmt
, 1), fcode
);
5029 case BUILT_IN_SPRINTF_CHK
:
5030 case BUILT_IN_VSPRINTF_CHK
:
5031 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5032 case BUILT_IN_STRCAT_CHK
:
5033 return gimple_fold_builtin_strcat_chk (gsi
);
5034 case BUILT_IN_STRNCAT_CHK
:
5035 return gimple_fold_builtin_strncat_chk (gsi
);
5036 case BUILT_IN_STRLEN
:
5037 return gimple_fold_builtin_strlen (gsi
);
5038 case BUILT_IN_STRCPY
:
5039 return gimple_fold_builtin_strcpy (gsi
,
5040 gimple_call_arg (stmt
, 0),
5041 gimple_call_arg (stmt
, 1));
5042 case BUILT_IN_STRNCPY
:
5043 return gimple_fold_builtin_strncpy (gsi
,
5044 gimple_call_arg (stmt
, 0),
5045 gimple_call_arg (stmt
, 1),
5046 gimple_call_arg (stmt
, 2));
5047 case BUILT_IN_STRCAT
:
5048 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5049 gimple_call_arg (stmt
, 1));
5050 case BUILT_IN_STRNCAT
:
5051 return gimple_fold_builtin_strncat (gsi
);
5052 case BUILT_IN_INDEX
:
5053 case BUILT_IN_STRCHR
:
5054 return gimple_fold_builtin_strchr (gsi
, false);
5055 case BUILT_IN_RINDEX
:
5056 case BUILT_IN_STRRCHR
:
5057 return gimple_fold_builtin_strchr (gsi
, true);
5058 case BUILT_IN_STRSTR
:
5059 return gimple_fold_builtin_strstr (gsi
);
5060 case BUILT_IN_STRCMP
:
5061 case BUILT_IN_STRCMP_EQ
:
5062 case BUILT_IN_STRCASECMP
:
5063 case BUILT_IN_STRNCMP
:
5064 case BUILT_IN_STRNCMP_EQ
:
5065 case BUILT_IN_STRNCASECMP
:
5066 return gimple_fold_builtin_string_compare (gsi
);
5067 case BUILT_IN_MEMCHR
:
5068 return gimple_fold_builtin_memchr (gsi
);
5069 case BUILT_IN_FPUTS
:
5070 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5071 gimple_call_arg (stmt
, 1), false);
5072 case BUILT_IN_FPUTS_UNLOCKED
:
5073 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5074 gimple_call_arg (stmt
, 1), true);
5075 case BUILT_IN_MEMCPY_CHK
:
5076 case BUILT_IN_MEMPCPY_CHK
:
5077 case BUILT_IN_MEMMOVE_CHK
:
5078 case BUILT_IN_MEMSET_CHK
:
5079 return gimple_fold_builtin_memory_chk (gsi
,
5080 gimple_call_arg (stmt
, 0),
5081 gimple_call_arg (stmt
, 1),
5082 gimple_call_arg (stmt
, 2),
5083 gimple_call_arg (stmt
, 3),
5085 case BUILT_IN_STPCPY
:
5086 return gimple_fold_builtin_stpcpy (gsi
);
5087 case BUILT_IN_STRCPY_CHK
:
5088 case BUILT_IN_STPCPY_CHK
:
5089 return gimple_fold_builtin_stxcpy_chk (gsi
,
5090 gimple_call_arg (stmt
, 0),
5091 gimple_call_arg (stmt
, 1),
5092 gimple_call_arg (stmt
, 2),
5094 case BUILT_IN_STRNCPY_CHK
:
5095 case BUILT_IN_STPNCPY_CHK
:
5096 return gimple_fold_builtin_stxncpy_chk (gsi
,
5097 gimple_call_arg (stmt
, 0),
5098 gimple_call_arg (stmt
, 1),
5099 gimple_call_arg (stmt
, 2),
5100 gimple_call_arg (stmt
, 3),
5102 case BUILT_IN_SNPRINTF_CHK
:
5103 case BUILT_IN_VSNPRINTF_CHK
:
5104 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5106 case BUILT_IN_FPRINTF
:
5107 case BUILT_IN_FPRINTF_UNLOCKED
:
5108 case BUILT_IN_VFPRINTF
:
5109 if (n
== 2 || n
== 3)
5110 return gimple_fold_builtin_fprintf (gsi
,
5111 gimple_call_arg (stmt
, 0),
5112 gimple_call_arg (stmt
, 1),
5114 ? gimple_call_arg (stmt
, 2)
5118 case BUILT_IN_FPRINTF_CHK
:
5119 case BUILT_IN_VFPRINTF_CHK
:
5120 if (n
== 3 || n
== 4)
5121 return gimple_fold_builtin_fprintf (gsi
,
5122 gimple_call_arg (stmt
, 0),
5123 gimple_call_arg (stmt
, 2),
5125 ? gimple_call_arg (stmt
, 3)
5129 case BUILT_IN_PRINTF
:
5130 case BUILT_IN_PRINTF_UNLOCKED
:
5131 case BUILT_IN_VPRINTF
:
5132 if (n
== 1 || n
== 2)
5133 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5135 ? gimple_call_arg (stmt
, 1)
5136 : NULL_TREE
, fcode
);
5138 case BUILT_IN_PRINTF_CHK
:
5139 case BUILT_IN_VPRINTF_CHK
:
5140 if (n
== 2 || n
== 3)
5141 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5143 ? gimple_call_arg (stmt
, 2)
5144 : NULL_TREE
, fcode
);
5146 case BUILT_IN_ACC_ON_DEVICE
:
5147 return gimple_fold_builtin_acc_on_device (gsi
,
5148 gimple_call_arg (stmt
, 0));
5149 case BUILT_IN_REALLOC
:
5150 return gimple_fold_builtin_realloc (gsi
);
5152 case BUILT_IN_CLEAR_PADDING
:
5153 return gimple_fold_builtin_clear_padding (gsi
);
5158 /* Try the generic builtin folder. */
5159 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5160 tree result
= fold_call_stmt (stmt
, ignore
);
5164 STRIP_NOPS (result
);
5166 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5167 gimplify_and_update_call_from_tree (gsi
, result
);
5174 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5175 function calls to constants, where possible. */
5178 fold_internal_goacc_dim (const gimple
*call
)
5180 int axis
= oacc_get_ifn_dim_arg (call
);
5181 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5182 tree result
= NULL_TREE
;
5183 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5185 switch (gimple_call_internal_fn (call
))
5187 case IFN_GOACC_DIM_POS
:
5188 /* If the size is 1, we know the answer. */
5190 result
= build_int_cst (type
, 0);
5192 case IFN_GOACC_DIM_SIZE
:
5193 /* If the size is not dynamic, we know the answer. */
5195 result
= build_int_cst (type
, size
);
5204 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5205 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5206 &var where var is only addressable because of such calls. */
5209 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5211 if (gimple_call_num_args (stmt
) != 6
5212 || !flag_inline_atomics
5214 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5215 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5216 || !gimple_vdef (stmt
)
5217 || !gimple_vuse (stmt
))
5220 tree fndecl
= gimple_call_fndecl (stmt
);
5221 switch (DECL_FUNCTION_CODE (fndecl
))
5223 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5224 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5225 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5226 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5227 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5233 tree expected
= gimple_call_arg (stmt
, 1);
5234 if (TREE_CODE (expected
) != ADDR_EXPR
5235 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5238 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5239 if (!is_gimple_reg_type (etype
)
5240 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5241 || TREE_THIS_VOLATILE (etype
)
5242 || VECTOR_TYPE_P (etype
)
5243 || TREE_CODE (etype
) == COMPLEX_TYPE
5244 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5245 might not preserve all the bits. See PR71716. */
5246 || SCALAR_FLOAT_TYPE_P (etype
)
5247 || maybe_ne (TYPE_PRECISION (etype
),
5248 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5251 tree weak
= gimple_call_arg (stmt
, 3);
5252 if (!integer_zerop (weak
) && !integer_onep (weak
))
5255 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5256 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5257 machine_mode mode
= TYPE_MODE (itype
);
5259 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5261 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5264 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5271 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5273 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5274 i = IMAGPART_EXPR <t>;
5276 e = REALPART_EXPR <t>; */
5279 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5281 gimple
*stmt
= gsi_stmt (*gsi
);
5282 tree fndecl
= gimple_call_fndecl (stmt
);
5283 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5284 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5285 tree ctype
= build_complex_type (itype
);
5286 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5287 bool throws
= false;
5289 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5291 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5292 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5293 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5295 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5296 build1 (VIEW_CONVERT_EXPR
, itype
,
5297 gimple_assign_lhs (g
)));
5298 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5300 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5301 + int_size_in_bytes (itype
);
5302 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5303 gimple_call_arg (stmt
, 0),
5304 gimple_assign_lhs (g
),
5305 gimple_call_arg (stmt
, 2),
5306 build_int_cst (integer_type_node
, flag
),
5307 gimple_call_arg (stmt
, 4),
5308 gimple_call_arg (stmt
, 5));
5309 tree lhs
= make_ssa_name (ctype
);
5310 gimple_call_set_lhs (g
, lhs
);
5311 gimple_move_vops (g
, stmt
);
5312 tree oldlhs
= gimple_call_lhs (stmt
);
5313 if (stmt_can_throw_internal (cfun
, stmt
))
5316 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5318 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5319 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5320 gimple_call_set_lhs (stmt
, NULL_TREE
);
5321 gsi_replace (gsi
, g
, true);
5324 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5325 build1 (IMAGPART_EXPR
, itype
, lhs
));
5328 gsi_insert_on_edge_immediate (e
, g
);
5329 *gsi
= gsi_for_stmt (g
);
5332 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5333 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5334 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5336 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5337 build1 (REALPART_EXPR
, itype
, lhs
));
5338 if (throws
&& oldlhs
== NULL_TREE
)
5340 gsi_insert_on_edge_immediate (e
, g
);
5341 *gsi
= gsi_for_stmt (g
);
5344 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5345 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5347 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5349 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5350 gimple_assign_lhs (g
)));
5351 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5353 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5354 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5358 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5359 doesn't fit into TYPE. The test for overflow should be regardless of
5360 -fwrapv, and even for unsigned types. */
5363 arith_overflowed_p (enum tree_code code
, const_tree type
,
5364 const_tree arg0
, const_tree arg1
)
5366 widest2_int warg0
= widest2_int_cst (arg0
);
5367 widest2_int warg1
= widest2_int_cst (arg1
);
5371 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5372 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5373 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5374 default: gcc_unreachable ();
5376 signop sign
= TYPE_SIGN (type
);
5377 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5379 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5382 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5383 for the memory it references, otherwise return null. VECTYPE is the
5384 type of the memory vector. */
5387 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5389 tree ptr
= gimple_call_arg (call
, 0);
5390 tree alias_align
= gimple_call_arg (call
, 1);
5391 tree mask
= gimple_call_arg (call
, 2);
5392 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5395 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5396 if (TYPE_ALIGN (vectype
) != align
)
5397 vectype
= build_aligned_type (vectype
, align
);
5398 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5399 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5402 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5405 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5407 tree lhs
= gimple_call_lhs (call
);
5411 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5413 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5414 gimple_set_location (new_stmt
, gimple_location (call
));
5415 gimple_move_vops (new_stmt
, call
);
5416 gsi_replace (gsi
, new_stmt
, false);
5422 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5425 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5427 tree rhs
= gimple_call_arg (call
, 3);
5428 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5430 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5431 gimple_set_location (new_stmt
, gimple_location (call
));
5432 gimple_move_vops (new_stmt
, call
);
5433 gsi_replace (gsi
, new_stmt
, false);
5439 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5440 The statement may be replaced by another statement, e.g., if the call
5441 simplifies to a constant value. Return true if any changes were made.
5442 It is assumed that the operands have been previously folded. */
5445 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5447 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5449 bool changed
= false;
5451 /* Check for virtual calls that became direct calls. */
5452 callee
= gimple_call_fn (stmt
);
5453 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5455 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5457 if (dump_file
&& virtual_method_call_p (callee
)
5458 && !possible_polymorphic_call_target_p
5459 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5460 (OBJ_TYPE_REF_EXPR (callee
)))))
5463 "Type inheritance inconsistent devirtualization of ");
5464 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5465 fprintf (dump_file
, " to ");
5466 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5467 fprintf (dump_file
, "\n");
5470 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5473 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5476 vec
<cgraph_node
*>targets
5477 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5478 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5480 tree lhs
= gimple_call_lhs (stmt
);
5481 if (dump_enabled_p ())
5483 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5484 "folding virtual function call to %s\n",
5485 targets
.length () == 1
5486 ? targets
[0]->name ()
5487 : "__builtin_unreachable");
5489 if (targets
.length () == 1)
5491 tree fndecl
= targets
[0]->decl
;
5492 gimple_call_set_fndecl (stmt
, fndecl
);
5494 /* If changing the call to __cxa_pure_virtual
5495 or similar noreturn function, adjust gimple_call_fntype
5497 if (gimple_call_noreturn_p (stmt
)
5498 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5499 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5500 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5502 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5503 /* If the call becomes noreturn, remove the lhs. */
5505 && gimple_call_noreturn_p (stmt
)
5506 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5507 || should_remove_lhs_p (lhs
)))
5509 if (TREE_CODE (lhs
) == SSA_NAME
)
5511 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5512 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5513 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5514 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5516 gimple_call_set_lhs (stmt
, NULL_TREE
);
5518 maybe_remove_unused_call_args (cfun
, stmt
);
5522 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5523 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5524 gimple_set_location (new_stmt
, gimple_location (stmt
));
5525 /* If the call had a SSA name as lhs morph that into
5526 an uninitialized value. */
5527 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5529 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5530 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5531 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5532 set_ssa_default_def (cfun
, var
, lhs
);
5534 gimple_move_vops (new_stmt
, stmt
);
5535 gsi_replace (gsi
, new_stmt
, false);
5542 /* Check for indirect calls that became direct calls, and then
5543 no longer require a static chain. */
5544 if (gimple_call_chain (stmt
))
5546 tree fn
= gimple_call_fndecl (stmt
);
5547 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5549 gimple_call_set_chain (stmt
, NULL
);
5557 /* Check for builtins that CCP can handle using information not
5558 available in the generic fold routines. */
5559 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5561 if (gimple_fold_builtin (gsi
))
5564 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5566 changed
|= targetm
.gimple_fold_builtin (gsi
);
5568 else if (gimple_call_internal_p (stmt
))
5570 enum tree_code subcode
= ERROR_MARK
;
5571 tree result
= NULL_TREE
;
5572 bool cplx_result
= false;
5573 tree overflow
= NULL_TREE
;
5574 switch (gimple_call_internal_fn (stmt
))
5576 case IFN_BUILTIN_EXPECT
:
5577 result
= fold_builtin_expect (gimple_location (stmt
),
5578 gimple_call_arg (stmt
, 0),
5579 gimple_call_arg (stmt
, 1),
5580 gimple_call_arg (stmt
, 2),
5583 case IFN_UBSAN_OBJECT_SIZE
:
5585 tree offset
= gimple_call_arg (stmt
, 1);
5586 tree objsize
= gimple_call_arg (stmt
, 2);
5587 if (integer_all_onesp (objsize
)
5588 || (TREE_CODE (offset
) == INTEGER_CST
5589 && TREE_CODE (objsize
) == INTEGER_CST
5590 && tree_int_cst_le (offset
, objsize
)))
5592 replace_call_with_value (gsi
, NULL_TREE
);
5598 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5600 replace_call_with_value (gsi
, NULL_TREE
);
5604 case IFN_UBSAN_BOUNDS
:
5606 tree index
= gimple_call_arg (stmt
, 1);
5607 tree bound
= gimple_call_arg (stmt
, 2);
5608 if (TREE_CODE (index
) == INTEGER_CST
5609 && TREE_CODE (bound
) == INTEGER_CST
)
5611 index
= fold_convert (TREE_TYPE (bound
), index
);
5612 if (TREE_CODE (index
) == INTEGER_CST
5613 && tree_int_cst_le (index
, bound
))
5615 replace_call_with_value (gsi
, NULL_TREE
);
5621 case IFN_GOACC_DIM_SIZE
:
5622 case IFN_GOACC_DIM_POS
:
5623 result
= fold_internal_goacc_dim (stmt
);
5625 case IFN_UBSAN_CHECK_ADD
:
5626 subcode
= PLUS_EXPR
;
5628 case IFN_UBSAN_CHECK_SUB
:
5629 subcode
= MINUS_EXPR
;
5631 case IFN_UBSAN_CHECK_MUL
:
5632 subcode
= MULT_EXPR
;
5634 case IFN_ADD_OVERFLOW
:
5635 subcode
= PLUS_EXPR
;
5638 case IFN_SUB_OVERFLOW
:
5639 subcode
= MINUS_EXPR
;
5642 case IFN_MUL_OVERFLOW
:
5643 subcode
= MULT_EXPR
;
5647 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5649 case IFN_MASK_STORE
:
5650 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5655 if (subcode
!= ERROR_MARK
)
5657 tree arg0
= gimple_call_arg (stmt
, 0);
5658 tree arg1
= gimple_call_arg (stmt
, 1);
5659 tree type
= TREE_TYPE (arg0
);
5662 tree lhs
= gimple_call_lhs (stmt
);
5663 if (lhs
== NULL_TREE
)
5666 type
= TREE_TYPE (TREE_TYPE (lhs
));
5668 if (type
== NULL_TREE
)
5670 /* x = y + 0; x = y - 0; x = y * 0; */
5671 else if (integer_zerop (arg1
))
5672 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5673 /* x = 0 + y; x = 0 * y; */
5674 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5675 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5677 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5678 result
= integer_zero_node
;
5679 /* x = y * 1; x = 1 * y; */
5680 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5682 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5684 else if (TREE_CODE (arg0
) == INTEGER_CST
5685 && TREE_CODE (arg1
) == INTEGER_CST
)
5688 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5689 fold_convert (type
, arg1
));
5691 result
= int_const_binop (subcode
, arg0
, arg1
);
5692 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5695 overflow
= build_one_cst (type
);
5702 if (result
== integer_zero_node
)
5703 result
= build_zero_cst (type
);
5704 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5706 if (TREE_CODE (result
) == INTEGER_CST
)
5708 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5710 overflow
= build_one_cst (type
);
5712 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5713 && TYPE_UNSIGNED (type
))
5714 || (TYPE_PRECISION (type
)
5715 < (TYPE_PRECISION (TREE_TYPE (result
))
5716 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5717 && !TYPE_UNSIGNED (type
)))))
5720 result
= fold_convert (type
, result
);
5727 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5728 result
= drop_tree_overflow (result
);
5731 if (overflow
== NULL_TREE
)
5732 overflow
= build_zero_cst (TREE_TYPE (result
));
5733 tree ctype
= build_complex_type (TREE_TYPE (result
));
5734 if (TREE_CODE (result
) == INTEGER_CST
5735 && TREE_CODE (overflow
) == INTEGER_CST
)
5736 result
= build_complex (ctype
, result
, overflow
);
5738 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5739 ctype
, result
, overflow
);
5741 gimplify_and_update_call_from_tree (gsi
, result
);
5750 /* Return true whether NAME has a use on STMT. */
5753 has_use_on_stmt (tree name
, gimple
*stmt
)
5755 imm_use_iterator iter
;
5756 use_operand_p use_p
;
5757 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5758 if (USE_STMT (use_p
) == stmt
)
5763 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5766 Replaces *GSI with the simplification result in RCODE and OPS
5767 and the associated statements in *SEQ. Does the replacement
5768 according to INPLACE and returns true if the operation succeeded. */
5771 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5772 gimple_match_op
*res_op
,
5773 gimple_seq
*seq
, bool inplace
)
5775 gimple
*stmt
= gsi_stmt (*gsi
);
5776 tree
*ops
= res_op
->ops
;
5777 unsigned int num_ops
= res_op
->num_ops
;
5779 /* Play safe and do not allow abnormals to be mentioned in
5780 newly created statements. See also maybe_push_res_to_seq.
5781 As an exception allow such uses if there was a use of the
5782 same SSA name on the old stmt. */
5783 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5784 if (TREE_CODE (ops
[i
]) == SSA_NAME
5785 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5786 && !has_use_on_stmt (ops
[i
], stmt
))
5789 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5790 for (unsigned int i
= 0; i
< 2; ++i
)
5791 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5792 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5793 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5796 /* Don't insert new statements when INPLACE is true, even if we could
5797 reuse STMT for the final statement. */
5798 if (inplace
&& !gimple_seq_empty_p (*seq
))
5801 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5803 gcc_assert (res_op
->code
.is_tree_code ());
5804 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
5805 /* GIMPLE_CONDs condition may not throw. */
5806 && (!flag_exceptions
5807 || !cfun
->can_throw_non_call_exceptions
5808 || !operation_could_trap_p (res_op
->code
,
5809 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5811 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
5812 else if (res_op
->code
== SSA_NAME
)
5813 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5814 build_zero_cst (TREE_TYPE (ops
[0])));
5815 else if (res_op
->code
== INTEGER_CST
)
5817 if (integer_zerop (ops
[0]))
5818 gimple_cond_make_false (cond_stmt
);
5820 gimple_cond_make_true (cond_stmt
);
5824 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5827 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5828 build_zero_cst (TREE_TYPE (res
)));
5832 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5834 fprintf (dump_file
, "gimple_simplified to ");
5835 if (!gimple_seq_empty_p (*seq
))
5836 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5837 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5840 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5843 else if (is_gimple_assign (stmt
)
5844 && res_op
->code
.is_tree_code ())
5847 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
5849 maybe_build_generic_op (res_op
);
5850 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
5851 res_op
->op_or_null (0),
5852 res_op
->op_or_null (1),
5853 res_op
->op_or_null (2));
5854 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5856 fprintf (dump_file
, "gimple_simplified to ");
5857 if (!gimple_seq_empty_p (*seq
))
5858 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5859 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5862 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5866 else if (res_op
->code
.is_fn_code ()
5867 && gimple_call_combined_fn (stmt
) == res_op
->code
)
5869 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5870 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5871 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5872 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5874 fprintf (dump_file
, "gimple_simplified to ");
5875 if (!gimple_seq_empty_p (*seq
))
5876 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5877 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5879 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5884 if (gimple_has_lhs (stmt
))
5886 tree lhs
= gimple_get_lhs (stmt
);
5887 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5889 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5891 fprintf (dump_file
, "gimple_simplified to ");
5892 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5894 gsi_replace_with_seq_vops (gsi
, *seq
);
5904 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5907 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5912 if (TREE_CODE (*t
) == ADDR_EXPR
)
5913 t
= &TREE_OPERAND (*t
, 0);
5915 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5916 generic vector extension. The actual vector referenced is
5917 view-converted to an array type for this purpose. If the index
5918 is constant the canonical representation in the middle-end is a
5919 BIT_FIELD_REF so re-write the former to the latter here. */
5920 if (TREE_CODE (*t
) == ARRAY_REF
5921 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5922 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5923 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5925 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5926 if (VECTOR_TYPE_P (vtype
))
5928 tree low
= array_ref_low_bound (*t
);
5929 if (TREE_CODE (low
) == INTEGER_CST
)
5931 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5933 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5934 wi::to_widest (low
));
5935 idx
= wi::mul (idx
, wi::to_widest
5936 (TYPE_SIZE (TREE_TYPE (*t
))));
5938 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5939 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5941 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5943 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5944 TYPE_SIZE (TREE_TYPE (*t
)),
5945 wide_int_to_tree (bitsizetype
, idx
));
5953 while (handled_component_p (*t
))
5954 t
= &TREE_OPERAND (*t
, 0);
5956 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5957 of invariant addresses into a SSA name MEM_REF address. */
5958 if (TREE_CODE (*t
) == MEM_REF
5959 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5961 tree addr
= TREE_OPERAND (*t
, 0);
5962 if (TREE_CODE (addr
) == ADDR_EXPR
5963 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5964 || handled_component_p (TREE_OPERAND (addr
, 0))))
5968 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5977 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5978 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5979 TREE_OPERAND (*t
, 1),
5980 size_int (coffset
));
5983 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5984 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5987 /* Canonicalize back MEM_REFs to plain reference trees if the object
5988 accessed is a decl that has the same access semantics as the MEM_REF. */
5989 if (TREE_CODE (*t
) == MEM_REF
5990 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5991 && integer_zerop (TREE_OPERAND (*t
, 1))
5992 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
5994 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5995 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
5996 if (/* Same volatile qualification. */
5997 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
5998 /* Same TBAA behavior with -fstrict-aliasing. */
5999 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
6000 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
6001 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
6002 /* Same alignment. */
6003 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
6004 /* We have to look out here to not drop a required conversion
6005 from the rhs to the lhs if *t appears on the lhs or vice-versa
6006 if it appears on the rhs. Thus require strict type
6008 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
6010 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6015 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
6016 && TREE_CODE (*t
) == MEM_REF
6017 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6021 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6025 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6027 if (mem_ref_offset (base
).to_shwi (&moffset
))
6030 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6033 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6040 /* Canonicalize TARGET_MEM_REF in particular with respect to
6041 the indexes becoming constant. */
6042 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6044 tree tem
= maybe_fold_tmr (*t
);
6048 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6049 recompute_tree_invariant_for_addr_expr (*orig_t
);
6057 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6058 distinguishes both cases. */
6061 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6063 bool changed
= false;
6064 gimple
*stmt
= gsi_stmt (*gsi
);
6065 bool nowarning
= gimple_no_warning_p (stmt
);
6067 fold_defer_overflow_warnings ();
6069 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6071 ??? This shouldn't be done in generic folding but in the
6072 propagation helpers which also know whether an address was
6074 Also canonicalize operand order. */
6075 switch (gimple_code (stmt
))
6078 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6080 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6081 if ((REFERENCE_CLASS_P (*rhs
)
6082 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6083 && maybe_canonicalize_mem_ref_addr (rhs
))
6085 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6086 if (REFERENCE_CLASS_P (*lhs
)
6087 && maybe_canonicalize_mem_ref_addr (lhs
))
6092 /* Canonicalize operand order. */
6093 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6094 if (TREE_CODE_CLASS (code
) == tcc_comparison
6095 || commutative_tree_code (code
)
6096 || commutative_ternary_tree_code (code
))
6098 tree rhs1
= gimple_assign_rhs1 (stmt
);
6099 tree rhs2
= gimple_assign_rhs2 (stmt
);
6100 if (tree_swap_operands_p (rhs1
, rhs2
))
6102 gimple_assign_set_rhs1 (stmt
, rhs2
);
6103 gimple_assign_set_rhs2 (stmt
, rhs1
);
6104 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6105 gimple_assign_set_rhs_code (stmt
,
6106 swap_tree_comparison (code
));
6114 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6116 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
6117 if (REFERENCE_CLASS_P (*arg
)
6118 && maybe_canonicalize_mem_ref_addr (arg
))
6121 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
6123 && REFERENCE_CLASS_P (*lhs
)
6124 && maybe_canonicalize_mem_ref_addr (lhs
))
6130 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6131 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6133 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6134 tree op
= TREE_VALUE (link
);
6135 if (REFERENCE_CLASS_P (op
)
6136 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6139 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6141 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6142 tree op
= TREE_VALUE (link
);
6143 if ((REFERENCE_CLASS_P (op
)
6144 || TREE_CODE (op
) == ADDR_EXPR
)
6145 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6151 if (gimple_debug_bind_p (stmt
))
6153 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6155 && (REFERENCE_CLASS_P (*val
)
6156 || TREE_CODE (*val
) == ADDR_EXPR
)
6157 && maybe_canonicalize_mem_ref_addr (val
, true))
6163 /* Canonicalize operand order. */
6164 tree lhs
= gimple_cond_lhs (stmt
);
6165 tree rhs
= gimple_cond_rhs (stmt
);
6166 if (tree_swap_operands_p (lhs
, rhs
))
6168 gcond
*gc
= as_a
<gcond
*> (stmt
);
6169 gimple_cond_set_lhs (gc
, rhs
);
6170 gimple_cond_set_rhs (gc
, lhs
);
6171 gimple_cond_set_code (gc
,
6172 swap_tree_comparison (gimple_cond_code (gc
)));
6179 /* Dispatch to pattern-based folding. */
6181 || is_gimple_assign (stmt
)
6182 || gimple_code (stmt
) == GIMPLE_COND
)
6184 gimple_seq seq
= NULL
;
6185 gimple_match_op res_op
;
6186 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6187 valueize
, valueize
))
6189 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6192 gimple_seq_discard (seq
);
6196 stmt
= gsi_stmt (*gsi
);
6198 /* Fold the main computation performed by the statement. */
6199 switch (gimple_code (stmt
))
6203 /* Try to canonicalize for boolean-typed X the comparisons
6204 X == 0, X == 1, X != 0, and X != 1. */
6205 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6206 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6208 tree lhs
= gimple_assign_lhs (stmt
);
6209 tree op1
= gimple_assign_rhs1 (stmt
);
6210 tree op2
= gimple_assign_rhs2 (stmt
);
6211 tree type
= TREE_TYPE (op1
);
6213 /* Check whether the comparison operands are of the same boolean
6214 type as the result type is.
6215 Check that second operand is an integer-constant with value
6217 if (TREE_CODE (op2
) == INTEGER_CST
6218 && (integer_zerop (op2
) || integer_onep (op2
))
6219 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6221 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6222 bool is_logical_not
= false;
6224 /* X == 0 and X != 1 is a logical-not.of X
6225 X == 1 and X != 0 is X */
6226 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6227 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6228 is_logical_not
= true;
6230 if (is_logical_not
== false)
6231 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6232 /* Only for one-bit precision typed X the transformation
6233 !X -> ~X is valied. */
6234 else if (TYPE_PRECISION (type
) == 1)
6235 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6236 /* Otherwise we use !X -> X ^ 1. */
6238 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6239 build_int_cst (type
, 1));
6245 unsigned old_num_ops
= gimple_num_ops (stmt
);
6246 tree lhs
= gimple_assign_lhs (stmt
);
6247 tree new_rhs
= fold_gimple_assign (gsi
);
6249 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6250 TREE_TYPE (new_rhs
)))
6251 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6254 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6256 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6263 changed
|= gimple_fold_call (gsi
, inplace
);
6267 if (gimple_debug_bind_p (stmt
))
6269 tree val
= gimple_debug_bind_get_value (stmt
);
6271 && REFERENCE_CLASS_P (val
))
6273 tree tem
= maybe_fold_reference (val
);
6276 gimple_debug_bind_set_value (stmt
, tem
);
6281 && TREE_CODE (val
) == ADDR_EXPR
)
6283 tree ref
= TREE_OPERAND (val
, 0);
6284 tree tem
= maybe_fold_reference (ref
);
6287 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
6288 gimple_debug_bind_set_value (stmt
, tem
);
6297 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6298 tree ret
= gimple_return_retval(ret_stmt
);
6300 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6302 tree val
= valueize (ret
);
6303 if (val
&& val
!= ret
6304 && may_propagate_copy (ret
, val
))
6306 gimple_return_set_retval (ret_stmt
, val
);
6316 stmt
= gsi_stmt (*gsi
);
6318 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6322 /* Valueziation callback that ends up not following SSA edges. */
6325 no_follow_ssa_edges (tree
)
6330 /* Valueization callback that ends up following single-use SSA edges only. */
6333 follow_single_use_edges (tree val
)
6335 if (TREE_CODE (val
) == SSA_NAME
6336 && !has_single_use (val
))
6341 /* Valueization callback that follows all SSA edges. */
6344 follow_all_ssa_edges (tree val
)
6349 /* Fold the statement pointed to by GSI. In some cases, this function may
6350 replace the whole statement with a new one. Returns true iff folding
6352 The statement pointed to by GSI should be in valid gimple form but may
6353 be in unfolded state as resulting from for example constant propagation
6354 which can produce *&x = 0. */
6357 fold_stmt (gimple_stmt_iterator
*gsi
)
6359 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6363 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6365 return fold_stmt_1 (gsi
, false, valueize
);
6368 /* Perform the minimal folding on statement *GSI. Only operations like
6369 *&x created by constant propagation are handled. The statement cannot
6370 be replaced with a new one. Return true if the statement was
6371 changed, false otherwise.
6372 The statement *GSI should be in valid gimple form but may
6373 be in unfolded state as resulting from for example constant propagation
6374 which can produce *&x = 0. */
6377 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6379 gimple
*stmt
= gsi_stmt (*gsi
);
6380 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6381 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6385 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6386 if EXPR is null or we don't know how.
6387 If non-null, the result always has boolean type. */
6390 canonicalize_bool (tree expr
, bool invert
)
6396 if (integer_nonzerop (expr
))
6397 return boolean_false_node
;
6398 else if (integer_zerop (expr
))
6399 return boolean_true_node
;
6400 else if (TREE_CODE (expr
) == SSA_NAME
)
6401 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6402 build_int_cst (TREE_TYPE (expr
), 0));
6403 else if (COMPARISON_CLASS_P (expr
))
6404 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6406 TREE_OPERAND (expr
, 0),
6407 TREE_OPERAND (expr
, 1));
6413 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6415 if (integer_nonzerop (expr
))
6416 return boolean_true_node
;
6417 else if (integer_zerop (expr
))
6418 return boolean_false_node
;
6419 else if (TREE_CODE (expr
) == SSA_NAME
)
6420 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6421 build_int_cst (TREE_TYPE (expr
), 0));
6422 else if (COMPARISON_CLASS_P (expr
))
6423 return fold_build2 (TREE_CODE (expr
),
6425 TREE_OPERAND (expr
, 0),
6426 TREE_OPERAND (expr
, 1));
6432 /* Check to see if a boolean expression EXPR is logically equivalent to the
6433 comparison (OP1 CODE OP2). Check for various identities involving
6437 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6438 const_tree op1
, const_tree op2
)
6442 /* The obvious case. */
6443 if (TREE_CODE (expr
) == code
6444 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6445 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6448 /* Check for comparing (name, name != 0) and the case where expr
6449 is an SSA_NAME with a definition matching the comparison. */
6450 if (TREE_CODE (expr
) == SSA_NAME
6451 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6453 if (operand_equal_p (expr
, op1
, 0))
6454 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6455 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6456 s
= SSA_NAME_DEF_STMT (expr
);
6457 if (is_gimple_assign (s
)
6458 && gimple_assign_rhs_code (s
) == code
6459 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6460 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6464 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6465 of name is a comparison, recurse. */
6466 if (TREE_CODE (op1
) == SSA_NAME
6467 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6469 s
= SSA_NAME_DEF_STMT (op1
);
6470 if (is_gimple_assign (s
)
6471 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6473 enum tree_code c
= gimple_assign_rhs_code (s
);
6474 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6475 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6476 return same_bool_comparison_p (expr
, c
,
6477 gimple_assign_rhs1 (s
),
6478 gimple_assign_rhs2 (s
));
6479 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6480 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6481 return same_bool_comparison_p (expr
,
6482 invert_tree_comparison (c
, false),
6483 gimple_assign_rhs1 (s
),
6484 gimple_assign_rhs2 (s
));
6490 /* Check to see if two boolean expressions OP1 and OP2 are logically
6494 same_bool_result_p (const_tree op1
, const_tree op2
)
6496 /* Simple cases first. */
6497 if (operand_equal_p (op1
, op2
, 0))
6500 /* Check the cases where at least one of the operands is a comparison.
6501 These are a bit smarter than operand_equal_p in that they apply some
6502 identifies on SSA_NAMEs. */
6503 if (COMPARISON_CLASS_P (op2
)
6504 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6505 TREE_OPERAND (op2
, 0),
6506 TREE_OPERAND (op2
, 1)))
6508 if (COMPARISON_CLASS_P (op1
)
6509 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6510 TREE_OPERAND (op1
, 0),
6511 TREE_OPERAND (op1
, 1)))
6518 /* Forward declarations for some mutually recursive functions. */
6521 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6522 enum tree_code code2
, tree op2a
, tree op2b
);
6524 and_var_with_comparison (tree type
, tree var
, bool invert
,
6525 enum tree_code code2
, tree op2a
, tree op2b
);
6527 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6528 enum tree_code code2
, tree op2a
, tree op2b
);
6530 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6531 enum tree_code code2
, tree op2a
, tree op2b
);
6533 or_var_with_comparison (tree
, tree var
, bool invert
,
6534 enum tree_code code2
, tree op2a
, tree op2b
);
6536 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6537 enum tree_code code2
, tree op2a
, tree op2b
);
6539 /* Helper function for and_comparisons_1: try to simplify the AND of the
6540 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6541 If INVERT is true, invert the value of the VAR before doing the AND.
6542 Return NULL_EXPR if we can't simplify this to a single expression. */
6545 and_var_with_comparison (tree type
, tree var
, bool invert
,
6546 enum tree_code code2
, tree op2a
, tree op2b
)
6549 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6551 /* We can only deal with variables whose definitions are assignments. */
6552 if (!is_gimple_assign (stmt
))
6555 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6556 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6557 Then we only have to consider the simpler non-inverted cases. */
6559 t
= or_var_with_comparison_1 (type
, stmt
,
6560 invert_tree_comparison (code2
, false),
6563 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6564 return canonicalize_bool (t
, invert
);
6567 /* Try to simplify the AND of the ssa variable defined by the assignment
6568 STMT with the comparison specified by (OP2A CODE2 OP2B).
6569 Return NULL_EXPR if we can't simplify this to a single expression. */
6572 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6573 enum tree_code code2
, tree op2a
, tree op2b
)
6575 tree var
= gimple_assign_lhs (stmt
);
6576 tree true_test_var
= NULL_TREE
;
6577 tree false_test_var
= NULL_TREE
;
6578 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6580 /* Check for identities like (var AND (var == 0)) => false. */
6581 if (TREE_CODE (op2a
) == SSA_NAME
6582 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6584 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6585 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6587 true_test_var
= op2a
;
6588 if (var
== true_test_var
)
6591 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6592 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6594 false_test_var
= op2a
;
6595 if (var
== false_test_var
)
6596 return boolean_false_node
;
6600 /* If the definition is a comparison, recurse on it. */
6601 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6603 tree t
= and_comparisons_1 (type
, innercode
,
6604 gimple_assign_rhs1 (stmt
),
6605 gimple_assign_rhs2 (stmt
),
6613 /* If the definition is an AND or OR expression, we may be able to
6614 simplify by reassociating. */
6615 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6616 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6618 tree inner1
= gimple_assign_rhs1 (stmt
);
6619 tree inner2
= gimple_assign_rhs2 (stmt
);
6622 tree partial
= NULL_TREE
;
6623 bool is_and
= (innercode
== BIT_AND_EXPR
);
6625 /* Check for boolean identities that don't require recursive examination
6627 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6628 inner1 AND (inner1 OR inner2) => inner1
6629 !inner1 AND (inner1 AND inner2) => false
6630 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6631 Likewise for similar cases involving inner2. */
6632 if (inner1
== true_test_var
)
6633 return (is_and
? var
: inner1
);
6634 else if (inner2
== true_test_var
)
6635 return (is_and
? var
: inner2
);
6636 else if (inner1
== false_test_var
)
6638 ? boolean_false_node
6639 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6641 else if (inner2
== false_test_var
)
6643 ? boolean_false_node
6644 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6647 /* Next, redistribute/reassociate the AND across the inner tests.
6648 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6649 if (TREE_CODE (inner1
) == SSA_NAME
6650 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6651 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6652 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6653 gimple_assign_rhs1 (s
),
6654 gimple_assign_rhs2 (s
),
6655 code2
, op2a
, op2b
)))
6657 /* Handle the AND case, where we are reassociating:
6658 (inner1 AND inner2) AND (op2a code2 op2b)
6660 If the partial result t is a constant, we win. Otherwise
6661 continue on to try reassociating with the other inner test. */
6664 if (integer_onep (t
))
6666 else if (integer_zerop (t
))
6667 return boolean_false_node
;
6670 /* Handle the OR case, where we are redistributing:
6671 (inner1 OR inner2) AND (op2a code2 op2b)
6672 => (t OR (inner2 AND (op2a code2 op2b))) */
6673 else if (integer_onep (t
))
6674 return boolean_true_node
;
6676 /* Save partial result for later. */
6680 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6681 if (TREE_CODE (inner2
) == SSA_NAME
6682 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6683 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6684 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6685 gimple_assign_rhs1 (s
),
6686 gimple_assign_rhs2 (s
),
6687 code2
, op2a
, op2b
)))
6689 /* Handle the AND case, where we are reassociating:
6690 (inner1 AND inner2) AND (op2a code2 op2b)
6691 => (inner1 AND t) */
6694 if (integer_onep (t
))
6696 else if (integer_zerop (t
))
6697 return boolean_false_node
;
6698 /* If both are the same, we can apply the identity
6700 else if (partial
&& same_bool_result_p (t
, partial
))
6704 /* Handle the OR case. where we are redistributing:
6705 (inner1 OR inner2) AND (op2a code2 op2b)
6706 => (t OR (inner1 AND (op2a code2 op2b)))
6707 => (t OR partial) */
6710 if (integer_onep (t
))
6711 return boolean_true_node
;
6714 /* We already got a simplification for the other
6715 operand to the redistributed OR expression. The
6716 interesting case is when at least one is false.
6717 Or, if both are the same, we can apply the identity
6719 if (integer_zerop (partial
))
6721 else if (integer_zerop (t
))
6723 else if (same_bool_result_p (t
, partial
))
6732 /* Try to simplify the AND of two comparisons defined by
6733 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6734 If this can be done without constructing an intermediate value,
6735 return the resulting tree; otherwise NULL_TREE is returned.
6736 This function is deliberately asymmetric as it recurses on SSA_DEFs
6737 in the first comparison but not the second. */
6740 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6741 enum tree_code code2
, tree op2a
, tree op2b
)
6743 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6745 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6746 if (operand_equal_p (op1a
, op2a
, 0)
6747 && operand_equal_p (op1b
, op2b
, 0))
6749 /* Result will be either NULL_TREE, or a combined comparison. */
6750 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6751 TRUTH_ANDIF_EXPR
, code1
, code2
,
6752 truth_type
, op1a
, op1b
);
6757 /* Likewise the swapped case of the above. */
6758 if (operand_equal_p (op1a
, op2b
, 0)
6759 && operand_equal_p (op1b
, op2a
, 0))
6761 /* Result will be either NULL_TREE, or a combined comparison. */
6762 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6763 TRUTH_ANDIF_EXPR
, code1
,
6764 swap_tree_comparison (code2
),
6765 truth_type
, op1a
, op1b
);
6770 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6771 NAME's definition is a truth value. See if there are any simplifications
6772 that can be done against the NAME's definition. */
6773 if (TREE_CODE (op1a
) == SSA_NAME
6774 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6775 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6777 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6778 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6779 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6780 switch (gimple_code (stmt
))
6783 /* Try to simplify by copy-propagating the definition. */
6784 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6788 /* If every argument to the PHI produces the same result when
6789 ANDed with the second comparison, we win.
6790 Do not do this unless the type is bool since we need a bool
6791 result here anyway. */
6792 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6794 tree result
= NULL_TREE
;
6796 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6798 tree arg
= gimple_phi_arg_def (stmt
, i
);
6800 /* If this PHI has itself as an argument, ignore it.
6801 If all the other args produce the same result,
6803 if (arg
== gimple_phi_result (stmt
))
6805 else if (TREE_CODE (arg
) == INTEGER_CST
)
6807 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6810 result
= boolean_false_node
;
6811 else if (!integer_zerop (result
))
6815 result
= fold_build2 (code2
, boolean_type_node
,
6817 else if (!same_bool_comparison_p (result
,
6821 else if (TREE_CODE (arg
) == SSA_NAME
6822 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6825 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6826 /* In simple cases we can look through PHI nodes,
6827 but we have to be careful with loops.
6829 if (! dom_info_available_p (CDI_DOMINATORS
)
6830 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6831 || dominated_by_p (CDI_DOMINATORS
,
6832 gimple_bb (def_stmt
),
6835 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6841 else if (!same_bool_result_p (result
, temp
))
6857 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6858 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6859 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6860 simplify this to a single expression. As we are going to lower the cost
6861 of building SSA names / gimple stmts significantly, we need to allocate
6862 them ont the stack. This will cause the code to be a bit ugly. */
6865 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6866 enum tree_code code1
,
6867 tree op1a
, tree op1b
,
6868 enum tree_code code2
, tree op2a
,
6871 /* Allocate gimple stmt1 on the stack. */
6873 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6874 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6875 gimple_assign_set_rhs_code (stmt1
, code1
);
6876 gimple_assign_set_rhs1 (stmt1
, op1a
);
6877 gimple_assign_set_rhs2 (stmt1
, op1b
);
6879 /* Allocate gimple stmt2 on the stack. */
6881 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6882 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6883 gimple_assign_set_rhs_code (stmt2
, code2
);
6884 gimple_assign_set_rhs1 (stmt2
, op2a
);
6885 gimple_assign_set_rhs2 (stmt2
, op2b
);
6887 /* Allocate SSA names(lhs1) on the stack. */
6888 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6889 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6890 TREE_SET_CODE (lhs1
, SSA_NAME
);
6891 TREE_TYPE (lhs1
) = type
;
6892 init_ssa_name_imm_use (lhs1
);
6894 /* Allocate SSA names(lhs2) on the stack. */
6895 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6896 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6897 TREE_SET_CODE (lhs2
, SSA_NAME
);
6898 TREE_TYPE (lhs2
) = type
;
6899 init_ssa_name_imm_use (lhs2
);
6901 gimple_assign_set_lhs (stmt1
, lhs1
);
6902 gimple_assign_set_lhs (stmt2
, lhs2
);
6904 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6905 type
, gimple_assign_lhs (stmt1
),
6906 gimple_assign_lhs (stmt2
));
6907 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
6909 if (gimple_simplified_result_is_gimple_val (&op
))
6911 tree res
= op
.ops
[0];
6913 return build2 (code1
, type
, op1a
, op1b
);
6914 else if (res
== lhs2
)
6915 return build2 (code2
, type
, op2a
, op2b
);
6919 else if (op
.code
.is_tree_code ()
6920 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6922 tree op0
= op
.ops
[0];
6923 tree op1
= op
.ops
[1];
6924 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6925 return NULL_TREE
; /* not simple */
6927 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6934 /* Try to simplify the AND of two comparisons, specified by
6935 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6936 If this can be simplified to a single expression (without requiring
6937 introducing more SSA variables to hold intermediate values),
6938 return the resulting tree. Otherwise return NULL_TREE.
6939 If the result expression is non-null, it has boolean type. */
6942 maybe_fold_and_comparisons (tree type
,
6943 enum tree_code code1
, tree op1a
, tree op1b
,
6944 enum tree_code code2
, tree op2a
, tree op2b
)
6946 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6949 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6952 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
6953 op1a
, op1b
, code2
, op2a
,
6960 /* Helper function for or_comparisons_1: try to simplify the OR of the
6961 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6962 If INVERT is true, invert the value of VAR before doing the OR.
6963 Return NULL_EXPR if we can't simplify this to a single expression. */
6966 or_var_with_comparison (tree type
, tree var
, bool invert
,
6967 enum tree_code code2
, tree op2a
, tree op2b
)
6970 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6972 /* We can only deal with variables whose definitions are assignments. */
6973 if (!is_gimple_assign (stmt
))
6976 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6977 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6978 Then we only have to consider the simpler non-inverted cases. */
6980 t
= and_var_with_comparison_1 (type
, stmt
,
6981 invert_tree_comparison (code2
, false),
6984 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6985 return canonicalize_bool (t
, invert
);
6988 /* Try to simplify the OR of the ssa variable defined by the assignment
6989 STMT with the comparison specified by (OP2A CODE2 OP2B).
6990 Return NULL_EXPR if we can't simplify this to a single expression. */
6993 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
6994 enum tree_code code2
, tree op2a
, tree op2b
)
6996 tree var
= gimple_assign_lhs (stmt
);
6997 tree true_test_var
= NULL_TREE
;
6998 tree false_test_var
= NULL_TREE
;
6999 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7001 /* Check for identities like (var OR (var != 0)) => true . */
7002 if (TREE_CODE (op2a
) == SSA_NAME
7003 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7005 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7006 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7008 true_test_var
= op2a
;
7009 if (var
== true_test_var
)
7012 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7013 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7015 false_test_var
= op2a
;
7016 if (var
== false_test_var
)
7017 return boolean_true_node
;
7021 /* If the definition is a comparison, recurse on it. */
7022 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7024 tree t
= or_comparisons_1 (type
, innercode
,
7025 gimple_assign_rhs1 (stmt
),
7026 gimple_assign_rhs2 (stmt
),
7034 /* If the definition is an AND or OR expression, we may be able to
7035 simplify by reassociating. */
7036 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7037 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7039 tree inner1
= gimple_assign_rhs1 (stmt
);
7040 tree inner2
= gimple_assign_rhs2 (stmt
);
7043 tree partial
= NULL_TREE
;
7044 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7046 /* Check for boolean identities that don't require recursive examination
7048 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7049 inner1 OR (inner1 AND inner2) => inner1
7050 !inner1 OR (inner1 OR inner2) => true
7051 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7053 if (inner1
== true_test_var
)
7054 return (is_or
? var
: inner1
);
7055 else if (inner2
== true_test_var
)
7056 return (is_or
? var
: inner2
);
7057 else if (inner1
== false_test_var
)
7060 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7062 else if (inner2
== false_test_var
)
7065 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7068 /* Next, redistribute/reassociate the OR across the inner tests.
7069 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7070 if (TREE_CODE (inner1
) == SSA_NAME
7071 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7072 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7073 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7074 gimple_assign_rhs1 (s
),
7075 gimple_assign_rhs2 (s
),
7076 code2
, op2a
, op2b
)))
7078 /* Handle the OR case, where we are reassociating:
7079 (inner1 OR inner2) OR (op2a code2 op2b)
7081 If the partial result t is a constant, we win. Otherwise
7082 continue on to try reassociating with the other inner test. */
7085 if (integer_onep (t
))
7086 return boolean_true_node
;
7087 else if (integer_zerop (t
))
7091 /* Handle the AND case, where we are redistributing:
7092 (inner1 AND inner2) OR (op2a code2 op2b)
7093 => (t AND (inner2 OR (op2a code op2b))) */
7094 else if (integer_zerop (t
))
7095 return boolean_false_node
;
7097 /* Save partial result for later. */
7101 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7102 if (TREE_CODE (inner2
) == SSA_NAME
7103 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7104 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7105 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7106 gimple_assign_rhs1 (s
),
7107 gimple_assign_rhs2 (s
),
7108 code2
, op2a
, op2b
)))
7110 /* Handle the OR case, where we are reassociating:
7111 (inner1 OR inner2) OR (op2a code2 op2b)
7113 => (t OR partial) */
7116 if (integer_zerop (t
))
7118 else if (integer_onep (t
))
7119 return boolean_true_node
;
7120 /* If both are the same, we can apply the identity
7122 else if (partial
&& same_bool_result_p (t
, partial
))
7126 /* Handle the AND case, where we are redistributing:
7127 (inner1 AND inner2) OR (op2a code2 op2b)
7128 => (t AND (inner1 OR (op2a code2 op2b)))
7129 => (t AND partial) */
7132 if (integer_zerop (t
))
7133 return boolean_false_node
;
7136 /* We already got a simplification for the other
7137 operand to the redistributed AND expression. The
7138 interesting case is when at least one is true.
7139 Or, if both are the same, we can apply the identity
7141 if (integer_onep (partial
))
7143 else if (integer_onep (t
))
7145 else if (same_bool_result_p (t
, partial
))
7154 /* Try to simplify the OR of two comparisons defined by
7155 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7156 If this can be done without constructing an intermediate value,
7157 return the resulting tree; otherwise NULL_TREE is returned.
7158 This function is deliberately asymmetric as it recurses on SSA_DEFs
7159 in the first comparison but not the second. */
7162 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7163 enum tree_code code2
, tree op2a
, tree op2b
)
7165 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7167 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7168 if (operand_equal_p (op1a
, op2a
, 0)
7169 && operand_equal_p (op1b
, op2b
, 0))
7171 /* Result will be either NULL_TREE, or a combined comparison. */
7172 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7173 TRUTH_ORIF_EXPR
, code1
, code2
,
7174 truth_type
, op1a
, op1b
);
7179 /* Likewise the swapped case of the above. */
7180 if (operand_equal_p (op1a
, op2b
, 0)
7181 && operand_equal_p (op1b
, op2a
, 0))
7183 /* Result will be either NULL_TREE, or a combined comparison. */
7184 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7185 TRUTH_ORIF_EXPR
, code1
,
7186 swap_tree_comparison (code2
),
7187 truth_type
, op1a
, op1b
);
7192 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7193 NAME's definition is a truth value. See if there are any simplifications
7194 that can be done against the NAME's definition. */
7195 if (TREE_CODE (op1a
) == SSA_NAME
7196 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7197 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7199 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7200 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7201 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7202 switch (gimple_code (stmt
))
7205 /* Try to simplify by copy-propagating the definition. */
7206 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7210 /* If every argument to the PHI produces the same result when
7211 ORed with the second comparison, we win.
7212 Do not do this unless the type is bool since we need a bool
7213 result here anyway. */
7214 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7216 tree result
= NULL_TREE
;
7218 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7220 tree arg
= gimple_phi_arg_def (stmt
, i
);
7222 /* If this PHI has itself as an argument, ignore it.
7223 If all the other args produce the same result,
7225 if (arg
== gimple_phi_result (stmt
))
7227 else if (TREE_CODE (arg
) == INTEGER_CST
)
7229 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7232 result
= boolean_true_node
;
7233 else if (!integer_onep (result
))
7237 result
= fold_build2 (code2
, boolean_type_node
,
7239 else if (!same_bool_comparison_p (result
,
7243 else if (TREE_CODE (arg
) == SSA_NAME
7244 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7247 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7248 /* In simple cases we can look through PHI nodes,
7249 but we have to be careful with loops.
7251 if (! dom_info_available_p (CDI_DOMINATORS
)
7252 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7253 || dominated_by_p (CDI_DOMINATORS
,
7254 gimple_bb (def_stmt
),
7257 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7263 else if (!same_bool_result_p (result
, temp
))
7279 /* Try to simplify the OR of two comparisons, specified by
7280 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7281 If this can be simplified to a single expression (without requiring
7282 introducing more SSA variables to hold intermediate values),
7283 return the resulting tree. Otherwise return NULL_TREE.
7284 If the result expression is non-null, it has boolean type. */
7287 maybe_fold_or_comparisons (tree type
,
7288 enum tree_code code1
, tree op1a
, tree op1b
,
7289 enum tree_code code2
, tree op2a
, tree op2b
)
7291 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
7294 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
7297 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7298 op1a
, op1b
, code2
, op2a
,
7305 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7307 Either NULL_TREE, a simplified but non-constant or a constant
7310 ??? This should go into a gimple-fold-inline.h file to be eventually
7311 privatized with the single valueize function used in the various TUs
7312 to avoid the indirect function call overhead. */
7315 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7316 tree (*gvalueize
) (tree
))
7318 gimple_match_op res_op
;
7319 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7320 edges if there are intermediate VARYING defs. For this reason
7321 do not follow SSA edges here even though SCCVN can technically
7322 just deal fine with that. */
7323 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7325 tree res
= NULL_TREE
;
7326 if (gimple_simplified_result_is_gimple_val (&res_op
))
7327 res
= res_op
.ops
[0];
7328 else if (mprts_hook
)
7329 res
= mprts_hook (&res_op
);
7332 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7334 fprintf (dump_file
, "Match-and-simplified ");
7335 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7336 fprintf (dump_file
, " to ");
7337 print_generic_expr (dump_file
, res
);
7338 fprintf (dump_file
, "\n");
7344 location_t loc
= gimple_location (stmt
);
7345 switch (gimple_code (stmt
))
7349 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7351 switch (get_gimple_rhs_class (subcode
))
7353 case GIMPLE_SINGLE_RHS
:
7355 tree rhs
= gimple_assign_rhs1 (stmt
);
7356 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7358 if (TREE_CODE (rhs
) == SSA_NAME
)
7360 /* If the RHS is an SSA_NAME, return its known constant value,
7362 return (*valueize
) (rhs
);
7364 /* Handle propagating invariant addresses into address
7366 else if (TREE_CODE (rhs
) == ADDR_EXPR
7367 && !is_gimple_min_invariant (rhs
))
7369 poly_int64 offset
= 0;
7371 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7375 && (CONSTANT_CLASS_P (base
)
7376 || decl_address_invariant_p (base
)))
7377 return build_invariant_address (TREE_TYPE (rhs
),
7380 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7381 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7382 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7383 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7388 nelts
= CONSTRUCTOR_NELTS (rhs
);
7389 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7390 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7392 val
= (*valueize
) (val
);
7393 if (TREE_CODE (val
) == INTEGER_CST
7394 || TREE_CODE (val
) == REAL_CST
7395 || TREE_CODE (val
) == FIXED_CST
)
7396 vec
.quick_push (val
);
7401 return vec
.build ();
7403 if (subcode
== OBJ_TYPE_REF
)
7405 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7406 /* If callee is constant, we can fold away the wrapper. */
7407 if (is_gimple_min_invariant (val
))
7411 if (kind
== tcc_reference
)
7413 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7414 || TREE_CODE (rhs
) == REALPART_EXPR
7415 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7416 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7418 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7419 return fold_unary_loc (EXPR_LOCATION (rhs
),
7421 TREE_TYPE (rhs
), val
);
7423 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7424 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7426 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7427 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7429 TREE_TYPE (rhs
), val
,
7430 TREE_OPERAND (rhs
, 1),
7431 TREE_OPERAND (rhs
, 2));
7433 else if (TREE_CODE (rhs
) == MEM_REF
7434 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7436 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7437 if (TREE_CODE (val
) == ADDR_EXPR
7438 && is_gimple_min_invariant (val
))
7440 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7442 TREE_OPERAND (rhs
, 1));
7447 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7449 else if (kind
== tcc_declaration
)
7450 return get_symbol_constant_value (rhs
);
7454 case GIMPLE_UNARY_RHS
:
7457 case GIMPLE_BINARY_RHS
:
7458 /* Translate &x + CST into an invariant form suitable for
7459 further propagation. */
7460 if (subcode
== POINTER_PLUS_EXPR
)
7462 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7463 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7464 if (TREE_CODE (op0
) == ADDR_EXPR
7465 && TREE_CODE (op1
) == INTEGER_CST
)
7467 tree off
= fold_convert (ptr_type_node
, op1
);
7469 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7470 fold_build2 (MEM_REF
,
7471 TREE_TYPE (TREE_TYPE (op0
)),
7472 unshare_expr (op0
), off
));
7475 /* Canonicalize bool != 0 and bool == 0 appearing after
7476 valueization. While gimple_simplify handles this
7477 it can get confused by the ~X == 1 -> X == 0 transform
7478 which we cant reduce to a SSA name or a constant
7479 (and we have no way to tell gimple_simplify to not
7480 consider those transforms in the first place). */
7481 else if (subcode
== EQ_EXPR
7482 || subcode
== NE_EXPR
)
7484 tree lhs
= gimple_assign_lhs (stmt
);
7485 tree op0
= gimple_assign_rhs1 (stmt
);
7486 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7489 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7490 op0
= (*valueize
) (op0
);
7491 if (TREE_CODE (op0
) == INTEGER_CST
)
7492 std::swap (op0
, op1
);
7493 if (TREE_CODE (op1
) == INTEGER_CST
7494 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7495 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7501 case GIMPLE_TERNARY_RHS
:
7503 /* Handle ternary operators that can appear in GIMPLE form. */
7504 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7505 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7506 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7507 return fold_ternary_loc (loc
, subcode
,
7508 gimple_expr_type (stmt
), op0
, op1
, op2
);
7519 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7521 if (gimple_call_internal_p (stmt
))
7523 enum tree_code subcode
= ERROR_MARK
;
7524 switch (gimple_call_internal_fn (stmt
))
7526 case IFN_UBSAN_CHECK_ADD
:
7527 subcode
= PLUS_EXPR
;
7529 case IFN_UBSAN_CHECK_SUB
:
7530 subcode
= MINUS_EXPR
;
7532 case IFN_UBSAN_CHECK_MUL
:
7533 subcode
= MULT_EXPR
;
7535 case IFN_BUILTIN_EXPECT
:
7537 tree arg0
= gimple_call_arg (stmt
, 0);
7538 tree op0
= (*valueize
) (arg0
);
7539 if (TREE_CODE (op0
) == INTEGER_CST
)
7546 tree arg0
= gimple_call_arg (stmt
, 0);
7547 tree arg1
= gimple_call_arg (stmt
, 1);
7548 tree op0
= (*valueize
) (arg0
);
7549 tree op1
= (*valueize
) (arg1
);
7551 if (TREE_CODE (op0
) != INTEGER_CST
7552 || TREE_CODE (op1
) != INTEGER_CST
)
7557 /* x * 0 = 0 * x = 0 without overflow. */
7558 if (integer_zerop (op0
) || integer_zerop (op1
))
7559 return build_zero_cst (TREE_TYPE (arg0
));
7562 /* y - y = 0 without overflow. */
7563 if (operand_equal_p (op0
, op1
, 0))
7564 return build_zero_cst (TREE_TYPE (arg0
));
7571 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7573 && TREE_CODE (res
) == INTEGER_CST
7574 && !TREE_OVERFLOW (res
))
7579 fn
= (*valueize
) (gimple_call_fn (stmt
));
7580 if (TREE_CODE (fn
) == ADDR_EXPR
7581 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7582 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7583 && gimple_builtin_call_types_compatible_p (stmt
,
7584 TREE_OPERAND (fn
, 0)))
7586 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7589 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7590 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7591 retval
= fold_builtin_call_array (loc
,
7592 gimple_call_return_type (call_stmt
),
7593 fn
, gimple_call_num_args (stmt
), args
);
7596 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7597 STRIP_NOPS (retval
);
7598 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7611 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7612 Returns NULL_TREE if folding to a constant is not possible, otherwise
7613 returns a constant according to is_gimple_min_invariant. */
7616 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7618 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7619 if (res
&& is_gimple_min_invariant (res
))
7625 /* The following set of functions are supposed to fold references using
7626 their constant initializers. */
7628 /* See if we can find constructor defining value of BASE.
7629 When we know the consructor with constant offset (such as
7630 base is array[40] and we do know constructor of array), then
7631 BIT_OFFSET is adjusted accordingly.
7633 As a special case, return error_mark_node when constructor
7634 is not explicitly available, but it is known to be zero
7635 such as 'static const int a;'. */
7637 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7638 tree (*valueize
)(tree
))
7640 poly_int64 bit_offset2
, size
, max_size
;
7643 if (TREE_CODE (base
) == MEM_REF
)
7645 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7646 if (!boff
.to_shwi (bit_offset
))
7650 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7651 base
= valueize (TREE_OPERAND (base
, 0));
7652 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7654 base
= TREE_OPERAND (base
, 0);
7657 && TREE_CODE (base
) == SSA_NAME
)
7658 base
= valueize (base
);
7660 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7661 DECL_INITIAL. If BASE is a nested reference into another
7662 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7663 the inner reference. */
7664 switch (TREE_CODE (base
))
7669 tree init
= ctor_for_folding (base
);
7671 /* Our semantic is exact opposite of ctor_for_folding;
7672 NULL means unknown, while error_mark_node is 0. */
7673 if (init
== error_mark_node
)
7676 return error_mark_node
;
7680 case VIEW_CONVERT_EXPR
:
7681 return get_base_constructor (TREE_OPERAND (base
, 0),
7682 bit_offset
, valueize
);
7686 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7688 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7690 *bit_offset
+= bit_offset2
;
7691 return get_base_constructor (base
, bit_offset
, valueize
);
7697 if (CONSTANT_CLASS_P (base
))
7704 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7705 to the memory at bit OFFSET. When non-null, TYPE is the expected
7706 type of the reference; otherwise the type of the referenced element
7707 is used instead. When SIZE is zero, attempt to fold a reference to
7708 the entire element which OFFSET refers to. Increment *SUBOFF by
7709 the bit offset of the accessed element. */
7712 fold_array_ctor_reference (tree type
, tree ctor
,
7713 unsigned HOST_WIDE_INT offset
,
7714 unsigned HOST_WIDE_INT size
,
7716 unsigned HOST_WIDE_INT
*suboff
)
7718 offset_int low_bound
;
7719 offset_int elt_size
;
7720 offset_int access_index
;
7721 tree domain_type
= NULL_TREE
;
7722 HOST_WIDE_INT inner_offset
;
7724 /* Compute low bound and elt size. */
7725 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7726 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7727 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7729 /* Static constructors for variably sized objects make no sense. */
7730 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7732 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7736 /* Static constructors for variably sized objects make no sense. */
7737 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7739 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7741 /* When TYPE is non-null, verify that it specifies a constant-sized
7742 access of a multiple of the array element size. Avoid division
7743 by zero below when ELT_SIZE is zero, such as with the result of
7744 an initializer for a zero-length array or an empty struct. */
7747 && (!TYPE_SIZE_UNIT (type
)
7748 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7751 /* Compute the array index we look for. */
7752 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7754 access_index
+= low_bound
;
7756 /* And offset within the access. */
7757 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7759 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7760 if (size
> elt_sz
* BITS_PER_UNIT
)
7762 /* native_encode_expr constraints. */
7763 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7764 || size
% BITS_PER_UNIT
!= 0
7765 || inner_offset
% BITS_PER_UNIT
!= 0
7766 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7770 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7772 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7773 return build_zero_cst (type
);
7775 /* native-encode adjacent ctor elements. */
7776 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7777 unsigned bufoff
= 0;
7778 offset_int index
= 0;
7779 offset_int max_index
= access_index
;
7780 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7782 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7783 else if (!CONSTANT_CLASS_P (val
))
7787 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7789 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7790 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7793 index
= max_index
= wi::to_offset (elt
->index
);
7794 index
= wi::umax (index
, access_index
);
7797 if (bufoff
+ elt_sz
> sizeof (buf
))
7798 elt_sz
= sizeof (buf
) - bufoff
;
7799 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7800 inner_offset
/ BITS_PER_UNIT
);
7801 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7807 if (wi::cmpu (access_index
, index
) == 0)
7809 else if (wi::cmpu (access_index
, max_index
) > 0)
7812 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7814 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7819 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7821 max_index
= access_index
;
7824 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7826 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7827 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7830 index
= max_index
= wi::to_offset (elt
->index
);
7831 index
= wi::umax (index
, access_index
);
7832 if (wi::cmpu (access_index
, index
) == 0)
7835 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7839 while (bufoff
< size
/ BITS_PER_UNIT
);
7841 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7844 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7846 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7848 /* For the final reference to the entire accessed element
7849 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7850 may be null) in favor of the type of the element, and set
7851 SIZE to the size of the accessed element. */
7853 type
= TREE_TYPE (val
);
7854 size
= elt_sz
* BITS_PER_UNIT
;
7856 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7857 && TREE_CODE (val
) == CONSTRUCTOR
7858 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7859 /* If this isn't the last element in the CTOR and a CTOR itself
7860 and it does not cover the whole object we are requesting give up
7861 since we're not set up for combining from multiple CTORs. */
7864 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7865 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7869 /* Memory not explicitly mentioned in constructor is 0 (or
7870 the reference is out of range). */
7871 return type
? build_zero_cst (type
) : NULL_TREE
;
7874 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7875 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7876 is the expected type of the reference; otherwise the type of
7877 the referenced member is used instead. When SIZE is zero,
7878 attempt to fold a reference to the entire member which OFFSET
7879 refers to; in this case. Increment *SUBOFF by the bit offset
7880 of the accessed member. */
7883 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7884 unsigned HOST_WIDE_INT offset
,
7885 unsigned HOST_WIDE_INT size
,
7887 unsigned HOST_WIDE_INT
*suboff
)
7889 unsigned HOST_WIDE_INT cnt
;
7892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7895 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7896 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7897 tree field_size
= DECL_SIZE (cfield
);
7901 /* Determine the size of the flexible array member from
7902 the size of the initializer provided for it. */
7903 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7906 /* Variable sized objects in static constructors makes no sense,
7907 but field_size can be NULL for flexible array members. */
7908 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7909 && TREE_CODE (byte_offset
) == INTEGER_CST
7910 && (field_size
!= NULL_TREE
7911 ? TREE_CODE (field_size
) == INTEGER_CST
7912 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7914 /* Compute bit offset of the field. */
7915 offset_int bitoffset
7916 = (wi::to_offset (field_offset
)
7917 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7918 /* Compute bit offset where the field ends. */
7919 offset_int bitoffset_end
;
7920 if (field_size
!= NULL_TREE
)
7921 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7925 /* Compute the bit offset of the end of the desired access.
7926 As a special case, if the size of the desired access is
7927 zero, assume the access is to the entire field (and let
7928 the caller make any necessary adjustments by storing
7929 the actual bounds of the field in FIELDBOUNDS). */
7930 offset_int access_end
= offset_int (offset
);
7934 access_end
= bitoffset_end
;
7936 /* Is there any overlap between the desired access at
7937 [OFFSET, OFFSET+SIZE) and the offset of the field within
7938 the object at [BITOFFSET, BITOFFSET_END)? */
7939 if (wi::cmps (access_end
, bitoffset
) > 0
7940 && (field_size
== NULL_TREE
7941 || wi::lts_p (offset
, bitoffset_end
)))
7943 *suboff
+= bitoffset
.to_uhwi ();
7945 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
7947 /* For the final reference to the entire accessed member
7948 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7949 be null) in favor of the type of the member, and set
7950 SIZE to the size of the accessed member. */
7951 offset
= bitoffset
.to_uhwi ();
7952 type
= TREE_TYPE (cval
);
7953 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
7956 /* We do have overlap. Now see if the field is large enough
7957 to cover the access. Give up for accesses that extend
7958 beyond the end of the object or that span multiple fields. */
7959 if (wi::cmps (access_end
, bitoffset_end
) > 0)
7961 if (offset
< bitoffset
)
7964 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
7965 return fold_ctor_reference (type
, cval
,
7966 inner_offset
.to_uhwi (), size
,
7974 return build_zero_cst (type
);
7977 /* CTOR is value initializing memory. Fold a reference of TYPE and
7978 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7979 is zero, attempt to fold a reference to the entire subobject
7980 which OFFSET refers to. This is used when folding accesses to
7981 string members of aggregates. When non-null, set *SUBOFF to
7982 the bit offset of the accessed subobject. */
7985 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
7986 const poly_uint64
&poly_size
, tree from_decl
,
7987 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
7991 /* We found the field with exact match. */
7993 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
7994 && known_eq (poly_offset
, 0U))
7995 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
7997 /* The remaining optimizations need a constant size and offset. */
7998 unsigned HOST_WIDE_INT size
, offset
;
7999 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8002 /* We are at the end of walk, see if we can view convert the
8004 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8005 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8006 && !compare_tree_int (TYPE_SIZE (type
), size
)
8007 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8009 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8012 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8014 STRIP_USELESS_TYPE_CONVERSION (ret
);
8018 /* For constants and byte-aligned/sized reads try to go through
8019 native_encode/interpret. */
8020 if (CONSTANT_CLASS_P (ctor
)
8021 && BITS_PER_UNIT
== 8
8022 && offset
% BITS_PER_UNIT
== 0
8023 && offset
/ BITS_PER_UNIT
<= INT_MAX
8024 && size
% BITS_PER_UNIT
== 0
8025 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8026 && can_native_interpret_type_p (type
))
8028 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8029 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8030 offset
/ BITS_PER_UNIT
);
8032 return native_interpret_expr (type
, buf
, len
);
8034 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8036 unsigned HOST_WIDE_INT dummy
= 0;
8041 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8042 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8043 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8046 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8049 /* Fall back to native_encode_initializer. Needs to be done
8050 only in the outermost fold_ctor_reference call (because it itself
8051 recurses into CONSTRUCTORs) and doesn't update suboff. */
8052 if (ret
== NULL_TREE
8054 && BITS_PER_UNIT
== 8
8055 && offset
% BITS_PER_UNIT
== 0
8056 && offset
/ BITS_PER_UNIT
<= INT_MAX
8057 && size
% BITS_PER_UNIT
== 0
8058 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8059 && can_native_interpret_type_p (type
))
8061 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8062 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8063 offset
/ BITS_PER_UNIT
);
8065 return native_interpret_expr (type
, buf
, len
);
8074 /* Return the tree representing the element referenced by T if T is an
8075 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8076 names using VALUEIZE. Return NULL_TREE otherwise. */
8079 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8081 tree ctor
, idx
, base
;
8082 poly_int64 offset
, size
, max_size
;
8086 if (TREE_THIS_VOLATILE (t
))
8090 return get_symbol_constant_value (t
);
8092 tem
= fold_read_from_constant_string (t
);
8096 switch (TREE_CODE (t
))
8099 case ARRAY_RANGE_REF
:
8100 /* Constant indexes are handled well by get_base_constructor.
8101 Only special case variable offsets.
8102 FIXME: This code can't handle nested references with variable indexes
8103 (they will be handled only by iteration of ccp). Perhaps we can bring
8104 get_ref_base_and_extent here and make it use a valueize callback. */
8105 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8107 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8108 && poly_int_tree_p (idx
))
8110 tree low_bound
, unit_size
;
8112 /* If the resulting bit-offset is constant, track it. */
8113 if ((low_bound
= array_ref_low_bound (t
),
8114 poly_int_tree_p (low_bound
))
8115 && (unit_size
= array_ref_element_size (t
),
8116 tree_fits_uhwi_p (unit_size
)))
8118 poly_offset_int woffset
8119 = wi::sext (wi::to_poly_offset (idx
)
8120 - wi::to_poly_offset (low_bound
),
8121 TYPE_PRECISION (sizetype
));
8122 woffset
*= tree_to_uhwi (unit_size
);
8123 woffset
*= BITS_PER_UNIT
;
8124 if (woffset
.to_shwi (&offset
))
8126 base
= TREE_OPERAND (t
, 0);
8127 ctor
= get_base_constructor (base
, &offset
, valueize
);
8128 /* Empty constructor. Always fold to 0. */
8129 if (ctor
== error_mark_node
)
8130 return build_zero_cst (TREE_TYPE (t
));
8131 /* Out of bound array access. Value is undefined,
8133 if (maybe_lt (offset
, 0))
8135 /* We cannot determine ctor. */
8138 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8139 tree_to_uhwi (unit_size
)
8149 case TARGET_MEM_REF
:
8151 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8152 ctor
= get_base_constructor (base
, &offset
, valueize
);
8154 /* Empty constructor. Always fold to 0. */
8155 if (ctor
== error_mark_node
)
8156 return build_zero_cst (TREE_TYPE (t
));
8157 /* We do not know precise address. */
8158 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8160 /* We cannot determine ctor. */
8164 /* Out of bound array access. Value is undefined, but don't fold. */
8165 if (maybe_lt (offset
, 0))
8168 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8172 /* For bit field reads try to read the representative and
8174 if (TREE_CODE (t
) == COMPONENT_REF
8175 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8176 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8178 HOST_WIDE_INT csize
, coffset
;
8179 tree field
= TREE_OPERAND (t
, 1);
8180 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8181 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8182 && size
.is_constant (&csize
)
8183 && offset
.is_constant (&coffset
)
8184 && (coffset
% BITS_PER_UNIT
!= 0
8185 || csize
% BITS_PER_UNIT
!= 0)
8187 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8189 poly_int64 bitoffset
;
8190 poly_uint64 field_offset
, repr_offset
;
8191 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8192 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8193 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8196 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8197 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8198 HOST_WIDE_INT bitoff
;
8199 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8200 - TYPE_PRECISION (TREE_TYPE (field
)));
8201 if (bitoffset
.is_constant (&bitoff
)
8206 size
= tree_to_uhwi (DECL_SIZE (repr
));
8208 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8210 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8212 if (!BYTES_BIG_ENDIAN
)
8213 tem
= wide_int_to_tree (TREE_TYPE (field
),
8214 wi::lrshift (wi::to_wide (tem
),
8217 tem
= wide_int_to_tree (TREE_TYPE (field
),
8218 wi::lrshift (wi::to_wide (tem
),
8230 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8231 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8232 return fold_build1_loc (EXPR_LOCATION (t
),
8233 TREE_CODE (t
), TREE_TYPE (t
), c
);
8245 fold_const_aggregate_ref (tree t
)
8247 return fold_const_aggregate_ref_1 (t
, NULL
);
8250 /* Lookup virtual method with index TOKEN in a virtual table V
8252 Set CAN_REFER if non-NULL to false if method
8253 is not referable or if the virtual table is ill-formed (such as rewriten
8254 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8257 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8259 unsigned HOST_WIDE_INT offset
,
8262 tree vtable
= v
, init
, fn
;
8263 unsigned HOST_WIDE_INT size
;
8264 unsigned HOST_WIDE_INT elt_size
, access_index
;
8270 /* First of all double check we have virtual table. */
8271 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8273 /* Pass down that we lost track of the target. */
8279 init
= ctor_for_folding (v
);
8281 /* The virtual tables should always be born with constructors
8282 and we always should assume that they are avaialble for
8283 folding. At the moment we do not stream them in all cases,
8284 but it should never happen that ctor seem unreachable. */
8286 if (init
== error_mark_node
)
8288 /* Pass down that we lost track of the target. */
8293 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8294 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8295 offset
*= BITS_PER_UNIT
;
8296 offset
+= token
* size
;
8298 /* Lookup the value in the constructor that is assumed to be array.
8299 This is equivalent to
8300 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8301 offset, size, NULL);
8302 but in a constant time. We expect that frontend produced a simple
8303 array without indexed initializers. */
8305 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8306 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8307 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8308 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8310 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8311 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8313 /* The C++ FE can now produce indexed fields, and we check if the indexes
8315 if (access_index
< CONSTRUCTOR_NELTS (init
))
8317 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8318 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8319 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8325 /* For type inconsistent program we may end up looking up virtual method
8326 in virtual table that does not contain TOKEN entries. We may overrun
8327 the virtual table and pick up a constant or RTTI info pointer.
8328 In any case the call is undefined. */
8330 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8331 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8332 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8335 fn
= TREE_OPERAND (fn
, 0);
8337 /* When cgraph node is missing and function is not public, we cannot
8338 devirtualize. This can happen in WHOPR when the actual method
8339 ends up in other partition, because we found devirtualization
8340 possibility too late. */
8341 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8352 /* Make sure we create a cgraph node for functions we'll reference.
8353 They can be non-existent if the reference comes from an entry
8354 of an external vtable for example. */
8355 cgraph_node::get_create (fn
);
8360 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8361 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8362 KNOWN_BINFO carries the binfo describing the true type of
8363 OBJ_TYPE_REF_OBJECT(REF).
8364 Set CAN_REFER if non-NULL to false if method
8365 is not referable or if the virtual table is ill-formed (such as rewriten
8366 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8369 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8372 unsigned HOST_WIDE_INT offset
;
8375 v
= BINFO_VTABLE (known_binfo
);
8376 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8380 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8386 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8389 /* Given a pointer value T, return a simplified version of an
8390 indirection through T, or NULL_TREE if no simplification is
8391 possible. Note that the resulting type may be different from
8392 the type pointed to in the sense that it is still compatible
8393 from the langhooks point of view. */
8396 gimple_fold_indirect_ref (tree t
)
8398 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8403 subtype
= TREE_TYPE (sub
);
8404 if (!POINTER_TYPE_P (subtype
)
8405 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8408 if (TREE_CODE (sub
) == ADDR_EXPR
)
8410 tree op
= TREE_OPERAND (sub
, 0);
8411 tree optype
= TREE_TYPE (op
);
8413 if (useless_type_conversion_p (type
, optype
))
8416 /* *(foo *)&fooarray => fooarray[0] */
8417 if (TREE_CODE (optype
) == ARRAY_TYPE
8418 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8419 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8421 tree type_domain
= TYPE_DOMAIN (optype
);
8422 tree min_val
= size_zero_node
;
8423 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8424 min_val
= TYPE_MIN_VALUE (type_domain
);
8425 if (TREE_CODE (min_val
) == INTEGER_CST
)
8426 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8428 /* *(foo *)&complexfoo => __real__ complexfoo */
8429 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8430 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8431 return fold_build1 (REALPART_EXPR
, type
, op
);
8432 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8433 else if (TREE_CODE (optype
) == VECTOR_TYPE
8434 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8436 tree part_width
= TYPE_SIZE (type
);
8437 tree index
= bitsize_int (0);
8438 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8442 /* *(p + CST) -> ... */
8443 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8444 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8446 tree addr
= TREE_OPERAND (sub
, 0);
8447 tree off
= TREE_OPERAND (sub
, 1);
8451 addrtype
= TREE_TYPE (addr
);
8453 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8454 if (TREE_CODE (addr
) == ADDR_EXPR
8455 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8456 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8457 && tree_fits_uhwi_p (off
))
8459 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8460 tree part_width
= TYPE_SIZE (type
);
8461 unsigned HOST_WIDE_INT part_widthi
8462 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8463 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8464 tree index
= bitsize_int (indexi
);
8465 if (known_lt (offset
/ part_widthi
,
8466 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8467 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8471 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8472 if (TREE_CODE (addr
) == ADDR_EXPR
8473 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8474 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8476 tree size
= TYPE_SIZE_UNIT (type
);
8477 if (tree_int_cst_equal (size
, off
))
8478 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8481 /* *(p + CST) -> MEM_REF <p, CST>. */
8482 if (TREE_CODE (addr
) != ADDR_EXPR
8483 || DECL_P (TREE_OPERAND (addr
, 0)))
8484 return fold_build2 (MEM_REF
, type
,
8486 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8489 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8490 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8491 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8492 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8495 tree min_val
= size_zero_node
;
8497 sub
= gimple_fold_indirect_ref (sub
);
8499 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8500 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8501 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8502 min_val
= TYPE_MIN_VALUE (type_domain
);
8503 if (TREE_CODE (min_val
) == INTEGER_CST
)
8504 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8510 /* Return true if CODE is an operation that when operating on signed
8511 integer types involves undefined behavior on overflow and the
8512 operation can be expressed with unsigned arithmetic. */
8515 arith_code_with_undefined_signed_overflow (tree_code code
)
8524 case POINTER_PLUS_EXPR
:
8531 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8532 operation that can be transformed to unsigned arithmetic by converting
8533 its operand, carrying out the operation in the corresponding unsigned
8534 type and converting the result back to the original type.
8536 Returns a sequence of statements that replace STMT and also contain
8537 a modified form of STMT itself. */
8540 rewrite_to_defined_overflow (gimple
*stmt
)
8542 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8544 fprintf (dump_file
, "rewriting stmt with undefined signed "
8546 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8549 tree lhs
= gimple_assign_lhs (stmt
);
8550 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8551 gimple_seq stmts
= NULL
;
8552 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8553 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8555 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8557 tree op
= gimple_op (stmt
, i
);
8558 op
= gimple_convert (&stmts
, type
, op
);
8559 gimple_set_op (stmt
, i
, op
);
8561 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8562 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8563 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8564 gimple_set_modified (stmt
, true);
8565 gimple_seq_add_stmt (&stmts
, stmt
);
8566 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8567 gimple_seq_add_stmt (&stmts
, cvt
);
8573 /* The valueization hook we use for the gimple_build API simplification.
8574 This makes us match fold_buildN behavior by only combining with
8575 statements in the sequence(s) we are currently building. */
8578 gimple_build_valueize (tree op
)
8580 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8585 /* Build the expression CODE OP0 of type TYPE with location LOC,
8586 simplifying it first if possible. Returns the built
8587 expression value and appends statements possibly defining it
8591 gimple_build (gimple_seq
*seq
, location_t loc
,
8592 enum tree_code code
, tree type
, tree op0
)
8594 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8597 res
= create_tmp_reg_or_ssa_name (type
);
8599 if (code
== REALPART_EXPR
8600 || code
== IMAGPART_EXPR
8601 || code
== VIEW_CONVERT_EXPR
)
8602 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8604 stmt
= gimple_build_assign (res
, code
, op0
);
8605 gimple_set_location (stmt
, loc
);
8606 gimple_seq_add_stmt_without_update (seq
, stmt
);
8611 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8612 simplifying it first if possible. Returns the built
8613 expression value and appends statements possibly defining it
8617 gimple_build (gimple_seq
*seq
, location_t loc
,
8618 enum tree_code code
, tree type
, tree op0
, tree op1
)
8620 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8623 res
= create_tmp_reg_or_ssa_name (type
);
8624 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8625 gimple_set_location (stmt
, loc
);
8626 gimple_seq_add_stmt_without_update (seq
, stmt
);
8631 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8632 simplifying it first if possible. Returns the built
8633 expression value and appends statements possibly defining it
8637 gimple_build (gimple_seq
*seq
, location_t loc
,
8638 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8640 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8641 seq
, gimple_build_valueize
);
8644 res
= create_tmp_reg_or_ssa_name (type
);
8646 if (code
== BIT_FIELD_REF
)
8647 stmt
= gimple_build_assign (res
, code
,
8648 build3 (code
, type
, op0
, op1
, op2
));
8650 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8651 gimple_set_location (stmt
, loc
);
8652 gimple_seq_add_stmt_without_update (seq
, stmt
);
8657 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8658 void) with a location LOC. Returns the built expression value (or NULL_TREE
8659 if TYPE is void) and appends statements possibly defining it to SEQ. */
8662 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8664 tree res
= NULL_TREE
;
8666 if (internal_fn_p (fn
))
8667 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8670 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8671 stmt
= gimple_build_call (decl
, 0);
8673 if (!VOID_TYPE_P (type
))
8675 res
= create_tmp_reg_or_ssa_name (type
);
8676 gimple_call_set_lhs (stmt
, res
);
8678 gimple_set_location (stmt
, loc
);
8679 gimple_seq_add_stmt_without_update (seq
, stmt
);
8683 /* Build the call FN (ARG0) with a result of type TYPE
8684 (or no result if TYPE is void) with location LOC,
8685 simplifying it first if possible. Returns the built
8686 expression value (or NULL_TREE if TYPE is void) and appends
8687 statements possibly defining it to SEQ. */
8690 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8691 tree type
, tree arg0
)
8693 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8697 if (internal_fn_p (fn
))
8698 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8701 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8702 stmt
= gimple_build_call (decl
, 1, arg0
);
8704 if (!VOID_TYPE_P (type
))
8706 res
= create_tmp_reg_or_ssa_name (type
);
8707 gimple_call_set_lhs (stmt
, res
);
8709 gimple_set_location (stmt
, loc
);
8710 gimple_seq_add_stmt_without_update (seq
, stmt
);
8715 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8716 (or no result if TYPE is void) with location LOC,
8717 simplifying it first if possible. Returns the built
8718 expression value (or NULL_TREE if TYPE is void) and appends
8719 statements possibly defining it to SEQ. */
8722 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8723 tree type
, tree arg0
, tree arg1
)
8725 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8729 if (internal_fn_p (fn
))
8730 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8733 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8734 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8736 if (!VOID_TYPE_P (type
))
8738 res
= create_tmp_reg_or_ssa_name (type
);
8739 gimple_call_set_lhs (stmt
, res
);
8741 gimple_set_location (stmt
, loc
);
8742 gimple_seq_add_stmt_without_update (seq
, stmt
);
8747 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8748 (or no result if TYPE is void) with location LOC,
8749 simplifying it first if possible. Returns the built
8750 expression value (or NULL_TREE if TYPE is void) and appends
8751 statements possibly defining it to SEQ. */
8754 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8755 tree type
, tree arg0
, tree arg1
, tree arg2
)
8757 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8758 seq
, gimple_build_valueize
);
8762 if (internal_fn_p (fn
))
8763 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8764 3, arg0
, arg1
, arg2
);
8767 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8768 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8770 if (!VOID_TYPE_P (type
))
8772 res
= create_tmp_reg_or_ssa_name (type
);
8773 gimple_call_set_lhs (stmt
, res
);
8775 gimple_set_location (stmt
, loc
);
8776 gimple_seq_add_stmt_without_update (seq
, stmt
);
8781 /* Build the conversion (TYPE) OP with a result of type TYPE
8782 with location LOC if such conversion is neccesary in GIMPLE,
8783 simplifying it first.
8784 Returns the built expression value and appends
8785 statements possibly defining it to SEQ. */
8788 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8790 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8792 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8795 /* Build the conversion (ptrofftype) OP with a result of a type
8796 compatible with ptrofftype with location LOC if such conversion
8797 is neccesary in GIMPLE, simplifying it first.
8798 Returns the built expression value and appends
8799 statements possibly defining it to SEQ. */
8802 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8804 if (ptrofftype_p (TREE_TYPE (op
)))
8806 return gimple_convert (seq
, loc
, sizetype
, op
);
8809 /* Build a vector of type TYPE in which each element has the value OP.
8810 Return a gimple value for the result, appending any new statements
8814 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8817 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8818 && !CONSTANT_CLASS_P (op
))
8819 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8821 tree res
, vec
= build_vector_from_val (type
, op
);
8822 if (is_gimple_val (vec
))
8824 if (gimple_in_ssa_p (cfun
))
8825 res
= make_ssa_name (type
);
8827 res
= create_tmp_reg (type
);
8828 gimple
*stmt
= gimple_build_assign (res
, vec
);
8829 gimple_set_location (stmt
, loc
);
8830 gimple_seq_add_stmt_without_update (seq
, stmt
);
8834 /* Build a vector from BUILDER, handling the case in which some elements
8835 are non-constant. Return a gimple value for the result, appending any
8836 new instructions to SEQ.
8838 BUILDER must not have a stepped encoding on entry. This is because
8839 the function is not geared up to handle the arithmetic that would
8840 be needed in the variable case, and any code building a vector that
8841 is known to be constant should use BUILDER->build () directly. */
8844 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8845 tree_vector_builder
*builder
)
8847 gcc_assert (builder
->nelts_per_pattern () <= 2);
8848 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8849 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8850 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8852 tree type
= builder
->type ();
8853 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8854 vec
<constructor_elt
, va_gc
> *v
;
8855 vec_alloc (v
, nelts
);
8856 for (i
= 0; i
< nelts
; ++i
)
8857 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8860 if (gimple_in_ssa_p (cfun
))
8861 res
= make_ssa_name (type
);
8863 res
= create_tmp_reg (type
);
8864 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8865 gimple_set_location (stmt
, loc
);
8866 gimple_seq_add_stmt_without_update (seq
, stmt
);
8869 return builder
->build ();
8872 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8873 and generate a value guaranteed to be rounded upwards to ALIGN.
8875 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8878 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
8879 tree old_size
, unsigned HOST_WIDE_INT align
)
8881 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
8882 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8883 gcc_assert (INTEGRAL_TYPE_P (type
));
8884 tree tree_mask
= build_int_cst (type
, tg_mask
);
8885 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
8888 tree mask
= build_int_cst (type
, -align
);
8889 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
8892 /* Return true if the result of assignment STMT is known to be non-negative.
8893 If the return value is based on the assumption that signed overflow is
8894 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8895 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8898 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8901 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8902 switch (get_gimple_rhs_class (code
))
8904 case GIMPLE_UNARY_RHS
:
8905 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8906 gimple_expr_type (stmt
),
8907 gimple_assign_rhs1 (stmt
),
8908 strict_overflow_p
, depth
);
8909 case GIMPLE_BINARY_RHS
:
8910 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8911 gimple_expr_type (stmt
),
8912 gimple_assign_rhs1 (stmt
),
8913 gimple_assign_rhs2 (stmt
),
8914 strict_overflow_p
, depth
);
8915 case GIMPLE_TERNARY_RHS
:
8917 case GIMPLE_SINGLE_RHS
:
8918 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
8919 strict_overflow_p
, depth
);
8920 case GIMPLE_INVALID_RHS
:
8926 /* Return true if return value of call STMT is known to be non-negative.
8927 If the return value is based on the assumption that signed overflow is
8928 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8929 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8932 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8935 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
8936 gimple_call_arg (stmt
, 0) : NULL_TREE
;
8937 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
8938 gimple_call_arg (stmt
, 1) : NULL_TREE
;
8940 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
8941 gimple_call_combined_fn (stmt
),
8944 strict_overflow_p
, depth
);
8947 /* Return true if return value of call STMT is known to be non-negative.
8948 If the return value is based on the assumption that signed overflow is
8949 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8950 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8953 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8956 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8958 tree arg
= gimple_phi_arg_def (stmt
, i
);
8959 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
8965 /* Return true if STMT is known to compute a non-negative value.
8966 If the return value is based on the assumption that signed overflow is
8967 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8968 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8971 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8974 switch (gimple_code (stmt
))
8977 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8980 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8983 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8990 /* Return true if the floating-point value computed by assignment STMT
8991 is known to have an integer value. We also allow +Inf, -Inf and NaN
8992 to be considered integer values. Return false for signaling NaN.
8994 DEPTH is the current nesting depth of the query. */
8997 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
8999 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9000 switch (get_gimple_rhs_class (code
))
9002 case GIMPLE_UNARY_RHS
:
9003 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9004 gimple_assign_rhs1 (stmt
), depth
);
9005 case GIMPLE_BINARY_RHS
:
9006 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9007 gimple_assign_rhs1 (stmt
),
9008 gimple_assign_rhs2 (stmt
), depth
);
9009 case GIMPLE_TERNARY_RHS
:
9011 case GIMPLE_SINGLE_RHS
:
9012 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9013 case GIMPLE_INVALID_RHS
:
9019 /* Return true if the floating-point value computed by call STMT is known
9020 to have an integer value. We also allow +Inf, -Inf and NaN to be
9021 considered integer values. Return false for signaling NaN.
9023 DEPTH is the current nesting depth of the query. */
9026 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9028 tree arg0
= (gimple_call_num_args (stmt
) > 0
9029 ? gimple_call_arg (stmt
, 0)
9031 tree arg1
= (gimple_call_num_args (stmt
) > 1
9032 ? gimple_call_arg (stmt
, 1)
9034 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9038 /* Return true if the floating-point result of phi STMT is known to have
9039 an integer value. We also allow +Inf, -Inf and NaN to be considered
9040 integer values. Return false for signaling NaN.
9042 DEPTH is the current nesting depth of the query. */
9045 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9047 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9049 tree arg
= gimple_phi_arg_def (stmt
, i
);
9050 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9056 /* Return true if the floating-point value computed by STMT is known
9057 to have an integer value. We also allow +Inf, -Inf and NaN to be
9058 considered integer values. Return false for signaling NaN.
9060 DEPTH is the current nesting depth of the query. */
9063 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9065 switch (gimple_code (stmt
))
9068 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9070 return gimple_call_integer_valued_real_p (stmt
, depth
);
9072 return gimple_phi_integer_valued_real_p (stmt
, depth
);