1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "fold-const.h"
32 #include "insn-config.h"
41 #include "stor-layout.h"
43 #include "dominance.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
47 #include "gimple-iterator.h"
48 #include "tree-into-ssa.h"
51 #include "tree-ssa-propagate.h"
54 #include "ipa-utils.h"
55 #include "gimple-pretty-print.h"
56 #include "tree-ssa-address.h"
57 #include "langhooks.h"
58 #include "gimplify-me.h"
63 #include "gimple-match.h"
65 /* Return true when DECL can be referenced from current unit.
66 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
67 We can get declarations that are not possible to reference for various
70 1) When analyzing C++ virtual tables.
71 C++ virtual tables do have known constructors even
72 when they are keyed to other compilation unit.
73 Those tables can contain pointers to methods and vars
74 in other units. Those methods have both STATIC and EXTERNAL
76 2) In WHOPR mode devirtualization might lead to reference
77 to method that was partitioned elsehwere.
78 In this case we have static VAR_DECL or FUNCTION_DECL
79 that has no corresponding callgraph/varpool node
81 3) COMDAT functions referred by external vtables that
82 we devirtualize only during final compilation stage.
83 At this time we already decided that we will not output
84 the function body and thus we can't reference the symbol
88 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
91 struct cgraph_node
*node
;
94 if (DECL_ABSTRACT_P (decl
))
97 /* We are concerned only about static/external vars and functions. */
98 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
99 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
102 /* Static objects can be referred only if they was not optimized out yet. */
103 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
105 /* Before we start optimizing unreachable code we can be sure all
106 static objects are defined. */
107 if (symtab
->function_flags_ready
)
109 snode
= symtab_node::get (decl
);
110 if (!snode
|| !snode
->definition
)
112 node
= dyn_cast
<cgraph_node
*> (snode
);
113 return !node
|| !node
->global
.inlined_to
;
116 /* We will later output the initializer, so we can refer to it.
117 So we are concerned only when DECL comes from initializer of
118 external var or var that has been optimized out. */
120 || TREE_CODE (from_decl
) != VAR_DECL
121 || (!DECL_EXTERNAL (from_decl
)
122 && (vnode
= varpool_node::get (from_decl
)) != NULL
123 && vnode
->definition
)
125 && (vnode
= varpool_node::get (from_decl
)) != NULL
126 && vnode
->in_other_partition
))
128 /* We are folding reference from external vtable. The vtable may reffer
129 to a symbol keyed to other compilation unit. The other compilation
130 unit may be in separate DSO and the symbol may be hidden. */
131 if (DECL_VISIBILITY_SPECIFIED (decl
)
132 && DECL_EXTERNAL (decl
)
133 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
134 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
136 /* When function is public, we always can introduce new reference.
137 Exception are the COMDAT functions where introducing a direct
138 reference imply need to include function body in the curren tunit. */
139 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
141 /* We have COMDAT. We are going to check if we still have definition
142 or if the definition is going to be output in other partition.
143 Bypass this when gimplifying; all needed functions will be produced.
145 As observed in PR20991 for already optimized out comdat virtual functions
146 it may be tempting to not necessarily give up because the copy will be
147 output elsewhere when corresponding vtable is output.
148 This is however not possible - ABI specify that COMDATs are output in
149 units where they are used and when the other unit was compiled with LTO
150 it is possible that vtable was kept public while the function itself
152 if (!symtab
->function_flags_ready
)
155 snode
= symtab_node::get (decl
);
157 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
158 && (!snode
->in_other_partition
159 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
161 node
= dyn_cast
<cgraph_node
*> (snode
);
162 return !node
|| !node
->global
.inlined_to
;
165 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
166 acceptable form for is_gimple_min_invariant.
167 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
170 canonicalize_constructor_val (tree cval
, tree from_decl
)
172 tree orig_cval
= cval
;
174 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
175 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
177 tree ptr
= TREE_OPERAND (cval
, 0);
178 if (is_gimple_min_invariant (ptr
))
179 cval
= build1_loc (EXPR_LOCATION (cval
),
180 ADDR_EXPR
, TREE_TYPE (ptr
),
181 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
183 fold_convert (ptr_type_node
,
184 TREE_OPERAND (cval
, 1))));
186 if (TREE_CODE (cval
) == ADDR_EXPR
)
188 tree base
= NULL_TREE
;
189 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
191 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
193 TREE_OPERAND (cval
, 0) = base
;
196 base
= get_base_address (TREE_OPERAND (cval
, 0));
200 if ((TREE_CODE (base
) == VAR_DECL
201 || TREE_CODE (base
) == FUNCTION_DECL
)
202 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
204 if (TREE_CODE (base
) == VAR_DECL
)
205 TREE_ADDRESSABLE (base
) = 1;
206 else if (TREE_CODE (base
) == FUNCTION_DECL
)
208 /* Make sure we create a cgraph node for functions we'll reference.
209 They can be non-existent if the reference comes from an entry
210 of an external vtable for example. */
211 cgraph_node::get_create (base
);
213 /* Fixup types in global initializers. */
214 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
215 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
217 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
218 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
221 if (TREE_OVERFLOW_P (cval
))
222 return drop_tree_overflow (cval
);
226 /* If SYM is a constant variable with known value, return the value.
227 NULL_TREE is returned otherwise. */
230 get_symbol_constant_value (tree sym
)
232 tree val
= ctor_for_folding (sym
);
233 if (val
!= error_mark_node
)
237 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
238 if (val
&& is_gimple_min_invariant (val
))
243 /* Variables declared 'const' without an initializer
244 have zero as the initializer if they may not be
245 overridden at link or run time. */
247 && is_gimple_reg_type (TREE_TYPE (sym
)))
248 return build_zero_cst (TREE_TYPE (sym
));
256 /* Subroutine of fold_stmt. We perform several simplifications of the
257 memory reference tree EXPR and make sure to re-gimplify them properly
258 after propagation of constant addresses. IS_LHS is true if the
259 reference is supposed to be an lvalue. */
262 maybe_fold_reference (tree expr
, bool is_lhs
)
266 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
267 || TREE_CODE (expr
) == REALPART_EXPR
268 || TREE_CODE (expr
) == IMAGPART_EXPR
)
269 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
270 return fold_unary_loc (EXPR_LOCATION (expr
),
273 TREE_OPERAND (expr
, 0));
274 else if (TREE_CODE (expr
) == BIT_FIELD_REF
275 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
276 return fold_ternary_loc (EXPR_LOCATION (expr
),
279 TREE_OPERAND (expr
, 0),
280 TREE_OPERAND (expr
, 1),
281 TREE_OPERAND (expr
, 2));
284 && (result
= fold_const_aggregate_ref (expr
))
285 && is_gimple_min_invariant (result
))
292 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
293 replacement rhs for the statement or NULL_TREE if no simplification
294 could be made. It is assumed that the operands have been previously
298 fold_gimple_assign (gimple_stmt_iterator
*si
)
300 gimple stmt
= gsi_stmt (*si
);
301 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
302 location_t loc
= gimple_location (stmt
);
304 tree result
= NULL_TREE
;
306 switch (get_gimple_rhs_class (subcode
))
308 case GIMPLE_SINGLE_RHS
:
310 tree rhs
= gimple_assign_rhs1 (stmt
);
312 if (TREE_CLOBBER_P (rhs
))
315 if (REFERENCE_CLASS_P (rhs
))
316 return maybe_fold_reference (rhs
, false);
318 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
320 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
321 if (is_gimple_min_invariant (val
))
323 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
326 vec
<cgraph_node
*>targets
327 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
328 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
330 if (dump_enabled_p ())
332 location_t loc
= gimple_location_safe (stmt
);
333 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
334 "resolving virtual function address "
335 "reference to function %s\n",
336 targets
.length () == 1
337 ? targets
[0]->name ()
340 if (targets
.length () == 1)
342 val
= fold_convert (TREE_TYPE (val
),
343 build_fold_addr_expr_loc
344 (loc
, targets
[0]->decl
));
345 STRIP_USELESS_TYPE_CONVERSION (val
);
348 /* We can not use __builtin_unreachable here because it
349 can not have address taken. */
350 val
= build_int_cst (TREE_TYPE (val
), 0);
356 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
358 tree ref
= TREE_OPERAND (rhs
, 0);
359 tree tem
= maybe_fold_reference (ref
, true);
361 && TREE_CODE (tem
) == MEM_REF
362 && integer_zerop (TREE_OPERAND (tem
, 1)))
363 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
365 result
= fold_convert (TREE_TYPE (rhs
),
366 build_fold_addr_expr_loc (loc
, tem
));
367 else if (TREE_CODE (ref
) == MEM_REF
368 && integer_zerop (TREE_OPERAND (ref
, 1)))
369 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
372 else if (TREE_CODE (rhs
) == CONSTRUCTOR
373 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
374 && (CONSTRUCTOR_NELTS (rhs
)
375 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
377 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
381 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
382 if (TREE_CODE (val
) != INTEGER_CST
383 && TREE_CODE (val
) != REAL_CST
384 && TREE_CODE (val
) != FIXED_CST
)
387 return build_vector_from_ctor (TREE_TYPE (rhs
),
388 CONSTRUCTOR_ELTS (rhs
));
391 else if (DECL_P (rhs
))
392 return get_symbol_constant_value (rhs
);
394 /* If we couldn't fold the RHS, hand over to the generic
396 if (result
== NULL_TREE
)
399 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
400 that may have been added by fold, and "useless" type
401 conversions that might now be apparent due to propagation. */
402 STRIP_USELESS_TYPE_CONVERSION (result
);
404 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
411 case GIMPLE_UNARY_RHS
:
414 case GIMPLE_BINARY_RHS
:
415 /* Try to canonicalize for boolean-typed X the comparisons
416 X == 0, X == 1, X != 0, and X != 1. */
417 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
418 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
420 tree lhs
= gimple_assign_lhs (stmt
);
421 tree op1
= gimple_assign_rhs1 (stmt
);
422 tree op2
= gimple_assign_rhs2 (stmt
);
423 tree type
= TREE_TYPE (op1
);
425 /* Check whether the comparison operands are of the same boolean
426 type as the result type is.
427 Check that second operand is an integer-constant with value
429 if (TREE_CODE (op2
) == INTEGER_CST
430 && (integer_zerop (op2
) || integer_onep (op2
))
431 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
433 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
434 bool is_logical_not
= false;
436 /* X == 0 and X != 1 is a logical-not.of X
437 X == 1 and X != 0 is X */
438 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
439 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
440 is_logical_not
= true;
442 if (is_logical_not
== false)
444 /* Only for one-bit precision typed X the transformation
445 !X -> ~X is valied. */
446 else if (TYPE_PRECISION (type
) == 1)
447 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
449 /* Otherwise we use !X -> X ^ 1. */
451 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
452 type
, op1
, build_int_cst (type
, 1));
458 result
= fold_binary_loc (loc
, subcode
,
459 TREE_TYPE (gimple_assign_lhs (stmt
)),
460 gimple_assign_rhs1 (stmt
),
461 gimple_assign_rhs2 (stmt
));
465 STRIP_USELESS_TYPE_CONVERSION (result
);
466 if (valid_gimple_rhs_p (result
))
471 case GIMPLE_TERNARY_RHS
:
472 /* Try to fold a conditional expression. */
473 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
475 tree op0
= gimple_assign_rhs1 (stmt
);
478 location_t cond_loc
= gimple_location (stmt
);
480 if (COMPARISON_CLASS_P (op0
))
482 fold_defer_overflow_warnings ();
483 tem
= fold_binary_loc (cond_loc
,
484 TREE_CODE (op0
), TREE_TYPE (op0
),
485 TREE_OPERAND (op0
, 0),
486 TREE_OPERAND (op0
, 1));
487 /* This is actually a conditional expression, not a GIMPLE
488 conditional statement, however, the valid_gimple_rhs_p
489 test still applies. */
490 set
= (tem
&& is_gimple_condexpr (tem
)
491 && valid_gimple_rhs_p (tem
));
492 fold_undefer_overflow_warnings (set
, stmt
, 0);
494 else if (is_gimple_min_invariant (op0
))
503 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
504 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
505 gimple_assign_rhs2 (stmt
),
506 gimple_assign_rhs3 (stmt
));
510 result
= fold_ternary_loc (loc
, subcode
,
511 TREE_TYPE (gimple_assign_lhs (stmt
)),
512 gimple_assign_rhs1 (stmt
),
513 gimple_assign_rhs2 (stmt
),
514 gimple_assign_rhs3 (stmt
));
518 STRIP_USELESS_TYPE_CONVERSION (result
);
519 if (valid_gimple_rhs_p (result
))
524 case GIMPLE_INVALID_RHS
:
531 /* Attempt to fold a conditional statement. Return true if any changes were
532 made. We only attempt to fold the condition expression, and do not perform
533 any transformation that would require alteration of the cfg. It is
534 assumed that the operands have been previously folded. */
537 fold_gimple_cond (gcond
*stmt
)
539 tree result
= fold_binary_loc (gimple_location (stmt
),
540 gimple_cond_code (stmt
),
542 gimple_cond_lhs (stmt
),
543 gimple_cond_rhs (stmt
));
547 STRIP_USELESS_TYPE_CONVERSION (result
);
548 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
550 gimple_cond_set_condition_from_tree (stmt
, result
);
559 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
560 adjusting the replacement stmts location and virtual operands.
561 If the statement has a lhs the last stmt in the sequence is expected
562 to assign to that lhs. */
565 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
567 gimple stmt
= gsi_stmt (*si_p
);
569 if (gimple_has_location (stmt
))
570 annotate_all_with_location (stmts
, gimple_location (stmt
));
572 /* First iterate over the replacement statements backward, assigning
573 virtual operands to their defining statements. */
574 gimple laststore
= NULL
;
575 for (gimple_stmt_iterator i
= gsi_last (stmts
);
576 !gsi_end_p (i
); gsi_prev (&i
))
578 gimple new_stmt
= gsi_stmt (i
);
579 if ((gimple_assign_single_p (new_stmt
)
580 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
581 || (is_gimple_call (new_stmt
)
582 && (gimple_call_flags (new_stmt
)
583 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
587 vdef
= gimple_vdef (stmt
);
589 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
590 gimple_set_vdef (new_stmt
, vdef
);
591 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
592 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
593 laststore
= new_stmt
;
597 /* Second iterate over the statements forward, assigning virtual
598 operands to their uses. */
599 tree reaching_vuse
= gimple_vuse (stmt
);
600 for (gimple_stmt_iterator i
= gsi_start (stmts
);
601 !gsi_end_p (i
); gsi_next (&i
))
603 gimple new_stmt
= gsi_stmt (i
);
604 /* If the new statement possibly has a VUSE, update it with exact SSA
605 name we know will reach this one. */
606 if (gimple_has_mem_ops (new_stmt
))
607 gimple_set_vuse (new_stmt
, reaching_vuse
);
608 gimple_set_modified (new_stmt
, true);
609 if (gimple_vdef (new_stmt
))
610 reaching_vuse
= gimple_vdef (new_stmt
);
613 /* If the new sequence does not do a store release the virtual
614 definition of the original statement. */
616 && reaching_vuse
== gimple_vuse (stmt
))
618 tree vdef
= gimple_vdef (stmt
);
620 && TREE_CODE (vdef
) == SSA_NAME
)
622 unlink_stmt_vdef (stmt
);
623 release_ssa_name (vdef
);
627 /* Finally replace the original statement with the sequence. */
628 gsi_replace_with_seq (si_p
, stmts
, false);
631 /* Convert EXPR into a GIMPLE value suitable for substitution on the
632 RHS of an assignment. Insert the necessary statements before
633 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
634 is replaced. If the call is expected to produces a result, then it
635 is replaced by an assignment of the new RHS to the result variable.
636 If the result is to be ignored, then the call is replaced by a
637 GIMPLE_NOP. A proper VDEF chain is retained by making the first
638 VUSE and the last VDEF of the whole sequence be the same as the replaced
639 statement and using new SSA names for stores in between. */
642 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
645 gimple stmt
, new_stmt
;
646 gimple_stmt_iterator i
;
647 gimple_seq stmts
= NULL
;
649 stmt
= gsi_stmt (*si_p
);
651 gcc_assert (is_gimple_call (stmt
));
653 push_gimplify_context (gimple_in_ssa_p (cfun
));
655 lhs
= gimple_call_lhs (stmt
);
656 if (lhs
== NULL_TREE
)
658 gimplify_and_add (expr
, &stmts
);
659 /* We can end up with folding a memcpy of an empty class assignment
660 which gets optimized away by C++ gimplification. */
661 if (gimple_seq_empty_p (stmts
))
663 pop_gimplify_context (NULL
);
664 if (gimple_in_ssa_p (cfun
))
666 unlink_stmt_vdef (stmt
);
669 gsi_replace (si_p
, gimple_build_nop (), true);
675 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
676 new_stmt
= gimple_build_assign (lhs
, tmp
);
677 i
= gsi_last (stmts
);
678 gsi_insert_after_without_update (&i
, new_stmt
,
679 GSI_CONTINUE_LINKING
);
682 pop_gimplify_context (NULL
);
684 gsi_replace_with_seq_vops (si_p
, stmts
);
688 /* Replace the call at *GSI with the gimple value VAL. */
691 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
693 gimple stmt
= gsi_stmt (*gsi
);
694 tree lhs
= gimple_call_lhs (stmt
);
698 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
699 val
= fold_convert (TREE_TYPE (lhs
), val
);
700 repl
= gimple_build_assign (lhs
, val
);
703 repl
= gimple_build_nop ();
704 tree vdef
= gimple_vdef (stmt
);
705 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
707 unlink_stmt_vdef (stmt
);
708 release_ssa_name (vdef
);
710 gsi_replace (gsi
, repl
, true);
713 /* Replace the call at *GSI with the new call REPL and fold that
717 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
719 gimple stmt
= gsi_stmt (*gsi
);
720 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
721 gimple_set_location (repl
, gimple_location (stmt
));
722 if (gimple_vdef (stmt
)
723 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
725 gimple_set_vdef (repl
, gimple_vdef (stmt
));
726 gimple_set_vuse (repl
, gimple_vuse (stmt
));
727 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
729 gsi_replace (gsi
, repl
, true);
733 /* Return true if VAR is a VAR_DECL or a component thereof. */
736 var_decl_component_p (tree var
)
739 while (handled_component_p (inner
))
740 inner
= TREE_OPERAND (inner
, 0);
741 return SSA_VAR_P (inner
);
744 /* Fold function call to builtin mem{{,p}cpy,move}. Return
745 false if no simplification can be made.
746 If ENDP is 0, return DEST (like memcpy).
747 If ENDP is 1, return DEST+LEN (like mempcpy).
748 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
749 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
753 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
754 tree dest
, tree src
, int endp
)
756 gimple stmt
= gsi_stmt (*gsi
);
757 tree lhs
= gimple_call_lhs (stmt
);
758 tree len
= gimple_call_arg (stmt
, 2);
759 tree destvar
, srcvar
;
760 location_t loc
= gimple_location (stmt
);
762 /* If the LEN parameter is zero, return DEST. */
763 if (integer_zerop (len
))
766 if (gimple_call_lhs (stmt
))
767 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
769 repl
= gimple_build_nop ();
770 tree vdef
= gimple_vdef (stmt
);
771 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
773 unlink_stmt_vdef (stmt
);
774 release_ssa_name (vdef
);
776 gsi_replace (gsi
, repl
, true);
780 /* If SRC and DEST are the same (and not volatile), return
781 DEST{,+LEN,+LEN-1}. */
782 if (operand_equal_p (src
, dest
, 0))
784 unlink_stmt_vdef (stmt
);
785 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
786 release_ssa_name (gimple_vdef (stmt
));
789 gsi_replace (gsi
, gimple_build_nop (), true);
796 tree srctype
, desttype
;
797 unsigned int src_align
, dest_align
;
800 /* Build accesses at offset zero with a ref-all character type. */
801 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
804 /* If we can perform the copy efficiently with first doing all loads
805 and then all stores inline it that way. Currently efficiently
806 means that we can load all the memory into a single integer
807 register which is what MOVE_MAX gives us. */
808 src_align
= get_pointer_alignment (src
);
809 dest_align
= get_pointer_alignment (dest
);
810 if (tree_fits_uhwi_p (len
)
811 && compare_tree_int (len
, MOVE_MAX
) <= 0
812 /* ??? Don't transform copies from strings with known length this
813 confuses the tree-ssa-strlen.c. This doesn't handle
814 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
816 && !c_strlen (src
, 2))
818 unsigned ilen
= tree_to_uhwi (len
);
819 if (exact_log2 (ilen
) != -1)
821 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
823 && TYPE_MODE (type
) != BLKmode
824 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
826 /* If the destination pointer is not aligned we must be able
827 to emit an unaligned store. */
828 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
829 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
832 tree desttype
= type
;
833 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
834 srctype
= build_aligned_type (type
, src_align
);
835 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
836 tree tem
= fold_const_aggregate_ref (srcmem
);
839 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
840 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
846 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
848 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
849 if (gimple_in_ssa_p (cfun
))
850 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
853 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
854 gimple_assign_set_lhs (new_stmt
, srcmem
);
855 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
856 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
858 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
859 desttype
= build_aligned_type (type
, dest_align
);
861 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
864 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
865 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
866 if (gimple_vdef (new_stmt
)
867 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
868 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
871 gsi_replace (gsi
, new_stmt
, true);
874 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
883 /* Both DEST and SRC must be pointer types.
884 ??? This is what old code did. Is the testing for pointer types
887 If either SRC is readonly or length is 1, we can use memcpy. */
888 if (!dest_align
|| !src_align
)
890 if (readonly_data_expr (src
)
891 || (tree_fits_uhwi_p (len
)
892 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
893 >= tree_to_uhwi (len
))))
895 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
898 gimple_call_set_fndecl (stmt
, fn
);
899 gimple_call_set_arg (stmt
, 0, dest
);
900 gimple_call_set_arg (stmt
, 1, src
);
905 /* If *src and *dest can't overlap, optimize into memcpy as well. */
906 if (TREE_CODE (src
) == ADDR_EXPR
907 && TREE_CODE (dest
) == ADDR_EXPR
)
909 tree src_base
, dest_base
, fn
;
910 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
911 HOST_WIDE_INT size
= -1;
912 HOST_WIDE_INT maxsize
= -1;
914 srcvar
= TREE_OPERAND (src
, 0);
915 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
917 destvar
= TREE_OPERAND (dest
, 0);
918 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
920 if (tree_fits_uhwi_p (len
))
921 maxsize
= tree_to_uhwi (len
);
924 src_offset
/= BITS_PER_UNIT
;
925 dest_offset
/= BITS_PER_UNIT
;
926 if (SSA_VAR_P (src_base
)
927 && SSA_VAR_P (dest_base
))
929 if (operand_equal_p (src_base
, dest_base
, 0)
930 && ranges_overlap_p (src_offset
, maxsize
,
931 dest_offset
, maxsize
))
934 else if (TREE_CODE (src_base
) == MEM_REF
935 && TREE_CODE (dest_base
) == MEM_REF
)
937 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
938 TREE_OPERAND (dest_base
, 0), 0))
940 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
941 if (!wi::fits_shwi_p (off
))
943 src_offset
= off
.to_shwi ();
945 off
= mem_ref_offset (dest_base
) + dest_offset
;
946 if (!wi::fits_shwi_p (off
))
948 dest_offset
= off
.to_shwi ();
949 if (ranges_overlap_p (src_offset
, maxsize
,
950 dest_offset
, maxsize
))
956 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
959 gimple_call_set_fndecl (stmt
, fn
);
960 gimple_call_set_arg (stmt
, 0, dest
);
961 gimple_call_set_arg (stmt
, 1, src
);
966 /* If the destination and source do not alias optimize into
968 if ((is_gimple_min_invariant (dest
)
969 || TREE_CODE (dest
) == SSA_NAME
)
970 && (is_gimple_min_invariant (src
)
971 || TREE_CODE (src
) == SSA_NAME
))
974 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
975 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
976 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
979 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
982 gimple_call_set_fndecl (stmt
, fn
);
983 gimple_call_set_arg (stmt
, 0, dest
);
984 gimple_call_set_arg (stmt
, 1, src
);
993 if (!tree_fits_shwi_p (len
))
996 This logic lose for arguments like (type *)malloc (sizeof (type)),
997 since we strip the casts of up to VOID return value from malloc.
998 Perhaps we ought to inherit type from non-VOID argument here? */
1001 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1002 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1004 /* In the following try to find a type that is most natural to be
1005 used for the memcpy source and destination and that allows
1006 the most optimization when memcpy is turned into a plain assignment
1007 using that type. In theory we could always use a char[len] type
1008 but that only gains us that the destination and source possibly
1009 no longer will have their address taken. */
1010 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1011 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1013 tree tem
= TREE_OPERAND (src
, 0);
1015 if (tem
!= TREE_OPERAND (src
, 0))
1016 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1018 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1020 tree tem
= TREE_OPERAND (dest
, 0);
1022 if (tem
!= TREE_OPERAND (dest
, 0))
1023 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1025 srctype
= TREE_TYPE (TREE_TYPE (src
));
1026 if (TREE_CODE (srctype
) == ARRAY_TYPE
1027 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1029 srctype
= TREE_TYPE (srctype
);
1031 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1033 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1034 if (TREE_CODE (desttype
) == ARRAY_TYPE
1035 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1037 desttype
= TREE_TYPE (desttype
);
1039 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1041 if (TREE_ADDRESSABLE (srctype
)
1042 || TREE_ADDRESSABLE (desttype
))
1045 /* Make sure we are not copying using a floating-point mode or
1046 a type whose size possibly does not match its precision. */
1047 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1048 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1049 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1050 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1051 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1052 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1053 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1054 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1062 src_align
= get_pointer_alignment (src
);
1063 dest_align
= get_pointer_alignment (dest
);
1064 if (dest_align
< TYPE_ALIGN (desttype
)
1065 || src_align
< TYPE_ALIGN (srctype
))
1069 STRIP_NOPS (destvar
);
1070 if (TREE_CODE (destvar
) == ADDR_EXPR
1071 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1072 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1073 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1075 destvar
= NULL_TREE
;
1078 STRIP_NOPS (srcvar
);
1079 if (TREE_CODE (srcvar
) == ADDR_EXPR
1080 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1081 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1084 || src_align
>= TYPE_ALIGN (desttype
))
1085 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1087 else if (!STRICT_ALIGNMENT
)
1089 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1091 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1099 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1102 if (srcvar
== NULL_TREE
)
1105 if (src_align
>= TYPE_ALIGN (desttype
))
1106 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1109 if (STRICT_ALIGNMENT
)
1111 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1113 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1116 else if (destvar
== NULL_TREE
)
1119 if (dest_align
>= TYPE_ALIGN (srctype
))
1120 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1123 if (STRICT_ALIGNMENT
)
1125 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1127 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1132 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1134 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1135 if (gimple_in_ssa_p (cfun
))
1136 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1138 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1139 gimple_assign_set_lhs (new_stmt
, srcvar
);
1140 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1141 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1143 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1144 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1145 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1146 if (gimple_vdef (new_stmt
)
1147 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1148 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1151 gsi_replace (gsi
, new_stmt
, true);
1154 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1158 if (endp
== 0 || endp
== 3)
1161 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1163 if (endp
== 2 || endp
== 1)
1164 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1166 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1168 gimple repl
= gimple_build_assign (lhs
, dest
);
1169 gsi_replace (gsi
, repl
, true);
1173 /* Fold function call to builtin memset or bzero at *GSI setting the
1174 memory of size LEN to VAL. Return whether a simplification was made. */
1177 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1179 gimple stmt
= gsi_stmt (*gsi
);
1181 unsigned HOST_WIDE_INT length
, cval
;
1183 /* If the LEN parameter is zero, return DEST. */
1184 if (integer_zerop (len
))
1186 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1190 if (! tree_fits_uhwi_p (len
))
1193 if (TREE_CODE (c
) != INTEGER_CST
)
1196 tree dest
= gimple_call_arg (stmt
, 0);
1198 if (TREE_CODE (var
) != ADDR_EXPR
)
1201 var
= TREE_OPERAND (var
, 0);
1202 if (TREE_THIS_VOLATILE (var
))
1205 etype
= TREE_TYPE (var
);
1206 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1207 etype
= TREE_TYPE (etype
);
1209 if (!INTEGRAL_TYPE_P (etype
)
1210 && !POINTER_TYPE_P (etype
))
1213 if (! var_decl_component_p (var
))
1216 length
= tree_to_uhwi (len
);
1217 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1218 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1221 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1224 if (integer_zerop (c
))
1228 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1231 cval
= TREE_INT_CST_LOW (c
);
1235 cval
|= (cval
<< 31) << 1;
1238 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1239 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1240 gimple_set_vuse (store
, gimple_vuse (stmt
));
1241 tree vdef
= gimple_vdef (stmt
);
1242 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1244 gimple_set_vdef (store
, gimple_vdef (stmt
));
1245 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1247 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1248 if (gimple_call_lhs (stmt
))
1250 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1251 gsi_replace (gsi
, asgn
, true);
1255 gimple_stmt_iterator gsi2
= *gsi
;
1257 gsi_remove (&gsi2
, true);
1264 /* Return the string length, maximum string length or maximum value of
1266 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1267 is not NULL and, for TYPE == 0, its value is not equal to the length
1268 we determine or if we are unable to determine the length or value,
1269 return false. VISITED is a bitmap of visited variables.
1270 TYPE is 0 if string length should be returned, 1 for maximum string
1271 length and 2 for maximum value ARG can have. */
1274 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1279 if (TREE_CODE (arg
) != SSA_NAME
)
1281 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1282 if (TREE_CODE (arg
) == ADDR_EXPR
1283 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1284 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1286 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1287 if (TREE_CODE (aop0
) == INDIRECT_REF
1288 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1289 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1290 length
, visited
, type
);
1296 if (TREE_CODE (val
) != INTEGER_CST
1297 || tree_int_cst_sgn (val
) < 0)
1301 val
= c_strlen (arg
, 1);
1309 if (TREE_CODE (*length
) != INTEGER_CST
1310 || TREE_CODE (val
) != INTEGER_CST
)
1313 if (tree_int_cst_lt (*length
, val
))
1317 else if (simple_cst_equal (val
, *length
) != 1)
1325 /* If ARG is registered for SSA update we cannot look at its defining
1327 if (name_registered_for_update_p (arg
))
1330 /* If we were already here, break the infinite cycle. */
1332 *visited
= BITMAP_ALLOC (NULL
);
1333 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1337 def_stmt
= SSA_NAME_DEF_STMT (var
);
1339 switch (gimple_code (def_stmt
))
1342 /* The RHS of the statement defining VAR must either have a
1343 constant length or come from another SSA_NAME with a constant
1345 if (gimple_assign_single_p (def_stmt
)
1346 || gimple_assign_unary_nop_p (def_stmt
))
1348 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1349 return get_maxval_strlen (rhs
, length
, visited
, type
);
1351 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1353 tree op2
= gimple_assign_rhs2 (def_stmt
);
1354 tree op3
= gimple_assign_rhs3 (def_stmt
);
1355 return get_maxval_strlen (op2
, length
, visited
, type
)
1356 && get_maxval_strlen (op3
, length
, visited
, type
);
1362 /* All the arguments of the PHI node must have the same constant
1366 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1368 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1370 /* If this PHI has itself as an argument, we cannot
1371 determine the string length of this argument. However,
1372 if we can find a constant string length for the other
1373 PHI args then we can still be sure that this is a
1374 constant string length. So be optimistic and just
1375 continue with the next argument. */
1376 if (arg
== gimple_phi_result (def_stmt
))
1379 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1391 get_maxval_strlen (tree arg
, int type
)
1393 bitmap visited
= NULL
;
1394 tree len
= NULL_TREE
;
1395 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1398 BITMAP_FREE (visited
);
1404 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1405 If LEN is not NULL, it represents the length of the string to be
1406 copied. Return NULL_TREE if no simplification can be made. */
1409 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1410 tree dest
, tree src
)
1412 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1415 /* If SRC and DEST are the same (and not volatile), return DEST. */
1416 if (operand_equal_p (src
, dest
, 0))
1418 replace_call_with_value (gsi
, dest
);
1422 if (optimize_function_for_size_p (cfun
))
1425 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1429 tree len
= get_maxval_strlen (src
, 0);
1433 len
= fold_convert_loc (loc
, size_type_node
, len
);
1434 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1435 len
= force_gimple_operand_gsi (gsi
, len
, true,
1436 NULL_TREE
, true, GSI_SAME_STMT
);
1437 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1438 replace_call_with_call_and_fold (gsi
, repl
);
1442 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1443 If SLEN is not NULL, it represents the length of the source string.
1444 Return NULL_TREE if no simplification can be made. */
1447 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1448 tree dest
, tree src
, tree len
)
1450 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1453 /* If the LEN parameter is zero, return DEST. */
1454 if (integer_zerop (len
))
1456 replace_call_with_value (gsi
, dest
);
1460 /* We can't compare slen with len as constants below if len is not a
1462 if (TREE_CODE (len
) != INTEGER_CST
)
1465 /* Now, we must be passed a constant src ptr parameter. */
1466 tree slen
= get_maxval_strlen (src
, 0);
1467 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1470 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1472 /* We do not support simplification of this case, though we do
1473 support it when expanding trees into RTL. */
1474 /* FIXME: generate a call to __builtin_memset. */
1475 if (tree_int_cst_lt (slen
, len
))
1478 /* OK transform into builtin memcpy. */
1479 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1483 len
= fold_convert_loc (loc
, size_type_node
, len
);
1484 len
= force_gimple_operand_gsi (gsi
, len
, true,
1485 NULL_TREE
, true, GSI_SAME_STMT
);
1486 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1487 replace_call_with_call_and_fold (gsi
, repl
);
1491 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1494 Return NULL_TREE if no simplification was possible, otherwise return the
1495 simplified form of the call as a tree.
1497 The simplified form may be a constant or other expression which
1498 computes the same value, but in a more efficient manner (including
1499 calls to other builtin functions).
1501 The call may contain arguments which need to be evaluated, but
1502 which are not useful to determine the result of the call. In
1503 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1504 COMPOUND_EXPR will be an argument which must be evaluated.
1505 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1506 COMPOUND_EXPR in the chain will contain the tree for the simplified
1507 form of the builtin function call. */
1510 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1512 gimple stmt
= gsi_stmt (*gsi
);
1513 location_t loc
= gimple_location (stmt
);
1515 const char *p
= c_getstr (src
);
1517 /* If the string length is zero, return the dst parameter. */
1518 if (p
&& *p
== '\0')
1520 replace_call_with_value (gsi
, dst
);
1524 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1527 /* See if we can store by pieces into (dst + strlen(dst)). */
1529 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1530 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1532 if (!strlen_fn
|| !memcpy_fn
)
1535 /* If the length of the source string isn't computable don't
1536 split strcat into strlen and memcpy. */
1537 tree len
= get_maxval_strlen (src
, 0);
1541 /* Create strlen (dst). */
1542 gimple_seq stmts
= NULL
, stmts2
;
1543 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1544 gimple_set_location (repl
, loc
);
1545 if (gimple_in_ssa_p (cfun
))
1546 newdst
= make_ssa_name (size_type_node
);
1548 newdst
= create_tmp_reg (size_type_node
);
1549 gimple_call_set_lhs (repl
, newdst
);
1550 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1552 /* Create (dst p+ strlen (dst)). */
1553 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1554 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1555 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1557 len
= fold_convert_loc (loc
, size_type_node
, len
);
1558 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1559 build_int_cst (size_type_node
, 1));
1560 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1561 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1563 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1564 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1565 if (gimple_call_lhs (stmt
))
1567 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1568 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1569 gsi_replace_with_seq_vops (gsi
, stmts
);
1570 /* gsi now points at the assignment to the lhs, get a
1571 stmt iterator to the memcpy call.
1572 ??? We can't use gsi_for_stmt as that doesn't work when the
1573 CFG isn't built yet. */
1574 gimple_stmt_iterator gsi2
= *gsi
;
1580 gsi_replace_with_seq_vops (gsi
, stmts
);
1586 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1587 are the arguments to the call. */
1590 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1592 gimple stmt
= gsi_stmt (*gsi
);
1593 tree dest
= gimple_call_arg (stmt
, 0);
1594 tree src
= gimple_call_arg (stmt
, 1);
1595 tree size
= gimple_call_arg (stmt
, 2);
1601 /* If the SRC parameter is "", return DEST. */
1602 if (p
&& *p
== '\0')
1604 replace_call_with_value (gsi
, dest
);
1608 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1611 /* If __builtin_strcat_chk is used, assume strcat is available. */
1612 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1616 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1617 replace_call_with_call_and_fold (gsi
, repl
);
1621 /* Simplify a call to the strncat builtin. */
1624 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1626 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1627 tree dst
= gimple_call_arg (stmt
, 0);
1628 tree src
= gimple_call_arg (stmt
, 1);
1629 tree len
= gimple_call_arg (stmt
, 2);
1631 const char *p
= c_getstr (src
);
1633 /* If the requested length is zero, or the src parameter string
1634 length is zero, return the dst parameter. */
1635 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1637 replace_call_with_value (gsi
, dst
);
1641 /* If the requested len is greater than or equal to the string
1642 length, call strcat. */
1643 if (TREE_CODE (len
) == INTEGER_CST
&& p
1644 && compare_tree_int (len
, strlen (p
)) >= 0)
1646 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1648 /* If the replacement _DECL isn't initialized, don't do the
1653 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1654 replace_call_with_call_and_fold (gsi
, repl
);
1661 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1665 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1667 gimple stmt
= gsi_stmt (*gsi
);
1668 tree dest
= gimple_call_arg (stmt
, 0);
1669 tree src
= gimple_call_arg (stmt
, 1);
1670 tree len
= gimple_call_arg (stmt
, 2);
1671 tree size
= gimple_call_arg (stmt
, 3);
1676 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1677 if ((p
&& *p
== '\0')
1678 || integer_zerop (len
))
1680 replace_call_with_value (gsi
, dest
);
1684 if (! tree_fits_uhwi_p (size
))
1687 if (! integer_all_onesp (size
))
1689 tree src_len
= c_strlen (src
, 1);
1691 && tree_fits_uhwi_p (src_len
)
1692 && tree_fits_uhwi_p (len
)
1693 && ! tree_int_cst_lt (len
, src_len
))
1695 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1696 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1700 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1701 replace_call_with_call_and_fold (gsi
, repl
);
1707 /* If __builtin_strncat_chk is used, assume strncat is available. */
1708 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1712 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1713 replace_call_with_call_and_fold (gsi
, repl
);
1717 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1718 to the call. IGNORE is true if the value returned
1719 by the builtin will be ignored. UNLOCKED is true is true if this
1720 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1721 the known length of the string. Return NULL_TREE if no simplification
1725 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1726 tree arg0
, tree arg1
,
1729 gimple stmt
= gsi_stmt (*gsi
);
1731 /* If we're using an unlocked function, assume the other unlocked
1732 functions exist explicitly. */
1733 tree
const fn_fputc
= (unlocked
1734 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1735 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1736 tree
const fn_fwrite
= (unlocked
1737 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1738 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1740 /* If the return value is used, don't do the transformation. */
1741 if (gimple_call_lhs (stmt
))
1744 /* Get the length of the string passed to fputs. If the length
1745 can't be determined, punt. */
1746 tree len
= get_maxval_strlen (arg0
, 0);
1748 || TREE_CODE (len
) != INTEGER_CST
)
1751 switch (compare_tree_int (len
, 1))
1753 case -1: /* length is 0, delete the call entirely . */
1754 replace_call_with_value (gsi
, integer_zero_node
);
1757 case 0: /* length is 1, call fputc. */
1759 const char *p
= c_getstr (arg0
);
1765 gimple repl
= gimple_build_call (fn_fputc
, 2,
1767 (integer_type_node
, p
[0]), arg1
);
1768 replace_call_with_call_and_fold (gsi
, repl
);
1773 case 1: /* length is greater than 1, call fwrite. */
1775 /* If optimizing for size keep fputs. */
1776 if (optimize_function_for_size_p (cfun
))
1778 /* New argument list transforming fputs(string, stream) to
1779 fwrite(string, 1, len, stream). */
1783 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1784 size_one_node
, len
, arg1
);
1785 replace_call_with_call_and_fold (gsi
, repl
);
1794 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1795 DEST, SRC, LEN, and SIZE are the arguments to the call.
1796 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1797 code of the builtin. If MAXLEN is not NULL, it is maximum length
1798 passed as third argument. */
1801 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1802 tree dest
, tree src
, tree len
, tree size
,
1803 enum built_in_function fcode
)
1805 gimple stmt
= gsi_stmt (*gsi
);
1806 location_t loc
= gimple_location (stmt
);
1807 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1810 /* If SRC and DEST are the same (and not volatile), return DEST
1811 (resp. DEST+LEN for __mempcpy_chk). */
1812 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1814 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1816 replace_call_with_value (gsi
, dest
);
1821 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1822 temp
= force_gimple_operand_gsi (gsi
, temp
,
1823 false, NULL_TREE
, true,
1825 replace_call_with_value (gsi
, temp
);
1830 if (! tree_fits_uhwi_p (size
))
1833 tree maxlen
= get_maxval_strlen (len
, 2);
1834 if (! integer_all_onesp (size
))
1836 if (! tree_fits_uhwi_p (len
))
1838 /* If LEN is not constant, try MAXLEN too.
1839 For MAXLEN only allow optimizing into non-_ocs function
1840 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1841 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1843 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1845 /* (void) __mempcpy_chk () can be optimized into
1846 (void) __memcpy_chk (). */
1847 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1851 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1852 replace_call_with_call_and_fold (gsi
, repl
);
1861 if (tree_int_cst_lt (size
, maxlen
))
1866 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1867 mem{cpy,pcpy,move,set} is available. */
1870 case BUILT_IN_MEMCPY_CHK
:
1871 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1873 case BUILT_IN_MEMPCPY_CHK
:
1874 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1876 case BUILT_IN_MEMMOVE_CHK
:
1877 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1879 case BUILT_IN_MEMSET_CHK
:
1880 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1889 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1890 replace_call_with_call_and_fold (gsi
, repl
);
1894 /* Fold a call to the __st[rp]cpy_chk builtin.
1895 DEST, SRC, and SIZE are the arguments to the call.
1896 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1897 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1898 strings passed as second argument. */
1901 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1903 tree src
, tree size
,
1904 enum built_in_function fcode
)
1906 gimple stmt
= gsi_stmt (*gsi
);
1907 location_t loc
= gimple_location (stmt
);
1908 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1911 /* If SRC and DEST are the same (and not volatile), return DEST. */
1912 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1914 replace_call_with_value (gsi
, dest
);
1918 if (! tree_fits_uhwi_p (size
))
1921 tree maxlen
= get_maxval_strlen (src
, 1);
1922 if (! integer_all_onesp (size
))
1924 len
= c_strlen (src
, 1);
1925 if (! len
|| ! tree_fits_uhwi_p (len
))
1927 /* If LEN is not constant, try MAXLEN too.
1928 For MAXLEN only allow optimizing into non-_ocs function
1929 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1930 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1932 if (fcode
== BUILT_IN_STPCPY_CHK
)
1937 /* If return value of __stpcpy_chk is ignored,
1938 optimize into __strcpy_chk. */
1939 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1943 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1944 replace_call_with_call_and_fold (gsi
, repl
);
1948 if (! len
|| TREE_SIDE_EFFECTS (len
))
1951 /* If c_strlen returned something, but not a constant,
1952 transform __strcpy_chk into __memcpy_chk. */
1953 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1957 len
= fold_convert_loc (loc
, size_type_node
, len
);
1958 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1959 build_int_cst (size_type_node
, 1));
1960 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1961 true, GSI_SAME_STMT
);
1962 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1963 replace_call_with_call_and_fold (gsi
, repl
);
1970 if (! tree_int_cst_lt (maxlen
, size
))
1974 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1975 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1976 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1980 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1981 replace_call_with_call_and_fold (gsi
, repl
);
1985 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1986 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1987 length passed as third argument. IGNORE is true if return value can be
1988 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1991 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1992 tree dest
, tree src
,
1993 tree len
, tree size
,
1994 enum built_in_function fcode
)
1996 gimple stmt
= gsi_stmt (*gsi
);
1997 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2000 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2002 /* If return value of __stpncpy_chk is ignored,
2003 optimize into __strncpy_chk. */
2004 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2007 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2008 replace_call_with_call_and_fold (gsi
, repl
);
2013 if (! tree_fits_uhwi_p (size
))
2016 tree maxlen
= get_maxval_strlen (len
, 2);
2017 if (! integer_all_onesp (size
))
2019 if (! tree_fits_uhwi_p (len
))
2021 /* If LEN is not constant, try MAXLEN too.
2022 For MAXLEN only allow optimizing into non-_ocs function
2023 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2024 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2030 if (tree_int_cst_lt (size
, maxlen
))
2034 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2035 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2036 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2040 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2041 replace_call_with_call_and_fold (gsi
, repl
);
2045 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2046 Return NULL_TREE if no simplification can be made. */
2049 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2051 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2052 location_t loc
= gimple_location (stmt
);
2053 tree dest
= gimple_call_arg (stmt
, 0);
2054 tree src
= gimple_call_arg (stmt
, 1);
2055 tree fn
, len
, lenp1
;
2057 /* If the result is unused, replace stpcpy with strcpy. */
2058 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2060 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2063 gimple_call_set_fndecl (stmt
, fn
);
2068 len
= c_strlen (src
, 1);
2070 || TREE_CODE (len
) != INTEGER_CST
)
2073 if (optimize_function_for_size_p (cfun
)
2074 /* If length is zero it's small enough. */
2075 && !integer_zerop (len
))
2078 /* If the source has a known length replace stpcpy with memcpy. */
2079 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2083 gimple_seq stmts
= NULL
;
2084 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2085 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2086 tem
, build_int_cst (size_type_node
, 1));
2087 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2088 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2089 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2090 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2091 if (gimple_vdef (repl
)
2092 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2093 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2094 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2095 /* Replace the result with dest + len. */
2097 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2098 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2099 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2100 POINTER_PLUS_EXPR
, dest
, tem
);
2101 gsi_replace (gsi
, ret
, true);
2102 /* Finally fold the memcpy call. */
2103 gimple_stmt_iterator gsi2
= *gsi
;
2109 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2110 NULL_TREE if a normal call should be emitted rather than expanding
2111 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2112 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2113 passed as second argument. */
2116 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2117 enum built_in_function fcode
)
2119 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2120 tree dest
, size
, len
, fn
, fmt
, flag
;
2121 const char *fmt_str
;
2123 /* Verify the required arguments in the original call. */
2124 if (gimple_call_num_args (stmt
) < 5)
2127 dest
= gimple_call_arg (stmt
, 0);
2128 len
= gimple_call_arg (stmt
, 1);
2129 flag
= gimple_call_arg (stmt
, 2);
2130 size
= gimple_call_arg (stmt
, 3);
2131 fmt
= gimple_call_arg (stmt
, 4);
2133 if (! tree_fits_uhwi_p (size
))
2136 if (! integer_all_onesp (size
))
2138 tree maxlen
= get_maxval_strlen (len
, 2);
2139 if (! tree_fits_uhwi_p (len
))
2141 /* If LEN is not constant, try MAXLEN too.
2142 For MAXLEN only allow optimizing into non-_ocs function
2143 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2144 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2150 if (tree_int_cst_lt (size
, maxlen
))
2154 if (!init_target_chars ())
2157 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2158 or if format doesn't contain % chars or is "%s". */
2159 if (! integer_zerop (flag
))
2161 fmt_str
= c_getstr (fmt
);
2162 if (fmt_str
== NULL
)
2164 if (strchr (fmt_str
, target_percent
) != NULL
2165 && strcmp (fmt_str
, target_percent_s
))
2169 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2171 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2172 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2176 /* Replace the called function and the first 5 argument by 3 retaining
2177 trailing varargs. */
2178 gimple_call_set_fndecl (stmt
, fn
);
2179 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2180 gimple_call_set_arg (stmt
, 0, dest
);
2181 gimple_call_set_arg (stmt
, 1, len
);
2182 gimple_call_set_arg (stmt
, 2, fmt
);
2183 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2184 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2185 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2190 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2191 Return NULL_TREE if a normal call should be emitted rather than
2192 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2193 or BUILT_IN_VSPRINTF_CHK. */
2196 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2197 enum built_in_function fcode
)
2199 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2200 tree dest
, size
, len
, fn
, fmt
, flag
;
2201 const char *fmt_str
;
2202 unsigned nargs
= gimple_call_num_args (stmt
);
2204 /* Verify the required arguments in the original call. */
2207 dest
= gimple_call_arg (stmt
, 0);
2208 flag
= gimple_call_arg (stmt
, 1);
2209 size
= gimple_call_arg (stmt
, 2);
2210 fmt
= gimple_call_arg (stmt
, 3);
2212 if (! tree_fits_uhwi_p (size
))
2217 if (!init_target_chars ())
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str
= c_getstr (fmt
);
2222 if (fmt_str
!= NULL
)
2224 /* If the format doesn't contain % args or %%, we know the size. */
2225 if (strchr (fmt_str
, target_percent
) == 0)
2227 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2228 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2230 /* If the format is "%s" and first ... argument is a string literal,
2231 we know the size too. */
2232 else if (fcode
== BUILT_IN_SPRINTF_CHK
2233 && strcmp (fmt_str
, target_percent_s
) == 0)
2239 arg
= gimple_call_arg (stmt
, 4);
2240 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2242 len
= c_strlen (arg
, 1);
2243 if (! len
|| ! tree_fits_uhwi_p (len
))
2250 if (! integer_all_onesp (size
))
2252 if (! len
|| ! tree_int_cst_lt (len
, size
))
2256 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2257 or if format doesn't contain % chars or is "%s". */
2258 if (! integer_zerop (flag
))
2260 if (fmt_str
== NULL
)
2262 if (strchr (fmt_str
, target_percent
) != NULL
2263 && strcmp (fmt_str
, target_percent_s
))
2267 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2268 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2269 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2273 /* Replace the called function and the first 4 argument by 2 retaining
2274 trailing varargs. */
2275 gimple_call_set_fndecl (stmt
, fn
);
2276 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2277 gimple_call_set_arg (stmt
, 0, dest
);
2278 gimple_call_set_arg (stmt
, 1, fmt
);
2279 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2280 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2281 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2286 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2287 ORIG may be null if this is a 2-argument call. We don't attempt to
2288 simplify calls with more than 3 arguments.
2290 Return NULL_TREE if no simplification was possible, otherwise return the
2291 simplified form of the call as a tree. If IGNORED is true, it means that
2292 the caller does not use the returned value of the function. */
2295 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2297 gimple stmt
= gsi_stmt (*gsi
);
2298 tree dest
= gimple_call_arg (stmt
, 0);
2299 tree fmt
= gimple_call_arg (stmt
, 1);
2300 tree orig
= NULL_TREE
;
2301 const char *fmt_str
= NULL
;
2303 /* Verify the required arguments in the original call. We deal with two
2304 types of sprintf() calls: 'sprintf (str, fmt)' and
2305 'sprintf (dest, "%s", orig)'. */
2306 if (gimple_call_num_args (stmt
) > 3)
2309 if (gimple_call_num_args (stmt
) == 3)
2310 orig
= gimple_call_arg (stmt
, 2);
2312 /* Check whether the format is a literal string constant. */
2313 fmt_str
= c_getstr (fmt
);
2314 if (fmt_str
== NULL
)
2317 if (!init_target_chars ())
2320 /* If the format doesn't contain % args or %%, use strcpy. */
2321 if (strchr (fmt_str
, target_percent
) == NULL
)
2323 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2328 /* Don't optimize sprintf (buf, "abc", ptr++). */
2332 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2333 'format' is known to contain no % formats. */
2334 gimple_seq stmts
= NULL
;
2335 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2336 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2337 if (gimple_call_lhs (stmt
))
2339 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2340 build_int_cst (integer_type_node
,
2342 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2343 gsi_replace_with_seq_vops (gsi
, stmts
);
2344 /* gsi now points at the assignment to the lhs, get a
2345 stmt iterator to the memcpy call.
2346 ??? We can't use gsi_for_stmt as that doesn't work when the
2347 CFG isn't built yet. */
2348 gimple_stmt_iterator gsi2
= *gsi
;
2354 gsi_replace_with_seq_vops (gsi
, stmts
);
2360 /* If the format is "%s", use strcpy if the result isn't used. */
2361 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2364 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2369 /* Don't crash on sprintf (str1, "%s"). */
2373 tree orig_len
= NULL_TREE
;
2374 if (gimple_call_lhs (stmt
))
2376 orig_len
= get_maxval_strlen (orig
, 0);
2381 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2382 gimple_seq stmts
= NULL
;
2383 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2384 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2385 if (gimple_call_lhs (stmt
))
2387 if (!useless_type_conversion_p (integer_type_node
,
2388 TREE_TYPE (orig_len
)))
2389 orig_len
= fold_convert (integer_type_node
, orig_len
);
2390 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2391 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2392 gsi_replace_with_seq_vops (gsi
, stmts
);
2393 /* gsi now points at the assignment to the lhs, get a
2394 stmt iterator to the memcpy call.
2395 ??? We can't use gsi_for_stmt as that doesn't work when the
2396 CFG isn't built yet. */
2397 gimple_stmt_iterator gsi2
= *gsi
;
2403 gsi_replace_with_seq_vops (gsi
, stmts
);
2411 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2412 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2413 attempt to simplify calls with more than 4 arguments.
2415 Return NULL_TREE if no simplification was possible, otherwise return the
2416 simplified form of the call as a tree. If IGNORED is true, it means that
2417 the caller does not use the returned value of the function. */
2420 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2422 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2423 tree dest
= gimple_call_arg (stmt
, 0);
2424 tree destsize
= gimple_call_arg (stmt
, 1);
2425 tree fmt
= gimple_call_arg (stmt
, 2);
2426 tree orig
= NULL_TREE
;
2427 const char *fmt_str
= NULL
;
2429 if (gimple_call_num_args (stmt
) > 4)
2432 if (gimple_call_num_args (stmt
) == 4)
2433 orig
= gimple_call_arg (stmt
, 3);
2435 if (!tree_fits_uhwi_p (destsize
))
2437 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2439 /* Check whether the format is a literal string constant. */
2440 fmt_str
= c_getstr (fmt
);
2441 if (fmt_str
== NULL
)
2444 if (!init_target_chars ())
2447 /* If the format doesn't contain % args or %%, use strcpy. */
2448 if (strchr (fmt_str
, target_percent
) == NULL
)
2450 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2454 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2458 /* We could expand this as
2459 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2461 memcpy (str, fmt_with_nul_at_cstm1, cst);
2462 but in the former case that might increase code size
2463 and in the latter case grow .rodata section too much.
2465 size_t len
= strlen (fmt_str
);
2469 gimple_seq stmts
= NULL
;
2470 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2471 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2472 if (gimple_call_lhs (stmt
))
2474 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2475 build_int_cst (integer_type_node
, len
));
2476 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2477 gsi_replace_with_seq_vops (gsi
, stmts
);
2478 /* gsi now points at the assignment to the lhs, get a
2479 stmt iterator to the memcpy call.
2480 ??? We can't use gsi_for_stmt as that doesn't work when the
2481 CFG isn't built yet. */
2482 gimple_stmt_iterator gsi2
= *gsi
;
2488 gsi_replace_with_seq_vops (gsi
, stmts
);
2494 /* If the format is "%s", use strcpy if the result isn't used. */
2495 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2497 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2501 /* Don't crash on snprintf (str1, cst, "%s"). */
2505 tree orig_len
= get_maxval_strlen (orig
, 0);
2506 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2509 /* We could expand this as
2510 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2512 memcpy (str1, str2_with_nul_at_cstm1, cst);
2513 but in the former case that might increase code size
2514 and in the latter case grow .rodata section too much.
2516 if (compare_tree_int (orig_len
, destlen
) >= 0)
2519 /* Convert snprintf (str1, cst, "%s", str2) into
2520 strcpy (str1, str2) if strlen (str2) < cst. */
2521 gimple_seq stmts
= NULL
;
2522 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2523 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2524 if (gimple_call_lhs (stmt
))
2526 if (!useless_type_conversion_p (integer_type_node
,
2527 TREE_TYPE (orig_len
)))
2528 orig_len
= fold_convert (integer_type_node
, orig_len
);
2529 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2530 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2531 gsi_replace_with_seq_vops (gsi
, stmts
);
2532 /* gsi now points at the assignment to the lhs, get a
2533 stmt iterator to the memcpy call.
2534 ??? We can't use gsi_for_stmt as that doesn't work when the
2535 CFG isn't built yet. */
2536 gimple_stmt_iterator gsi2
= *gsi
;
2542 gsi_replace_with_seq_vops (gsi
, stmts
);
2550 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2551 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2552 more than 3 arguments, and ARG may be null in the 2-argument case.
2554 Return NULL_TREE if no simplification was possible, otherwise return the
2555 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2556 code of the function to be simplified. */
2559 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2560 tree fp
, tree fmt
, tree arg
,
2561 enum built_in_function fcode
)
2563 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2564 tree fn_fputc
, fn_fputs
;
2565 const char *fmt_str
= NULL
;
2567 /* If the return value is used, don't do the transformation. */
2568 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2571 /* Check whether the format is a literal string constant. */
2572 fmt_str
= c_getstr (fmt
);
2573 if (fmt_str
== NULL
)
2576 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2578 /* If we're using an unlocked function, assume the other
2579 unlocked functions exist explicitly. */
2580 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2581 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2585 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2586 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2589 if (!init_target_chars ())
2592 /* If the format doesn't contain % args or %%, use strcpy. */
2593 if (strchr (fmt_str
, target_percent
) == NULL
)
2595 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2599 /* If the format specifier was "", fprintf does nothing. */
2600 if (fmt_str
[0] == '\0')
2602 replace_call_with_value (gsi
, NULL_TREE
);
2606 /* When "string" doesn't contain %, replace all cases of
2607 fprintf (fp, string) with fputs (string, fp). The fputs
2608 builtin will take care of special cases like length == 1. */
2611 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2612 replace_call_with_call_and_fold (gsi
, repl
);
2617 /* The other optimizations can be done only on the non-va_list variants. */
2618 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2621 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2622 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2624 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2628 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2629 replace_call_with_call_and_fold (gsi
, repl
);
2634 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2635 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2638 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2642 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2643 replace_call_with_call_and_fold (gsi
, repl
);
2651 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2652 FMT and ARG are the arguments to the call; we don't fold cases with
2653 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2655 Return NULL_TREE if no simplification was possible, otherwise return the
2656 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2657 code of the function to be simplified. */
2660 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2661 tree arg
, enum built_in_function fcode
)
2663 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2664 tree fn_putchar
, fn_puts
, newarg
;
2665 const char *fmt_str
= NULL
;
2667 /* If the return value is used, don't do the transformation. */
2668 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2671 /* Check whether the format is a literal string constant. */
2672 fmt_str
= c_getstr (fmt
);
2673 if (fmt_str
== NULL
)
2676 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2678 /* If we're using an unlocked function, assume the other
2679 unlocked functions exist explicitly. */
2680 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2681 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2685 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2686 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2689 if (!init_target_chars ())
2692 if (strcmp (fmt_str
, target_percent_s
) == 0
2693 || strchr (fmt_str
, target_percent
) == NULL
)
2697 if (strcmp (fmt_str
, target_percent_s
) == 0)
2699 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2702 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2705 str
= c_getstr (arg
);
2711 /* The format specifier doesn't contain any '%' characters. */
2712 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2718 /* If the string was "", printf does nothing. */
2721 replace_call_with_value (gsi
, NULL_TREE
);
2725 /* If the string has length of 1, call putchar. */
2728 /* Given printf("c"), (where c is any one character,)
2729 convert "c"[0] to an int and pass that to the replacement
2731 newarg
= build_int_cst (integer_type_node
, str
[0]);
2734 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2735 replace_call_with_call_and_fold (gsi
, repl
);
2741 /* If the string was "string\n", call puts("string"). */
2742 size_t len
= strlen (str
);
2743 if ((unsigned char)str
[len
- 1] == target_newline
2744 && (size_t) (int) len
== len
2748 tree offset_node
, string_cst
;
2750 /* Create a NUL-terminated string that's one char shorter
2751 than the original, stripping off the trailing '\n'. */
2752 newarg
= build_string_literal (len
, str
);
2753 string_cst
= string_constant (newarg
, &offset_node
);
2754 gcc_checking_assert (string_cst
2755 && (TREE_STRING_LENGTH (string_cst
)
2757 && integer_zerop (offset_node
)
2759 TREE_STRING_POINTER (string_cst
)[len
- 1]
2761 /* build_string_literal creates a new STRING_CST,
2762 modify it in place to avoid double copying. */
2763 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2764 newstr
[len
- 1] = '\0';
2767 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2768 replace_call_with_call_and_fold (gsi
, repl
);
2773 /* We'd like to arrange to call fputs(string,stdout) here,
2774 but we need stdout and don't have a way to get it yet. */
2779 /* The other optimizations can be done only on the non-va_list variants. */
2780 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2783 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2784 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2786 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2790 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2791 replace_call_with_call_and_fold (gsi
, repl
);
2796 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2797 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2799 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2804 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2805 replace_call_with_call_and_fold (gsi
, repl
);
2815 /* Fold a call to __builtin_strlen with known length LEN. */
2818 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2820 gimple stmt
= gsi_stmt (*gsi
);
2821 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2824 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2825 replace_call_with_value (gsi
, len
);
2830 /* Fold the non-target builtin at *GSI and return whether any simplification
2834 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2836 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2837 tree callee
= gimple_call_fndecl (stmt
);
2839 /* Give up for always_inline inline builtins until they are
2841 if (avoid_folding_inline_builtin (callee
))
2844 unsigned n
= gimple_call_num_args (stmt
);
2845 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2848 case BUILT_IN_BZERO
:
2849 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2850 gimple_call_arg (stmt
, 1));
2851 case BUILT_IN_MEMSET
:
2852 return gimple_fold_builtin_memset (gsi
,
2853 gimple_call_arg (stmt
, 1),
2854 gimple_call_arg (stmt
, 2));
2855 case BUILT_IN_BCOPY
:
2856 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2857 gimple_call_arg (stmt
, 0), 3);
2858 case BUILT_IN_MEMCPY
:
2859 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2860 gimple_call_arg (stmt
, 1), 0);
2861 case BUILT_IN_MEMPCPY
:
2862 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2863 gimple_call_arg (stmt
, 1), 1);
2864 case BUILT_IN_MEMMOVE
:
2865 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2866 gimple_call_arg (stmt
, 1), 3);
2867 case BUILT_IN_SPRINTF_CHK
:
2868 case BUILT_IN_VSPRINTF_CHK
:
2869 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2870 case BUILT_IN_STRCAT_CHK
:
2871 return gimple_fold_builtin_strcat_chk (gsi
);
2872 case BUILT_IN_STRNCAT_CHK
:
2873 return gimple_fold_builtin_strncat_chk (gsi
);
2874 case BUILT_IN_STRLEN
:
2875 return gimple_fold_builtin_strlen (gsi
);
2876 case BUILT_IN_STRCPY
:
2877 return gimple_fold_builtin_strcpy (gsi
,
2878 gimple_call_arg (stmt
, 0),
2879 gimple_call_arg (stmt
, 1));
2880 case BUILT_IN_STRNCPY
:
2881 return gimple_fold_builtin_strncpy (gsi
,
2882 gimple_call_arg (stmt
, 0),
2883 gimple_call_arg (stmt
, 1),
2884 gimple_call_arg (stmt
, 2));
2885 case BUILT_IN_STRCAT
:
2886 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2887 gimple_call_arg (stmt
, 1));
2888 case BUILT_IN_STRNCAT
:
2889 return gimple_fold_builtin_strncat (gsi
);
2890 case BUILT_IN_FPUTS
:
2891 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2892 gimple_call_arg (stmt
, 1), false);
2893 case BUILT_IN_FPUTS_UNLOCKED
:
2894 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2895 gimple_call_arg (stmt
, 1), true);
2896 case BUILT_IN_MEMCPY_CHK
:
2897 case BUILT_IN_MEMPCPY_CHK
:
2898 case BUILT_IN_MEMMOVE_CHK
:
2899 case BUILT_IN_MEMSET_CHK
:
2900 return gimple_fold_builtin_memory_chk (gsi
,
2901 gimple_call_arg (stmt
, 0),
2902 gimple_call_arg (stmt
, 1),
2903 gimple_call_arg (stmt
, 2),
2904 gimple_call_arg (stmt
, 3),
2906 case BUILT_IN_STPCPY
:
2907 return gimple_fold_builtin_stpcpy (gsi
);
2908 case BUILT_IN_STRCPY_CHK
:
2909 case BUILT_IN_STPCPY_CHK
:
2910 return gimple_fold_builtin_stxcpy_chk (gsi
,
2911 gimple_call_arg (stmt
, 0),
2912 gimple_call_arg (stmt
, 1),
2913 gimple_call_arg (stmt
, 2),
2915 case BUILT_IN_STRNCPY_CHK
:
2916 case BUILT_IN_STPNCPY_CHK
:
2917 return gimple_fold_builtin_stxncpy_chk (gsi
,
2918 gimple_call_arg (stmt
, 0),
2919 gimple_call_arg (stmt
, 1),
2920 gimple_call_arg (stmt
, 2),
2921 gimple_call_arg (stmt
, 3),
2923 case BUILT_IN_SNPRINTF_CHK
:
2924 case BUILT_IN_VSNPRINTF_CHK
:
2925 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2926 case BUILT_IN_SNPRINTF
:
2927 return gimple_fold_builtin_snprintf (gsi
);
2928 case BUILT_IN_SPRINTF
:
2929 return gimple_fold_builtin_sprintf (gsi
);
2930 case BUILT_IN_FPRINTF
:
2931 case BUILT_IN_FPRINTF_UNLOCKED
:
2932 case BUILT_IN_VFPRINTF
:
2933 if (n
== 2 || n
== 3)
2934 return gimple_fold_builtin_fprintf (gsi
,
2935 gimple_call_arg (stmt
, 0),
2936 gimple_call_arg (stmt
, 1),
2938 ? gimple_call_arg (stmt
, 2)
2942 case BUILT_IN_FPRINTF_CHK
:
2943 case BUILT_IN_VFPRINTF_CHK
:
2944 if (n
== 3 || n
== 4)
2945 return gimple_fold_builtin_fprintf (gsi
,
2946 gimple_call_arg (stmt
, 0),
2947 gimple_call_arg (stmt
, 2),
2949 ? gimple_call_arg (stmt
, 3)
2953 case BUILT_IN_PRINTF
:
2954 case BUILT_IN_PRINTF_UNLOCKED
:
2955 case BUILT_IN_VPRINTF
:
2956 if (n
== 1 || n
== 2)
2957 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2959 ? gimple_call_arg (stmt
, 1)
2960 : NULL_TREE
, fcode
);
2962 case BUILT_IN_PRINTF_CHK
:
2963 case BUILT_IN_VPRINTF_CHK
:
2964 if (n
== 2 || n
== 3)
2965 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2967 ? gimple_call_arg (stmt
, 2)
2968 : NULL_TREE
, fcode
);
2972 /* Try the generic builtin folder. */
2973 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2974 tree result
= fold_call_stmt (stmt
, ignore
);
2978 STRIP_NOPS (result
);
2980 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2981 if (!update_call_from_tree (gsi
, result
))
2982 gimplify_and_update_call_from_tree (gsi
, result
);
2989 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2990 doesn't fit into TYPE. The test for overflow should be regardless of
2991 -fwrapv, and even for unsigned types. */
2994 arith_overflowed_p (enum tree_code code
, const_tree type
,
2995 const_tree arg0
, const_tree arg1
)
2997 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
2998 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3000 widest2_int warg0
= widest2_int_cst (arg0
);
3001 widest2_int warg1
= widest2_int_cst (arg1
);
3005 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3006 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3007 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3008 default: gcc_unreachable ();
3010 signop sign
= TYPE_SIGN (type
);
3011 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3013 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3016 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3017 The statement may be replaced by another statement, e.g., if the call
3018 simplifies to a constant value. Return true if any changes were made.
3019 It is assumed that the operands have been previously folded. */
3022 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3024 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3026 bool changed
= false;
3029 /* Fold *& in call arguments. */
3030 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3031 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3033 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3036 gimple_call_set_arg (stmt
, i
, tmp
);
3041 /* Check for virtual calls that became direct calls. */
3042 callee
= gimple_call_fn (stmt
);
3043 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3045 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3047 if (dump_file
&& virtual_method_call_p (callee
)
3048 && !possible_polymorphic_call_target_p
3049 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3050 (OBJ_TYPE_REF_EXPR (callee
)))))
3053 "Type inheritance inconsistent devirtualization of ");
3054 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3055 fprintf (dump_file
, " to ");
3056 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3057 fprintf (dump_file
, "\n");
3060 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3063 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3066 vec
<cgraph_node
*>targets
3067 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3068 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3070 tree lhs
= gimple_call_lhs (stmt
);
3071 if (dump_enabled_p ())
3073 location_t loc
= gimple_location_safe (stmt
);
3074 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3075 "folding virtual function call to %s\n",
3076 targets
.length () == 1
3077 ? targets
[0]->name ()
3078 : "__builtin_unreachable");
3080 if (targets
.length () == 1)
3082 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3084 /* If the call becomes noreturn, remove the lhs. */
3085 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3087 if (TREE_CODE (lhs
) == SSA_NAME
)
3089 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3090 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3091 gimple new_stmt
= gimple_build_assign (lhs
, def
);
3092 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3094 gimple_call_set_lhs (stmt
, NULL_TREE
);
3096 maybe_remove_unused_call_args (cfun
, stmt
);
3100 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3101 gimple new_stmt
= gimple_build_call (fndecl
, 0);
3102 gimple_set_location (new_stmt
, gimple_location (stmt
));
3103 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3105 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3106 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3108 /* To satisfy condition for
3109 cgraph_update_edges_for_call_stmt_node,
3110 we need to preserve GIMPLE_CALL statement
3111 at position of GSI iterator. */
3112 update_call_from_tree (gsi
, def
);
3113 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3117 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3118 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3119 gsi_replace (gsi
, new_stmt
, false);
3127 /* Check for indirect calls that became direct calls, and then
3128 no longer require a static chain. */
3129 if (gimple_call_chain (stmt
))
3131 tree fn
= gimple_call_fndecl (stmt
);
3132 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3134 gimple_call_set_chain (stmt
, NULL
);
3139 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3142 gimple_call_set_chain (stmt
, tmp
);
3151 /* Check for builtins that CCP can handle using information not
3152 available in the generic fold routines. */
3153 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3155 if (gimple_fold_builtin (gsi
))
3158 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3160 changed
|= targetm
.gimple_fold_builtin (gsi
);
3162 else if (gimple_call_internal_p (stmt
))
3164 enum tree_code subcode
= ERROR_MARK
;
3165 tree result
= NULL_TREE
;
3166 bool cplx_result
= false;
3167 tree overflow
= NULL_TREE
;
3168 switch (gimple_call_internal_fn (stmt
))
3170 case IFN_BUILTIN_EXPECT
:
3171 result
= fold_builtin_expect (gimple_location (stmt
),
3172 gimple_call_arg (stmt
, 0),
3173 gimple_call_arg (stmt
, 1),
3174 gimple_call_arg (stmt
, 2));
3176 case IFN_UBSAN_OBJECT_SIZE
:
3177 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3178 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3179 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3180 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3181 gimple_call_arg (stmt
, 2))))
3183 gsi_replace (gsi
, gimple_build_nop (), true);
3184 unlink_stmt_vdef (stmt
);
3185 release_defs (stmt
);
3189 case IFN_UBSAN_CHECK_ADD
:
3190 subcode
= PLUS_EXPR
;
3192 case IFN_UBSAN_CHECK_SUB
:
3193 subcode
= MINUS_EXPR
;
3195 case IFN_UBSAN_CHECK_MUL
:
3196 subcode
= MULT_EXPR
;
3198 case IFN_ADD_OVERFLOW
:
3199 subcode
= PLUS_EXPR
;
3202 case IFN_SUB_OVERFLOW
:
3203 subcode
= MINUS_EXPR
;
3206 case IFN_MUL_OVERFLOW
:
3207 subcode
= MULT_EXPR
;
3213 if (subcode
!= ERROR_MARK
)
3215 tree arg0
= gimple_call_arg (stmt
, 0);
3216 tree arg1
= gimple_call_arg (stmt
, 1);
3217 tree type
= TREE_TYPE (arg0
);
3220 tree lhs
= gimple_call_lhs (stmt
);
3221 if (lhs
== NULL_TREE
)
3224 type
= TREE_TYPE (TREE_TYPE (lhs
));
3226 if (type
== NULL_TREE
)
3228 /* x = y + 0; x = y - 0; x = y * 0; */
3229 else if (integer_zerop (arg1
))
3230 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3231 /* x = 0 + y; x = 0 * y; */
3232 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3233 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3235 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3236 result
= integer_zero_node
;
3237 /* x = y * 1; x = 1 * y; */
3238 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3240 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3242 else if (TREE_CODE (arg0
) == INTEGER_CST
3243 && TREE_CODE (arg1
) == INTEGER_CST
)
3246 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3247 fold_convert (type
, arg1
));
3249 result
= int_const_binop (subcode
, arg0
, arg1
);
3250 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3253 overflow
= build_one_cst (type
);
3260 if (result
== integer_zero_node
)
3261 result
= build_zero_cst (type
);
3262 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3264 if (TREE_CODE (result
) == INTEGER_CST
)
3266 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3268 overflow
= build_one_cst (type
);
3270 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3271 && TYPE_UNSIGNED (type
))
3272 || (TYPE_PRECISION (type
)
3273 < (TYPE_PRECISION (TREE_TYPE (result
))
3274 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3275 && !TYPE_UNSIGNED (type
)))))
3278 result
= fold_convert (type
, result
);
3285 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3286 result
= drop_tree_overflow (result
);
3289 if (overflow
== NULL_TREE
)
3290 overflow
= build_zero_cst (TREE_TYPE (result
));
3291 tree ctype
= build_complex_type (TREE_TYPE (result
));
3292 if (TREE_CODE (result
) == INTEGER_CST
3293 && TREE_CODE (overflow
) == INTEGER_CST
)
3294 result
= build_complex (ctype
, result
, overflow
);
3296 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3297 ctype
, result
, overflow
);
3299 if (!update_call_from_tree (gsi
, result
))
3300 gimplify_and_update_call_from_tree (gsi
, result
);
3309 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3312 Replaces *GSI with the simplification result in RCODE and OPS
3313 and the associated statements in *SEQ. Does the replacement
3314 according to INPLACE and returns true if the operation succeeded. */
3317 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3318 code_helper rcode
, tree
*ops
,
3319 gimple_seq
*seq
, bool inplace
)
3321 gimple stmt
= gsi_stmt (*gsi
);
3323 /* Play safe and do not allow abnormals to be mentioned in
3324 newly created statements. See also maybe_push_res_to_seq. */
3325 if ((TREE_CODE (ops
[0]) == SSA_NAME
3326 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
3328 && TREE_CODE (ops
[1]) == SSA_NAME
3329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
3331 && TREE_CODE (ops
[2]) == SSA_NAME
3332 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
3335 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3337 gcc_assert (rcode
.is_tree_code ());
3338 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3339 /* GIMPLE_CONDs condition may not throw. */
3340 && (!flag_exceptions
3341 || !cfun
->can_throw_non_call_exceptions
3342 || !operation_could_trap_p (rcode
,
3343 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3345 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3346 else if (rcode
== SSA_NAME
)
3347 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3348 build_zero_cst (TREE_TYPE (ops
[0])));
3349 else if (rcode
== INTEGER_CST
)
3351 if (integer_zerop (ops
[0]))
3352 gimple_cond_make_false (cond_stmt
);
3354 gimple_cond_make_true (cond_stmt
);
3358 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3362 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3363 build_zero_cst (TREE_TYPE (res
)));
3367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3369 fprintf (dump_file
, "gimple_simplified to ");
3370 if (!gimple_seq_empty_p (*seq
))
3371 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3372 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3375 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3378 else if (is_gimple_assign (stmt
)
3379 && rcode
.is_tree_code ())
3382 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3384 maybe_build_generic_op (rcode
,
3385 TREE_TYPE (gimple_assign_lhs (stmt
)),
3386 &ops
[0], ops
[1], ops
[2]);
3387 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3388 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3390 fprintf (dump_file
, "gimple_simplified to ");
3391 if (!gimple_seq_empty_p (*seq
))
3392 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3393 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3396 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3402 if (gimple_has_lhs (stmt
))
3404 tree lhs
= gimple_get_lhs (stmt
);
3405 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3408 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3410 fprintf (dump_file
, "gimple_simplified to ");
3411 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3413 gsi_replace_with_seq_vops (gsi
, *seq
);
3423 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3426 maybe_canonicalize_mem_ref_addr (tree
*t
)
3430 if (TREE_CODE (*t
) == ADDR_EXPR
)
3431 t
= &TREE_OPERAND (*t
, 0);
3433 while (handled_component_p (*t
))
3434 t
= &TREE_OPERAND (*t
, 0);
3436 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3437 of invariant addresses into a SSA name MEM_REF address. */
3438 if (TREE_CODE (*t
) == MEM_REF
3439 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3441 tree addr
= TREE_OPERAND (*t
, 0);
3442 if (TREE_CODE (addr
) == ADDR_EXPR
3443 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3444 || handled_component_p (TREE_OPERAND (addr
, 0))))
3447 HOST_WIDE_INT coffset
;
3448 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3453 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3454 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3455 TREE_OPERAND (*t
, 1),
3456 size_int (coffset
));
3459 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3460 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3463 /* Canonicalize back MEM_REFs to plain reference trees if the object
3464 accessed is a decl that has the same access semantics as the MEM_REF. */
3465 if (TREE_CODE (*t
) == MEM_REF
3466 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3467 && integer_zerop (TREE_OPERAND (*t
, 1))
3468 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3470 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3471 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3472 if (/* Same volatile qualification. */
3473 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3474 /* Same TBAA behavior with -fstrict-aliasing. */
3475 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3476 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3477 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3478 /* Same alignment. */
3479 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3480 /* We have to look out here to not drop a required conversion
3481 from the rhs to the lhs if *t appears on the lhs or vice-versa
3482 if it appears on the rhs. Thus require strict type
3484 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3486 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3491 /* Canonicalize TARGET_MEM_REF in particular with respect to
3492 the indexes becoming constant. */
3493 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3495 tree tem
= maybe_fold_tmr (*t
);
3506 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3507 distinguishes both cases. */
3510 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3512 bool changed
= false;
3513 gimple stmt
= gsi_stmt (*gsi
);
3516 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3518 ??? This shouldn't be done in generic folding but in the
3519 propagation helpers which also know whether an address was
3521 switch (gimple_code (stmt
))
3524 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3526 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3527 if ((REFERENCE_CLASS_P (*rhs
)
3528 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3529 && maybe_canonicalize_mem_ref_addr (rhs
))
3531 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3532 if (REFERENCE_CLASS_P (*lhs
)
3533 && maybe_canonicalize_mem_ref_addr (lhs
))
3539 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3541 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3542 if (REFERENCE_CLASS_P (*arg
)
3543 && maybe_canonicalize_mem_ref_addr (arg
))
3546 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3548 && REFERENCE_CLASS_P (*lhs
)
3549 && maybe_canonicalize_mem_ref_addr (lhs
))
3555 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3556 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3558 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3559 tree op
= TREE_VALUE (link
);
3560 if (REFERENCE_CLASS_P (op
)
3561 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3564 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3566 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3567 tree op
= TREE_VALUE (link
);
3568 if ((REFERENCE_CLASS_P (op
)
3569 || TREE_CODE (op
) == ADDR_EXPR
)
3570 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3576 if (gimple_debug_bind_p (stmt
))
3578 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3580 && (REFERENCE_CLASS_P (*val
)
3581 || TREE_CODE (*val
) == ADDR_EXPR
)
3582 && maybe_canonicalize_mem_ref_addr (val
))
3589 /* Dispatch to pattern-based folding. */
3591 || is_gimple_assign (stmt
)
3592 || gimple_code (stmt
) == GIMPLE_COND
)
3594 gimple_seq seq
= NULL
;
3597 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3598 valueize
, valueize
))
3600 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3603 gimple_seq_discard (seq
);
3607 stmt
= gsi_stmt (*gsi
);
3609 /* Fold the main computation performed by the statement. */
3610 switch (gimple_code (stmt
))
3614 unsigned old_num_ops
= gimple_num_ops (stmt
);
3615 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3616 tree lhs
= gimple_assign_lhs (stmt
);
3618 /* First canonicalize operand order. This avoids building new
3619 trees if this is the only thing fold would later do. */
3620 if ((commutative_tree_code (subcode
)
3621 || commutative_ternary_tree_code (subcode
))
3622 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3623 gimple_assign_rhs2 (stmt
), false))
3625 tree tem
= gimple_assign_rhs1 (stmt
);
3626 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3627 gimple_assign_set_rhs2 (stmt
, tem
);
3630 new_rhs
= fold_gimple_assign (gsi
);
3632 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3633 TREE_TYPE (new_rhs
)))
3634 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3637 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3639 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3646 changed
|= fold_gimple_cond (as_a
<gcond
*> (stmt
));
3650 changed
|= gimple_fold_call (gsi
, inplace
);
3654 /* Fold *& in asm operands. */
3656 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3658 const char **oconstraints
;
3659 const char *constraint
;
3660 bool allows_mem
, allows_reg
;
3662 noutputs
= gimple_asm_noutputs (asm_stmt
);
3663 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3665 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3667 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3668 tree op
= TREE_VALUE (link
);
3670 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3671 if (REFERENCE_CLASS_P (op
)
3672 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3674 TREE_VALUE (link
) = op
;
3678 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3680 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3681 tree op
= TREE_VALUE (link
);
3683 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3684 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3685 oconstraints
, &allows_mem
, &allows_reg
);
3686 if (REFERENCE_CLASS_P (op
)
3687 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3690 TREE_VALUE (link
) = op
;
3698 if (gimple_debug_bind_p (stmt
))
3700 tree val
= gimple_debug_bind_get_value (stmt
);
3702 && REFERENCE_CLASS_P (val
))
3704 tree tem
= maybe_fold_reference (val
, false);
3707 gimple_debug_bind_set_value (stmt
, tem
);
3712 && TREE_CODE (val
) == ADDR_EXPR
)
3714 tree ref
= TREE_OPERAND (val
, 0);
3715 tree tem
= maybe_fold_reference (ref
, false);
3718 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3719 gimple_debug_bind_set_value (stmt
, tem
);
3729 stmt
= gsi_stmt (*gsi
);
3731 /* Fold *& on the lhs. */
3732 if (gimple_has_lhs (stmt
))
3734 tree lhs
= gimple_get_lhs (stmt
);
3735 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3737 tree new_lhs
= maybe_fold_reference (lhs
, true);
3740 gimple_set_lhs (stmt
, new_lhs
);
3749 /* Valueziation callback that ends up not following SSA edges. */
3752 no_follow_ssa_edges (tree
)
3757 /* Valueization callback that ends up following single-use SSA edges only. */
3760 follow_single_use_edges (tree val
)
3762 if (TREE_CODE (val
) == SSA_NAME
3763 && !has_single_use (val
))
3768 /* Fold the statement pointed to by GSI. In some cases, this function may
3769 replace the whole statement with a new one. Returns true iff folding
3771 The statement pointed to by GSI should be in valid gimple form but may
3772 be in unfolded state as resulting from for example constant propagation
3773 which can produce *&x = 0. */
3776 fold_stmt (gimple_stmt_iterator
*gsi
)
3778 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3782 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3784 return fold_stmt_1 (gsi
, false, valueize
);
3787 /* Perform the minimal folding on statement *GSI. Only operations like
3788 *&x created by constant propagation are handled. The statement cannot
3789 be replaced with a new one. Return true if the statement was
3790 changed, false otherwise.
3791 The statement *GSI should be in valid gimple form but may
3792 be in unfolded state as resulting from for example constant propagation
3793 which can produce *&x = 0. */
3796 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3798 gimple stmt
= gsi_stmt (*gsi
);
3799 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3800 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3804 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3805 if EXPR is null or we don't know how.
3806 If non-null, the result always has boolean type. */
3809 canonicalize_bool (tree expr
, bool invert
)
3815 if (integer_nonzerop (expr
))
3816 return boolean_false_node
;
3817 else if (integer_zerop (expr
))
3818 return boolean_true_node
;
3819 else if (TREE_CODE (expr
) == SSA_NAME
)
3820 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3821 build_int_cst (TREE_TYPE (expr
), 0));
3822 else if (COMPARISON_CLASS_P (expr
))
3823 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3825 TREE_OPERAND (expr
, 0),
3826 TREE_OPERAND (expr
, 1));
3832 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3834 if (integer_nonzerop (expr
))
3835 return boolean_true_node
;
3836 else if (integer_zerop (expr
))
3837 return boolean_false_node
;
3838 else if (TREE_CODE (expr
) == SSA_NAME
)
3839 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3840 build_int_cst (TREE_TYPE (expr
), 0));
3841 else if (COMPARISON_CLASS_P (expr
))
3842 return fold_build2 (TREE_CODE (expr
),
3844 TREE_OPERAND (expr
, 0),
3845 TREE_OPERAND (expr
, 1));
3851 /* Check to see if a boolean expression EXPR is logically equivalent to the
3852 comparison (OP1 CODE OP2). Check for various identities involving
3856 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3857 const_tree op1
, const_tree op2
)
3861 /* The obvious case. */
3862 if (TREE_CODE (expr
) == code
3863 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3864 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3867 /* Check for comparing (name, name != 0) and the case where expr
3868 is an SSA_NAME with a definition matching the comparison. */
3869 if (TREE_CODE (expr
) == SSA_NAME
3870 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3872 if (operand_equal_p (expr
, op1
, 0))
3873 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3874 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3875 s
= SSA_NAME_DEF_STMT (expr
);
3876 if (is_gimple_assign (s
)
3877 && gimple_assign_rhs_code (s
) == code
3878 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3879 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3883 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3884 of name is a comparison, recurse. */
3885 if (TREE_CODE (op1
) == SSA_NAME
3886 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3888 s
= SSA_NAME_DEF_STMT (op1
);
3889 if (is_gimple_assign (s
)
3890 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3892 enum tree_code c
= gimple_assign_rhs_code (s
);
3893 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3894 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3895 return same_bool_comparison_p (expr
, c
,
3896 gimple_assign_rhs1 (s
),
3897 gimple_assign_rhs2 (s
));
3898 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3899 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3900 return same_bool_comparison_p (expr
,
3901 invert_tree_comparison (c
, false),
3902 gimple_assign_rhs1 (s
),
3903 gimple_assign_rhs2 (s
));
3909 /* Check to see if two boolean expressions OP1 and OP2 are logically
3913 same_bool_result_p (const_tree op1
, const_tree op2
)
3915 /* Simple cases first. */
3916 if (operand_equal_p (op1
, op2
, 0))
3919 /* Check the cases where at least one of the operands is a comparison.
3920 These are a bit smarter than operand_equal_p in that they apply some
3921 identifies on SSA_NAMEs. */
3922 if (COMPARISON_CLASS_P (op2
)
3923 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3924 TREE_OPERAND (op2
, 0),
3925 TREE_OPERAND (op2
, 1)))
3927 if (COMPARISON_CLASS_P (op1
)
3928 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3929 TREE_OPERAND (op1
, 0),
3930 TREE_OPERAND (op1
, 1)))
3937 /* Forward declarations for some mutually recursive functions. */
3940 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3941 enum tree_code code2
, tree op2a
, tree op2b
);
3943 and_var_with_comparison (tree var
, bool invert
,
3944 enum tree_code code2
, tree op2a
, tree op2b
);
3946 and_var_with_comparison_1 (gimple stmt
,
3947 enum tree_code code2
, tree op2a
, tree op2b
);
3949 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3950 enum tree_code code2
, tree op2a
, tree op2b
);
3952 or_var_with_comparison (tree var
, bool invert
,
3953 enum tree_code code2
, tree op2a
, tree op2b
);
3955 or_var_with_comparison_1 (gimple stmt
,
3956 enum tree_code code2
, tree op2a
, tree op2b
);
3958 /* Helper function for and_comparisons_1: try to simplify the AND of the
3959 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3960 If INVERT is true, invert the value of the VAR before doing the AND.
3961 Return NULL_EXPR if we can't simplify this to a single expression. */
3964 and_var_with_comparison (tree var
, bool invert
,
3965 enum tree_code code2
, tree op2a
, tree op2b
)
3968 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3970 /* We can only deal with variables whose definitions are assignments. */
3971 if (!is_gimple_assign (stmt
))
3974 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3975 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3976 Then we only have to consider the simpler non-inverted cases. */
3978 t
= or_var_with_comparison_1 (stmt
,
3979 invert_tree_comparison (code2
, false),
3982 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3983 return canonicalize_bool (t
, invert
);
3986 /* Try to simplify the AND of the ssa variable defined by the assignment
3987 STMT with the comparison specified by (OP2A CODE2 OP2B).
3988 Return NULL_EXPR if we can't simplify this to a single expression. */
3991 and_var_with_comparison_1 (gimple stmt
,
3992 enum tree_code code2
, tree op2a
, tree op2b
)
3994 tree var
= gimple_assign_lhs (stmt
);
3995 tree true_test_var
= NULL_TREE
;
3996 tree false_test_var
= NULL_TREE
;
3997 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3999 /* Check for identities like (var AND (var == 0)) => false. */
4000 if (TREE_CODE (op2a
) == SSA_NAME
4001 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4003 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4004 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4006 true_test_var
= op2a
;
4007 if (var
== true_test_var
)
4010 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4011 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4013 false_test_var
= op2a
;
4014 if (var
== false_test_var
)
4015 return boolean_false_node
;
4019 /* If the definition is a comparison, recurse on it. */
4020 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4022 tree t
= and_comparisons_1 (innercode
,
4023 gimple_assign_rhs1 (stmt
),
4024 gimple_assign_rhs2 (stmt
),
4032 /* If the definition is an AND or OR expression, we may be able to
4033 simplify by reassociating. */
4034 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4035 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4037 tree inner1
= gimple_assign_rhs1 (stmt
);
4038 tree inner2
= gimple_assign_rhs2 (stmt
);
4041 tree partial
= NULL_TREE
;
4042 bool is_and
= (innercode
== BIT_AND_EXPR
);
4044 /* Check for boolean identities that don't require recursive examination
4046 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4047 inner1 AND (inner1 OR inner2) => inner1
4048 !inner1 AND (inner1 AND inner2) => false
4049 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4050 Likewise for similar cases involving inner2. */
4051 if (inner1
== true_test_var
)
4052 return (is_and
? var
: inner1
);
4053 else if (inner2
== true_test_var
)
4054 return (is_and
? var
: inner2
);
4055 else if (inner1
== false_test_var
)
4057 ? boolean_false_node
4058 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4059 else if (inner2
== false_test_var
)
4061 ? boolean_false_node
4062 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4064 /* Next, redistribute/reassociate the AND across the inner tests.
4065 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4066 if (TREE_CODE (inner1
) == SSA_NAME
4067 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4068 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4069 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4070 gimple_assign_rhs1 (s
),
4071 gimple_assign_rhs2 (s
),
4072 code2
, op2a
, op2b
)))
4074 /* Handle the AND case, where we are reassociating:
4075 (inner1 AND inner2) AND (op2a code2 op2b)
4077 If the partial result t is a constant, we win. Otherwise
4078 continue on to try reassociating with the other inner test. */
4081 if (integer_onep (t
))
4083 else if (integer_zerop (t
))
4084 return boolean_false_node
;
4087 /* Handle the OR case, where we are redistributing:
4088 (inner1 OR inner2) AND (op2a code2 op2b)
4089 => (t OR (inner2 AND (op2a code2 op2b))) */
4090 else if (integer_onep (t
))
4091 return boolean_true_node
;
4093 /* Save partial result for later. */
4097 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4098 if (TREE_CODE (inner2
) == SSA_NAME
4099 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4100 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4101 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4102 gimple_assign_rhs1 (s
),
4103 gimple_assign_rhs2 (s
),
4104 code2
, op2a
, op2b
)))
4106 /* Handle the AND case, where we are reassociating:
4107 (inner1 AND inner2) AND (op2a code2 op2b)
4108 => (inner1 AND t) */
4111 if (integer_onep (t
))
4113 else if (integer_zerop (t
))
4114 return boolean_false_node
;
4115 /* If both are the same, we can apply the identity
4117 else if (partial
&& same_bool_result_p (t
, partial
))
4121 /* Handle the OR case. where we are redistributing:
4122 (inner1 OR inner2) AND (op2a code2 op2b)
4123 => (t OR (inner1 AND (op2a code2 op2b)))
4124 => (t OR partial) */
4127 if (integer_onep (t
))
4128 return boolean_true_node
;
4131 /* We already got a simplification for the other
4132 operand to the redistributed OR expression. The
4133 interesting case is when at least one is false.
4134 Or, if both are the same, we can apply the identity
4136 if (integer_zerop (partial
))
4138 else if (integer_zerop (t
))
4140 else if (same_bool_result_p (t
, partial
))
4149 /* Try to simplify the AND of two comparisons defined by
4150 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4151 If this can be done without constructing an intermediate value,
4152 return the resulting tree; otherwise NULL_TREE is returned.
4153 This function is deliberately asymmetric as it recurses on SSA_DEFs
4154 in the first comparison but not the second. */
4157 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4158 enum tree_code code2
, tree op2a
, tree op2b
)
4160 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4162 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4163 if (operand_equal_p (op1a
, op2a
, 0)
4164 && operand_equal_p (op1b
, op2b
, 0))
4166 /* Result will be either NULL_TREE, or a combined comparison. */
4167 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4168 TRUTH_ANDIF_EXPR
, code1
, code2
,
4169 truth_type
, op1a
, op1b
);
4174 /* Likewise the swapped case of the above. */
4175 if (operand_equal_p (op1a
, op2b
, 0)
4176 && operand_equal_p (op1b
, op2a
, 0))
4178 /* Result will be either NULL_TREE, or a combined comparison. */
4179 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4180 TRUTH_ANDIF_EXPR
, code1
,
4181 swap_tree_comparison (code2
),
4182 truth_type
, op1a
, op1b
);
4187 /* If both comparisons are of the same value against constants, we might
4188 be able to merge them. */
4189 if (operand_equal_p (op1a
, op2a
, 0)
4190 && TREE_CODE (op1b
) == INTEGER_CST
4191 && TREE_CODE (op2b
) == INTEGER_CST
)
4193 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4195 /* If we have (op1a == op1b), we should either be able to
4196 return that or FALSE, depending on whether the constant op1b
4197 also satisfies the other comparison against op2b. */
4198 if (code1
== EQ_EXPR
)
4204 case EQ_EXPR
: val
= (cmp
== 0); break;
4205 case NE_EXPR
: val
= (cmp
!= 0); break;
4206 case LT_EXPR
: val
= (cmp
< 0); break;
4207 case GT_EXPR
: val
= (cmp
> 0); break;
4208 case LE_EXPR
: val
= (cmp
<= 0); break;
4209 case GE_EXPR
: val
= (cmp
>= 0); break;
4210 default: done
= false;
4215 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4217 return boolean_false_node
;
4220 /* Likewise if the second comparison is an == comparison. */
4221 else if (code2
== EQ_EXPR
)
4227 case EQ_EXPR
: val
= (cmp
== 0); break;
4228 case NE_EXPR
: val
= (cmp
!= 0); break;
4229 case LT_EXPR
: val
= (cmp
> 0); break;
4230 case GT_EXPR
: val
= (cmp
< 0); break;
4231 case LE_EXPR
: val
= (cmp
>= 0); break;
4232 case GE_EXPR
: val
= (cmp
<= 0); break;
4233 default: done
= false;
4238 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4240 return boolean_false_node
;
4244 /* Same business with inequality tests. */
4245 else if (code1
== NE_EXPR
)
4250 case EQ_EXPR
: val
= (cmp
!= 0); break;
4251 case NE_EXPR
: val
= (cmp
== 0); break;
4252 case LT_EXPR
: val
= (cmp
>= 0); break;
4253 case GT_EXPR
: val
= (cmp
<= 0); break;
4254 case LE_EXPR
: val
= (cmp
> 0); break;
4255 case GE_EXPR
: val
= (cmp
< 0); break;
4260 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4262 else if (code2
== NE_EXPR
)
4267 case EQ_EXPR
: val
= (cmp
== 0); break;
4268 case NE_EXPR
: val
= (cmp
!= 0); break;
4269 case LT_EXPR
: val
= (cmp
<= 0); break;
4270 case GT_EXPR
: val
= (cmp
>= 0); break;
4271 case LE_EXPR
: val
= (cmp
< 0); break;
4272 case GE_EXPR
: val
= (cmp
> 0); break;
4277 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4280 /* Chose the more restrictive of two < or <= comparisons. */
4281 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4282 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4284 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4285 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4287 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4290 /* Likewise chose the more restrictive of two > or >= comparisons. */
4291 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4292 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4294 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4295 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4297 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4300 /* Check for singleton ranges. */
4302 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4303 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4304 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4306 /* Check for disjoint ranges. */
4308 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4309 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4310 return boolean_false_node
;
4312 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4313 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4314 return boolean_false_node
;
4317 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4318 NAME's definition is a truth value. See if there are any simplifications
4319 that can be done against the NAME's definition. */
4320 if (TREE_CODE (op1a
) == SSA_NAME
4321 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4322 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4324 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4325 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4326 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4327 switch (gimple_code (stmt
))
4330 /* Try to simplify by copy-propagating the definition. */
4331 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4334 /* If every argument to the PHI produces the same result when
4335 ANDed with the second comparison, we win.
4336 Do not do this unless the type is bool since we need a bool
4337 result here anyway. */
4338 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4340 tree result
= NULL_TREE
;
4342 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4344 tree arg
= gimple_phi_arg_def (stmt
, i
);
4346 /* If this PHI has itself as an argument, ignore it.
4347 If all the other args produce the same result,
4349 if (arg
== gimple_phi_result (stmt
))
4351 else if (TREE_CODE (arg
) == INTEGER_CST
)
4353 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4356 result
= boolean_false_node
;
4357 else if (!integer_zerop (result
))
4361 result
= fold_build2 (code2
, boolean_type_node
,
4363 else if (!same_bool_comparison_p (result
,
4367 else if (TREE_CODE (arg
) == SSA_NAME
4368 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4371 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4372 /* In simple cases we can look through PHI nodes,
4373 but we have to be careful with loops.
4375 if (! dom_info_available_p (CDI_DOMINATORS
)
4376 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4377 || dominated_by_p (CDI_DOMINATORS
,
4378 gimple_bb (def_stmt
),
4381 temp
= and_var_with_comparison (arg
, invert
, code2
,
4387 else if (!same_bool_result_p (result
, temp
))
4403 /* Try to simplify the AND of two comparisons, specified by
4404 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4405 If this can be simplified to a single expression (without requiring
4406 introducing more SSA variables to hold intermediate values),
4407 return the resulting tree. Otherwise return NULL_TREE.
4408 If the result expression is non-null, it has boolean type. */
4411 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4412 enum tree_code code2
, tree op2a
, tree op2b
)
4414 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4418 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4421 /* Helper function for or_comparisons_1: try to simplify the OR of the
4422 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4423 If INVERT is true, invert the value of VAR before doing the OR.
4424 Return NULL_EXPR if we can't simplify this to a single expression. */
4427 or_var_with_comparison (tree var
, bool invert
,
4428 enum tree_code code2
, tree op2a
, tree op2b
)
4431 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4433 /* We can only deal with variables whose definitions are assignments. */
4434 if (!is_gimple_assign (stmt
))
4437 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4438 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4439 Then we only have to consider the simpler non-inverted cases. */
4441 t
= and_var_with_comparison_1 (stmt
,
4442 invert_tree_comparison (code2
, false),
4445 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4446 return canonicalize_bool (t
, invert
);
4449 /* Try to simplify the OR of the ssa variable defined by the assignment
4450 STMT with the comparison specified by (OP2A CODE2 OP2B).
4451 Return NULL_EXPR if we can't simplify this to a single expression. */
4454 or_var_with_comparison_1 (gimple stmt
,
4455 enum tree_code code2
, tree op2a
, tree op2b
)
4457 tree var
= gimple_assign_lhs (stmt
);
4458 tree true_test_var
= NULL_TREE
;
4459 tree false_test_var
= NULL_TREE
;
4460 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4462 /* Check for identities like (var OR (var != 0)) => true . */
4463 if (TREE_CODE (op2a
) == SSA_NAME
4464 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4466 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4467 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4469 true_test_var
= op2a
;
4470 if (var
== true_test_var
)
4473 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4474 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4476 false_test_var
= op2a
;
4477 if (var
== false_test_var
)
4478 return boolean_true_node
;
4482 /* If the definition is a comparison, recurse on it. */
4483 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4485 tree t
= or_comparisons_1 (innercode
,
4486 gimple_assign_rhs1 (stmt
),
4487 gimple_assign_rhs2 (stmt
),
4495 /* If the definition is an AND or OR expression, we may be able to
4496 simplify by reassociating. */
4497 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4498 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4500 tree inner1
= gimple_assign_rhs1 (stmt
);
4501 tree inner2
= gimple_assign_rhs2 (stmt
);
4504 tree partial
= NULL_TREE
;
4505 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4507 /* Check for boolean identities that don't require recursive examination
4509 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4510 inner1 OR (inner1 AND inner2) => inner1
4511 !inner1 OR (inner1 OR inner2) => true
4512 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4514 if (inner1
== true_test_var
)
4515 return (is_or
? var
: inner1
);
4516 else if (inner2
== true_test_var
)
4517 return (is_or
? var
: inner2
);
4518 else if (inner1
== false_test_var
)
4521 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4522 else if (inner2
== false_test_var
)
4525 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4527 /* Next, redistribute/reassociate the OR across the inner tests.
4528 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4529 if (TREE_CODE (inner1
) == SSA_NAME
4530 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4531 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4532 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4533 gimple_assign_rhs1 (s
),
4534 gimple_assign_rhs2 (s
),
4535 code2
, op2a
, op2b
)))
4537 /* Handle the OR case, where we are reassociating:
4538 (inner1 OR inner2) OR (op2a code2 op2b)
4540 If the partial result t is a constant, we win. Otherwise
4541 continue on to try reassociating with the other inner test. */
4544 if (integer_onep (t
))
4545 return boolean_true_node
;
4546 else if (integer_zerop (t
))
4550 /* Handle the AND case, where we are redistributing:
4551 (inner1 AND inner2) OR (op2a code2 op2b)
4552 => (t AND (inner2 OR (op2a code op2b))) */
4553 else if (integer_zerop (t
))
4554 return boolean_false_node
;
4556 /* Save partial result for later. */
4560 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4561 if (TREE_CODE (inner2
) == SSA_NAME
4562 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4563 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4564 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4565 gimple_assign_rhs1 (s
),
4566 gimple_assign_rhs2 (s
),
4567 code2
, op2a
, op2b
)))
4569 /* Handle the OR case, where we are reassociating:
4570 (inner1 OR inner2) OR (op2a code2 op2b)
4572 => (t OR partial) */
4575 if (integer_zerop (t
))
4577 else if (integer_onep (t
))
4578 return boolean_true_node
;
4579 /* If both are the same, we can apply the identity
4581 else if (partial
&& same_bool_result_p (t
, partial
))
4585 /* Handle the AND case, where we are redistributing:
4586 (inner1 AND inner2) OR (op2a code2 op2b)
4587 => (t AND (inner1 OR (op2a code2 op2b)))
4588 => (t AND partial) */
4591 if (integer_zerop (t
))
4592 return boolean_false_node
;
4595 /* We already got a simplification for the other
4596 operand to the redistributed AND expression. The
4597 interesting case is when at least one is true.
4598 Or, if both are the same, we can apply the identity
4600 if (integer_onep (partial
))
4602 else if (integer_onep (t
))
4604 else if (same_bool_result_p (t
, partial
))
4613 /* Try to simplify the OR of two comparisons defined by
4614 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4615 If this can be done without constructing an intermediate value,
4616 return the resulting tree; otherwise NULL_TREE is returned.
4617 This function is deliberately asymmetric as it recurses on SSA_DEFs
4618 in the first comparison but not the second. */
4621 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4622 enum tree_code code2
, tree op2a
, tree op2b
)
4624 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4626 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4627 if (operand_equal_p (op1a
, op2a
, 0)
4628 && operand_equal_p (op1b
, op2b
, 0))
4630 /* Result will be either NULL_TREE, or a combined comparison. */
4631 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4632 TRUTH_ORIF_EXPR
, code1
, code2
,
4633 truth_type
, op1a
, op1b
);
4638 /* Likewise the swapped case of the above. */
4639 if (operand_equal_p (op1a
, op2b
, 0)
4640 && operand_equal_p (op1b
, op2a
, 0))
4642 /* Result will be either NULL_TREE, or a combined comparison. */
4643 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4644 TRUTH_ORIF_EXPR
, code1
,
4645 swap_tree_comparison (code2
),
4646 truth_type
, op1a
, op1b
);
4651 /* If both comparisons are of the same value against constants, we might
4652 be able to merge them. */
4653 if (operand_equal_p (op1a
, op2a
, 0)
4654 && TREE_CODE (op1b
) == INTEGER_CST
4655 && TREE_CODE (op2b
) == INTEGER_CST
)
4657 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4659 /* If we have (op1a != op1b), we should either be able to
4660 return that or TRUE, depending on whether the constant op1b
4661 also satisfies the other comparison against op2b. */
4662 if (code1
== NE_EXPR
)
4668 case EQ_EXPR
: val
= (cmp
== 0); break;
4669 case NE_EXPR
: val
= (cmp
!= 0); break;
4670 case LT_EXPR
: val
= (cmp
< 0); break;
4671 case GT_EXPR
: val
= (cmp
> 0); break;
4672 case LE_EXPR
: val
= (cmp
<= 0); break;
4673 case GE_EXPR
: val
= (cmp
>= 0); break;
4674 default: done
= false;
4679 return boolean_true_node
;
4681 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4684 /* Likewise if the second comparison is a != comparison. */
4685 else if (code2
== NE_EXPR
)
4691 case EQ_EXPR
: val
= (cmp
== 0); break;
4692 case NE_EXPR
: val
= (cmp
!= 0); break;
4693 case LT_EXPR
: val
= (cmp
> 0); break;
4694 case GT_EXPR
: val
= (cmp
< 0); break;
4695 case LE_EXPR
: val
= (cmp
>= 0); break;
4696 case GE_EXPR
: val
= (cmp
<= 0); break;
4697 default: done
= false;
4702 return boolean_true_node
;
4704 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4708 /* See if an equality test is redundant with the other comparison. */
4709 else if (code1
== EQ_EXPR
)
4714 case EQ_EXPR
: val
= (cmp
== 0); break;
4715 case NE_EXPR
: val
= (cmp
!= 0); break;
4716 case LT_EXPR
: val
= (cmp
< 0); break;
4717 case GT_EXPR
: val
= (cmp
> 0); break;
4718 case LE_EXPR
: val
= (cmp
<= 0); break;
4719 case GE_EXPR
: val
= (cmp
>= 0); break;
4724 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4726 else if (code2
== EQ_EXPR
)
4731 case EQ_EXPR
: val
= (cmp
== 0); break;
4732 case NE_EXPR
: val
= (cmp
!= 0); break;
4733 case LT_EXPR
: val
= (cmp
> 0); break;
4734 case GT_EXPR
: val
= (cmp
< 0); break;
4735 case LE_EXPR
: val
= (cmp
>= 0); break;
4736 case GE_EXPR
: val
= (cmp
<= 0); break;
4741 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4744 /* Chose the less restrictive of two < or <= comparisons. */
4745 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4746 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4748 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4749 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4751 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4754 /* Likewise chose the less restrictive of two > or >= comparisons. */
4755 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4756 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4758 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4759 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4761 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4764 /* Check for singleton ranges. */
4766 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4767 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4768 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4770 /* Check for less/greater pairs that don't restrict the range at all. */
4772 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4773 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4774 return boolean_true_node
;
4776 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4777 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4778 return boolean_true_node
;
4781 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4782 NAME's definition is a truth value. See if there are any simplifications
4783 that can be done against the NAME's definition. */
4784 if (TREE_CODE (op1a
) == SSA_NAME
4785 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4786 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4788 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4789 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4790 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4791 switch (gimple_code (stmt
))
4794 /* Try to simplify by copy-propagating the definition. */
4795 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4798 /* If every argument to the PHI produces the same result when
4799 ORed with the second comparison, we win.
4800 Do not do this unless the type is bool since we need a bool
4801 result here anyway. */
4802 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4804 tree result
= NULL_TREE
;
4806 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4808 tree arg
= gimple_phi_arg_def (stmt
, i
);
4810 /* If this PHI has itself as an argument, ignore it.
4811 If all the other args produce the same result,
4813 if (arg
== gimple_phi_result (stmt
))
4815 else if (TREE_CODE (arg
) == INTEGER_CST
)
4817 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4820 result
= boolean_true_node
;
4821 else if (!integer_onep (result
))
4825 result
= fold_build2 (code2
, boolean_type_node
,
4827 else if (!same_bool_comparison_p (result
,
4831 else if (TREE_CODE (arg
) == SSA_NAME
4832 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4835 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4836 /* In simple cases we can look through PHI nodes,
4837 but we have to be careful with loops.
4839 if (! dom_info_available_p (CDI_DOMINATORS
)
4840 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4841 || dominated_by_p (CDI_DOMINATORS
,
4842 gimple_bb (def_stmt
),
4845 temp
= or_var_with_comparison (arg
, invert
, code2
,
4851 else if (!same_bool_result_p (result
, temp
))
4867 /* Try to simplify the OR of two comparisons, specified by
4868 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4869 If this can be simplified to a single expression (without requiring
4870 introducing more SSA variables to hold intermediate values),
4871 return the resulting tree. Otherwise return NULL_TREE.
4872 If the result expression is non-null, it has boolean type. */
4875 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4876 enum tree_code code2
, tree op2a
, tree op2b
)
4878 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4882 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4886 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4888 Either NULL_TREE, a simplified but non-constant or a constant
4891 ??? This should go into a gimple-fold-inline.h file to be eventually
4892 privatized with the single valueize function used in the various TUs
4893 to avoid the indirect function call overhead. */
4896 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4897 tree (*gvalueize
) (tree
))
4901 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4902 edges if there are intermediate VARYING defs. For this reason
4903 do not follow SSA edges here even though SCCVN can technically
4904 just deal fine with that. */
4905 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
)
4906 && rcode
.is_tree_code ()
4907 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4908 || ((tree_code
) rcode
) == ADDR_EXPR
)
4909 && is_gimple_val (ops
[0]))
4912 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4914 fprintf (dump_file
, "Match-and-simplified ");
4915 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4916 fprintf (dump_file
, " to ");
4917 print_generic_expr (dump_file
, res
, 0);
4918 fprintf (dump_file
, "\n");
4923 location_t loc
= gimple_location (stmt
);
4924 switch (gimple_code (stmt
))
4928 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4930 switch (get_gimple_rhs_class (subcode
))
4932 case GIMPLE_SINGLE_RHS
:
4934 tree rhs
= gimple_assign_rhs1 (stmt
);
4935 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4937 if (TREE_CODE (rhs
) == SSA_NAME
)
4939 /* If the RHS is an SSA_NAME, return its known constant value,
4941 return (*valueize
) (rhs
);
4943 /* Handle propagating invariant addresses into address
4945 else if (TREE_CODE (rhs
) == ADDR_EXPR
4946 && !is_gimple_min_invariant (rhs
))
4948 HOST_WIDE_INT offset
= 0;
4950 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4954 && (CONSTANT_CLASS_P (base
)
4955 || decl_address_invariant_p (base
)))
4956 return build_invariant_address (TREE_TYPE (rhs
),
4959 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4960 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4961 && (CONSTRUCTOR_NELTS (rhs
)
4962 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4967 vec
= XALLOCAVEC (tree
,
4968 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4969 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4971 val
= (*valueize
) (val
);
4972 if (TREE_CODE (val
) == INTEGER_CST
4973 || TREE_CODE (val
) == REAL_CST
4974 || TREE_CODE (val
) == FIXED_CST
)
4980 return build_vector (TREE_TYPE (rhs
), vec
);
4982 if (subcode
== OBJ_TYPE_REF
)
4984 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4985 /* If callee is constant, we can fold away the wrapper. */
4986 if (is_gimple_min_invariant (val
))
4990 if (kind
== tcc_reference
)
4992 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4993 || TREE_CODE (rhs
) == REALPART_EXPR
4994 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4995 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4997 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4998 return fold_unary_loc (EXPR_LOCATION (rhs
),
5000 TREE_TYPE (rhs
), val
);
5002 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5003 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5005 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5006 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5008 TREE_TYPE (rhs
), val
,
5009 TREE_OPERAND (rhs
, 1),
5010 TREE_OPERAND (rhs
, 2));
5012 else if (TREE_CODE (rhs
) == MEM_REF
5013 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5015 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5016 if (TREE_CODE (val
) == ADDR_EXPR
5017 && is_gimple_min_invariant (val
))
5019 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5021 TREE_OPERAND (rhs
, 1));
5026 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5028 else if (kind
== tcc_declaration
)
5029 return get_symbol_constant_value (rhs
);
5033 case GIMPLE_UNARY_RHS
:
5036 case GIMPLE_BINARY_RHS
:
5038 /* Handle binary operators that can appear in GIMPLE form. */
5039 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5040 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5042 /* Translate &x + CST into an invariant form suitable for
5043 further propagation. */
5044 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
5045 && TREE_CODE (op0
) == ADDR_EXPR
5046 && TREE_CODE (op1
) == INTEGER_CST
)
5048 tree off
= fold_convert (ptr_type_node
, op1
);
5049 return build_fold_addr_expr_loc
5051 fold_build2 (MEM_REF
,
5052 TREE_TYPE (TREE_TYPE (op0
)),
5053 unshare_expr (op0
), off
));
5056 return fold_binary_loc (loc
, subcode
,
5057 gimple_expr_type (stmt
), op0
, op1
);
5060 case GIMPLE_TERNARY_RHS
:
5062 /* Handle ternary operators that can appear in GIMPLE form. */
5063 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5064 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5065 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5067 /* Fold embedded expressions in ternary codes. */
5068 if ((subcode
== COND_EXPR
5069 || subcode
== VEC_COND_EXPR
)
5070 && COMPARISON_CLASS_P (op0
))
5072 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
5073 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
5074 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
5075 TREE_TYPE (op0
), op00
, op01
);
5080 return fold_ternary_loc (loc
, subcode
,
5081 gimple_expr_type (stmt
), op0
, op1
, op2
);
5092 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5094 if (gimple_call_internal_p (stmt
))
5096 enum tree_code subcode
= ERROR_MARK
;
5097 switch (gimple_call_internal_fn (stmt
))
5099 case IFN_UBSAN_CHECK_ADD
:
5100 subcode
= PLUS_EXPR
;
5102 case IFN_UBSAN_CHECK_SUB
:
5103 subcode
= MINUS_EXPR
;
5105 case IFN_UBSAN_CHECK_MUL
:
5106 subcode
= MULT_EXPR
;
5111 tree arg0
= gimple_call_arg (stmt
, 0);
5112 tree arg1
= gimple_call_arg (stmt
, 1);
5113 tree op0
= (*valueize
) (arg0
);
5114 tree op1
= (*valueize
) (arg1
);
5116 if (TREE_CODE (op0
) != INTEGER_CST
5117 || TREE_CODE (op1
) != INTEGER_CST
)
5122 /* x * 0 = 0 * x = 0 without overflow. */
5123 if (integer_zerop (op0
) || integer_zerop (op1
))
5124 return build_zero_cst (TREE_TYPE (arg0
));
5127 /* y - y = 0 without overflow. */
5128 if (operand_equal_p (op0
, op1
, 0))
5129 return build_zero_cst (TREE_TYPE (arg0
));
5136 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5138 && TREE_CODE (res
) == INTEGER_CST
5139 && !TREE_OVERFLOW (res
))
5144 fn
= (*valueize
) (gimple_call_fn (stmt
));
5145 if (TREE_CODE (fn
) == ADDR_EXPR
5146 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5147 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5148 && gimple_builtin_call_types_compatible_p (stmt
,
5149 TREE_OPERAND (fn
, 0)))
5151 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5154 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5155 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5156 retval
= fold_builtin_call_array (loc
,
5157 gimple_call_return_type (call_stmt
),
5158 fn
, gimple_call_num_args (stmt
), args
);
5161 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5162 STRIP_NOPS (retval
);
5163 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5176 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5177 Returns NULL_TREE if folding to a constant is not possible, otherwise
5178 returns a constant according to is_gimple_min_invariant. */
5181 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
5183 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5184 if (res
&& is_gimple_min_invariant (res
))
5190 /* The following set of functions are supposed to fold references using
5191 their constant initializers. */
5193 /* See if we can find constructor defining value of BASE.
5194 When we know the consructor with constant offset (such as
5195 base is array[40] and we do know constructor of array), then
5196 BIT_OFFSET is adjusted accordingly.
5198 As a special case, return error_mark_node when constructor
5199 is not explicitly available, but it is known to be zero
5200 such as 'static const int a;'. */
5202 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5203 tree (*valueize
)(tree
))
5205 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5206 if (TREE_CODE (base
) == MEM_REF
)
5208 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5210 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5212 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5217 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5218 base
= valueize (TREE_OPERAND (base
, 0));
5219 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5221 base
= TREE_OPERAND (base
, 0);
5224 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5225 DECL_INITIAL. If BASE is a nested reference into another
5226 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5227 the inner reference. */
5228 switch (TREE_CODE (base
))
5233 tree init
= ctor_for_folding (base
);
5235 /* Our semantic is exact opposite of ctor_for_folding;
5236 NULL means unknown, while error_mark_node is 0. */
5237 if (init
== error_mark_node
)
5240 return error_mark_node
;
5246 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5247 if (max_size
== -1 || size
!= max_size
)
5249 *bit_offset
+= bit_offset2
;
5250 return get_base_constructor (base
, bit_offset
, valueize
);
5261 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5262 SIZE to the memory at bit OFFSET. */
5265 fold_array_ctor_reference (tree type
, tree ctor
,
5266 unsigned HOST_WIDE_INT offset
,
5267 unsigned HOST_WIDE_INT size
,
5270 unsigned HOST_WIDE_INT cnt
;
5272 offset_int low_bound
;
5273 offset_int elt_size
;
5274 offset_int index
, max_index
;
5275 offset_int access_index
;
5276 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5277 HOST_WIDE_INT inner_offset
;
5279 /* Compute low bound and elt size. */
5280 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5281 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5282 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5284 /* Static constructors for variably sized objects makes no sense. */
5285 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5286 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5287 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5291 /* Static constructors for variably sized objects makes no sense. */
5292 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5294 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5296 /* We can handle only constantly sized accesses that are known to not
5297 be larger than size of array element. */
5298 if (!TYPE_SIZE_UNIT (type
)
5299 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5300 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5304 /* Compute the array index we look for. */
5305 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5307 access_index
+= low_bound
;
5309 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5310 TYPE_SIGN (index_type
));
5312 /* And offset within the access. */
5313 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5315 /* See if the array field is large enough to span whole access. We do not
5316 care to fold accesses spanning multiple array indexes. */
5317 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5320 index
= low_bound
- 1;
5322 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5323 TYPE_SIGN (index_type
));
5325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5327 /* Array constructor might explicitely set index, or specify range
5328 or leave index NULL meaning that it is next index after previous
5332 if (TREE_CODE (cfield
) == INTEGER_CST
)
5333 max_index
= index
= wi::to_offset (cfield
);
5336 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5337 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5338 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5345 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5346 TYPE_SIGN (index_type
));
5350 /* Do we have match? */
5351 if (wi::cmpu (access_index
, index
) >= 0
5352 && wi::cmpu (access_index
, max_index
) <= 0)
5353 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5356 /* When memory is not explicitely mentioned in constructor,
5357 it is 0 (or out of range). */
5358 return build_zero_cst (type
);
5361 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5362 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5365 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5366 unsigned HOST_WIDE_INT offset
,
5367 unsigned HOST_WIDE_INT size
,
5370 unsigned HOST_WIDE_INT cnt
;
5373 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5376 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5377 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5378 tree field_size
= DECL_SIZE (cfield
);
5379 offset_int bitoffset
;
5380 offset_int bitoffset_end
, access_end
;
5382 /* Variable sized objects in static constructors makes no sense,
5383 but field_size can be NULL for flexible array members. */
5384 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5385 && TREE_CODE (byte_offset
) == INTEGER_CST
5386 && (field_size
!= NULL_TREE
5387 ? TREE_CODE (field_size
) == INTEGER_CST
5388 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5390 /* Compute bit offset of the field. */
5391 bitoffset
= (wi::to_offset (field_offset
)
5392 + wi::lshift (wi::to_offset (byte_offset
),
5393 LOG2_BITS_PER_UNIT
));
5394 /* Compute bit offset where the field ends. */
5395 if (field_size
!= NULL_TREE
)
5396 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5400 access_end
= offset_int (offset
) + size
;
5402 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5403 [BITOFFSET, BITOFFSET_END)? */
5404 if (wi::cmps (access_end
, bitoffset
) > 0
5405 && (field_size
== NULL_TREE
5406 || wi::lts_p (offset
, bitoffset_end
)))
5408 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5409 /* We do have overlap. Now see if field is large enough to
5410 cover the access. Give up for accesses spanning multiple
5412 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5414 if (wi::lts_p (offset
, bitoffset
))
5416 return fold_ctor_reference (type
, cval
,
5417 inner_offset
.to_uhwi (), size
,
5421 /* When memory is not explicitely mentioned in constructor, it is 0. */
5422 return build_zero_cst (type
);
5425 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5426 to the memory at bit OFFSET. */
5429 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5430 unsigned HOST_WIDE_INT size
, tree from_decl
)
5434 /* We found the field with exact match. */
5435 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5437 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5439 /* We are at the end of walk, see if we can view convert the
5441 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5442 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5443 && !compare_tree_int (TYPE_SIZE (type
), size
)
5444 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5446 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5447 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5449 STRIP_USELESS_TYPE_CONVERSION (ret
);
5452 /* For constants and byte-aligned/sized reads try to go through
5453 native_encode/interpret. */
5454 if (CONSTANT_CLASS_P (ctor
)
5455 && BITS_PER_UNIT
== 8
5456 && offset
% BITS_PER_UNIT
== 0
5457 && size
% BITS_PER_UNIT
== 0
5458 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5460 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5461 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5462 offset
/ BITS_PER_UNIT
) > 0)
5463 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5465 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5468 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5469 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5470 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5473 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5480 /* Return the tree representing the element referenced by T if T is an
5481 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5482 names using VALUEIZE. Return NULL_TREE otherwise. */
5485 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5487 tree ctor
, idx
, base
;
5488 HOST_WIDE_INT offset
, size
, max_size
;
5491 if (TREE_THIS_VOLATILE (t
))
5495 return get_symbol_constant_value (t
);
5497 tem
= fold_read_from_constant_string (t
);
5501 switch (TREE_CODE (t
))
5504 case ARRAY_RANGE_REF
:
5505 /* Constant indexes are handled well by get_base_constructor.
5506 Only special case variable offsets.
5507 FIXME: This code can't handle nested references with variable indexes
5508 (they will be handled only by iteration of ccp). Perhaps we can bring
5509 get_ref_base_and_extent here and make it use a valueize callback. */
5510 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5512 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5513 && TREE_CODE (idx
) == INTEGER_CST
)
5515 tree low_bound
, unit_size
;
5517 /* If the resulting bit-offset is constant, track it. */
5518 if ((low_bound
= array_ref_low_bound (t
),
5519 TREE_CODE (low_bound
) == INTEGER_CST
)
5520 && (unit_size
= array_ref_element_size (t
),
5521 tree_fits_uhwi_p (unit_size
)))
5524 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5525 TYPE_PRECISION (TREE_TYPE (idx
)));
5527 if (wi::fits_shwi_p (woffset
))
5529 offset
= woffset
.to_shwi ();
5530 /* TODO: This code seems wrong, multiply then check
5531 to see if it fits. */
5532 offset
*= tree_to_uhwi (unit_size
);
5533 offset
*= BITS_PER_UNIT
;
5535 base
= TREE_OPERAND (t
, 0);
5536 ctor
= get_base_constructor (base
, &offset
, valueize
);
5537 /* Empty constructor. Always fold to 0. */
5538 if (ctor
== error_mark_node
)
5539 return build_zero_cst (TREE_TYPE (t
));
5540 /* Out of bound array access. Value is undefined,
5544 /* We can not determine ctor. */
5547 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5548 tree_to_uhwi (unit_size
)
5558 case TARGET_MEM_REF
:
5560 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5561 ctor
= get_base_constructor (base
, &offset
, valueize
);
5563 /* Empty constructor. Always fold to 0. */
5564 if (ctor
== error_mark_node
)
5565 return build_zero_cst (TREE_TYPE (t
));
5566 /* We do not know precise address. */
5567 if (max_size
== -1 || max_size
!= size
)
5569 /* We can not determine ctor. */
5573 /* Out of bound array access. Value is undefined, but don't fold. */
5577 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5583 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5584 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5585 return fold_build1_loc (EXPR_LOCATION (t
),
5586 TREE_CODE (t
), TREE_TYPE (t
), c
);
5598 fold_const_aggregate_ref (tree t
)
5600 return fold_const_aggregate_ref_1 (t
, NULL
);
5603 /* Lookup virtual method with index TOKEN in a virtual table V
5605 Set CAN_REFER if non-NULL to false if method
5606 is not referable or if the virtual table is ill-formed (such as rewriten
5607 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5610 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5612 unsigned HOST_WIDE_INT offset
,
5615 tree vtable
= v
, init
, fn
;
5616 unsigned HOST_WIDE_INT size
;
5617 unsigned HOST_WIDE_INT elt_size
, access_index
;
5623 /* First of all double check we have virtual table. */
5624 if (TREE_CODE (v
) != VAR_DECL
5625 || !DECL_VIRTUAL_P (v
))
5627 /* Pass down that we lost track of the target. */
5633 init
= ctor_for_folding (v
);
5635 /* The virtual tables should always be born with constructors
5636 and we always should assume that they are avaialble for
5637 folding. At the moment we do not stream them in all cases,
5638 but it should never happen that ctor seem unreachable. */
5640 if (init
== error_mark_node
)
5642 gcc_assert (in_lto_p
);
5643 /* Pass down that we lost track of the target. */
5648 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5649 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5650 offset
*= BITS_PER_UNIT
;
5651 offset
+= token
* size
;
5653 /* Lookup the value in the constructor that is assumed to be array.
5654 This is equivalent to
5655 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5656 offset, size, NULL);
5657 but in a constant time. We expect that frontend produced a simple
5658 array without indexed initializers. */
5660 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5661 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5662 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5663 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5665 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5666 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5668 /* This code makes an assumption that there are no
5669 indexed fileds produced by C++ FE, so we can directly index the array. */
5670 if (access_index
< CONSTRUCTOR_NELTS (init
))
5672 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5673 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5679 /* For type inconsistent program we may end up looking up virtual method
5680 in virtual table that does not contain TOKEN entries. We may overrun
5681 the virtual table and pick up a constant or RTTI info pointer.
5682 In any case the call is undefined. */
5684 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5685 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5686 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5689 fn
= TREE_OPERAND (fn
, 0);
5691 /* When cgraph node is missing and function is not public, we cannot
5692 devirtualize. This can happen in WHOPR when the actual method
5693 ends up in other partition, because we found devirtualization
5694 possibility too late. */
5695 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5706 /* Make sure we create a cgraph node for functions we'll reference.
5707 They can be non-existent if the reference comes from an entry
5708 of an external vtable for example. */
5709 cgraph_node::get_create (fn
);
5714 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5715 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5716 KNOWN_BINFO carries the binfo describing the true type of
5717 OBJ_TYPE_REF_OBJECT(REF).
5718 Set CAN_REFER if non-NULL to false if method
5719 is not referable or if the virtual table is ill-formed (such as rewriten
5720 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5723 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5726 unsigned HOST_WIDE_INT offset
;
5729 v
= BINFO_VTABLE (known_binfo
);
5730 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5734 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5740 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5743 /* Return true iff VAL is a gimple expression that is known to be
5744 non-negative. Restricted to floating-point inputs. */
5747 gimple_val_nonnegative_real_p (tree val
)
5751 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5753 /* Use existing logic for non-gimple trees. */
5754 if (tree_expr_nonnegative_p (val
))
5757 if (TREE_CODE (val
) != SSA_NAME
)
5760 /* Currently we look only at the immediately defining statement
5761 to make this determination, since recursion on defining
5762 statements of operands can lead to quadratic behavior in the
5763 worst case. This is expected to catch almost all occurrences
5764 in practice. It would be possible to implement limited-depth
5765 recursion if important cases are lost. Alternatively, passes
5766 that need this information (such as the pow/powi lowering code
5767 in the cse_sincos pass) could be revised to provide it through
5768 dataflow propagation. */
5770 def_stmt
= SSA_NAME_DEF_STMT (val
);
5772 if (is_gimple_assign (def_stmt
))
5776 /* See fold-const.c:tree_expr_nonnegative_p for additional
5777 cases that could be handled with recursion. */
5779 switch (gimple_assign_rhs_code (def_stmt
))
5782 /* Always true for floating-point operands. */
5786 /* True if the two operands are identical (since we are
5787 restricted to floating-point inputs). */
5788 op0
= gimple_assign_rhs1 (def_stmt
);
5789 op1
= gimple_assign_rhs2 (def_stmt
);
5792 || operand_equal_p (op0
, op1
, 0))
5799 else if (is_gimple_call (def_stmt
))
5801 tree fndecl
= gimple_call_fndecl (def_stmt
);
5803 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5807 switch (DECL_FUNCTION_CODE (fndecl
))
5809 CASE_FLT_FN (BUILT_IN_ACOS
):
5810 CASE_FLT_FN (BUILT_IN_ACOSH
):
5811 CASE_FLT_FN (BUILT_IN_CABS
):
5812 CASE_FLT_FN (BUILT_IN_COSH
):
5813 CASE_FLT_FN (BUILT_IN_ERFC
):
5814 CASE_FLT_FN (BUILT_IN_EXP
):
5815 CASE_FLT_FN (BUILT_IN_EXP10
):
5816 CASE_FLT_FN (BUILT_IN_EXP2
):
5817 CASE_FLT_FN (BUILT_IN_FABS
):
5818 CASE_FLT_FN (BUILT_IN_FDIM
):
5819 CASE_FLT_FN (BUILT_IN_HYPOT
):
5820 CASE_FLT_FN (BUILT_IN_POW10
):
5823 CASE_FLT_FN (BUILT_IN_SQRT
):
5824 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5825 nonnegative inputs. */
5826 if (!HONOR_SIGNED_ZEROS (val
))
5831 CASE_FLT_FN (BUILT_IN_POWI
):
5832 /* True if the second argument is an even integer. */
5833 arg1
= gimple_call_arg (def_stmt
, 1);
5835 if (TREE_CODE (arg1
) == INTEGER_CST
5836 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5841 CASE_FLT_FN (BUILT_IN_POW
):
5842 /* True if the second argument is an even integer-valued
5844 arg1
= gimple_call_arg (def_stmt
, 1);
5846 if (TREE_CODE (arg1
) == REAL_CST
)
5851 c
= TREE_REAL_CST (arg1
);
5852 n
= real_to_integer (&c
);
5856 REAL_VALUE_TYPE cint
;
5857 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5858 if (real_identical (&c
, &cint
))
5874 /* Given a pointer value OP0, return a simplified version of an
5875 indirection through OP0, or NULL_TREE if no simplification is
5876 possible. Note that the resulting type may be different from
5877 the type pointed to in the sense that it is still compatible
5878 from the langhooks point of view. */
5881 gimple_fold_indirect_ref (tree t
)
5883 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5888 subtype
= TREE_TYPE (sub
);
5889 if (!POINTER_TYPE_P (subtype
))
5892 if (TREE_CODE (sub
) == ADDR_EXPR
)
5894 tree op
= TREE_OPERAND (sub
, 0);
5895 tree optype
= TREE_TYPE (op
);
5897 if (useless_type_conversion_p (type
, optype
))
5900 /* *(foo *)&fooarray => fooarray[0] */
5901 if (TREE_CODE (optype
) == ARRAY_TYPE
5902 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5903 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5905 tree type_domain
= TYPE_DOMAIN (optype
);
5906 tree min_val
= size_zero_node
;
5907 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5908 min_val
= TYPE_MIN_VALUE (type_domain
);
5909 if (TREE_CODE (min_val
) == INTEGER_CST
)
5910 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5912 /* *(foo *)&complexfoo => __real__ complexfoo */
5913 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5914 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5915 return fold_build1 (REALPART_EXPR
, type
, op
);
5916 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5917 else if (TREE_CODE (optype
) == VECTOR_TYPE
5918 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5920 tree part_width
= TYPE_SIZE (type
);
5921 tree index
= bitsize_int (0);
5922 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5926 /* *(p + CST) -> ... */
5927 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5928 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5930 tree addr
= TREE_OPERAND (sub
, 0);
5931 tree off
= TREE_OPERAND (sub
, 1);
5935 addrtype
= TREE_TYPE (addr
);
5937 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5938 if (TREE_CODE (addr
) == ADDR_EXPR
5939 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5940 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5941 && tree_fits_uhwi_p (off
))
5943 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5944 tree part_width
= TYPE_SIZE (type
);
5945 unsigned HOST_WIDE_INT part_widthi
5946 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5947 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5948 tree index
= bitsize_int (indexi
);
5949 if (offset
/ part_widthi
5950 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5951 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5955 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5956 if (TREE_CODE (addr
) == ADDR_EXPR
5957 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5958 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5960 tree size
= TYPE_SIZE_UNIT (type
);
5961 if (tree_int_cst_equal (size
, off
))
5962 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5965 /* *(p + CST) -> MEM_REF <p, CST>. */
5966 if (TREE_CODE (addr
) != ADDR_EXPR
5967 || DECL_P (TREE_OPERAND (addr
, 0)))
5968 return fold_build2 (MEM_REF
, type
,
5970 wide_int_to_tree (ptype
, off
));
5973 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5974 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5975 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5976 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5979 tree min_val
= size_zero_node
;
5981 sub
= gimple_fold_indirect_ref (sub
);
5983 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5984 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5985 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5986 min_val
= TYPE_MIN_VALUE (type_domain
);
5987 if (TREE_CODE (min_val
) == INTEGER_CST
)
5988 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5994 /* Return true if CODE is an operation that when operating on signed
5995 integer types involves undefined behavior on overflow and the
5996 operation can be expressed with unsigned arithmetic. */
5999 arith_code_with_undefined_signed_overflow (tree_code code
)
6007 case POINTER_PLUS_EXPR
:
6014 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6015 operation that can be transformed to unsigned arithmetic by converting
6016 its operand, carrying out the operation in the corresponding unsigned
6017 type and converting the result back to the original type.
6019 Returns a sequence of statements that replace STMT and also contain
6020 a modified form of STMT itself. */
6023 rewrite_to_defined_overflow (gimple stmt
)
6025 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6027 fprintf (dump_file
, "rewriting stmt with undefined signed "
6029 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6032 tree lhs
= gimple_assign_lhs (stmt
);
6033 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6034 gimple_seq stmts
= NULL
;
6035 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6037 gimple_seq stmts2
= NULL
;
6038 gimple_set_op (stmt
, i
,
6039 force_gimple_operand (fold_convert (type
,
6040 gimple_op (stmt
, i
)),
6041 &stmts2
, true, NULL_TREE
));
6042 gimple_seq_add_seq (&stmts
, stmts2
);
6044 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6045 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6046 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6047 gimple_seq_add_stmt (&stmts
, stmt
);
6048 gimple cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6049 gimple_seq_add_stmt (&stmts
, cvt
);
6055 /* The valueization hook we use for the gimple_build API simplification.
6056 This makes us match fold_buildN behavior by only combining with
6057 statements in the sequence(s) we are currently building. */
6060 gimple_build_valueize (tree op
)
6062 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6067 /* Build the expression CODE OP0 of type TYPE with location LOC,
6068 simplifying it first if possible. Returns the built
6069 expression value and appends statements possibly defining it
6073 gimple_build (gimple_seq
*seq
, location_t loc
,
6074 enum tree_code code
, tree type
, tree op0
)
6076 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6079 if (gimple_in_ssa_p (cfun
))
6080 res
= make_ssa_name (type
);
6082 res
= create_tmp_reg (type
);
6084 if (code
== REALPART_EXPR
6085 || code
== IMAGPART_EXPR
6086 || code
== VIEW_CONVERT_EXPR
)
6087 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6089 stmt
= gimple_build_assign (res
, code
, op0
);
6090 gimple_set_location (stmt
, loc
);
6091 gimple_seq_add_stmt_without_update (seq
, stmt
);
6096 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6097 simplifying it first if possible. Returns the built
6098 expression value and appends statements possibly defining it
6102 gimple_build (gimple_seq
*seq
, location_t loc
,
6103 enum tree_code code
, tree type
, tree op0
, tree op1
)
6105 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6108 if (gimple_in_ssa_p (cfun
))
6109 res
= make_ssa_name (type
);
6111 res
= create_tmp_reg (type
);
6112 gimple stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6113 gimple_set_location (stmt
, loc
);
6114 gimple_seq_add_stmt_without_update (seq
, stmt
);
6119 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6120 simplifying it first if possible. Returns the built
6121 expression value and appends statements possibly defining it
6125 gimple_build (gimple_seq
*seq
, location_t loc
,
6126 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6128 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6129 seq
, gimple_build_valueize
);
6132 if (gimple_in_ssa_p (cfun
))
6133 res
= make_ssa_name (type
);
6135 res
= create_tmp_reg (type
);
6137 if (code
== BIT_FIELD_REF
)
6138 stmt
= gimple_build_assign (res
, code
,
6139 build3 (code
, type
, op0
, op1
, op2
));
6141 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6142 gimple_set_location (stmt
, loc
);
6143 gimple_seq_add_stmt_without_update (seq
, stmt
);
6148 /* Build the call FN (ARG0) with a result of type TYPE
6149 (or no result if TYPE is void) with location LOC,
6150 simplifying it first if possible. Returns the built
6151 expression value (or NULL_TREE if TYPE is void) and appends
6152 statements possibly defining it to SEQ. */
6155 gimple_build (gimple_seq
*seq
, location_t loc
,
6156 enum built_in_function fn
, tree type
, tree arg0
)
6158 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6161 tree decl
= builtin_decl_implicit (fn
);
6162 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
6163 if (!VOID_TYPE_P (type
))
6165 if (gimple_in_ssa_p (cfun
))
6166 res
= make_ssa_name (type
);
6168 res
= create_tmp_reg (type
);
6169 gimple_call_set_lhs (stmt
, res
);
6171 gimple_set_location (stmt
, loc
);
6172 gimple_seq_add_stmt_without_update (seq
, stmt
);
6177 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6178 (or no result if TYPE is void) with location LOC,
6179 simplifying it first if possible. Returns the built
6180 expression value (or NULL_TREE if TYPE is void) and appends
6181 statements possibly defining it to SEQ. */
6184 gimple_build (gimple_seq
*seq
, location_t loc
,
6185 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6187 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6190 tree decl
= builtin_decl_implicit (fn
);
6191 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6192 if (!VOID_TYPE_P (type
))
6194 if (gimple_in_ssa_p (cfun
))
6195 res
= make_ssa_name (type
);
6197 res
= create_tmp_reg (type
);
6198 gimple_call_set_lhs (stmt
, res
);
6200 gimple_set_location (stmt
, loc
);
6201 gimple_seq_add_stmt_without_update (seq
, stmt
);
6206 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6207 (or no result if TYPE is void) with location LOC,
6208 simplifying it first if possible. Returns the built
6209 expression value (or NULL_TREE if TYPE is void) and appends
6210 statements possibly defining it to SEQ. */
6213 gimple_build (gimple_seq
*seq
, location_t loc
,
6214 enum built_in_function fn
, tree type
,
6215 tree arg0
, tree arg1
, tree arg2
)
6217 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6218 seq
, gimple_build_valueize
);
6221 tree decl
= builtin_decl_implicit (fn
);
6222 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6223 if (!VOID_TYPE_P (type
))
6225 if (gimple_in_ssa_p (cfun
))
6226 res
= make_ssa_name (type
);
6228 res
= create_tmp_reg (type
);
6229 gimple_call_set_lhs (stmt
, res
);
6231 gimple_set_location (stmt
, loc
);
6232 gimple_seq_add_stmt_without_update (seq
, stmt
);
6237 /* Build the conversion (TYPE) OP with a result of type TYPE
6238 with location LOC if such conversion is neccesary in GIMPLE,
6239 simplifying it first.
6240 Returns the built expression value and appends
6241 statements possibly defining it to SEQ. */
6244 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6246 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6248 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);