1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
46 #include "tree-cfgcleanup.h"
48 #include "optabs-tree.h"
49 #include "tree-vector-builder.h"
50 #include "vec-perm-indices.h"
52 /* This pass propagates the RHS of assignment statements into use
53 sites of the LHS of the assignment. It's basically a specialized
54 form of tree combination. It is hoped all of this can disappear
55 when we have a generalized tree combiner.
57 One class of common cases we handle is forward propagating a single use
58 variable into a COND_EXPR.
62 if (x) goto ... else goto ...
64 Will be transformed into:
67 if (a COND b) goto ... else goto ...
69 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
71 Or (assuming c1 and c2 are constants):
75 if (x EQ/NEQ c2) goto ... else goto ...
77 Will be transformed into:
80 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
82 Similarly for x = a - c1.
88 if (x) goto ... else goto ...
90 Will be transformed into:
93 if (a == 0) goto ... else goto ...
95 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
96 For these cases, we propagate A into all, possibly more than one,
97 COND_EXPRs that use X.
103 if (x) goto ... else goto ...
105 Will be transformed into:
108 if (a != 0) goto ... else goto ...
110 (Assuming a is an integral type and x is a boolean or x is an
111 integral and a is a boolean.)
113 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
114 For these cases, we propagate A into all, possibly more than one,
115 COND_EXPRs that use X.
117 In addition to eliminating the variable and the statement which assigns
118 a value to the variable, we may be able to later thread the jump without
119 adding insane complexity in the dominator optimizer.
121 Also note these transformations can cascade. We handle this by having
122 a worklist of COND_EXPR statements to examine. As we make a change to
123 a statement, we put it back on the worklist to examine on the next
124 iteration of the main loop.
126 A second class of propagation opportunities arises for ADDR_EXPR
137 ptr = (type1*)&type2var;
140 Will get turned into (if type1 and type2 are the same size
141 and neither have volatile on them):
142 res = VIEW_CONVERT_EXPR<type1>(type2var)
147 ptr2 = ptr + <constant>;
151 ptr2 = &x[constant/elementsize];
156 offset = index * element_size;
157 offset_p = (pointer) offset;
158 ptr2 = ptr + offset_p
160 Will get turned into:
168 Provided that decl has known alignment >= 2, will get turned into
172 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
173 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
176 This will (of course) be extended as other needs arise. */
178 static bool forward_propagate_addr_expr (tree
, tree
, bool);
180 /* Set to true if we delete dead edges during the optimization. */
181 static bool cfg_changed
;
183 static tree
rhs_to_tree (tree type
, gimple
*stmt
);
185 static bitmap to_purge
;
187 /* Const-and-copy lattice. */
188 static vec
<tree
> lattice
;
190 /* Set the lattice entry for NAME to VAL. */
192 fwprop_set_lattice_val (tree name
, tree val
)
194 if (TREE_CODE (name
) == SSA_NAME
)
196 if (SSA_NAME_VERSION (name
) >= lattice
.length ())
198 lattice
.reserve (num_ssa_names
- lattice
.length ());
199 lattice
.quick_grow_cleared (num_ssa_names
);
201 lattice
[SSA_NAME_VERSION (name
)] = val
;
205 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
207 fwprop_invalidate_lattice (tree name
)
210 && TREE_CODE (name
) == SSA_NAME
211 && SSA_NAME_VERSION (name
) < lattice
.length ())
212 lattice
[SSA_NAME_VERSION (name
)] = NULL_TREE
;
216 /* Get the statement we can propagate from into NAME skipping
217 trivial copies. Returns the statement which defines the
218 propagation source or NULL_TREE if there is no such one.
219 If SINGLE_USE_ONLY is set considers only sources which have
220 a single use chain up to NAME. If SINGLE_USE_P is non-null,
221 it is set to whether the chain to NAME is a single use chain
222 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
225 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
227 bool single_use
= true;
230 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
232 if (!has_single_use (name
))
239 /* If name is defined by a PHI node or is the default def, bail out. */
240 if (!is_gimple_assign (def_stmt
))
243 /* If def_stmt is a simple copy, continue looking. */
244 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
245 name
= gimple_assign_rhs1 (def_stmt
);
248 if (!single_use_only
&& single_use_p
)
249 *single_use_p
= single_use
;
256 /* Checks if the destination ssa name in DEF_STMT can be used as
257 propagation source. Returns true if so, otherwise false. */
260 can_propagate_from (gimple
*def_stmt
)
262 gcc_assert (is_gimple_assign (def_stmt
));
264 /* If the rhs has side-effects we cannot propagate from it. */
265 if (gimple_has_volatile_ops (def_stmt
))
268 /* If the rhs is a load we cannot propagate from it. */
269 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
270 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
273 /* Constants can be always propagated. */
274 if (gimple_assign_single_p (def_stmt
)
275 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
278 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
279 if (stmt_references_abnormal_ssa_name (def_stmt
))
282 /* If the definition is a conversion of a pointer to a function type,
283 then we cannot apply optimizations as some targets require
284 function pointers to be canonicalized and in this case this
285 optimization could eliminate a necessary canonicalization. */
286 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
288 tree rhs
= gimple_assign_rhs1 (def_stmt
);
289 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
290 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
297 /* Remove a chain of dead statements starting at the definition of
298 NAME. The chain is linked via the first operand of the defining statements.
299 If NAME was replaced in its only use then this function can be used
300 to clean up dead stmts. The function handles already released SSA
302 Returns true if cleanup-cfg has to run. */
305 remove_prop_source_from_use (tree name
)
307 gimple_stmt_iterator gsi
;
309 bool cfg_changed
= false;
314 if (SSA_NAME_IN_FREE_LIST (name
)
315 || SSA_NAME_IS_DEFAULT_DEF (name
)
316 || !has_zero_uses (name
))
319 stmt
= SSA_NAME_DEF_STMT (name
);
320 if (gimple_code (stmt
) == GIMPLE_PHI
321 || gimple_has_side_effects (stmt
))
324 bb
= gimple_bb (stmt
);
325 gsi
= gsi_for_stmt (stmt
);
326 unlink_stmt_vdef (stmt
);
327 if (gsi_remove (&gsi
, true))
328 bitmap_set_bit (to_purge
, bb
->index
);
329 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
332 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
333 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
338 /* Return the rhs of a gassign *STMT in a form of a single tree,
339 converted to type TYPE.
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
346 rhs_to_tree (tree type
, gimple
*stmt
)
348 location_t loc
= gimple_location (stmt
);
349 enum tree_code code
= gimple_assign_rhs_code (stmt
);
350 switch (get_gimple_rhs_class (code
))
352 case GIMPLE_TERNARY_RHS
:
353 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
354 gimple_assign_rhs2 (stmt
),
355 gimple_assign_rhs3 (stmt
));
356 case GIMPLE_BINARY_RHS
:
357 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
358 gimple_assign_rhs2 (stmt
));
359 case GIMPLE_UNARY_RHS
:
360 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
361 case GIMPLE_SINGLE_RHS
:
362 return gimple_assign_rhs1 (stmt
);
368 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
369 the folded result in a form suitable for COND_EXPR_COND or
370 NULL_TREE, if there is no suitable simplified form. If
371 INVARIANT_ONLY is true only gimple_min_invariant results are
372 considered simplified. */
375 combine_cond_expr_cond (gimple
*stmt
, enum tree_code code
, tree type
,
376 tree op0
, tree op1
, bool invariant_only
)
380 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
382 fold_defer_overflow_warnings ();
383 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
386 fold_undefer_overflow_warnings (false, NULL
, 0);
390 /* Require that we got a boolean type out if we put one in. */
391 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
393 /* Canonicalize the combined condition for use in a COND_EXPR. */
394 t
= canonicalize_cond_expr_cond (t
);
396 /* Bail out if we required an invariant but didn't get one. */
397 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
399 fold_undefer_overflow_warnings (false, NULL
, 0);
403 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
408 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
409 of its operand. Return a new comparison tree or NULL_TREE if there
410 were no simplifying combines. */
413 forward_propagate_into_comparison_1 (gimple
*stmt
,
414 enum tree_code code
, tree type
,
417 tree tmp
= NULL_TREE
;
418 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
419 bool single_use0_p
= false, single_use1_p
= false;
421 /* For comparisons use the first operand, that is likely to
422 simplify comparisons against constants. */
423 if (TREE_CODE (op0
) == SSA_NAME
)
425 gimple
*def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
426 if (def_stmt
&& can_propagate_from (def_stmt
))
428 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
429 bool invariant_only_p
= !single_use0_p
;
431 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
433 /* Always combine comparisons or conversions from booleans. */
434 if (TREE_CODE (op1
) == INTEGER_CST
435 && ((CONVERT_EXPR_CODE_P (def_code
)
436 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0
, 0)))
438 || TREE_CODE_CLASS (def_code
) == tcc_comparison
))
439 invariant_only_p
= false;
441 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
442 rhs0
, op1
, invariant_only_p
);
448 /* If that wasn't successful, try the second operand. */
449 if (TREE_CODE (op1
) == SSA_NAME
)
451 gimple
*def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
452 if (def_stmt
&& can_propagate_from (def_stmt
))
454 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
455 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
456 op0
, rhs1
, !single_use1_p
);
462 /* If that wasn't successful either, try both operands. */
463 if (rhs0
!= NULL_TREE
464 && rhs1
!= NULL_TREE
)
465 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
467 !(single_use0_p
&& single_use1_p
));
472 /* Propagate from the ssa name definition statements of the assignment
473 from a comparison at *GSI into the conditional if that simplifies it.
474 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
475 otherwise returns 0. */
478 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
480 gimple
*stmt
= gsi_stmt (*gsi
);
482 bool cfg_changed
= false;
483 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
484 tree rhs1
= gimple_assign_rhs1 (stmt
);
485 tree rhs2
= gimple_assign_rhs2 (stmt
);
487 /* Combine the comparison with defining statements. */
488 tmp
= forward_propagate_into_comparison_1 (stmt
,
489 gimple_assign_rhs_code (stmt
),
491 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
493 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
495 update_stmt (gsi_stmt (*gsi
));
497 if (TREE_CODE (rhs1
) == SSA_NAME
)
498 cfg_changed
|= remove_prop_source_from_use (rhs1
);
499 if (TREE_CODE (rhs2
) == SSA_NAME
)
500 cfg_changed
|= remove_prop_source_from_use (rhs2
);
501 return cfg_changed
? 2 : 1;
507 /* Propagate from the ssa name definition statements of COND_EXPR
508 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
509 Returns zero if no statement was changed, one if there were
510 changes and two if cfg_cleanup needs to run.
512 This must be kept in sync with forward_propagate_into_cond. */
515 forward_propagate_into_gimple_cond (gcond
*stmt
)
518 enum tree_code code
= gimple_cond_code (stmt
);
519 bool cfg_changed
= false;
520 tree rhs1
= gimple_cond_lhs (stmt
);
521 tree rhs2
= gimple_cond_rhs (stmt
);
523 /* We can do tree combining on SSA_NAME and comparison expressions. */
524 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
527 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
532 if (dump_file
&& tmp
)
534 fprintf (dump_file
, " Replaced '");
535 print_gimple_expr (dump_file
, stmt
, 0);
536 fprintf (dump_file
, "' with '");
537 print_generic_expr (dump_file
, tmp
);
538 fprintf (dump_file
, "'\n");
541 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
544 if (TREE_CODE (rhs1
) == SSA_NAME
)
545 cfg_changed
|= remove_prop_source_from_use (rhs1
);
546 if (TREE_CODE (rhs2
) == SSA_NAME
)
547 cfg_changed
|= remove_prop_source_from_use (rhs2
);
548 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
551 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
552 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
553 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
554 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
556 && integer_zerop (rhs2
))
558 && integer_onep (rhs2
))))
560 basic_block bb
= gimple_bb (stmt
);
561 gimple_cond_set_code (stmt
, NE_EXPR
);
562 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
563 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
564 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
572 /* Propagate from the ssa name definition statements of COND_EXPR
573 in the rhs of statement STMT into the conditional if that simplifies it.
574 Returns true zero if the stmt was changed. */
577 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
579 gimple
*stmt
= gsi_stmt (*gsi_p
);
580 tree tmp
= NULL_TREE
;
581 tree cond
= gimple_assign_rhs1 (stmt
);
582 enum tree_code code
= gimple_assign_rhs_code (stmt
);
584 /* We can do tree combining on SSA_NAME and comparison expressions. */
585 if (COMPARISON_CLASS_P (cond
))
586 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
588 TREE_OPERAND (cond
, 0),
589 TREE_OPERAND (cond
, 1));
590 else if (TREE_CODE (cond
) == SSA_NAME
)
592 enum tree_code def_code
;
594 gimple
*def_stmt
= get_prop_source_stmt (name
, true, NULL
);
595 if (!def_stmt
|| !can_propagate_from (def_stmt
))
598 def_code
= gimple_assign_rhs_code (def_stmt
);
599 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
600 tmp
= fold_build2_loc (gimple_location (def_stmt
),
603 gimple_assign_rhs1 (def_stmt
),
604 gimple_assign_rhs2 (def_stmt
));
608 && is_gimple_condexpr (tmp
))
610 if (dump_file
&& tmp
)
612 fprintf (dump_file
, " Replaced '");
613 print_generic_expr (dump_file
, cond
);
614 fprintf (dump_file
, "' with '");
615 print_generic_expr (dump_file
, tmp
);
616 fprintf (dump_file
, "'\n");
619 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
620 : integer_onep (tmp
))
621 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
622 else if (integer_zerop (tmp
))
623 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
625 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
626 stmt
= gsi_stmt (*gsi_p
);
635 /* We've just substituted an ADDR_EXPR into stmt. Update all the
636 relevant data structures to match. */
639 tidy_after_forward_propagate_addr (gimple
*stmt
)
641 /* We may have turned a trapping insn into a non-trapping insn. */
642 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
643 bitmap_set_bit (to_purge
, gimple_bb (stmt
)->index
);
645 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
646 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
649 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
650 ADDR_EXPR <whatever>.
652 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
653 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
654 node or for recovery of array indexing from pointer arithmetic.
656 Return true if the propagation was successful (the propagation can
657 be not totally successful, yet things may have been changed). */
660 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
661 gimple_stmt_iterator
*use_stmt_gsi
,
664 tree lhs
, rhs
, rhs2
, array_ref
;
665 gimple
*use_stmt
= gsi_stmt (*use_stmt_gsi
);
666 enum tree_code rhs_code
;
669 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
671 lhs
= gimple_assign_lhs (use_stmt
);
672 rhs_code
= gimple_assign_rhs_code (use_stmt
);
673 rhs
= gimple_assign_rhs1 (use_stmt
);
675 /* Do not perform copy-propagation but recurse through copy chains. */
676 if (TREE_CODE (lhs
) == SSA_NAME
677 && rhs_code
== SSA_NAME
)
678 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
680 /* The use statement could be a conversion. Recurse to the uses of the
681 lhs as copyprop does not copy through pointer to integer to pointer
682 conversions and FRE does not catch all cases either.
683 Treat the case of a single-use name and
684 a conversion to def_rhs type separate, though. */
685 if (TREE_CODE (lhs
) == SSA_NAME
686 && CONVERT_EXPR_CODE_P (rhs_code
))
688 /* If there is a point in a conversion chain where the types match
689 so we can remove a conversion re-materialize the address here
692 && useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
694 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
695 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
699 /* Else recurse if the conversion preserves the address value. */
700 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
701 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
702 && (TYPE_PRECISION (TREE_TYPE (lhs
))
703 >= TYPE_PRECISION (TREE_TYPE (def_rhs
))))
704 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
709 /* If this isn't a conversion chain from this on we only can propagate
710 into compatible pointer contexts. */
711 if (!types_compatible_p (TREE_TYPE (name
), TREE_TYPE (def_rhs
)))
714 /* Propagate through constant pointer adjustments. */
715 if (TREE_CODE (lhs
) == SSA_NAME
716 && rhs_code
== POINTER_PLUS_EXPR
718 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
721 /* As we come here with non-invariant addresses in def_rhs we need
722 to make sure we can build a valid constant offsetted address
723 for further propagation. Simply rely on fold building that
724 and check after the fact. */
725 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
727 fold_convert (ptr_type_node
,
728 gimple_assign_rhs2 (use_stmt
)));
729 if (TREE_CODE (new_def_rhs
) == MEM_REF
730 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
732 new_def_rhs
= build_fold_addr_expr_with_type (new_def_rhs
,
735 /* Recurse. If we could propagate into all uses of lhs do not
736 bother to replace into the current use but just pretend we did. */
737 if (TREE_CODE (new_def_rhs
) == ADDR_EXPR
738 && forward_propagate_addr_expr (lhs
, new_def_rhs
, single_use_p
))
741 if (useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (new_def_rhs
)))
742 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
744 else if (is_gimple_min_invariant (new_def_rhs
))
745 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
, new_def_rhs
);
748 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
749 update_stmt (use_stmt
);
753 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
754 ADDR_EXPR will not appear on the LHS. */
755 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
756 while (handled_component_p (*lhsp
))
757 lhsp
= &TREE_OPERAND (*lhsp
, 0);
760 /* Now see if the LHS node is a MEM_REF using NAME. If so,
761 propagate the ADDR_EXPR into the use of NAME and fold the result. */
762 if (TREE_CODE (lhs
) == MEM_REF
763 && TREE_OPERAND (lhs
, 0) == name
)
766 poly_int64 def_rhs_offset
;
767 /* If the address is invariant we can always fold it. */
768 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
771 poly_offset_int off
= mem_ref_offset (lhs
);
773 off
+= def_rhs_offset
;
774 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
776 off
+= mem_ref_offset (def_rhs_base
);
777 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
780 new_ptr
= build_fold_addr_expr (def_rhs_base
);
781 TREE_OPERAND (lhs
, 0) = new_ptr
;
782 TREE_OPERAND (lhs
, 1)
783 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
784 tidy_after_forward_propagate_addr (use_stmt
);
785 /* Continue propagating into the RHS if this was not the only use. */
789 /* If the LHS is a plain dereference and the value type is the same as
790 that of the pointed-to type of the address we can put the
791 dereferenced address on the LHS preserving the original alias-type. */
792 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
793 && ((gimple_assign_lhs (use_stmt
) == lhs
794 && useless_type_conversion_p
795 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
796 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
797 || types_compatible_p (TREE_TYPE (lhs
),
798 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
799 /* Don't forward anything into clobber stmts if it would result
800 in the lhs no longer being a MEM_REF. */
801 && (!gimple_clobber_p (use_stmt
)
802 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
804 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
805 tree new_offset
, new_base
, saved
, new_lhs
;
806 while (handled_component_p (*def_rhs_basep
))
807 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
808 saved
= *def_rhs_basep
;
809 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
811 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
812 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
813 TREE_OPERAND (*def_rhs_basep
, 1));
817 new_base
= build_fold_addr_expr (*def_rhs_basep
);
818 new_offset
= TREE_OPERAND (lhs
, 1);
820 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
821 new_base
, new_offset
);
822 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
823 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
824 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
825 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
827 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
828 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
829 *def_rhs_basep
= saved
;
830 tidy_after_forward_propagate_addr (use_stmt
);
831 /* Continue propagating into the RHS if this was not the
837 /* We can have a struct assignment dereferencing our name twice.
838 Note that we didn't propagate into the lhs to not falsely
839 claim we did when propagating into the rhs. */
843 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
844 nodes from the RHS. */
845 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
846 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
847 rhsp
= &TREE_OPERAND (*rhsp
, 0);
848 while (handled_component_p (*rhsp
))
849 rhsp
= &TREE_OPERAND (*rhsp
, 0);
852 /* Now see if the RHS node is a MEM_REF using NAME. If so,
853 propagate the ADDR_EXPR into the use of NAME and fold the result. */
854 if (TREE_CODE (rhs
) == MEM_REF
855 && TREE_OPERAND (rhs
, 0) == name
)
858 poly_int64 def_rhs_offset
;
859 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
862 poly_offset_int off
= mem_ref_offset (rhs
);
864 off
+= def_rhs_offset
;
865 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
867 off
+= mem_ref_offset (def_rhs_base
);
868 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
871 new_ptr
= build_fold_addr_expr (def_rhs_base
);
872 TREE_OPERAND (rhs
, 0) = new_ptr
;
873 TREE_OPERAND (rhs
, 1)
874 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
875 fold_stmt_inplace (use_stmt_gsi
);
876 tidy_after_forward_propagate_addr (use_stmt
);
879 /* If the RHS is a plain dereference and the value type is the same as
880 that of the pointed-to type of the address we can put the
881 dereferenced address on the RHS preserving the original alias-type. */
882 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
883 && ((gimple_assign_rhs1 (use_stmt
) == rhs
884 && useless_type_conversion_p
885 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
886 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
887 || types_compatible_p (TREE_TYPE (rhs
),
888 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
890 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
891 tree new_offset
, new_base
, saved
, new_rhs
;
892 while (handled_component_p (*def_rhs_basep
))
893 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
894 saved
= *def_rhs_basep
;
895 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
897 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
898 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
899 TREE_OPERAND (*def_rhs_basep
, 1));
903 new_base
= build_fold_addr_expr (*def_rhs_basep
);
904 new_offset
= TREE_OPERAND (rhs
, 1);
906 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
907 new_base
, new_offset
);
908 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
909 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
910 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
911 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
913 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
914 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
915 *def_rhs_basep
= saved
;
916 fold_stmt_inplace (use_stmt_gsi
);
917 tidy_after_forward_propagate_addr (use_stmt
);
922 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
924 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
925 || gimple_assign_rhs1 (use_stmt
) != name
)
928 /* The remaining cases are all for turning pointer arithmetic into
929 array indexing. They only apply when we have the address of
930 element zero in an array. If that is not the case then there
932 array_ref
= TREE_OPERAND (def_rhs
, 0);
933 if ((TREE_CODE (array_ref
) != ARRAY_REF
934 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
935 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
936 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
939 rhs2
= gimple_assign_rhs2 (use_stmt
);
940 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
941 if (TREE_CODE (rhs2
) == INTEGER_CST
)
943 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
944 ADDR_EXPR
, TREE_TYPE (def_rhs
),
945 fold_build2 (MEM_REF
,
946 TREE_TYPE (TREE_TYPE (def_rhs
)),
947 unshare_expr (def_rhs
),
948 fold_convert (ptr_type_node
,
950 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
951 use_stmt
= gsi_stmt (*use_stmt_gsi
);
952 update_stmt (use_stmt
);
953 tidy_after_forward_propagate_addr (use_stmt
);
960 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
962 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
963 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
964 node or for recovery of array indexing from pointer arithmetic.
966 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
967 the single use in the previous invocation. Pass true when calling
970 Returns true, if all uses have been propagated into. */
973 forward_propagate_addr_expr (tree name
, tree rhs
, bool parent_single_use_p
)
975 imm_use_iterator iter
;
978 bool single_use_p
= parent_single_use_p
&& has_single_use (name
);
980 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
985 /* If the use is not in a simple assignment statement, then
986 there is nothing we can do. */
987 if (!is_gimple_assign (use_stmt
))
989 if (!is_gimple_debug (use_stmt
))
994 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
995 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
997 /* If the use has moved to a different statement adjust
998 the update machinery for the old statement too. */
999 if (use_stmt
!= gsi_stmt (gsi
))
1001 update_stmt (use_stmt
);
1002 use_stmt
= gsi_stmt (gsi
);
1004 update_stmt (use_stmt
);
1007 /* Remove intermediate now unused copy and conversion chains. */
1008 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1010 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1011 && TREE_CODE (use_rhs
) == SSA_NAME
1012 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1014 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1015 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt
));
1016 release_defs (use_stmt
);
1017 gsi_remove (&gsi
, true);
1021 return all
&& has_zero_uses (name
);
1025 /* Helper function for simplify_gimple_switch. Remove case labels that
1026 have values outside the range of the new type. */
1029 simplify_gimple_switch_label_vec (gswitch
*stmt
, tree index_type
)
1031 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1032 auto_vec
<tree
> labels (branch_num
);
1033 unsigned int i
, len
;
1035 /* Collect the existing case labels in a VEC, and preprocess it as if
1036 we are gimplifying a GENERIC SWITCH_EXPR. */
1037 for (i
= 1; i
< branch_num
; i
++)
1038 labels
.quick_push (gimple_switch_label (stmt
, i
));
1039 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1041 /* If any labels were removed, replace the existing case labels
1042 in the GIMPLE_SWITCH statement with the correct ones.
1043 Note that the type updates were done in-place on the case labels,
1044 so we only have to replace the case labels in the GIMPLE_SWITCH
1045 if the number of labels changed. */
1046 len
= labels
.length ();
1047 if (len
< branch_num
- 1)
1049 bitmap target_blocks
;
1053 /* Corner case: *all* case labels have been removed as being
1054 out-of-range for INDEX_TYPE. Push one label and let the
1055 CFG cleanups deal with this further. */
1060 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1061 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1062 labels
.quick_push (elt
);
1066 for (i
= 0; i
< labels
.length (); i
++)
1067 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1068 for (i
++ ; i
< branch_num
; i
++)
1069 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1070 gimple_switch_set_num_labels (stmt
, len
+ 1);
1072 /* Cleanup any edges that are now dead. */
1073 target_blocks
= BITMAP_ALLOC (NULL
);
1074 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1076 tree elt
= gimple_switch_label (stmt
, i
);
1077 basic_block target
= label_to_block (cfun
, CASE_LABEL (elt
));
1078 bitmap_set_bit (target_blocks
, target
->index
);
1080 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1082 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1086 free_dominance_info (CDI_DOMINATORS
);
1091 BITMAP_FREE (target_blocks
);
1095 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1096 the condition which we may be able to optimize better. */
1099 simplify_gimple_switch (gswitch
*stmt
)
1101 /* The optimization that we really care about is removing unnecessary
1102 casts. That will let us do much better in propagating the inferred
1103 constant at the switch target. */
1104 tree cond
= gimple_switch_index (stmt
);
1105 if (TREE_CODE (cond
) == SSA_NAME
)
1107 gimple
*def_stmt
= SSA_NAME_DEF_STMT (cond
);
1108 if (gimple_assign_cast_p (def_stmt
))
1110 tree def
= gimple_assign_rhs1 (def_stmt
);
1111 if (TREE_CODE (def
) != SSA_NAME
)
1114 /* If we have an extension or sign-change that preserves the
1115 values we check against then we can copy the source value into
1117 tree ti
= TREE_TYPE (def
);
1118 if (INTEGRAL_TYPE_P (ti
)
1119 && TYPE_PRECISION (ti
) <= TYPE_PRECISION (TREE_TYPE (cond
)))
1121 size_t n
= gimple_switch_num_labels (stmt
);
1122 tree min
= NULL_TREE
, max
= NULL_TREE
;
1125 min
= CASE_LOW (gimple_switch_label (stmt
, 1));
1126 if (CASE_HIGH (gimple_switch_label (stmt
, n
- 1)))
1127 max
= CASE_HIGH (gimple_switch_label (stmt
, n
- 1));
1129 max
= CASE_LOW (gimple_switch_label (stmt
, n
- 1));
1131 if ((!min
|| int_fits_type_p (min
, ti
))
1132 && (!max
|| int_fits_type_p (max
, ti
)))
1134 gimple_switch_set_index (stmt
, def
);
1135 simplify_gimple_switch_label_vec (stmt
, ti
);
1146 /* For pointers p2 and p1 return p2 - p1 if the
1147 difference is known and constant, otherwise return NULL. */
1150 constant_pointer_difference (tree p1
, tree p2
)
1153 #define CPD_ITERATIONS 5
1154 tree exps
[2][CPD_ITERATIONS
];
1155 tree offs
[2][CPD_ITERATIONS
];
1158 for (i
= 0; i
< 2; i
++)
1160 tree p
= i
? p1
: p2
;
1161 tree off
= size_zero_node
;
1163 enum tree_code code
;
1165 /* For each of p1 and p2 we need to iterate at least
1166 twice, to handle ADDR_EXPR directly in p1/p2,
1167 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1168 on definition's stmt RHS. Iterate a few extra times. */
1172 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1174 if (TREE_CODE (p
) == ADDR_EXPR
)
1176 tree q
= TREE_OPERAND (p
, 0);
1178 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1182 if (maybe_ne (offset
, 0))
1183 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1185 if (TREE_CODE (q
) == MEM_REF
1186 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1188 p
= TREE_OPERAND (q
, 0);
1189 off
= size_binop (PLUS_EXPR
, off
,
1190 wide_int_to_tree (sizetype
,
1191 mem_ref_offset (q
)));
1200 if (TREE_CODE (p
) != SSA_NAME
)
1204 if (j
== CPD_ITERATIONS
)
1206 stmt
= SSA_NAME_DEF_STMT (p
);
1207 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1209 code
= gimple_assign_rhs_code (stmt
);
1210 if (code
== POINTER_PLUS_EXPR
)
1212 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1214 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1215 p
= gimple_assign_rhs1 (stmt
);
1217 else if (code
== ADDR_EXPR
|| CONVERT_EXPR_CODE_P (code
))
1218 p
= gimple_assign_rhs1 (stmt
);
1226 for (i
= 0; i
< cnt
[0]; i
++)
1227 for (j
= 0; j
< cnt
[1]; j
++)
1228 if (exps
[0][i
] == exps
[1][j
])
1229 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1234 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1236 memcpy (p, "abcd", 4);
1237 memset (p + 4, ' ', 3);
1239 memcpy (p, "abcd ", 7);
1240 call if the latter can be stored by pieces during expansion. */
1243 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1245 gimple
*stmt1
, *stmt2
= gsi_stmt (*gsi_p
);
1246 tree vuse
= gimple_vuse (stmt2
);
1249 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1251 switch (DECL_FUNCTION_CODE (callee2
))
1253 case BUILT_IN_MEMSET
:
1254 if (gimple_call_num_args (stmt2
) != 3
1255 || gimple_call_lhs (stmt2
)
1257 || BITS_PER_UNIT
!= 8)
1262 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1263 tree ptr2
= gimple_call_arg (stmt2
, 0);
1264 tree val2
= gimple_call_arg (stmt2
, 1);
1265 tree len2
= gimple_call_arg (stmt2
, 2);
1266 tree diff
, vdef
, new_str_cst
;
1268 unsigned int ptr1_align
;
1269 unsigned HOST_WIDE_INT src_len
;
1271 use_operand_p use_p
;
1273 if (!tree_fits_shwi_p (val2
)
1274 || !tree_fits_uhwi_p (len2
)
1275 || compare_tree_int (len2
, 1024) == 1)
1277 if (is_gimple_call (stmt1
))
1279 /* If first stmt is a call, it needs to be memcpy
1280 or mempcpy, with string literal as second argument and
1282 callee1
= gimple_call_fndecl (stmt1
);
1283 if (callee1
== NULL_TREE
1284 || !fndecl_built_in_p (callee1
, BUILT_IN_NORMAL
)
1285 || gimple_call_num_args (stmt1
) != 3)
1287 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1288 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1290 ptr1
= gimple_call_arg (stmt1
, 0);
1291 src1
= gimple_call_arg (stmt1
, 1);
1292 len1
= gimple_call_arg (stmt1
, 2);
1293 lhs1
= gimple_call_lhs (stmt1
);
1294 if (!tree_fits_uhwi_p (len1
))
1296 str1
= string_constant (src1
, &off1
, NULL
, NULL
);
1297 if (str1
== NULL_TREE
)
1299 if (!tree_fits_uhwi_p (off1
)
1300 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1301 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1302 - tree_to_uhwi (off1
)) > 0
1303 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1304 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1305 != TYPE_MODE (char_type_node
))
1308 else if (gimple_assign_single_p (stmt1
))
1310 /* Otherwise look for length 1 memcpy optimized into
1312 ptr1
= gimple_assign_lhs (stmt1
);
1313 src1
= gimple_assign_rhs1 (stmt1
);
1314 if (TREE_CODE (ptr1
) != MEM_REF
1315 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1316 || !tree_fits_shwi_p (src1
))
1318 ptr1
= build_fold_addr_expr (ptr1
);
1319 callee1
= NULL_TREE
;
1320 len1
= size_one_node
;
1322 off1
= size_zero_node
;
1328 diff
= constant_pointer_difference (ptr1
, ptr2
);
1329 if (diff
== NULL
&& lhs1
!= NULL
)
1331 diff
= constant_pointer_difference (lhs1
, ptr2
);
1332 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1334 diff
= size_binop (PLUS_EXPR
, diff
,
1335 fold_convert (sizetype
, len1
));
1337 /* If the difference between the second and first destination pointer
1338 is not constant, or is bigger than memcpy length, bail out. */
1340 || !tree_fits_uhwi_p (diff
)
1341 || tree_int_cst_lt (len1
, diff
)
1342 || compare_tree_int (diff
, 1024) == 1)
1345 /* Use maximum of difference plus memset length and memcpy length
1346 as the new memcpy length, if it is too big, bail out. */
1347 src_len
= tree_to_uhwi (diff
);
1348 src_len
+= tree_to_uhwi (len2
);
1349 if (src_len
< tree_to_uhwi (len1
))
1350 src_len
= tree_to_uhwi (len1
);
1354 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1355 with bigger length will return different result. */
1356 if (lhs1
!= NULL_TREE
1357 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1358 && (TREE_CODE (lhs1
) != SSA_NAME
1359 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1360 || use_stmt
!= stmt2
))
1363 /* If anything reads memory in between memcpy and memset
1364 call, the modified memcpy call might change it. */
1365 vdef
= gimple_vdef (stmt1
);
1367 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1368 || use_stmt
!= stmt2
))
1371 ptr1_align
= get_pointer_alignment (ptr1
);
1372 /* Construct the new source string literal. */
1373 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1376 TREE_STRING_POINTER (str1
) + tree_to_uhwi (off1
),
1377 tree_to_uhwi (len1
));
1379 src_buf
[0] = tree_to_shwi (src1
);
1380 memset (src_buf
+ tree_to_uhwi (diff
),
1381 tree_to_shwi (val2
), tree_to_uhwi (len2
));
1382 src_buf
[src_len
] = '\0';
1383 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1384 handle embedded '\0's. */
1385 if (strlen (src_buf
) != src_len
)
1387 rtl_profile_for_bb (gimple_bb (stmt2
));
1388 /* If the new memcpy wouldn't be emitted by storing the literal
1389 by pieces, this optimization might enlarge .rodata too much,
1390 as commonly used string literals couldn't be shared any
1392 if (!can_store_by_pieces (src_len
,
1393 builtin_strncpy_read_str
,
1394 src_buf
, ptr1_align
, false))
1397 new_str_cst
= build_string_literal (src_len
, src_buf
);
1400 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1402 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1403 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1404 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1405 gimple_call_set_arg (stmt1
, 2,
1406 build_int_cst (TREE_TYPE (len1
), src_len
));
1407 update_stmt (stmt1
);
1408 unlink_stmt_vdef (stmt2
);
1409 gsi_replace (gsi_p
, gimple_build_nop (), false);
1410 fwprop_invalidate_lattice (gimple_get_lhs (stmt2
));
1411 release_defs (stmt2
);
1412 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1414 fwprop_invalidate_lattice (lhs1
);
1415 release_ssa_name (lhs1
);
1421 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1422 assignment, remove STMT1 and change memset call into
1424 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1426 if (!is_gimple_val (ptr1
))
1427 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1428 true, GSI_SAME_STMT
);
1429 tree fndecl
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1430 gimple_call_set_fndecl (stmt2
, fndecl
);
1431 gimple_call_set_fntype (as_a
<gcall
*> (stmt2
),
1432 TREE_TYPE (fndecl
));
1433 gimple_call_set_arg (stmt2
, 0, ptr1
);
1434 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1435 gimple_call_set_arg (stmt2
, 2,
1436 build_int_cst (TREE_TYPE (len2
), src_len
));
1437 unlink_stmt_vdef (stmt1
);
1438 gsi_remove (&gsi
, true);
1439 fwprop_invalidate_lattice (gimple_get_lhs (stmt1
));
1440 release_defs (stmt1
);
1441 update_stmt (stmt2
);
1452 /* Given a ssa_name in NAME see if it was defined by an assignment and
1453 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1454 to the second operand on the rhs. */
1457 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1460 enum tree_code code1
;
1464 enum gimple_rhs_class grhs_class
;
1466 code1
= TREE_CODE (name
);
1470 grhs_class
= get_gimple_rhs_class (code1
);
1472 if (code1
== SSA_NAME
)
1474 def
= SSA_NAME_DEF_STMT (name
);
1476 if (def
&& is_gimple_assign (def
)
1477 && can_propagate_from (def
))
1479 code1
= gimple_assign_rhs_code (def
);
1480 arg11
= gimple_assign_rhs1 (def
);
1481 arg21
= gimple_assign_rhs2 (def
);
1482 arg31
= gimple_assign_rhs3 (def
);
1485 else if (grhs_class
!= GIMPLE_SINGLE_RHS
)
1497 /* Recognize rotation patterns. Return true if a transformation
1498 applied, otherwise return false.
1500 We are looking for X with unsigned type T with bitsize B, OP being
1501 +, | or ^, some type T2 wider than T. For:
1502 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1503 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1505 transform these into:
1509 (X << Y) OP (X >> (B - Y))
1510 (X << (int) Y) OP (X >> (int) (B - Y))
1511 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1512 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1513 (X << Y) | (X >> ((-Y) & (B - 1)))
1514 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1515 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1516 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1518 transform these into:
1522 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1523 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1524 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1525 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1526 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1528 transform these into:
1531 Note, in the patterns with T2 type, the type of OP operands
1532 might be even a signed type, but should have precision B.
1533 Expressions with & (B - 1) should be recognized only if B is
1537 simplify_rotate (gimple_stmt_iterator
*gsi
)
1539 gimple
*stmt
= gsi_stmt (*gsi
);
1540 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
1541 tree def_arg1
[2], def_arg2
[2];
1542 enum tree_code def_code
[2];
1545 bool swapped_p
= false;
1548 arg
[0] = gimple_assign_rhs1 (stmt
);
1549 arg
[1] = gimple_assign_rhs2 (stmt
);
1550 rtype
= TREE_TYPE (arg
[0]);
1552 /* Only create rotates in complete modes. Other cases are not
1553 expanded properly. */
1554 if (!INTEGRAL_TYPE_P (rtype
)
1555 || !type_has_mode_precision_p (rtype
))
1558 for (i
= 0; i
< 2; i
++)
1559 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1561 /* Look through narrowing conversions. */
1562 if (CONVERT_EXPR_CODE_P (def_code
[0])
1563 && CONVERT_EXPR_CODE_P (def_code
[1])
1564 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
1565 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
1566 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1567 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
1568 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) > TYPE_PRECISION (rtype
)
1569 && has_single_use (arg
[0])
1570 && has_single_use (arg
[1]))
1572 for (i
= 0; i
< 2; i
++)
1574 arg
[i
] = def_arg1
[i
];
1575 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1579 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1580 for (i
= 0; i
< 2; i
++)
1581 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
1583 else if (!has_single_use (arg
[i
]))
1585 if (def_code
[0] == def_code
[1])
1588 /* If we've looked through narrowing conversions before, look through
1589 widening conversions from unsigned type with the same precision
1591 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
1592 for (i
= 0; i
< 2; i
++)
1595 enum tree_code code
;
1596 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1597 if (!CONVERT_EXPR_CODE_P (code
)
1598 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1599 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1603 /* Both shifts have to use the same first operand. */
1604 if (!operand_equal_for_phi_arg_p (def_arg1
[0], def_arg1
[1])
1605 || !types_compatible_p (TREE_TYPE (def_arg1
[0]),
1606 TREE_TYPE (def_arg1
[1])))
1608 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
1611 /* CNT1 + CNT2 == B case above. */
1612 if (tree_fits_uhwi_p (def_arg2
[0])
1613 && tree_fits_uhwi_p (def_arg2
[1])
1614 && tree_to_uhwi (def_arg2
[0])
1615 + tree_to_uhwi (def_arg2
[1]) == TYPE_PRECISION (rtype
))
1616 rotcnt
= def_arg2
[0];
1617 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
1618 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
1622 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
1623 enum tree_code cdef_code
[2];
1624 /* Look through conversion of the shift count argument.
1625 The C/C++ FE cast any shift count argument to integer_type_node.
1626 The only problem might be if the shift count type maximum value
1627 is equal or smaller than number of bits in rtype. */
1628 for (i
= 0; i
< 2; i
++)
1630 def_arg2_alt
[i
] = def_arg2
[i
];
1631 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
1632 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1633 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
1634 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
1635 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1636 > floor_log2 (TYPE_PRECISION (rtype
))
1637 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1
[i
])))
1639 def_arg2_alt
[i
] = cdef_arg1
[i
];
1640 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
1641 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1644 for (i
= 0; i
< 2; i
++)
1645 /* Check for one shift count being Y and the other B - Y,
1646 with optional casts. */
1647 if (cdef_code
[i
] == MINUS_EXPR
1648 && tree_fits_shwi_p (cdef_arg1
[i
])
1649 && tree_to_shwi (cdef_arg1
[i
]) == TYPE_PRECISION (rtype
)
1650 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
1653 enum tree_code code
;
1655 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
1656 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
1658 rotcnt
= cdef_arg2
[i
];
1661 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
1662 if (CONVERT_EXPR_CODE_P (code
)
1663 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1664 && TYPE_PRECISION (TREE_TYPE (tem
))
1665 > floor_log2 (TYPE_PRECISION (rtype
))
1666 && type_has_mode_precision_p (TREE_TYPE (tem
))
1667 && (tem
== def_arg2
[1 - i
]
1668 || tem
== def_arg2_alt
[1 - i
]))
1674 /* The above sequence isn't safe for Y being 0,
1675 because then one of the shifts triggers undefined behavior.
1676 This alternative is safe even for rotation count of 0.
1677 One shift count is Y and the other (-Y) & (B - 1).
1678 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
1679 else if (cdef_code
[i
] == BIT_AND_EXPR
1680 && pow2p_hwi (TYPE_PRECISION (rtype
))
1681 && tree_fits_shwi_p (cdef_arg2
[i
])
1682 && tree_to_shwi (cdef_arg2
[i
])
1683 == TYPE_PRECISION (rtype
) - 1
1684 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
1685 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
1688 enum tree_code code
;
1690 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
1691 if (CONVERT_EXPR_CODE_P (code
)
1692 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1693 && TYPE_PRECISION (TREE_TYPE (tem
))
1694 > floor_log2 (TYPE_PRECISION (rtype
))
1695 && type_has_mode_precision_p (TREE_TYPE (tem
)))
1696 defcodefor_name (tem
, &code
, &tem
, NULL
);
1698 if (code
== NEGATE_EXPR
)
1700 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
1706 defcodefor_name (tem
, &code
, &tem2
, NULL
);
1707 if (CONVERT_EXPR_CODE_P (code
)
1708 && INTEGRAL_TYPE_P (TREE_TYPE (tem2
))
1709 && TYPE_PRECISION (TREE_TYPE (tem2
))
1710 > floor_log2 (TYPE_PRECISION (rtype
))
1711 && type_has_mode_precision_p (TREE_TYPE (tem2
)))
1713 if (tem2
== def_arg2
[1 - i
]
1714 || tem2
== def_arg2_alt
[1 - i
])
1723 if (cdef_code
[1 - i
] == BIT_AND_EXPR
1724 && tree_fits_shwi_p (cdef_arg2
[1 - i
])
1725 && tree_to_shwi (cdef_arg2
[1 - i
])
1726 == TYPE_PRECISION (rtype
) - 1
1727 && TREE_CODE (cdef_arg1
[1 - i
]) == SSA_NAME
)
1729 if (tem
== cdef_arg1
[1 - i
]
1730 || tem2
== cdef_arg1
[1 - i
])
1732 rotcnt
= def_arg2
[1 - i
];
1736 defcodefor_name (cdef_arg1
[1 - i
], &code
, &tem3
, NULL
);
1737 if (CONVERT_EXPR_CODE_P (code
)
1738 && INTEGRAL_TYPE_P (TREE_TYPE (tem3
))
1739 && TYPE_PRECISION (TREE_TYPE (tem3
))
1740 > floor_log2 (TYPE_PRECISION (rtype
))
1741 && type_has_mode_precision_p (TREE_TYPE (tem3
)))
1743 if (tem
== tem3
|| tem2
== tem3
)
1745 rotcnt
= def_arg2
[1 - i
];
1752 if (rotcnt
== NULL_TREE
)
1757 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
1758 TREE_TYPE (rotcnt
)))
1760 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2
[0])),
1762 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1763 rotcnt
= gimple_assign_lhs (g
);
1765 lhs
= gimple_assign_lhs (stmt
);
1766 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1767 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]));
1768 g
= gimple_build_assign (lhs
,
1769 ((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
1770 ? LROTATE_EXPR
: RROTATE_EXPR
, def_arg1
[0], rotcnt
);
1771 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1773 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1774 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, lhs
);
1776 gsi_replace (gsi
, g
, false);
1780 /* Combine an element access with a shuffle. Returns true if there were
1781 any changes made, else it returns false. */
1784 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
1786 gimple
*stmt
= gsi_stmt (*gsi
);
1788 tree op
, op0
, op1
, op2
;
1791 enum tree_code code
;
1793 op
= gimple_assign_rhs1 (stmt
);
1794 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
1796 op0
= TREE_OPERAND (op
, 0);
1797 if (TREE_CODE (op0
) != SSA_NAME
1798 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
1801 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
1802 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1805 op1
= TREE_OPERAND (op
, 1);
1806 op2
= TREE_OPERAND (op
, 2);
1807 code
= gimple_assign_rhs_code (def_stmt
);
1809 if (code
== CONSTRUCTOR
)
1811 tree tem
= fold_ternary (BIT_FIELD_REF
, TREE_TYPE (op
),
1812 gimple_assign_rhs1 (def_stmt
), op1
, op2
);
1813 if (!tem
|| !valid_gimple_rhs_p (tem
))
1815 gimple_assign_set_rhs_from_tree (gsi
, tem
);
1816 update_stmt (gsi_stmt (*gsi
));
1820 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
1821 if (TREE_TYPE (op
) != elem_type
)
1824 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
1825 if (maybe_ne (bit_field_size (op
), size
))
1828 if (code
== VEC_PERM_EXPR
1829 && constant_multiple_p (bit_field_offset (op
), size
, &idx
))
1832 unsigned HOST_WIDE_INT nelts
;
1833 m
= gimple_assign_rhs3 (def_stmt
);
1834 if (TREE_CODE (m
) != VECTOR_CST
1835 || !VECTOR_CST_NELTS (m
).is_constant (&nelts
))
1837 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
1841 p
= gimple_assign_rhs1 (def_stmt
);
1845 p
= gimple_assign_rhs2 (def_stmt
);
1848 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
1849 unshare_expr (p
), op1
, bitsize_int (idx
* size
));
1850 gimple_assign_set_rhs1 (stmt
, tem
);
1852 update_stmt (gsi_stmt (*gsi
));
1859 /* Determine whether applying the 2 permutations (mask1 then mask2)
1860 gives back one of the input. */
1863 is_combined_permutation_identity (tree mask1
, tree mask2
)
1866 unsigned HOST_WIDE_INT nelts
, i
, j
;
1867 bool maybe_identity1
= true;
1868 bool maybe_identity2
= true;
1870 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
1871 && TREE_CODE (mask2
) == VECTOR_CST
);
1872 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
1873 if (mask
== NULL_TREE
|| TREE_CODE (mask
) != VECTOR_CST
)
1876 if (!VECTOR_CST_NELTS (mask
).is_constant (&nelts
))
1878 for (i
= 0; i
< nelts
; i
++)
1880 tree val
= VECTOR_CST_ELT (mask
, i
);
1881 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
1882 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
1884 maybe_identity2
= false;
1885 else if (j
== i
+ nelts
)
1886 maybe_identity1
= false;
1890 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
1893 /* Combine a shuffle with its arguments. Returns 1 if there were any
1894 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
1897 simplify_permutation (gimple_stmt_iterator
*gsi
)
1899 gimple
*stmt
= gsi_stmt (*gsi
);
1901 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
1902 enum tree_code code
;
1903 bool single_use_op0
= false;
1905 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
1907 op0
= gimple_assign_rhs1 (stmt
);
1908 op1
= gimple_assign_rhs2 (stmt
);
1909 op2
= gimple_assign_rhs3 (stmt
);
1911 if (TREE_CODE (op2
) != VECTOR_CST
)
1914 if (TREE_CODE (op0
) == VECTOR_CST
)
1919 else if (TREE_CODE (op0
) == SSA_NAME
)
1921 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
1922 if (!def_stmt
|| !can_propagate_from (def_stmt
))
1925 code
= gimple_assign_rhs_code (def_stmt
);
1926 arg0
= gimple_assign_rhs1 (def_stmt
);
1931 /* Two consecutive shuffles. */
1932 if (code
== VEC_PERM_EXPR
)
1939 op3
= gimple_assign_rhs3 (def_stmt
);
1940 if (TREE_CODE (op3
) != VECTOR_CST
)
1942 ident
= is_combined_permutation_identity (op3
, op2
);
1945 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
1946 : gimple_assign_rhs2 (def_stmt
);
1947 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
1948 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
1949 gimple_set_num_ops (stmt
, 2);
1951 return remove_prop_source_from_use (op0
) ? 2 : 1;
1954 /* Shuffle of a constructor. */
1955 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
1961 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
1964 if (TREE_CODE (op1
) == VECTOR_CST
)
1966 else if (TREE_CODE (op1
) == SSA_NAME
)
1968 enum tree_code code2
;
1970 gimple
*def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
1971 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
1974 code2
= gimple_assign_rhs_code (def_stmt2
);
1975 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
1977 arg1
= gimple_assign_rhs1 (def_stmt2
);
1984 /* Already used twice in this statement. */
1985 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
1989 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (op0
), arg0
, arg1
, op2
);
1991 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
1993 gimple_assign_set_rhs_from_tree (gsi
, opt
);
1994 update_stmt (gsi_stmt (*gsi
));
1995 if (TREE_CODE (op0
) == SSA_NAME
)
1996 ret
= remove_prop_source_from_use (op0
);
1997 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
1998 ret
|= remove_prop_source_from_use (op1
);
2005 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
2006 conversions with code CONV_CODE or update it if still ERROR_MARK.
2007 Return NULL_TREE if no such matching def was found. */
2010 get_bit_field_ref_def (tree val
, enum tree_code
&conv_code
)
2012 if (TREE_CODE (val
) != SSA_NAME
)
2014 gimple
*def_stmt
= get_prop_source_stmt (val
, false, NULL
);
2017 enum tree_code code
= gimple_assign_rhs_code (def_stmt
);
2018 if (code
== FLOAT_EXPR
2019 || code
== FIX_TRUNC_EXPR
)
2021 tree op1
= gimple_assign_rhs1 (def_stmt
);
2022 if (conv_code
== ERROR_MARK
)
2024 if (maybe_ne (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (val
))),
2025 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1
)))))
2029 else if (conv_code
!= code
)
2031 if (TREE_CODE (op1
) != SSA_NAME
)
2033 def_stmt
= SSA_NAME_DEF_STMT (op1
);
2034 if (! is_gimple_assign (def_stmt
))
2036 code
= gimple_assign_rhs_code (def_stmt
);
2038 if (code
!= BIT_FIELD_REF
)
2040 return gimple_assign_rhs1 (def_stmt
);
2043 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2046 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
2048 gimple
*stmt
= gsi_stmt (*gsi
);
2049 tree op
, op2
, orig
[2], type
, elem_type
;
2050 unsigned elem_size
, i
;
2051 unsigned HOST_WIDE_INT nelts
;
2052 enum tree_code conv_code
;
2053 constructor_elt
*elt
;
2056 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
);
2058 op
= gimple_assign_rhs1 (stmt
);
2059 type
= TREE_TYPE (op
);
2060 gcc_checking_assert (TREE_CODE (type
) == VECTOR_TYPE
);
2062 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
2064 elem_type
= TREE_TYPE (type
);
2065 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2067 vec_perm_builder
sel (nelts
, nelts
, 1);
2070 conv_code
= ERROR_MARK
;
2072 tree one_constant
= NULL_TREE
;
2073 tree one_nonconstant
= NULL_TREE
;
2074 auto_vec
<tree
> constants
;
2075 constants
.safe_grow_cleared (nelts
);
2076 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
2084 /* Look for elements extracted and possibly converted from
2086 op1
= get_bit_field_ref_def (elt
->value
, conv_code
);
2088 && TREE_CODE ((ref
= TREE_OPERAND (op1
, 0))) == SSA_NAME
2089 && VECTOR_TYPE_P (TREE_TYPE (ref
))
2090 && useless_type_conversion_p (TREE_TYPE (op1
),
2091 TREE_TYPE (TREE_TYPE (ref
)))
2092 && known_eq (bit_field_size (op1
), elem_size
)
2093 && constant_multiple_p (bit_field_offset (op1
),
2097 for (j
= 0; j
< 2; ++j
)
2102 || useless_type_conversion_p (TREE_TYPE (orig
[0]),
2106 else if (ref
== orig
[j
])
2109 /* Found a suitable vector element. */
2116 maybe_ident
= false;
2117 sel
.quick_push (elem
);
2120 /* Else fallthru. */
2122 /* Handle elements not extracted from a vector.
2123 1. constants by permuting with constant vector
2124 2. a unique non-constant element by permuting with a splat vector */
2126 && orig
[1] != error_mark_node
)
2128 orig
[1] = error_mark_node
;
2129 if (CONSTANT_CLASS_P (elt
->value
))
2131 if (one_nonconstant
)
2134 one_constant
= elt
->value
;
2135 constants
[i
] = elt
->value
;
2141 if (!one_nonconstant
)
2142 one_nonconstant
= elt
->value
;
2143 else if (!operand_equal_p (one_nonconstant
, elt
->value
, 0))
2146 sel
.quick_push (i
+ nelts
);
2147 maybe_ident
= false;
2153 || ! VECTOR_TYPE_P (TREE_TYPE (orig
[0]))
2154 || maybe_ne (TYPE_VECTOR_SUBPARTS (type
),
2155 TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig
[0]))))
2159 if (conv_code
!= ERROR_MARK
2160 && (! supportable_convert_operation (conv_code
, type
,
2161 TREE_TYPE (orig
[0]),
2163 || conv_code
== CALL_EXPR
))
2168 if (conv_code
== ERROR_MARK
)
2169 gimple_assign_set_rhs_from_tree (gsi
, orig
[0]);
2171 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
[0],
2172 NULL_TREE
, NULL_TREE
);
2178 vec_perm_indices
indices (sel
, orig
[1] ? 2 : 1, nelts
);
2179 if (!can_vec_perm_const_p (TYPE_MODE (type
), indices
))
2182 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
2184 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
2185 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type
)),
2186 GET_MODE_SIZE (TYPE_MODE (type
))))
2188 op2
= vec_perm_indices_to_tree (mask_type
, indices
);
2189 bool convert_orig0
= false;
2192 else if (orig
[1] == error_mark_node
2195 gimple_seq seq
= NULL
;
2196 orig
[1] = gimple_build_vector_from_val (&seq
, UNKNOWN_LOCATION
,
2197 type
, one_nonconstant
);
2198 gsi_insert_seq_before (gsi
, seq
, GSI_SAME_STMT
);
2199 convert_orig0
= true;
2201 else if (orig
[1] == error_mark_node
)
2203 tree_vector_builder
vec (type
, nelts
, 1);
2204 for (unsigned i
= 0; i
< nelts
; ++i
)
2206 vec
.quick_push (constants
[i
]);
2208 /* ??? Push a don't-care value. */
2209 vec
.quick_push (one_constant
);
2210 orig
[1] = vec
.build ();
2211 convert_orig0
= true;
2213 if (conv_code
== ERROR_MARK
)
2214 gimple_assign_set_rhs_with_ops (gsi
, VEC_PERM_EXPR
, orig
[0],
2216 else if (convert_orig0
)
2219 = gimple_build_assign (make_ssa_name (type
), conv_code
, orig
[0]);
2220 orig
[0] = gimple_assign_lhs (conv
);
2221 gsi_insert_before (gsi
, conv
, GSI_SAME_STMT
);
2222 gimple_assign_set_rhs_with_ops (gsi
, VEC_PERM_EXPR
,
2223 orig
[0], orig
[1], op2
);
2228 = gimple_build_assign (make_ssa_name (TREE_TYPE (orig
[0])),
2229 VEC_PERM_EXPR
, orig
[0], orig
[1], op2
);
2230 orig
[0] = gimple_assign_lhs (perm
);
2231 gsi_insert_before (gsi
, perm
, GSI_SAME_STMT
);
2232 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
[0],
2233 NULL_TREE
, NULL_TREE
);
2236 update_stmt (gsi_stmt (*gsi
));
2241 /* Primitive "lattice" function for gimple_simplify. */
2244 fwprop_ssa_val (tree name
)
2246 /* First valueize NAME. */
2247 if (TREE_CODE (name
) == SSA_NAME
2248 && SSA_NAME_VERSION (name
) < lattice
.length ())
2250 tree val
= lattice
[SSA_NAME_VERSION (name
)];
2254 /* We continue matching along SSA use-def edges for SSA names
2255 that are not single-use. Currently there are no patterns
2256 that would cause any issues with that. */
2260 /* Main entry point for the forward propagation and statement combine
2265 const pass_data pass_data_forwprop
=
2267 GIMPLE_PASS
, /* type */
2268 "forwprop", /* name */
2269 OPTGROUP_NONE
, /* optinfo_flags */
2270 TV_TREE_FORWPROP
, /* tv_id */
2271 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2272 0, /* properties_provided */
2273 0, /* properties_destroyed */
2274 0, /* todo_flags_start */
2275 TODO_update_ssa
, /* todo_flags_finish */
2278 class pass_forwprop
: public gimple_opt_pass
2281 pass_forwprop (gcc::context
*ctxt
)
2282 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
2285 /* opt_pass methods: */
2286 opt_pass
* clone () { return new pass_forwprop (m_ctxt
); }
2287 virtual bool gate (function
*) { return flag_tree_forwprop
; }
2288 virtual unsigned int execute (function
*);
2290 }; // class pass_forwprop
2293 pass_forwprop::execute (function
*fun
)
2295 unsigned int todoflags
= 0;
2297 cfg_changed
= false;
2299 /* Combine stmts with the stmts defining their operands. Do that
2300 in an order that guarantees visiting SSA defs before SSA uses. */
2301 lattice
.create (num_ssa_names
);
2302 lattice
.quick_grow_cleared (num_ssa_names
);
2303 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (fun
));
2304 int postorder_num
= pre_and_rev_post_order_compute_fn (cfun
, NULL
,
2306 auto_vec
<gimple
*, 4> to_fixup
;
2307 auto_vec
<gimple
*, 32> to_remove
;
2308 to_purge
= BITMAP_ALLOC (NULL
);
2309 for (int i
= 0; i
< postorder_num
; ++i
)
2311 gimple_stmt_iterator gsi
;
2312 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, postorder
[i
]);
2314 /* Record degenerate PHIs in the lattice. */
2315 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
2318 gphi
*phi
= si
.phi ();
2319 tree res
= gimple_phi_result (phi
);
2320 if (virtual_operand_p (res
))
2323 use_operand_p use_p
;
2325 tree first
= NULL_TREE
;
2326 bool all_same
= true;
2327 FOR_EACH_PHI_ARG (use_p
, phi
, it
, SSA_OP_USE
)
2329 tree use
= USE_FROM_PTR (use_p
);
2332 else if (! operand_equal_p (first
, use
, 0))
2340 if (may_propagate_copy (res
, first
))
2341 to_remove
.safe_push (phi
);
2342 fwprop_set_lattice_val (res
, first
);
2346 /* Apply forward propagation to all stmts in the basic-block.
2347 Note we update GSI within the loop as necessary. */
2348 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2350 gimple
*stmt
= gsi_stmt (gsi
);
2352 enum tree_code code
;
2354 if (!is_gimple_assign (stmt
))
2360 lhs
= gimple_assign_lhs (stmt
);
2361 rhs
= gimple_assign_rhs1 (stmt
);
2362 code
= gimple_assign_rhs_code (stmt
);
2363 if (TREE_CODE (lhs
) != SSA_NAME
2364 || has_zero_uses (lhs
))
2370 /* If this statement sets an SSA_NAME to an address,
2371 try to propagate the address into the uses of the SSA_NAME. */
2372 if (code
== ADDR_EXPR
2373 /* Handle pointer conversions on invariant addresses
2374 as well, as this is valid gimple. */
2375 || (CONVERT_EXPR_CODE_P (code
)
2376 && TREE_CODE (rhs
) == ADDR_EXPR
2377 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
2379 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
2382 || decl_address_invariant_p (base
))
2383 && !stmt_references_abnormal_ssa_name (stmt
)
2384 && forward_propagate_addr_expr (lhs
, rhs
, true))
2386 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2387 release_defs (stmt
);
2388 gsi_remove (&gsi
, true);
2393 else if (code
== POINTER_PLUS_EXPR
)
2395 tree off
= gimple_assign_rhs2 (stmt
);
2396 if (TREE_CODE (off
) == INTEGER_CST
2397 && can_propagate_from (stmt
)
2398 && !simple_iv_increment_p (stmt
)
2399 /* ??? Better adjust the interface to that function
2400 instead of building new trees here. */
2401 && forward_propagate_addr_expr
2403 build1_loc (gimple_location (stmt
),
2404 ADDR_EXPR
, TREE_TYPE (rhs
),
2405 fold_build2 (MEM_REF
,
2406 TREE_TYPE (TREE_TYPE (rhs
)),
2408 fold_convert (ptr_type_node
,
2411 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2412 release_defs (stmt
);
2413 gsi_remove (&gsi
, true);
2415 else if (is_gimple_min_invariant (rhs
))
2417 /* Make sure to fold &a[0] + off_1 here. */
2418 fold_stmt_inplace (&gsi
);
2420 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2426 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
2427 && gimple_assign_load_p (stmt
)
2428 && !gimple_has_volatile_ops (stmt
)
2429 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
2431 && !stmt_can_throw_internal (cfun
, stmt
))
2433 /* Rewrite loads used only in real/imagpart extractions to
2434 component-wise loads. */
2435 use_operand_p use_p
;
2436 imm_use_iterator iter
;
2437 bool rewrite
= true;
2438 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
2440 gimple
*use_stmt
= USE_STMT (use_p
);
2441 if (is_gimple_debug (use_stmt
))
2443 if (!is_gimple_assign (use_stmt
)
2444 || (gimple_assign_rhs_code (use_stmt
) != REALPART_EXPR
2445 && gimple_assign_rhs_code (use_stmt
) != IMAGPART_EXPR
))
2454 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
2456 if (is_gimple_debug (use_stmt
))
2458 if (gimple_debug_bind_p (use_stmt
))
2460 gimple_debug_bind_reset_value (use_stmt
);
2461 update_stmt (use_stmt
);
2466 tree new_rhs
= build1 (gimple_assign_rhs_code (use_stmt
),
2467 TREE_TYPE (TREE_TYPE (rhs
)),
2468 unshare_expr (rhs
));
2470 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
2473 location_t loc
= gimple_location (use_stmt
);
2474 gimple_set_location (new_stmt
, loc
);
2475 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2476 unlink_stmt_vdef (use_stmt
);
2477 gsi_remove (&gsi2
, true);
2479 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2482 release_defs (stmt
);
2483 gsi_remove (&gsi
, true);
2488 else if (TREE_CODE (TREE_TYPE (lhs
)) == VECTOR_TYPE
2489 && TYPE_MODE (TREE_TYPE (lhs
)) == BLKmode
2490 && gimple_assign_load_p (stmt
)
2491 && !gimple_has_volatile_ops (stmt
)
2492 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
2494 && !stmt_can_throw_internal (cfun
, stmt
))
2496 /* Rewrite loads used only in BIT_FIELD_REF extractions to
2497 component-wise loads. */
2498 use_operand_p use_p
;
2499 imm_use_iterator iter
;
2500 bool rewrite
= true;
2501 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
2503 gimple
*use_stmt
= USE_STMT (use_p
);
2504 if (is_gimple_debug (use_stmt
))
2506 if (!is_gimple_assign (use_stmt
)
2507 || gimple_assign_rhs_code (use_stmt
) != BIT_FIELD_REF
)
2516 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
2518 if (is_gimple_debug (use_stmt
))
2520 if (gimple_debug_bind_p (use_stmt
))
2522 gimple_debug_bind_reset_value (use_stmt
);
2523 update_stmt (use_stmt
);
2528 tree bfr
= gimple_assign_rhs1 (use_stmt
);
2529 tree new_rhs
= fold_build3 (BIT_FIELD_REF
,
2532 TREE_OPERAND (bfr
, 1),
2533 TREE_OPERAND (bfr
, 2));
2535 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
2538 location_t loc
= gimple_location (use_stmt
);
2539 gimple_set_location (new_stmt
, loc
);
2540 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2541 unlink_stmt_vdef (use_stmt
);
2542 gsi_remove (&gsi2
, true);
2544 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2547 release_defs (stmt
);
2548 gsi_remove (&gsi
, true);
2554 else if (code
== COMPLEX_EXPR
)
2556 /* Rewrite stores of a single-use complex build expression
2557 to component-wise stores. */
2558 use_operand_p use_p
;
2560 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
2561 && gimple_store_p (use_stmt
)
2562 && !gimple_has_volatile_ops (use_stmt
)
2563 && is_gimple_assign (use_stmt
)
2564 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
2567 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2568 tree new_lhs
= build1 (REALPART_EXPR
,
2569 TREE_TYPE (TREE_TYPE (use_lhs
)),
2570 unshare_expr (use_lhs
));
2571 gimple
*new_stmt
= gimple_build_assign (new_lhs
, rhs
);
2572 location_t loc
= gimple_location (use_stmt
);
2573 gimple_set_location (new_stmt
, loc
);
2574 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
2575 gimple_set_vdef (new_stmt
, make_ssa_name (gimple_vop (cfun
)));
2576 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2577 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
2578 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2579 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
2581 new_lhs
= build1 (IMAGPART_EXPR
,
2582 TREE_TYPE (TREE_TYPE (use_lhs
)),
2583 unshare_expr (use_lhs
));
2584 gimple_assign_set_lhs (use_stmt
, new_lhs
);
2585 gimple_assign_set_rhs1 (use_stmt
, gimple_assign_rhs2 (stmt
));
2586 update_stmt (use_stmt
);
2588 release_defs (stmt
);
2589 gsi_remove (&gsi
, true);
2594 else if (code
== CONSTRUCTOR
2595 && VECTOR_TYPE_P (TREE_TYPE (rhs
))
2596 && TYPE_MODE (TREE_TYPE (rhs
)) == BLKmode
2597 && CONSTRUCTOR_NELTS (rhs
) > 0
2598 && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
))
2599 || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
))
2602 /* Rewrite stores of a single-use vector constructors
2603 to component-wise stores if the mode isn't supported. */
2604 use_operand_p use_p
;
2606 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
2607 && gimple_store_p (use_stmt
)
2608 && !gimple_has_volatile_ops (use_stmt
)
2609 && !stmt_can_throw_internal (cfun
, use_stmt
)
2610 && is_gimple_assign (use_stmt
)
2611 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
2614 tree elt_t
= TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
);
2615 unsigned HOST_WIDE_INT elt_w
2616 = tree_to_uhwi (TYPE_SIZE (elt_t
));
2617 unsigned HOST_WIDE_INT n
2618 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs
)));
2619 for (unsigned HOST_WIDE_INT bi
= 0; bi
< n
; bi
+= elt_w
)
2621 unsigned HOST_WIDE_INT ci
= bi
/ elt_w
;
2623 if (ci
< CONSTRUCTOR_NELTS (rhs
))
2624 new_rhs
= CONSTRUCTOR_ELT (rhs
, ci
)->value
;
2626 new_rhs
= build_zero_cst (elt_t
);
2627 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2628 tree new_lhs
= build3 (BIT_FIELD_REF
,
2630 unshare_expr (use_lhs
),
2631 bitsize_int (elt_w
),
2633 gimple
*new_stmt
= gimple_build_assign (new_lhs
, new_rhs
);
2634 location_t loc
= gimple_location (use_stmt
);
2635 gimple_set_location (new_stmt
, loc
);
2636 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
2637 gimple_set_vdef (new_stmt
,
2638 make_ssa_name (gimple_vop (cfun
)));
2639 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2640 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
2641 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2642 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
2644 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2645 unlink_stmt_vdef (use_stmt
);
2646 release_defs (use_stmt
);
2647 gsi_remove (&gsi2
, true);
2648 release_defs (stmt
);
2649 gsi_remove (&gsi
, true);
2658 /* Combine stmts with the stmts defining their operands.
2659 Note we update GSI within the loop as necessary. */
2660 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2662 gimple
*stmt
= gsi_stmt (gsi
);
2664 /* Mark stmt as potentially needing revisiting. */
2665 gimple_set_plf (stmt
, GF_PLF_1
, false);
2667 /* Substitute from our lattice. We need to do so only once. */
2668 bool substituted_p
= false;
2671 FOR_EACH_SSA_USE_OPERAND (usep
, stmt
, iter
, SSA_OP_USE
)
2673 tree use
= USE_FROM_PTR (usep
);
2674 tree val
= fwprop_ssa_val (use
);
2675 if (val
&& val
!= use
&& may_propagate_copy (use
, val
))
2677 propagate_value (usep
, val
);
2678 substituted_p
= true;
2682 && is_gimple_assign (stmt
)
2683 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
2684 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
2689 gimple
*orig_stmt
= stmt
= gsi_stmt (gsi
);
2690 bool was_noreturn
= (is_gimple_call (stmt
)
2691 && gimple_call_noreturn_p (stmt
));
2694 if (fold_stmt (&gsi
, fwprop_ssa_val
))
2697 stmt
= gsi_stmt (gsi
);
2698 /* Cleanup the CFG if we simplified a condition to
2700 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
2701 if (gimple_cond_true_p (cond
)
2702 || gimple_cond_false_p (cond
))
2706 if (changed
|| substituted_p
)
2708 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
2709 bitmap_set_bit (to_purge
, bb
->index
);
2711 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
2712 to_fixup
.safe_push (stmt
);
2714 substituted_p
= false;
2717 switch (gimple_code (stmt
))
2721 tree rhs1
= gimple_assign_rhs1 (stmt
);
2722 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2724 if (code
== COND_EXPR
2725 || code
== VEC_COND_EXPR
)
2727 /* In this case the entire COND_EXPR is in rhs1. */
2728 if (forward_propagate_into_cond (&gsi
))
2731 stmt
= gsi_stmt (gsi
);
2734 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2737 did_something
= forward_propagate_into_comparison (&gsi
);
2738 if (maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (gsi
)))
2739 bitmap_set_bit (to_purge
, bb
->index
);
2740 if (did_something
== 2)
2742 changed
= did_something
!= 0;
2744 else if ((code
== PLUS_EXPR
2745 || code
== BIT_IOR_EXPR
2746 || code
== BIT_XOR_EXPR
)
2747 && simplify_rotate (&gsi
))
2749 else if (code
== VEC_PERM_EXPR
)
2751 int did_something
= simplify_permutation (&gsi
);
2752 if (did_something
== 2)
2754 changed
= did_something
!= 0;
2756 else if (code
== BIT_FIELD_REF
)
2757 changed
= simplify_bitfield_ref (&gsi
);
2758 else if (code
== CONSTRUCTOR
2759 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
2760 changed
= simplify_vector_constructor (&gsi
);
2765 changed
= simplify_gimple_switch (as_a
<gswitch
*> (stmt
));
2770 int did_something
= forward_propagate_into_gimple_cond
2771 (as_a
<gcond
*> (stmt
));
2772 if (did_something
== 2)
2774 changed
= did_something
!= 0;
2780 tree callee
= gimple_call_fndecl (stmt
);
2781 if (callee
!= NULL_TREE
2782 && fndecl_built_in_p (callee
, BUILT_IN_NORMAL
))
2783 changed
= simplify_builtin_call (&gsi
, callee
);
2792 /* If the stmt changed then re-visit it and the statements
2793 inserted before it. */
2794 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
2795 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
2797 if (gsi_end_p (gsi
))
2798 gsi
= gsi_start_bb (bb
);
2805 /* Stmt no longer needs to be revisited. */
2806 stmt
= gsi_stmt (gsi
);
2807 gcc_checking_assert (!gimple_plf (stmt
, GF_PLF_1
));
2808 gimple_set_plf (stmt
, GF_PLF_1
, true);
2810 /* Fill up the lattice. */
2811 if (gimple_assign_single_p (stmt
))
2813 tree lhs
= gimple_assign_lhs (stmt
);
2814 tree rhs
= gimple_assign_rhs1 (stmt
);
2815 if (TREE_CODE (lhs
) == SSA_NAME
)
2818 if (TREE_CODE (rhs
) == SSA_NAME
)
2819 val
= fwprop_ssa_val (rhs
);
2820 else if (is_gimple_min_invariant (rhs
))
2822 /* If we can propagate the lattice-value mark the
2823 stmt for removal. */
2825 && may_propagate_copy (lhs
, val
))
2826 to_remove
.safe_push (stmt
);
2827 fwprop_set_lattice_val (lhs
, val
);
2830 else if (gimple_nop_p (stmt
))
2831 to_remove
.safe_push (stmt
);
2834 /* Substitute in destination PHI arguments. */
2837 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2838 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
2839 !gsi_end_p (gsi
); gsi_next (&gsi
))
2841 gphi
*phi
= gsi
.phi ();
2842 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
2843 tree arg
= USE_FROM_PTR (use_p
);
2844 if (TREE_CODE (arg
) != SSA_NAME
2845 || virtual_operand_p (arg
))
2847 tree val
= fwprop_ssa_val (arg
);
2849 && may_propagate_copy (arg
, val
))
2850 propagate_value (use_p
, val
);
2856 /* Remove stmts in reverse order to make debug stmt creation possible. */
2857 while (!to_remove
.is_empty())
2859 gimple
*stmt
= to_remove
.pop ();
2860 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2862 fprintf (dump_file
, "Removing dead stmt ");
2863 print_gimple_stmt (dump_file
, stmt
, 0);
2864 fprintf (dump_file
, "\n");
2866 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
2867 if (gimple_code (stmt
) == GIMPLE_PHI
)
2868 remove_phi_node (&gsi
, true);
2871 unlink_stmt_vdef (stmt
);
2872 gsi_remove (&gsi
, true);
2873 release_defs (stmt
);
2877 /* Fixup stmts that became noreturn calls. This may require splitting
2878 blocks and thus isn't possible during the walk. Do this
2879 in reverse order so we don't inadvertedly remove a stmt we want to
2880 fixup by visiting a dominating now noreturn call first. */
2881 while (!to_fixup
.is_empty ())
2883 gimple
*stmt
= to_fixup
.pop ();
2884 if (dump_file
&& dump_flags
& TDF_DETAILS
)
2886 fprintf (dump_file
, "Fixing up noreturn call ");
2887 print_gimple_stmt (dump_file
, stmt
, 0);
2888 fprintf (dump_file
, "\n");
2890 cfg_changed
|= fixup_noreturn_call (stmt
);
2893 cfg_changed
|= gimple_purge_all_dead_eh_edges (to_purge
);
2894 BITMAP_FREE (to_purge
);
2897 todoflags
|= TODO_cleanup_cfg
;
2905 make_pass_forwprop (gcc::context
*ctxt
)
2907 return new pass_forwprop (ctxt
);