1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
46 #include "tree-cfgcleanup.h"
48 #include "optabs-tree.h"
49 #include "tree-vector-builder.h"
50 #include "vec-perm-indices.h"
51 #include "internal-fn.h"
55 /* This pass propagates the RHS of assignment statements into use
56 sites of the LHS of the assignment. It's basically a specialized
57 form of tree combination. It is hoped all of this can disappear
58 when we have a generalized tree combiner.
60 One class of common cases we handle is forward propagating a single use
61 variable into a COND_EXPR.
65 if (x) goto ... else goto ...
67 Will be transformed into:
70 if (a COND b) goto ... else goto ...
72 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
74 Or (assuming c1 and c2 are constants):
78 if (x EQ/NEQ c2) goto ... else goto ...
80 Will be transformed into:
83 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
85 Similarly for x = a - c1.
91 if (x) goto ... else goto ...
93 Will be transformed into:
96 if (a == 0) goto ... else goto ...
98 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99 For these cases, we propagate A into all, possibly more than one,
100 COND_EXPRs that use X.
106 if (x) goto ... else goto ...
108 Will be transformed into:
111 if (a != 0) goto ... else goto ...
113 (Assuming a is an integral type and x is a boolean or x is an
114 integral and a is a boolean.)
116 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
117 For these cases, we propagate A into all, possibly more than one,
118 COND_EXPRs that use X.
120 In addition to eliminating the variable and the statement which assigns
121 a value to the variable, we may be able to later thread the jump without
122 adding insane complexity in the dominator optimizer.
124 Also note these transformations can cascade. We handle this by having
125 a worklist of COND_EXPR statements to examine. As we make a change to
126 a statement, we put it back on the worklist to examine on the next
127 iteration of the main loop.
129 A second class of propagation opportunities arises for ADDR_EXPR
140 ptr = (type1*)&type2var;
143 Will get turned into (if type1 and type2 are the same size
144 and neither have volatile on them):
145 res = VIEW_CONVERT_EXPR<type1>(type2var)
150 ptr2 = ptr + <constant>;
154 ptr2 = &x[constant/elementsize];
159 offset = index * element_size;
160 offset_p = (pointer) offset;
161 ptr2 = ptr + offset_p
163 Will get turned into:
171 Provided that decl has known alignment >= 2, will get turned into
175 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
176 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
179 This will (of course) be extended as other needs arise. */
181 static bool forward_propagate_addr_expr (tree
, tree
, bool);
183 /* Set to true if we delete dead edges during the optimization. */
184 static bool cfg_changed
;
186 static tree
rhs_to_tree (tree type
, gimple
*stmt
);
188 static bitmap to_purge
;
190 /* Const-and-copy lattice. */
191 static vec
<tree
> lattice
;
193 /* Set the lattice entry for NAME to VAL. */
195 fwprop_set_lattice_val (tree name
, tree val
)
197 if (TREE_CODE (name
) == SSA_NAME
)
199 if (SSA_NAME_VERSION (name
) >= lattice
.length ())
201 lattice
.reserve (num_ssa_names
- lattice
.length ());
202 lattice
.quick_grow_cleared (num_ssa_names
);
204 lattice
[SSA_NAME_VERSION (name
)] = val
;
208 /* Invalidate the lattice entry for NAME, done when releasing SSA names. */
210 fwprop_invalidate_lattice (tree name
)
213 && TREE_CODE (name
) == SSA_NAME
214 && SSA_NAME_VERSION (name
) < lattice
.length ())
215 lattice
[SSA_NAME_VERSION (name
)] = NULL_TREE
;
219 /* Get the statement we can propagate from into NAME skipping
220 trivial copies. Returns the statement which defines the
221 propagation source or NULL_TREE if there is no such one.
222 If SINGLE_USE_ONLY is set considers only sources which have
223 a single use chain up to NAME. If SINGLE_USE_P is non-null,
224 it is set to whether the chain to NAME is a single use chain
225 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
228 get_prop_source_stmt (tree name
, bool single_use_only
, bool *single_use_p
)
230 bool single_use
= true;
233 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
235 if (!has_single_use (name
))
242 /* If name is defined by a PHI node or is the default def, bail out. */
243 if (!is_gimple_assign (def_stmt
))
246 /* If def_stmt is a simple copy, continue looking. */
247 if (gimple_assign_rhs_code (def_stmt
) == SSA_NAME
)
248 name
= gimple_assign_rhs1 (def_stmt
);
251 if (!single_use_only
&& single_use_p
)
252 *single_use_p
= single_use
;
259 /* Checks if the destination ssa name in DEF_STMT can be used as
260 propagation source. Returns true if so, otherwise false. */
263 can_propagate_from (gimple
*def_stmt
)
265 gcc_assert (is_gimple_assign (def_stmt
));
267 /* If the rhs has side-effects we cannot propagate from it. */
268 if (gimple_has_volatile_ops (def_stmt
))
271 /* If the rhs is a load we cannot propagate from it. */
272 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_reference
273 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) == tcc_declaration
)
276 /* Constants can be always propagated. */
277 if (gimple_assign_single_p (def_stmt
)
278 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
)))
281 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
282 if (stmt_references_abnormal_ssa_name (def_stmt
))
285 /* If the definition is a conversion of a pointer to a function type,
286 then we cannot apply optimizations as some targets require
287 function pointers to be canonicalized and in this case this
288 optimization could eliminate a necessary canonicalization. */
289 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
291 tree rhs
= gimple_assign_rhs1 (def_stmt
);
292 if (POINTER_TYPE_P (TREE_TYPE (rhs
))
293 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs
))) == FUNCTION_TYPE
)
300 /* Remove a chain of dead statements starting at the definition of
301 NAME. The chain is linked via the first operand of the defining statements.
302 If NAME was replaced in its only use then this function can be used
303 to clean up dead stmts. The function handles already released SSA
305 Returns true if cleanup-cfg has to run. */
308 remove_prop_source_from_use (tree name
)
310 gimple_stmt_iterator gsi
;
312 bool cfg_changed
= false;
317 if (SSA_NAME_IN_FREE_LIST (name
)
318 || SSA_NAME_IS_DEFAULT_DEF (name
)
319 || !has_zero_uses (name
))
322 stmt
= SSA_NAME_DEF_STMT (name
);
323 if (gimple_code (stmt
) == GIMPLE_PHI
324 || gimple_has_side_effects (stmt
))
327 bb
= gimple_bb (stmt
);
328 gsi
= gsi_for_stmt (stmt
);
329 unlink_stmt_vdef (stmt
);
330 if (gsi_remove (&gsi
, true))
331 bitmap_set_bit (to_purge
, bb
->index
);
332 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
335 name
= is_gimple_assign (stmt
) ? gimple_assign_rhs1 (stmt
) : NULL_TREE
;
336 } while (name
&& TREE_CODE (name
) == SSA_NAME
);
341 /* Return the rhs of a gassign *STMT in a form of a single tree,
342 converted to type TYPE.
344 This should disappear, but is needed so we can combine expressions and use
345 the fold() interfaces. Long term, we need to develop folding and combine
346 routines that deal with gimple exclusively . */
349 rhs_to_tree (tree type
, gimple
*stmt
)
351 location_t loc
= gimple_location (stmt
);
352 enum tree_code code
= gimple_assign_rhs_code (stmt
);
353 switch (get_gimple_rhs_class (code
))
355 case GIMPLE_TERNARY_RHS
:
356 return fold_build3_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
357 gimple_assign_rhs2 (stmt
),
358 gimple_assign_rhs3 (stmt
));
359 case GIMPLE_BINARY_RHS
:
360 return fold_build2_loc (loc
, code
, type
, gimple_assign_rhs1 (stmt
),
361 gimple_assign_rhs2 (stmt
));
362 case GIMPLE_UNARY_RHS
:
363 return build1 (code
, type
, gimple_assign_rhs1 (stmt
));
364 case GIMPLE_SINGLE_RHS
:
365 return gimple_assign_rhs1 (stmt
);
371 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
372 the folded result in a form suitable for COND_EXPR_COND or
373 NULL_TREE, if there is no suitable simplified form. If
374 INVARIANT_ONLY is true only gimple_min_invariant results are
375 considered simplified. */
378 combine_cond_expr_cond (gimple
*stmt
, enum tree_code code
, tree type
,
379 tree op0
, tree op1
, bool invariant_only
)
383 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
385 fold_defer_overflow_warnings ();
386 t
= fold_binary_loc (gimple_location (stmt
), code
, type
, op0
, op1
);
389 fold_undefer_overflow_warnings (false, NULL
, 0);
393 /* Require that we got a boolean type out if we put one in. */
394 gcc_assert (TREE_CODE (TREE_TYPE (t
)) == TREE_CODE (type
));
396 /* Canonicalize the combined condition for use in a COND_EXPR. */
397 t
= canonicalize_cond_expr_cond (t
);
399 /* Bail out if we required an invariant but didn't get one. */
400 if (!t
|| (invariant_only
&& !is_gimple_min_invariant (t
)))
402 fold_undefer_overflow_warnings (false, NULL
, 0);
406 fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt
), stmt
, 0);
411 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
412 of its operand. Return a new comparison tree or NULL_TREE if there
413 were no simplifying combines. */
416 forward_propagate_into_comparison_1 (gimple
*stmt
,
417 enum tree_code code
, tree type
,
420 tree tmp
= NULL_TREE
;
421 tree rhs0
= NULL_TREE
, rhs1
= NULL_TREE
;
422 bool single_use0_p
= false, single_use1_p
= false;
424 /* For comparisons use the first operand, that is likely to
425 simplify comparisons against constants. */
426 if (TREE_CODE (op0
) == SSA_NAME
)
428 gimple
*def_stmt
= get_prop_source_stmt (op0
, false, &single_use0_p
);
429 if (def_stmt
&& can_propagate_from (def_stmt
))
431 enum tree_code def_code
= gimple_assign_rhs_code (def_stmt
);
432 bool invariant_only_p
= !single_use0_p
;
434 rhs0
= rhs_to_tree (TREE_TYPE (op1
), def_stmt
);
436 /* Always combine comparisons or conversions from booleans. */
437 if (TREE_CODE (op1
) == INTEGER_CST
438 && ((CONVERT_EXPR_CODE_P (def_code
)
439 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0
, 0)))
441 || TREE_CODE_CLASS (def_code
) == tcc_comparison
))
442 invariant_only_p
= false;
444 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
445 rhs0
, op1
, invariant_only_p
);
451 /* If that wasn't successful, try the second operand. */
452 if (TREE_CODE (op1
) == SSA_NAME
)
454 gimple
*def_stmt
= get_prop_source_stmt (op1
, false, &single_use1_p
);
455 if (def_stmt
&& can_propagate_from (def_stmt
))
457 rhs1
= rhs_to_tree (TREE_TYPE (op0
), def_stmt
);
458 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
459 op0
, rhs1
, !single_use1_p
);
465 /* If that wasn't successful either, try both operands. */
466 if (rhs0
!= NULL_TREE
467 && rhs1
!= NULL_TREE
)
468 tmp
= combine_cond_expr_cond (stmt
, code
, type
,
470 !(single_use0_p
&& single_use1_p
));
475 /* Propagate from the ssa name definition statements of the assignment
476 from a comparison at *GSI into the conditional if that simplifies it.
477 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
478 otherwise returns 0. */
481 forward_propagate_into_comparison (gimple_stmt_iterator
*gsi
)
483 gimple
*stmt
= gsi_stmt (*gsi
);
485 bool cfg_changed
= false;
486 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
487 tree rhs1
= gimple_assign_rhs1 (stmt
);
488 tree rhs2
= gimple_assign_rhs2 (stmt
);
490 /* Combine the comparison with defining statements. */
491 tmp
= forward_propagate_into_comparison_1 (stmt
,
492 gimple_assign_rhs_code (stmt
),
494 if (tmp
&& useless_type_conversion_p (type
, TREE_TYPE (tmp
)))
496 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
498 update_stmt (gsi_stmt (*gsi
));
500 if (TREE_CODE (rhs1
) == SSA_NAME
)
501 cfg_changed
|= remove_prop_source_from_use (rhs1
);
502 if (TREE_CODE (rhs2
) == SSA_NAME
)
503 cfg_changed
|= remove_prop_source_from_use (rhs2
);
504 return cfg_changed
? 2 : 1;
510 /* Propagate from the ssa name definition statements of COND_EXPR
511 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
512 Returns zero if no statement was changed, one if there were
513 changes and two if cfg_cleanup needs to run.
515 This must be kept in sync with forward_propagate_into_cond. */
518 forward_propagate_into_gimple_cond (gcond
*stmt
)
521 enum tree_code code
= gimple_cond_code (stmt
);
522 bool cfg_changed
= false;
523 tree rhs1
= gimple_cond_lhs (stmt
);
524 tree rhs2
= gimple_cond_rhs (stmt
);
526 /* We can do tree combining on SSA_NAME and comparison expressions. */
527 if (TREE_CODE_CLASS (gimple_cond_code (stmt
)) != tcc_comparison
)
530 tmp
= forward_propagate_into_comparison_1 (stmt
, code
,
534 && is_gimple_condexpr_for_cond (tmp
))
538 fprintf (dump_file
, " Replaced '");
539 print_gimple_expr (dump_file
, stmt
, 0);
540 fprintf (dump_file
, "' with '");
541 print_generic_expr (dump_file
, tmp
);
542 fprintf (dump_file
, "'\n");
545 gimple_cond_set_condition_from_tree (stmt
, unshare_expr (tmp
));
548 if (TREE_CODE (rhs1
) == SSA_NAME
)
549 cfg_changed
|= remove_prop_source_from_use (rhs1
);
550 if (TREE_CODE (rhs2
) == SSA_NAME
)
551 cfg_changed
|= remove_prop_source_from_use (rhs2
);
552 return (cfg_changed
|| is_gimple_min_invariant (tmp
)) ? 2 : 1;
555 /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges. */
556 if ((TREE_CODE (TREE_TYPE (rhs1
)) == BOOLEAN_TYPE
557 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
558 && TYPE_PRECISION (TREE_TYPE (rhs1
)) == 1))
560 && integer_zerop (rhs2
))
562 && integer_onep (rhs2
))))
564 basic_block bb
= gimple_bb (stmt
);
565 gimple_cond_set_code (stmt
, NE_EXPR
);
566 gimple_cond_set_rhs (stmt
, build_zero_cst (TREE_TYPE (rhs1
)));
567 EDGE_SUCC (bb
, 0)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
568 EDGE_SUCC (bb
, 1)->flags
^= (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
);
576 /* Propagate from the ssa name definition statements of COND_EXPR
577 in the rhs of statement STMT into the conditional if that simplifies it.
578 Returns true zero if the stmt was changed. */
581 forward_propagate_into_cond (gimple_stmt_iterator
*gsi_p
)
583 gimple
*stmt
= gsi_stmt (*gsi_p
);
584 tree tmp
= NULL_TREE
;
585 tree cond
= gimple_assign_rhs1 (stmt
);
586 enum tree_code code
= gimple_assign_rhs_code (stmt
);
588 /* We can do tree combining on SSA_NAME and comparison expressions. */
589 if (COMPARISON_CLASS_P (cond
))
590 tmp
= forward_propagate_into_comparison_1 (stmt
, TREE_CODE (cond
),
592 TREE_OPERAND (cond
, 0),
593 TREE_OPERAND (cond
, 1));
594 else if (TREE_CODE (cond
) == SSA_NAME
)
596 enum tree_code def_code
;
598 gimple
*def_stmt
= get_prop_source_stmt (name
, true, NULL
);
599 if (!def_stmt
|| !can_propagate_from (def_stmt
))
602 def_code
= gimple_assign_rhs_code (def_stmt
);
603 if (TREE_CODE_CLASS (def_code
) == tcc_comparison
)
604 tmp
= fold_build2_loc (gimple_location (def_stmt
),
607 gimple_assign_rhs1 (def_stmt
),
608 gimple_assign_rhs2 (def_stmt
));
612 && is_gimple_condexpr (tmp
))
616 fprintf (dump_file
, " Replaced '");
617 print_generic_expr (dump_file
, cond
);
618 fprintf (dump_file
, "' with '");
619 print_generic_expr (dump_file
, tmp
);
620 fprintf (dump_file
, "'\n");
623 if ((code
== VEC_COND_EXPR
) ? integer_all_onesp (tmp
)
624 : integer_onep (tmp
))
625 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs2 (stmt
));
626 else if (integer_zerop (tmp
))
627 gimple_assign_set_rhs_from_tree (gsi_p
, gimple_assign_rhs3 (stmt
));
629 gimple_assign_set_rhs1 (stmt
, unshare_expr (tmp
));
630 stmt
= gsi_stmt (*gsi_p
);
639 /* We've just substituted an ADDR_EXPR into stmt. Update all the
640 relevant data structures to match. */
643 tidy_after_forward_propagate_addr (gimple
*stmt
)
645 /* We may have turned a trapping insn into a non-trapping insn. */
646 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
647 bitmap_set_bit (to_purge
, gimple_bb (stmt
)->index
);
649 if (TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
650 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
653 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
654 ADDR_EXPR <whatever>.
656 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
657 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
658 node or for recovery of array indexing from pointer arithmetic.
660 Return true if the propagation was successful (the propagation can
661 be not totally successful, yet things may have been changed). */
664 forward_propagate_addr_expr_1 (tree name
, tree def_rhs
,
665 gimple_stmt_iterator
*use_stmt_gsi
,
668 tree lhs
, rhs
, rhs2
, array_ref
;
669 gimple
*use_stmt
= gsi_stmt (*use_stmt_gsi
);
670 enum tree_code rhs_code
;
673 gcc_assert (TREE_CODE (def_rhs
) == ADDR_EXPR
);
675 lhs
= gimple_assign_lhs (use_stmt
);
676 rhs_code
= gimple_assign_rhs_code (use_stmt
);
677 rhs
= gimple_assign_rhs1 (use_stmt
);
679 /* Do not perform copy-propagation but recurse through copy chains. */
680 if (TREE_CODE (lhs
) == SSA_NAME
681 && rhs_code
== SSA_NAME
)
682 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
684 /* The use statement could be a conversion. Recurse to the uses of the
685 lhs as copyprop does not copy through pointer to integer to pointer
686 conversions and FRE does not catch all cases either.
687 Treat the case of a single-use name and
688 a conversion to def_rhs type separate, though. */
689 if (TREE_CODE (lhs
) == SSA_NAME
690 && CONVERT_EXPR_CODE_P (rhs_code
))
692 /* If there is a point in a conversion chain where the types match
693 so we can remove a conversion re-materialize the address here
696 && useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (def_rhs
)))
698 gimple_assign_set_rhs1 (use_stmt
, unshare_expr (def_rhs
));
699 gimple_assign_set_rhs_code (use_stmt
, TREE_CODE (def_rhs
));
703 /* Else recurse if the conversion preserves the address value. */
704 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
705 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
706 && (TYPE_PRECISION (TREE_TYPE (lhs
))
707 >= TYPE_PRECISION (TREE_TYPE (def_rhs
))))
708 return forward_propagate_addr_expr (lhs
, def_rhs
, single_use_p
);
713 /* If this isn't a conversion chain from this on we only can propagate
714 into compatible pointer contexts. */
715 if (!types_compatible_p (TREE_TYPE (name
), TREE_TYPE (def_rhs
)))
718 /* Propagate through constant pointer adjustments. */
719 if (TREE_CODE (lhs
) == SSA_NAME
720 && rhs_code
== POINTER_PLUS_EXPR
722 && TREE_CODE (gimple_assign_rhs2 (use_stmt
)) == INTEGER_CST
)
725 /* As we come here with non-invariant addresses in def_rhs we need
726 to make sure we can build a valid constant offsetted address
727 for further propagation. Simply rely on fold building that
728 and check after the fact. */
729 new_def_rhs
= fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (rhs
)),
731 fold_convert (ptr_type_node
,
732 gimple_assign_rhs2 (use_stmt
)));
733 if (TREE_CODE (new_def_rhs
) == MEM_REF
734 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs
, 0)))
736 new_def_rhs
= build1 (ADDR_EXPR
, TREE_TYPE (rhs
), new_def_rhs
);
738 /* Recurse. If we could propagate into all uses of lhs do not
739 bother to replace into the current use but just pretend we did. */
740 if (forward_propagate_addr_expr (lhs
, new_def_rhs
, single_use_p
))
743 if (useless_type_conversion_p (TREE_TYPE (lhs
),
744 TREE_TYPE (new_def_rhs
)))
745 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, TREE_CODE (new_def_rhs
),
747 else if (is_gimple_min_invariant (new_def_rhs
))
748 gimple_assign_set_rhs_with_ops (use_stmt_gsi
, NOP_EXPR
, new_def_rhs
);
751 gcc_assert (gsi_stmt (*use_stmt_gsi
) == use_stmt
);
752 update_stmt (use_stmt
);
756 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
757 ADDR_EXPR will not appear on the LHS. */
758 tree
*lhsp
= gimple_assign_lhs_ptr (use_stmt
);
759 while (handled_component_p (*lhsp
))
760 lhsp
= &TREE_OPERAND (*lhsp
, 0);
763 /* Now see if the LHS node is a MEM_REF using NAME. If so,
764 propagate the ADDR_EXPR into the use of NAME and fold the result. */
765 if (TREE_CODE (lhs
) == MEM_REF
766 && TREE_OPERAND (lhs
, 0) == name
)
769 poly_int64 def_rhs_offset
;
770 /* If the address is invariant we can always fold it. */
771 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
774 poly_offset_int off
= mem_ref_offset (lhs
);
776 off
+= def_rhs_offset
;
777 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
779 off
+= mem_ref_offset (def_rhs_base
);
780 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
783 new_ptr
= build_fold_addr_expr (def_rhs_base
);
784 TREE_OPERAND (lhs
, 0) = new_ptr
;
785 TREE_OPERAND (lhs
, 1)
786 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs
, 1)), off
);
787 tidy_after_forward_propagate_addr (use_stmt
);
788 /* Continue propagating into the RHS if this was not the only use. */
792 /* If the LHS is a plain dereference and the value type is the same as
793 that of the pointed-to type of the address we can put the
794 dereferenced address on the LHS preserving the original alias-type. */
795 else if (integer_zerop (TREE_OPERAND (lhs
, 1))
796 && ((gimple_assign_lhs (use_stmt
) == lhs
797 && useless_type_conversion_p
798 (TREE_TYPE (TREE_OPERAND (def_rhs
, 0)),
799 TREE_TYPE (gimple_assign_rhs1 (use_stmt
))))
800 || types_compatible_p (TREE_TYPE (lhs
),
801 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
802 /* Don't forward anything into clobber stmts if it would result
803 in the lhs no longer being a MEM_REF. */
804 && (!gimple_clobber_p (use_stmt
)
805 || TREE_CODE (TREE_OPERAND (def_rhs
, 0)) == MEM_REF
))
807 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
808 tree new_offset
, new_base
, saved
, new_lhs
;
809 while (handled_component_p (*def_rhs_basep
))
810 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
811 saved
= *def_rhs_basep
;
812 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
814 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
815 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (lhs
, 1)),
816 TREE_OPERAND (*def_rhs_basep
, 1));
820 new_base
= build_fold_addr_expr (*def_rhs_basep
);
821 new_offset
= TREE_OPERAND (lhs
, 1);
823 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
824 new_base
, new_offset
);
825 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (lhs
);
826 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (lhs
);
827 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (lhs
);
828 new_lhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
830 TREE_THIS_VOLATILE (new_lhs
) = TREE_THIS_VOLATILE (lhs
);
831 TREE_SIDE_EFFECTS (new_lhs
) = TREE_SIDE_EFFECTS (lhs
);
832 *def_rhs_basep
= saved
;
833 tidy_after_forward_propagate_addr (use_stmt
);
834 /* Continue propagating into the RHS if this was not the
840 /* We can have a struct assignment dereferencing our name twice.
841 Note that we didn't propagate into the lhs to not falsely
842 claim we did when propagating into the rhs. */
846 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
847 nodes from the RHS. */
848 tree
*rhsp
= gimple_assign_rhs1_ptr (use_stmt
);
849 if (TREE_CODE (*rhsp
) == ADDR_EXPR
)
850 rhsp
= &TREE_OPERAND (*rhsp
, 0);
851 while (handled_component_p (*rhsp
))
852 rhsp
= &TREE_OPERAND (*rhsp
, 0);
855 /* Now see if the RHS node is a MEM_REF using NAME. If so,
856 propagate the ADDR_EXPR into the use of NAME and fold the result. */
857 if (TREE_CODE (rhs
) == MEM_REF
858 && TREE_OPERAND (rhs
, 0) == name
)
861 poly_int64 def_rhs_offset
;
862 if ((def_rhs_base
= get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs
, 0),
865 poly_offset_int off
= mem_ref_offset (rhs
);
867 off
+= def_rhs_offset
;
868 if (TREE_CODE (def_rhs_base
) == MEM_REF
)
870 off
+= mem_ref_offset (def_rhs_base
);
871 new_ptr
= TREE_OPERAND (def_rhs_base
, 0);
874 new_ptr
= build_fold_addr_expr (def_rhs_base
);
875 TREE_OPERAND (rhs
, 0) = new_ptr
;
876 TREE_OPERAND (rhs
, 1)
877 = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs
, 1)), off
);
878 fold_stmt_inplace (use_stmt_gsi
);
879 tidy_after_forward_propagate_addr (use_stmt
);
882 /* If the RHS is a plain dereference and the value type is the same as
883 that of the pointed-to type of the address we can put the
884 dereferenced address on the RHS preserving the original alias-type. */
885 else if (integer_zerop (TREE_OPERAND (rhs
, 1))
886 && ((gimple_assign_rhs1 (use_stmt
) == rhs
887 && useless_type_conversion_p
888 (TREE_TYPE (gimple_assign_lhs (use_stmt
)),
889 TREE_TYPE (TREE_OPERAND (def_rhs
, 0))))
890 || types_compatible_p (TREE_TYPE (rhs
),
891 TREE_TYPE (TREE_OPERAND (def_rhs
, 0)))))
893 tree
*def_rhs_basep
= &TREE_OPERAND (def_rhs
, 0);
894 tree new_offset
, new_base
, saved
, new_rhs
;
895 while (handled_component_p (*def_rhs_basep
))
896 def_rhs_basep
= &TREE_OPERAND (*def_rhs_basep
, 0);
897 saved
= *def_rhs_basep
;
898 if (TREE_CODE (*def_rhs_basep
) == MEM_REF
)
900 new_base
= TREE_OPERAND (*def_rhs_basep
, 0);
901 new_offset
= fold_convert (TREE_TYPE (TREE_OPERAND (rhs
, 1)),
902 TREE_OPERAND (*def_rhs_basep
, 1));
906 new_base
= build_fold_addr_expr (*def_rhs_basep
);
907 new_offset
= TREE_OPERAND (rhs
, 1);
909 *def_rhs_basep
= build2 (MEM_REF
, TREE_TYPE (*def_rhs_basep
),
910 new_base
, new_offset
);
911 TREE_THIS_VOLATILE (*def_rhs_basep
) = TREE_THIS_VOLATILE (rhs
);
912 TREE_SIDE_EFFECTS (*def_rhs_basep
) = TREE_SIDE_EFFECTS (rhs
);
913 TREE_THIS_NOTRAP (*def_rhs_basep
) = TREE_THIS_NOTRAP (rhs
);
914 new_rhs
= unshare_expr (TREE_OPERAND (def_rhs
, 0));
916 TREE_THIS_VOLATILE (new_rhs
) = TREE_THIS_VOLATILE (rhs
);
917 TREE_SIDE_EFFECTS (new_rhs
) = TREE_SIDE_EFFECTS (rhs
);
918 *def_rhs_basep
= saved
;
919 fold_stmt_inplace (use_stmt_gsi
);
920 tidy_after_forward_propagate_addr (use_stmt
);
925 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
927 if (gimple_assign_rhs_code (use_stmt
) != POINTER_PLUS_EXPR
928 || gimple_assign_rhs1 (use_stmt
) != name
)
931 /* The remaining cases are all for turning pointer arithmetic into
932 array indexing. They only apply when we have the address of
933 element zero in an array. If that is not the case then there
935 array_ref
= TREE_OPERAND (def_rhs
, 0);
936 if ((TREE_CODE (array_ref
) != ARRAY_REF
937 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref
, 0))) != ARRAY_TYPE
938 || TREE_CODE (TREE_OPERAND (array_ref
, 1)) != INTEGER_CST
)
939 && TREE_CODE (TREE_TYPE (array_ref
)) != ARRAY_TYPE
)
942 rhs2
= gimple_assign_rhs2 (use_stmt
);
943 /* Optimize &x[C1] p+ C2 to &x p+ C3 with C3 = C1 * element_size + C2. */
944 if (TREE_CODE (rhs2
) == INTEGER_CST
)
946 tree new_rhs
= build1_loc (gimple_location (use_stmt
),
947 ADDR_EXPR
, TREE_TYPE (def_rhs
),
948 fold_build2 (MEM_REF
,
949 TREE_TYPE (TREE_TYPE (def_rhs
)),
950 unshare_expr (def_rhs
),
951 fold_convert (ptr_type_node
,
953 gimple_assign_set_rhs_from_tree (use_stmt_gsi
, new_rhs
);
954 use_stmt
= gsi_stmt (*use_stmt_gsi
);
955 update_stmt (use_stmt
);
956 tidy_after_forward_propagate_addr (use_stmt
);
963 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
965 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
966 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
967 node or for recovery of array indexing from pointer arithmetic.
969 PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
970 the single use in the previous invocation. Pass true when calling
973 Returns true, if all uses have been propagated into. */
976 forward_propagate_addr_expr (tree name
, tree rhs
, bool parent_single_use_p
)
978 imm_use_iterator iter
;
981 bool single_use_p
= parent_single_use_p
&& has_single_use (name
);
983 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, name
)
988 /* If the use is not in a simple assignment statement, then
989 there is nothing we can do. */
990 if (!is_gimple_assign (use_stmt
))
992 if (!is_gimple_debug (use_stmt
))
997 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
998 result
= forward_propagate_addr_expr_1 (name
, rhs
, &gsi
,
1000 /* If the use has moved to a different statement adjust
1001 the update machinery for the old statement too. */
1002 if (use_stmt
!= gsi_stmt (gsi
))
1004 update_stmt (use_stmt
);
1005 use_stmt
= gsi_stmt (gsi
);
1007 update_stmt (use_stmt
);
1010 /* Remove intermediate now unused copy and conversion chains. */
1011 use_rhs
= gimple_assign_rhs1 (use_stmt
);
1013 && TREE_CODE (gimple_assign_lhs (use_stmt
)) == SSA_NAME
1014 && TREE_CODE (use_rhs
) == SSA_NAME
1015 && has_zero_uses (gimple_assign_lhs (use_stmt
)))
1017 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
1018 fwprop_invalidate_lattice (gimple_get_lhs (use_stmt
));
1019 release_defs (use_stmt
);
1020 gsi_remove (&gsi
, true);
1024 return all
&& has_zero_uses (name
);
1028 /* Helper function for simplify_gimple_switch. Remove case labels that
1029 have values outside the range of the new type. */
1032 simplify_gimple_switch_label_vec (gswitch
*stmt
, tree index_type
)
1034 unsigned int branch_num
= gimple_switch_num_labels (stmt
);
1035 auto_vec
<tree
> labels (branch_num
);
1036 unsigned int i
, len
;
1038 /* Collect the existing case labels in a VEC, and preprocess it as if
1039 we are gimplifying a GENERIC SWITCH_EXPR. */
1040 for (i
= 1; i
< branch_num
; i
++)
1041 labels
.quick_push (gimple_switch_label (stmt
, i
));
1042 preprocess_case_label_vec_for_gimple (labels
, index_type
, NULL
);
1044 /* If any labels were removed, replace the existing case labels
1045 in the GIMPLE_SWITCH statement with the correct ones.
1046 Note that the type updates were done in-place on the case labels,
1047 so we only have to replace the case labels in the GIMPLE_SWITCH
1048 if the number of labels changed. */
1049 len
= labels
.length ();
1050 if (len
< branch_num
- 1)
1052 bitmap target_blocks
;
1056 /* Corner case: *all* case labels have been removed as being
1057 out-of-range for INDEX_TYPE. Push one label and let the
1058 CFG cleanups deal with this further. */
1063 label
= CASE_LABEL (gimple_switch_default_label (stmt
));
1064 elt
= build_case_label (build_int_cst (index_type
, 0), NULL
, label
);
1065 labels
.quick_push (elt
);
1069 for (i
= 0; i
< labels
.length (); i
++)
1070 gimple_switch_set_label (stmt
, i
+ 1, labels
[i
]);
1071 for (i
++ ; i
< branch_num
; i
++)
1072 gimple_switch_set_label (stmt
, i
, NULL_TREE
);
1073 gimple_switch_set_num_labels (stmt
, len
+ 1);
1075 /* Cleanup any edges that are now dead. */
1076 target_blocks
= BITMAP_ALLOC (NULL
);
1077 for (i
= 0; i
< gimple_switch_num_labels (stmt
); i
++)
1079 tree elt
= gimple_switch_label (stmt
, i
);
1080 basic_block target
= label_to_block (cfun
, CASE_LABEL (elt
));
1081 bitmap_set_bit (target_blocks
, target
->index
);
1083 for (ei
= ei_start (gimple_bb (stmt
)->succs
); (e
= ei_safe_edge (ei
)); )
1085 if (! bitmap_bit_p (target_blocks
, e
->dest
->index
))
1089 free_dominance_info (CDI_DOMINATORS
);
1094 BITMAP_FREE (target_blocks
);
1098 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1099 the condition which we may be able to optimize better. */
1102 simplify_gimple_switch (gswitch
*stmt
)
1104 /* The optimization that we really care about is removing unnecessary
1105 casts. That will let us do much better in propagating the inferred
1106 constant at the switch target. */
1107 tree cond
= gimple_switch_index (stmt
);
1108 if (TREE_CODE (cond
) == SSA_NAME
)
1110 gimple
*def_stmt
= SSA_NAME_DEF_STMT (cond
);
1111 if (gimple_assign_cast_p (def_stmt
))
1113 tree def
= gimple_assign_rhs1 (def_stmt
);
1114 if (TREE_CODE (def
) != SSA_NAME
)
1117 /* If we have an extension or sign-change that preserves the
1118 values we check against then we can copy the source value into
1120 tree ti
= TREE_TYPE (def
);
1121 if (INTEGRAL_TYPE_P (ti
)
1122 && TYPE_PRECISION (ti
) <= TYPE_PRECISION (TREE_TYPE (cond
)))
1124 size_t n
= gimple_switch_num_labels (stmt
);
1125 tree min
= NULL_TREE
, max
= NULL_TREE
;
1128 min
= CASE_LOW (gimple_switch_label (stmt
, 1));
1129 if (CASE_HIGH (gimple_switch_label (stmt
, n
- 1)))
1130 max
= CASE_HIGH (gimple_switch_label (stmt
, n
- 1));
1132 max
= CASE_LOW (gimple_switch_label (stmt
, n
- 1));
1134 if ((!min
|| int_fits_type_p (min
, ti
))
1135 && (!max
|| int_fits_type_p (max
, ti
)))
1137 gimple_switch_set_index (stmt
, def
);
1138 simplify_gimple_switch_label_vec (stmt
, ti
);
1149 /* For pointers p2 and p1 return p2 - p1 if the
1150 difference is known and constant, otherwise return NULL. */
1153 constant_pointer_difference (tree p1
, tree p2
)
1156 #define CPD_ITERATIONS 5
1157 tree exps
[2][CPD_ITERATIONS
];
1158 tree offs
[2][CPD_ITERATIONS
];
1161 for (i
= 0; i
< 2; i
++)
1163 tree p
= i
? p1
: p2
;
1164 tree off
= size_zero_node
;
1166 enum tree_code code
;
1168 /* For each of p1 and p2 we need to iterate at least
1169 twice, to handle ADDR_EXPR directly in p1/p2,
1170 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1171 on definition's stmt RHS. Iterate a few extra times. */
1175 if (!POINTER_TYPE_P (TREE_TYPE (p
)))
1177 if (TREE_CODE (p
) == ADDR_EXPR
)
1179 tree q
= TREE_OPERAND (p
, 0);
1181 tree base
= get_addr_base_and_unit_offset (q
, &offset
);
1185 if (maybe_ne (offset
, 0))
1186 off
= size_binop (PLUS_EXPR
, off
, size_int (offset
));
1188 if (TREE_CODE (q
) == MEM_REF
1189 && TREE_CODE (TREE_OPERAND (q
, 0)) == SSA_NAME
)
1191 p
= TREE_OPERAND (q
, 0);
1192 off
= size_binop (PLUS_EXPR
, off
,
1193 wide_int_to_tree (sizetype
,
1194 mem_ref_offset (q
)));
1203 if (TREE_CODE (p
) != SSA_NAME
)
1207 if (j
== CPD_ITERATIONS
)
1209 stmt
= SSA_NAME_DEF_STMT (p
);
1210 if (!is_gimple_assign (stmt
) || gimple_assign_lhs (stmt
) != p
)
1212 code
= gimple_assign_rhs_code (stmt
);
1213 if (code
== POINTER_PLUS_EXPR
)
1215 if (TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)
1217 off
= size_binop (PLUS_EXPR
, off
, gimple_assign_rhs2 (stmt
));
1218 p
= gimple_assign_rhs1 (stmt
);
1220 else if (code
== ADDR_EXPR
|| CONVERT_EXPR_CODE_P (code
))
1221 p
= gimple_assign_rhs1 (stmt
);
1229 for (i
= 0; i
< cnt
[0]; i
++)
1230 for (j
= 0; j
< cnt
[1]; j
++)
1231 if (exps
[0][i
] == exps
[1][j
])
1232 return size_binop (MINUS_EXPR
, offs
[0][i
], offs
[1][j
]);
1237 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1239 memcpy (p, "abcd", 4);
1240 memset (p + 4, ' ', 3);
1242 memcpy (p, "abcd ", 7);
1243 call if the latter can be stored by pieces during expansion. */
1246 simplify_builtin_call (gimple_stmt_iterator
*gsi_p
, tree callee2
)
1248 gimple
*stmt1
, *stmt2
= gsi_stmt (*gsi_p
);
1249 tree vuse
= gimple_vuse (stmt2
);
1252 stmt1
= SSA_NAME_DEF_STMT (vuse
);
1254 switch (DECL_FUNCTION_CODE (callee2
))
1256 case BUILT_IN_MEMSET
:
1257 if (gimple_call_num_args (stmt2
) != 3
1258 || gimple_call_lhs (stmt2
)
1260 || BITS_PER_UNIT
!= 8)
1265 tree ptr1
, src1
, str1
, off1
, len1
, lhs1
;
1266 tree ptr2
= gimple_call_arg (stmt2
, 0);
1267 tree val2
= gimple_call_arg (stmt2
, 1);
1268 tree len2
= gimple_call_arg (stmt2
, 2);
1269 tree diff
, vdef
, new_str_cst
;
1271 unsigned int ptr1_align
;
1272 unsigned HOST_WIDE_INT src_len
;
1274 use_operand_p use_p
;
1276 if (!tree_fits_shwi_p (val2
)
1277 || !tree_fits_uhwi_p (len2
)
1278 || compare_tree_int (len2
, 1024) == 1)
1280 if (is_gimple_call (stmt1
))
1282 /* If first stmt is a call, it needs to be memcpy
1283 or mempcpy, with string literal as second argument and
1285 callee1
= gimple_call_fndecl (stmt1
);
1286 if (callee1
== NULL_TREE
1287 || !fndecl_built_in_p (callee1
, BUILT_IN_NORMAL
)
1288 || gimple_call_num_args (stmt1
) != 3)
1290 if (DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMCPY
1291 && DECL_FUNCTION_CODE (callee1
) != BUILT_IN_MEMPCPY
)
1293 ptr1
= gimple_call_arg (stmt1
, 0);
1294 src1
= gimple_call_arg (stmt1
, 1);
1295 len1
= gimple_call_arg (stmt1
, 2);
1296 lhs1
= gimple_call_lhs (stmt1
);
1297 if (!tree_fits_uhwi_p (len1
))
1299 str1
= string_constant (src1
, &off1
, NULL
, NULL
);
1300 if (str1
== NULL_TREE
)
1302 if (!tree_fits_uhwi_p (off1
)
1303 || compare_tree_int (off1
, TREE_STRING_LENGTH (str1
) - 1) > 0
1304 || compare_tree_int (len1
, TREE_STRING_LENGTH (str1
)
1305 - tree_to_uhwi (off1
)) > 0
1306 || TREE_CODE (TREE_TYPE (str1
)) != ARRAY_TYPE
1307 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1
)))
1308 != TYPE_MODE (char_type_node
))
1311 else if (gimple_assign_single_p (stmt1
))
1313 /* Otherwise look for length 1 memcpy optimized into
1315 ptr1
= gimple_assign_lhs (stmt1
);
1316 src1
= gimple_assign_rhs1 (stmt1
);
1317 if (TREE_CODE (ptr1
) != MEM_REF
1318 || TYPE_MODE (TREE_TYPE (ptr1
)) != TYPE_MODE (char_type_node
)
1319 || !tree_fits_shwi_p (src1
))
1321 ptr1
= build_fold_addr_expr (ptr1
);
1322 STRIP_USELESS_TYPE_CONVERSION (ptr1
);
1323 callee1
= NULL_TREE
;
1324 len1
= size_one_node
;
1326 off1
= size_zero_node
;
1332 diff
= constant_pointer_difference (ptr1
, ptr2
);
1333 if (diff
== NULL
&& lhs1
!= NULL
)
1335 diff
= constant_pointer_difference (lhs1
, ptr2
);
1336 if (DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1338 diff
= size_binop (PLUS_EXPR
, diff
,
1339 fold_convert (sizetype
, len1
));
1341 /* If the difference between the second and first destination pointer
1342 is not constant, or is bigger than memcpy length, bail out. */
1344 || !tree_fits_uhwi_p (diff
)
1345 || tree_int_cst_lt (len1
, diff
)
1346 || compare_tree_int (diff
, 1024) == 1)
1349 /* Use maximum of difference plus memset length and memcpy length
1350 as the new memcpy length, if it is too big, bail out. */
1351 src_len
= tree_to_uhwi (diff
);
1352 src_len
+= tree_to_uhwi (len2
);
1353 if (src_len
< tree_to_uhwi (len1
))
1354 src_len
= tree_to_uhwi (len1
);
1358 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1359 with bigger length will return different result. */
1360 if (lhs1
!= NULL_TREE
1361 && DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
1362 && (TREE_CODE (lhs1
) != SSA_NAME
1363 || !single_imm_use (lhs1
, &use_p
, &use_stmt
)
1364 || use_stmt
!= stmt2
))
1367 /* If anything reads memory in between memcpy and memset
1368 call, the modified memcpy call might change it. */
1369 vdef
= gimple_vdef (stmt1
);
1371 && (!single_imm_use (vdef
, &use_p
, &use_stmt
)
1372 || use_stmt
!= stmt2
))
1375 ptr1_align
= get_pointer_alignment (ptr1
);
1376 /* Construct the new source string literal. */
1377 src_buf
= XALLOCAVEC (char, src_len
+ 1);
1380 TREE_STRING_POINTER (str1
) + tree_to_uhwi (off1
),
1381 tree_to_uhwi (len1
));
1383 src_buf
[0] = tree_to_shwi (src1
);
1384 memset (src_buf
+ tree_to_uhwi (diff
),
1385 tree_to_shwi (val2
), tree_to_uhwi (len2
));
1386 src_buf
[src_len
] = '\0';
1387 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1388 handle embedded '\0's. */
1389 if (strlen (src_buf
) != src_len
)
1391 rtl_profile_for_bb (gimple_bb (stmt2
));
1392 /* If the new memcpy wouldn't be emitted by storing the literal
1393 by pieces, this optimization might enlarge .rodata too much,
1394 as commonly used string literals couldn't be shared any
1396 if (!can_store_by_pieces (src_len
,
1397 builtin_strncpy_read_str
,
1398 src_buf
, ptr1_align
, false))
1401 new_str_cst
= build_string_literal (src_len
, src_buf
);
1404 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1406 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1407 gimple_call_set_lhs (stmt1
, NULL_TREE
);
1408 gimple_call_set_arg (stmt1
, 1, new_str_cst
);
1409 gimple_call_set_arg (stmt1
, 2,
1410 build_int_cst (TREE_TYPE (len1
), src_len
));
1411 update_stmt (stmt1
);
1412 unlink_stmt_vdef (stmt2
);
1413 gsi_replace (gsi_p
, gimple_build_nop (), false);
1414 fwprop_invalidate_lattice (gimple_get_lhs (stmt2
));
1415 release_defs (stmt2
);
1416 if (lhs1
&& DECL_FUNCTION_CODE (callee1
) == BUILT_IN_MEMPCPY
)
1418 fwprop_invalidate_lattice (lhs1
);
1419 release_ssa_name (lhs1
);
1425 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1426 assignment, remove STMT1 and change memset call into
1428 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt1
);
1430 if (!is_gimple_val (ptr1
))
1431 ptr1
= force_gimple_operand_gsi (gsi_p
, ptr1
, true, NULL_TREE
,
1432 true, GSI_SAME_STMT
);
1433 tree fndecl
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1434 gimple_call_set_fndecl (stmt2
, fndecl
);
1435 gimple_call_set_fntype (as_a
<gcall
*> (stmt2
),
1436 TREE_TYPE (fndecl
));
1437 gimple_call_set_arg (stmt2
, 0, ptr1
);
1438 gimple_call_set_arg (stmt2
, 1, new_str_cst
);
1439 gimple_call_set_arg (stmt2
, 2,
1440 build_int_cst (TREE_TYPE (len2
), src_len
));
1441 unlink_stmt_vdef (stmt1
);
1442 gsi_remove (&gsi
, true);
1443 fwprop_invalidate_lattice (gimple_get_lhs (stmt1
));
1444 release_defs (stmt1
);
1445 update_stmt (stmt2
);
1456 /* Given a ssa_name in NAME see if it was defined by an assignment and
1457 set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1458 to the second operand on the rhs. */
1461 defcodefor_name (tree name
, enum tree_code
*code
, tree
*arg1
, tree
*arg2
)
1464 enum tree_code code1
;
1468 enum gimple_rhs_class grhs_class
;
1470 code1
= TREE_CODE (name
);
1474 grhs_class
= get_gimple_rhs_class (code1
);
1476 if (code1
== SSA_NAME
)
1478 def
= SSA_NAME_DEF_STMT (name
);
1480 if (def
&& is_gimple_assign (def
)
1481 && can_propagate_from (def
))
1483 code1
= gimple_assign_rhs_code (def
);
1484 arg11
= gimple_assign_rhs1 (def
);
1485 arg21
= gimple_assign_rhs2 (def
);
1486 arg31
= gimple_assign_rhs3 (def
);
1489 else if (grhs_class
!= GIMPLE_SINGLE_RHS
)
1501 /* Recognize rotation patterns. Return true if a transformation
1502 applied, otherwise return false.
1504 We are looking for X with unsigned type T with bitsize B, OP being
1505 +, | or ^, some type T2 wider than T. For:
1506 (X << CNT1) OP (X >> CNT2) iff CNT1 + CNT2 == B
1507 ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2)) iff CNT1 + CNT2 == B
1509 transform these into:
1513 (X << Y) OP (X >> (B - Y))
1514 (X << (int) Y) OP (X >> (int) (B - Y))
1515 ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1516 ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1517 (X << Y) | (X >> ((-Y) & (B - 1)))
1518 (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1519 ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1520 ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1522 transform these into:
1526 (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1527 (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1528 ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1529 ((T) ((T2) X << (int) (Y & (B - 1)))) \
1530 | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1532 transform these into:
1535 Note, in the patterns with T2 type, the type of OP operands
1536 might be even a signed type, but should have precision B.
1537 Expressions with & (B - 1) should be recognized only if B is
1541 simplify_rotate (gimple_stmt_iterator
*gsi
)
1543 gimple
*stmt
= gsi_stmt (*gsi
);
1544 tree arg
[2], rtype
, rotcnt
= NULL_TREE
;
1545 tree def_arg1
[2], def_arg2
[2];
1546 enum tree_code def_code
[2];
1549 bool swapped_p
= false;
1552 arg
[0] = gimple_assign_rhs1 (stmt
);
1553 arg
[1] = gimple_assign_rhs2 (stmt
);
1554 rtype
= TREE_TYPE (arg
[0]);
1556 /* Only create rotates in complete modes. Other cases are not
1557 expanded properly. */
1558 if (!INTEGRAL_TYPE_P (rtype
)
1559 || !type_has_mode_precision_p (rtype
))
1562 for (i
= 0; i
< 2; i
++)
1563 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1565 /* Look through narrowing (or same precision) conversions. */
1566 if (CONVERT_EXPR_CODE_P (def_code
[0])
1567 && CONVERT_EXPR_CODE_P (def_code
[1])
1568 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[0]))
1569 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[1]))
1570 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1571 == TYPE_PRECISION (TREE_TYPE (def_arg1
[1]))
1572 && TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) >= TYPE_PRECISION (rtype
)
1573 && has_single_use (arg
[0])
1574 && has_single_use (arg
[1]))
1576 for (i
= 0; i
< 2; i
++)
1578 arg
[i
] = def_arg1
[i
];
1579 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1584 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1585 in unsigned type but LSHIFT_EXPR could be signed. */
1586 i
= (def_code
[0] == LSHIFT_EXPR
|| def_code
[0] == RSHIFT_EXPR
);
1587 if (CONVERT_EXPR_CODE_P (def_code
[i
])
1588 && (def_code
[1 - i
] == LSHIFT_EXPR
|| def_code
[1 - i
] == RSHIFT_EXPR
)
1589 && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1
[i
]))
1590 && TYPE_PRECISION (rtype
) == TYPE_PRECISION (TREE_TYPE (def_arg1
[i
]))
1591 && has_single_use (arg
[i
]))
1593 arg
[i
] = def_arg1
[i
];
1594 defcodefor_name (arg
[i
], &def_code
[i
], &def_arg1
[i
], &def_arg2
[i
]);
1598 /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR. */
1599 for (i
= 0; i
< 2; i
++)
1600 if (def_code
[i
] != LSHIFT_EXPR
&& def_code
[i
] != RSHIFT_EXPR
)
1602 else if (!has_single_use (arg
[i
]))
1604 if (def_code
[0] == def_code
[1])
1607 /* If we've looked through narrowing conversions before, look through
1608 widening conversions from unsigned type with the same precision
1610 if (TYPE_PRECISION (TREE_TYPE (def_arg1
[0])) != TYPE_PRECISION (rtype
))
1611 for (i
= 0; i
< 2; i
++)
1614 enum tree_code code
;
1615 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1616 if (!CONVERT_EXPR_CODE_P (code
)
1617 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1618 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1622 /* Both shifts have to use the same first operand. */
1623 if (!operand_equal_for_phi_arg_p (def_arg1
[0], def_arg1
[1])
1624 || !types_compatible_p (TREE_TYPE (def_arg1
[0]),
1625 TREE_TYPE (def_arg1
[1])))
1627 if ((TYPE_PRECISION (TREE_TYPE (def_arg1
[0]))
1628 != TYPE_PRECISION (TREE_TYPE (def_arg1
[1])))
1629 || (TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0]))
1630 == TYPE_UNSIGNED (TREE_TYPE (def_arg1
[1]))))
1633 /* Handle signed rotate; the RSHIFT_EXPR has to be done
1634 in unsigned type but LSHIFT_EXPR could be signed. */
1635 i
= def_code
[0] != RSHIFT_EXPR
;
1636 if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[i
])))
1640 enum tree_code code
;
1641 defcodefor_name (def_arg1
[i
], &code
, &tem
, NULL
);
1642 if (!CONVERT_EXPR_CODE_P (code
)
1643 || !INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1644 || TYPE_PRECISION (TREE_TYPE (tem
)) != TYPE_PRECISION (rtype
))
1647 if (!operand_equal_for_phi_arg_p (def_arg1
[0], def_arg1
[1])
1648 || !types_compatible_p (TREE_TYPE (def_arg1
[0]),
1649 TREE_TYPE (def_arg1
[1])))
1652 else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1
[0])))
1655 /* CNT1 + CNT2 == B case above. */
1656 if (tree_fits_uhwi_p (def_arg2
[0])
1657 && tree_fits_uhwi_p (def_arg2
[1])
1658 && tree_to_uhwi (def_arg2
[0])
1659 + tree_to_uhwi (def_arg2
[1]) == TYPE_PRECISION (rtype
))
1660 rotcnt
= def_arg2
[0];
1661 else if (TREE_CODE (def_arg2
[0]) != SSA_NAME
1662 || TREE_CODE (def_arg2
[1]) != SSA_NAME
)
1666 tree cdef_arg1
[2], cdef_arg2
[2], def_arg2_alt
[2];
1667 enum tree_code cdef_code
[2];
1668 /* Look through conversion of the shift count argument.
1669 The C/C++ FE cast any shift count argument to integer_type_node.
1670 The only problem might be if the shift count type maximum value
1671 is equal or smaller than number of bits in rtype. */
1672 for (i
= 0; i
< 2; i
++)
1674 def_arg2_alt
[i
] = def_arg2
[i
];
1675 defcodefor_name (def_arg2
[i
], &cdef_code
[i
],
1676 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1677 if (CONVERT_EXPR_CODE_P (cdef_code
[i
])
1678 && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1
[i
]))
1679 && TYPE_PRECISION (TREE_TYPE (cdef_arg1
[i
]))
1680 > floor_log2 (TYPE_PRECISION (rtype
))
1681 && type_has_mode_precision_p (TREE_TYPE (cdef_arg1
[i
])))
1683 def_arg2_alt
[i
] = cdef_arg1
[i
];
1684 defcodefor_name (def_arg2_alt
[i
], &cdef_code
[i
],
1685 &cdef_arg1
[i
], &cdef_arg2
[i
]);
1688 for (i
= 0; i
< 2; i
++)
1689 /* Check for one shift count being Y and the other B - Y,
1690 with optional casts. */
1691 if (cdef_code
[i
] == MINUS_EXPR
1692 && tree_fits_shwi_p (cdef_arg1
[i
])
1693 && tree_to_shwi (cdef_arg1
[i
]) == TYPE_PRECISION (rtype
)
1694 && TREE_CODE (cdef_arg2
[i
]) == SSA_NAME
)
1697 enum tree_code code
;
1699 if (cdef_arg2
[i
] == def_arg2
[1 - i
]
1700 || cdef_arg2
[i
] == def_arg2_alt
[1 - i
])
1702 rotcnt
= cdef_arg2
[i
];
1705 defcodefor_name (cdef_arg2
[i
], &code
, &tem
, NULL
);
1706 if (CONVERT_EXPR_CODE_P (code
)
1707 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1708 && TYPE_PRECISION (TREE_TYPE (tem
))
1709 > floor_log2 (TYPE_PRECISION (rtype
))
1710 && type_has_mode_precision_p (TREE_TYPE (tem
))
1711 && (tem
== def_arg2
[1 - i
]
1712 || tem
== def_arg2_alt
[1 - i
]))
1718 /* The above sequence isn't safe for Y being 0,
1719 because then one of the shifts triggers undefined behavior.
1720 This alternative is safe even for rotation count of 0.
1721 One shift count is Y and the other (-Y) & (B - 1).
1722 Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1). */
1723 else if (cdef_code
[i
] == BIT_AND_EXPR
1724 && pow2p_hwi (TYPE_PRECISION (rtype
))
1725 && tree_fits_shwi_p (cdef_arg2
[i
])
1726 && tree_to_shwi (cdef_arg2
[i
])
1727 == TYPE_PRECISION (rtype
) - 1
1728 && TREE_CODE (cdef_arg1
[i
]) == SSA_NAME
1729 && gimple_assign_rhs_code (stmt
) == BIT_IOR_EXPR
)
1732 enum tree_code code
;
1734 defcodefor_name (cdef_arg1
[i
], &code
, &tem
, NULL
);
1735 if (CONVERT_EXPR_CODE_P (code
)
1736 && INTEGRAL_TYPE_P (TREE_TYPE (tem
))
1737 && TYPE_PRECISION (TREE_TYPE (tem
))
1738 > floor_log2 (TYPE_PRECISION (rtype
))
1739 && type_has_mode_precision_p (TREE_TYPE (tem
)))
1740 defcodefor_name (tem
, &code
, &tem
, NULL
);
1742 if (code
== NEGATE_EXPR
)
1744 if (tem
== def_arg2
[1 - i
] || tem
== def_arg2_alt
[1 - i
])
1750 defcodefor_name (tem
, &code
, &tem2
, NULL
);
1751 if (CONVERT_EXPR_CODE_P (code
)
1752 && INTEGRAL_TYPE_P (TREE_TYPE (tem2
))
1753 && TYPE_PRECISION (TREE_TYPE (tem2
))
1754 > floor_log2 (TYPE_PRECISION (rtype
))
1755 && type_has_mode_precision_p (TREE_TYPE (tem2
)))
1757 if (tem2
== def_arg2
[1 - i
]
1758 || tem2
== def_arg2_alt
[1 - i
])
1767 if (cdef_code
[1 - i
] == BIT_AND_EXPR
1768 && tree_fits_shwi_p (cdef_arg2
[1 - i
])
1769 && tree_to_shwi (cdef_arg2
[1 - i
])
1770 == TYPE_PRECISION (rtype
) - 1
1771 && TREE_CODE (cdef_arg1
[1 - i
]) == SSA_NAME
)
1773 if (tem
== cdef_arg1
[1 - i
]
1774 || tem2
== cdef_arg1
[1 - i
])
1776 rotcnt
= def_arg2
[1 - i
];
1780 defcodefor_name (cdef_arg1
[1 - i
], &code
, &tem3
, NULL
);
1781 if (CONVERT_EXPR_CODE_P (code
)
1782 && INTEGRAL_TYPE_P (TREE_TYPE (tem3
))
1783 && TYPE_PRECISION (TREE_TYPE (tem3
))
1784 > floor_log2 (TYPE_PRECISION (rtype
))
1785 && type_has_mode_precision_p (TREE_TYPE (tem3
)))
1787 if (tem
== tem3
|| tem2
== tem3
)
1789 rotcnt
= def_arg2
[1 - i
];
1796 if (rotcnt
== NULL_TREE
)
1801 if (!useless_type_conversion_p (TREE_TYPE (def_arg2
[0]),
1802 TREE_TYPE (rotcnt
)))
1804 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2
[0])),
1806 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1807 rotcnt
= gimple_assign_lhs (g
);
1809 lhs
= gimple_assign_lhs (stmt
);
1810 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1811 lhs
= make_ssa_name (TREE_TYPE (def_arg1
[0]));
1812 g
= gimple_build_assign (lhs
,
1813 ((def_code
[0] == LSHIFT_EXPR
) ^ swapped_p
)
1814 ? LROTATE_EXPR
: RROTATE_EXPR
, def_arg1
[0], rotcnt
);
1815 if (!useless_type_conversion_p (rtype
, TREE_TYPE (def_arg1
[0])))
1817 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1818 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, lhs
);
1820 gsi_replace (gsi
, g
, false);
1825 /* Check whether an array contains a valid ctz table. */
1827 check_ctz_array (tree ctor
, unsigned HOST_WIDE_INT mulc
,
1828 HOST_WIDE_INT
&zero_val
, unsigned shift
, unsigned bits
)
1831 unsigned HOST_WIDE_INT i
, mask
;
1832 unsigned matched
= 0;
1834 mask
= ((HOST_WIDE_INT_1U
<< (bits
- shift
)) - 1) << shift
;
1838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), i
, idx
, elt
)
1840 if (TREE_CODE (idx
) != INTEGER_CST
|| TREE_CODE (elt
) != INTEGER_CST
)
1845 unsigned HOST_WIDE_INT index
= tree_to_shwi (idx
);
1846 HOST_WIDE_INT val
= tree_to_shwi (elt
);
1854 if (val
>= 0 && val
< bits
&& (((mulc
<< val
) & mask
) >> shift
) == index
)
1864 /* Check whether a string contains a valid ctz table. */
1866 check_ctz_string (tree string
, unsigned HOST_WIDE_INT mulc
,
1867 HOST_WIDE_INT
&zero_val
, unsigned shift
, unsigned bits
)
1869 unsigned HOST_WIDE_INT len
= TREE_STRING_LENGTH (string
);
1870 unsigned HOST_WIDE_INT mask
;
1871 unsigned matched
= 0;
1872 const unsigned char *p
= (const unsigned char *) TREE_STRING_POINTER (string
);
1874 if (len
< bits
|| len
> bits
* 2)
1877 mask
= ((HOST_WIDE_INT_1U
<< (bits
- shift
)) - 1) << shift
;
1881 for (unsigned i
= 0; i
< len
; i
++)
1882 if (p
[i
] < bits
&& (((mulc
<< p
[i
]) & mask
) >> shift
) == i
)
1885 return matched
== bits
;
1888 /* Recognize count trailing zeroes idiom.
1889 The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
1890 constant which when multiplied by a power of 2 creates a unique value
1891 in the top 5 or 6 bits. This is then indexed into a table which maps it
1892 to the number of trailing zeroes. Array[0] is returned so the caller can
1893 emit an appropriate sequence depending on whether ctz (0) is defined on
1896 optimize_count_trailing_zeroes (tree array_ref
, tree x
, tree mulc
,
1897 tree tshift
, HOST_WIDE_INT
&zero_val
)
1899 tree type
= TREE_TYPE (array_ref
);
1900 tree array
= TREE_OPERAND (array_ref
, 0);
1902 gcc_assert (TREE_CODE (mulc
) == INTEGER_CST
);
1903 gcc_assert (TREE_CODE (tshift
) == INTEGER_CST
);
1905 tree input_type
= TREE_TYPE (x
);
1906 unsigned input_bits
= tree_to_shwi (TYPE_SIZE (input_type
));
1908 /* Check the array element type is not wider than 32 bits and the input is
1909 an unsigned 32-bit or 64-bit type. */
1910 if (TYPE_PRECISION (type
) > 32 || !TYPE_UNSIGNED (input_type
))
1912 if (input_bits
!= 32 && input_bits
!= 64)
1915 if (!direct_internal_fn_supported_p (IFN_CTZ
, input_type
, OPTIMIZE_FOR_BOTH
))
1918 /* Check the lower bound of the array is zero. */
1919 tree low
= array_ref_low_bound (array_ref
);
1920 if (!low
|| !integer_zerop (low
))
1923 unsigned shiftval
= tree_to_shwi (tshift
);
1925 /* Check the shift extracts the top 5..7 bits. */
1926 if (shiftval
< input_bits
- 7 || shiftval
> input_bits
- 5)
1929 tree ctor
= ctor_for_folding (array
);
1933 unsigned HOST_WIDE_INT val
= tree_to_uhwi (mulc
);
1935 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
1936 return check_ctz_array (ctor
, val
, zero_val
, shiftval
, input_bits
);
1938 if (TREE_CODE (ctor
) == STRING_CST
1939 && TYPE_PRECISION (type
) == CHAR_TYPE_SIZE
)
1940 return check_ctz_string (ctor
, val
, zero_val
, shiftval
, input_bits
);
1945 /* Match.pd function to match the ctz expression. */
1946 extern bool gimple_ctz_table_index (tree
, tree
*, tree (*)(tree
));
1949 simplify_count_trailing_zeroes (gimple_stmt_iterator
*gsi
)
1951 gimple
*stmt
= gsi_stmt (*gsi
);
1952 tree array_ref
= gimple_assign_rhs1 (stmt
);
1954 HOST_WIDE_INT zero_val
;
1956 gcc_checking_assert (TREE_CODE (array_ref
) == ARRAY_REF
);
1958 if (!gimple_ctz_table_index (TREE_OPERAND (array_ref
, 1), &res_ops
[0], NULL
))
1961 if (optimize_count_trailing_zeroes (array_ref
, res_ops
[0],
1962 res_ops
[1], res_ops
[2], zero_val
))
1964 tree type
= TREE_TYPE (res_ops
[0]);
1965 HOST_WIDE_INT ctz_val
= 0;
1966 HOST_WIDE_INT type_size
= tree_to_shwi (TYPE_SIZE (type
));
1968 = CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (type
), ctz_val
) == 2;
1970 /* If the input value can't be zero, don't special case ctz (0). */
1971 if (tree_expr_nonzero_p (res_ops
[0]))
1978 /* Skip if there is no value defined at zero, or if we can't easily
1979 return the correct value for zero. */
1982 if (zero_val
!= ctz_val
&& !(zero_val
== 0 && ctz_val
== type_size
))
1985 gimple_seq seq
= NULL
;
1987 gcall
*call
= gimple_build_call_internal (IFN_CTZ
, 1, res_ops
[0]);
1988 gimple_set_location (call
, gimple_location (stmt
));
1989 gimple_set_lhs (call
, make_ssa_name (integer_type_node
));
1990 gimple_seq_add_stmt (&seq
, call
);
1992 tree prev_lhs
= gimple_call_lhs (call
);
1994 /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0. */
1995 if (zero_val
== 0 && ctz_val
== type_size
)
1997 g
= gimple_build_assign (make_ssa_name (integer_type_node
),
1998 BIT_AND_EXPR
, prev_lhs
,
1999 build_int_cst (integer_type_node
,
2001 gimple_set_location (g
, gimple_location (stmt
));
2002 gimple_seq_add_stmt (&seq
, g
);
2003 prev_lhs
= gimple_assign_lhs (g
);
2006 g
= gimple_build_assign (gimple_assign_lhs (stmt
), NOP_EXPR
, prev_lhs
);
2007 gimple_seq_add_stmt (&seq
, g
);
2008 gsi_replace_with_seq (gsi
, seq
, true);
2016 /* Combine an element access with a shuffle. Returns true if there were
2017 any changes made, else it returns false. */
2020 simplify_bitfield_ref (gimple_stmt_iterator
*gsi
)
2022 gimple
*stmt
= gsi_stmt (*gsi
);
2027 enum tree_code code
;
2029 op
= gimple_assign_rhs1 (stmt
);
2030 gcc_checking_assert (TREE_CODE (op
) == BIT_FIELD_REF
);
2032 op0
= TREE_OPERAND (op
, 0);
2033 if (TREE_CODE (op0
) != SSA_NAME
2034 || TREE_CODE (TREE_TYPE (op0
)) != VECTOR_TYPE
)
2037 def_stmt
= get_prop_source_stmt (op0
, false, NULL
);
2038 if (!def_stmt
|| !can_propagate_from (def_stmt
))
2041 op1
= TREE_OPERAND (op
, 1);
2042 code
= gimple_assign_rhs_code (def_stmt
);
2043 elem_type
= TREE_TYPE (TREE_TYPE (op0
));
2044 if (TREE_TYPE (op
) != elem_type
)
2047 size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2048 if (maybe_ne (bit_field_size (op
), size
))
2051 if (code
== VEC_PERM_EXPR
2052 && constant_multiple_p (bit_field_offset (op
), size
, &idx
))
2055 unsigned HOST_WIDE_INT nelts
;
2056 m
= gimple_assign_rhs3 (def_stmt
);
2057 if (TREE_CODE (m
) != VECTOR_CST
2058 || !VECTOR_CST_NELTS (m
).is_constant (&nelts
))
2060 idx
= TREE_INT_CST_LOW (VECTOR_CST_ELT (m
, idx
));
2064 p
= gimple_assign_rhs1 (def_stmt
);
2068 p
= gimple_assign_rhs2 (def_stmt
);
2071 tem
= build3 (BIT_FIELD_REF
, TREE_TYPE (op
),
2072 unshare_expr (p
), op1
, bitsize_int (idx
* size
));
2073 gimple_assign_set_rhs1 (stmt
, tem
);
2075 update_stmt (gsi_stmt (*gsi
));
2082 /* Determine whether applying the 2 permutations (mask1 then mask2)
2083 gives back one of the input. */
2086 is_combined_permutation_identity (tree mask1
, tree mask2
)
2089 unsigned HOST_WIDE_INT nelts
, i
, j
;
2090 bool maybe_identity1
= true;
2091 bool maybe_identity2
= true;
2093 gcc_checking_assert (TREE_CODE (mask1
) == VECTOR_CST
2094 && TREE_CODE (mask2
) == VECTOR_CST
);
2095 mask
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (mask1
), mask1
, mask1
, mask2
);
2096 if (mask
== NULL_TREE
|| TREE_CODE (mask
) != VECTOR_CST
)
2099 if (!VECTOR_CST_NELTS (mask
).is_constant (&nelts
))
2101 for (i
= 0; i
< nelts
; i
++)
2103 tree val
= VECTOR_CST_ELT (mask
, i
);
2104 gcc_assert (TREE_CODE (val
) == INTEGER_CST
);
2105 j
= TREE_INT_CST_LOW (val
) & (2 * nelts
- 1);
2107 maybe_identity2
= false;
2108 else if (j
== i
+ nelts
)
2109 maybe_identity1
= false;
2113 return maybe_identity1
? 1 : maybe_identity2
? 2 : 0;
2116 /* Combine a shuffle with its arguments. Returns 1 if there were any
2117 changes made, 2 if cfg-cleanup needs to run. Else it returns 0. */
2120 simplify_permutation (gimple_stmt_iterator
*gsi
)
2122 gimple
*stmt
= gsi_stmt (*gsi
);
2124 tree op0
, op1
, op2
, op3
, arg0
, arg1
;
2125 enum tree_code code
;
2126 bool single_use_op0
= false;
2128 gcc_checking_assert (gimple_assign_rhs_code (stmt
) == VEC_PERM_EXPR
);
2130 op0
= gimple_assign_rhs1 (stmt
);
2131 op1
= gimple_assign_rhs2 (stmt
);
2132 op2
= gimple_assign_rhs3 (stmt
);
2134 if (TREE_CODE (op2
) != VECTOR_CST
)
2137 if (TREE_CODE (op0
) == VECTOR_CST
)
2142 else if (TREE_CODE (op0
) == SSA_NAME
)
2144 def_stmt
= get_prop_source_stmt (op0
, false, &single_use_op0
);
2145 if (!def_stmt
|| !can_propagate_from (def_stmt
))
2148 code
= gimple_assign_rhs_code (def_stmt
);
2149 arg0
= gimple_assign_rhs1 (def_stmt
);
2154 /* Two consecutive shuffles. */
2155 if (code
== VEC_PERM_EXPR
)
2162 op3
= gimple_assign_rhs3 (def_stmt
);
2163 if (TREE_CODE (op3
) != VECTOR_CST
)
2165 ident
= is_combined_permutation_identity (op3
, op2
);
2168 orig
= (ident
== 1) ? gimple_assign_rhs1 (def_stmt
)
2169 : gimple_assign_rhs2 (def_stmt
);
2170 gimple_assign_set_rhs1 (stmt
, unshare_expr (orig
));
2171 gimple_assign_set_rhs_code (stmt
, TREE_CODE (orig
));
2172 gimple_set_num_ops (stmt
, 2);
2174 return remove_prop_source_from_use (op0
) ? 2 : 1;
2177 /* Shuffle of a constructor. */
2178 else if (code
== CONSTRUCTOR
|| code
== VECTOR_CST
)
2184 if (TREE_CODE (op0
) == SSA_NAME
&& !single_use_op0
)
2187 if (TREE_CODE (op1
) == VECTOR_CST
)
2189 else if (TREE_CODE (op1
) == SSA_NAME
)
2191 enum tree_code code2
;
2193 gimple
*def_stmt2
= get_prop_source_stmt (op1
, true, NULL
);
2194 if (!def_stmt2
|| !can_propagate_from (def_stmt2
))
2197 code2
= gimple_assign_rhs_code (def_stmt2
);
2198 if (code2
!= CONSTRUCTOR
&& code2
!= VECTOR_CST
)
2200 arg1
= gimple_assign_rhs1 (def_stmt2
);
2207 /* Already used twice in this statement. */
2208 if (TREE_CODE (op0
) == SSA_NAME
&& num_imm_uses (op0
) > 2)
2212 opt
= fold_ternary (VEC_PERM_EXPR
, TREE_TYPE (op0
), arg0
, arg1
, op2
);
2214 || (TREE_CODE (opt
) != CONSTRUCTOR
&& TREE_CODE (opt
) != VECTOR_CST
))
2216 gimple_assign_set_rhs_from_tree (gsi
, opt
);
2217 update_stmt (gsi_stmt (*gsi
));
2218 if (TREE_CODE (op0
) == SSA_NAME
)
2219 ret
= remove_prop_source_from_use (op0
);
2220 if (op0
!= op1
&& TREE_CODE (op1
) == SSA_NAME
)
2221 ret
|= remove_prop_source_from_use (op1
);
2228 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
2229 conversions with code CONV_CODE or update it if still ERROR_MARK.
2230 Return NULL_TREE if no such matching def was found. */
2233 get_bit_field_ref_def (tree val
, enum tree_code
&conv_code
)
2235 if (TREE_CODE (val
) != SSA_NAME
)
2237 gimple
*def_stmt
= get_prop_source_stmt (val
, false, NULL
);
2240 enum tree_code code
= gimple_assign_rhs_code (def_stmt
);
2241 if (code
== FLOAT_EXPR
2242 || code
== FIX_TRUNC_EXPR
2243 || CONVERT_EXPR_CODE_P (code
))
2245 tree op1
= gimple_assign_rhs1 (def_stmt
);
2246 if (conv_code
== ERROR_MARK
)
2248 else if (conv_code
!= code
)
2250 if (TREE_CODE (op1
) != SSA_NAME
)
2252 def_stmt
= SSA_NAME_DEF_STMT (op1
);
2253 if (! is_gimple_assign (def_stmt
))
2255 code
= gimple_assign_rhs_code (def_stmt
);
2257 if (code
!= BIT_FIELD_REF
)
2259 return gimple_assign_rhs1 (def_stmt
);
2262 /* Recognize a VEC_PERM_EXPR. Returns true if there were any changes. */
2265 simplify_vector_constructor (gimple_stmt_iterator
*gsi
)
2267 gimple
*stmt
= gsi_stmt (*gsi
);
2268 tree op
, orig
[2], type
, elem_type
;
2269 unsigned elem_size
, i
;
2270 unsigned HOST_WIDE_INT nelts
;
2271 unsigned HOST_WIDE_INT refnelts
;
2272 enum tree_code conv_code
;
2273 constructor_elt
*elt
;
2275 op
= gimple_assign_rhs1 (stmt
);
2276 type
= TREE_TYPE (op
);
2277 gcc_checking_assert (TREE_CODE (op
) == CONSTRUCTOR
2278 && TREE_CODE (type
) == VECTOR_TYPE
);
2280 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
2282 elem_type
= TREE_TYPE (type
);
2283 elem_size
= TREE_INT_CST_LOW (TYPE_SIZE (elem_type
));
2287 conv_code
= ERROR_MARK
;
2288 bool maybe_ident
= true;
2289 bool maybe_blend
[2] = { true, true };
2290 tree one_constant
= NULL_TREE
;
2291 tree one_nonconstant
= NULL_TREE
;
2292 auto_vec
<tree
> constants
;
2293 constants
.safe_grow_cleared (nelts
);
2294 auto_vec
<std::pair
<unsigned, unsigned>, 64> elts
;
2295 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op
), i
, elt
)
2303 /* Look for elements extracted and possibly converted from
2305 op1
= get_bit_field_ref_def (elt
->value
, conv_code
);
2307 && TREE_CODE ((ref
= TREE_OPERAND (op1
, 0))) == SSA_NAME
2308 && VECTOR_TYPE_P (TREE_TYPE (ref
))
2309 && useless_type_conversion_p (TREE_TYPE (op1
),
2310 TREE_TYPE (TREE_TYPE (ref
)))
2311 && constant_multiple_p (bit_field_offset (op1
),
2312 bit_field_size (op1
), &elem
)
2313 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref
)).is_constant (&refnelts
))
2316 for (j
= 0; j
< 2; ++j
)
2321 || useless_type_conversion_p (TREE_TYPE (orig
[0]),
2325 else if (ref
== orig
[j
])
2328 /* Found a suitable vector element. */
2332 if (elem
!= i
|| j
!= 0)
2333 maybe_ident
= false;
2335 maybe_blend
[j
] = false;
2336 elts
.safe_push (std::make_pair (j
, elem
));
2339 /* Else fallthru. */
2341 /* Handle elements not extracted from a vector.
2342 1. constants by permuting with constant vector
2343 2. a unique non-constant element by permuting with a splat vector */
2345 && orig
[1] != error_mark_node
)
2347 orig
[1] = error_mark_node
;
2348 if (CONSTANT_CLASS_P (elt
->value
))
2350 if (one_nonconstant
)
2353 one_constant
= elt
->value
;
2354 constants
[i
] = elt
->value
;
2360 if (!one_nonconstant
)
2361 one_nonconstant
= elt
->value
;
2362 else if (!operand_equal_p (one_nonconstant
, elt
->value
, 0))
2365 elts
.safe_push (std::make_pair (1, i
));
2366 maybe_ident
= false;
2372 || ! VECTOR_TYPE_P (TREE_TYPE (orig
[0])))
2374 refnelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig
[0])).to_constant ();
2375 /* We currently do not handle larger destination vectors. */
2376 if (refnelts
< nelts
)
2382 = (nelts
!= refnelts
2383 ? (conv_code
!= ERROR_MARK
2384 ? build_vector_type (TREE_TYPE (TREE_TYPE (orig
[0])), nelts
)
2386 : TREE_TYPE (orig
[0]));
2387 if (conv_code
!= ERROR_MARK
2388 && !supportable_convert_operation (conv_code
, type
, conv_src_type
,
2391 /* Only few targets implement direct conversion patterns so try
2392 some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR. */
2394 tree halfvectype
, dblvectype
;
2395 if (CONVERT_EXPR_CODE_P (conv_code
)
2396 && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig
[0])))
2397 == TYPE_PRECISION (TREE_TYPE (type
)))
2398 && mode_for_vector (as_a
<scalar_mode
>
2399 (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig
[0])))),
2400 nelts
* 2).exists ()
2402 = build_vector_type (TREE_TYPE (TREE_TYPE (orig
[0])),
2404 && (optab
= optab_for_tree_code (FLOAT_TYPE_P (TREE_TYPE (type
))
2405 ? VEC_UNPACK_FLOAT_LO_EXPR
2406 : VEC_UNPACK_LO_EXPR
,
2409 && (optab_handler (optab
, TYPE_MODE (dblvectype
))
2410 != CODE_FOR_nothing
))
2412 gimple_seq stmts
= NULL
;
2414 if (refnelts
== nelts
)
2416 /* ??? Paradoxical subregs don't exist, so insert into
2417 the lower half of a wider zero vector. */
2418 dbl
= gimple_build (&stmts
, BIT_INSERT_EXPR
, dblvectype
,
2419 build_zero_cst (dblvectype
), orig
[0],
2422 else if (refnelts
== 2 * nelts
)
2425 dbl
= gimple_build (&stmts
, BIT_FIELD_REF
, dblvectype
,
2426 orig
[0], TYPE_SIZE (dblvectype
),
2428 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2429 gimple_assign_set_rhs_with_ops (gsi
,
2430 FLOAT_TYPE_P (TREE_TYPE (type
))
2431 ? VEC_UNPACK_FLOAT_LO_EXPR
2432 : VEC_UNPACK_LO_EXPR
,
2435 else if (CONVERT_EXPR_CODE_P (conv_code
)
2436 && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig
[0])))
2437 == 2 * TYPE_PRECISION (TREE_TYPE (type
)))
2438 && mode_for_vector (as_a
<scalar_mode
>
2440 (TREE_TYPE (TREE_TYPE (orig
[0])))),
2441 nelts
/ 2).exists ()
2443 = build_vector_type (TREE_TYPE (TREE_TYPE (orig
[0])),
2445 && (optab
= optab_for_tree_code (VEC_PACK_TRUNC_EXPR
,
2448 && (optab_handler (optab
, TYPE_MODE (halfvectype
))
2449 != CODE_FOR_nothing
))
2451 gimple_seq stmts
= NULL
;
2452 tree low
= gimple_build (&stmts
, BIT_FIELD_REF
, halfvectype
,
2453 orig
[0], TYPE_SIZE (halfvectype
),
2455 tree hig
= gimple_build (&stmts
, BIT_FIELD_REF
, halfvectype
,
2456 orig
[0], TYPE_SIZE (halfvectype
),
2457 TYPE_SIZE (halfvectype
));
2458 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2459 gimple_assign_set_rhs_with_ops (gsi
, VEC_PACK_TRUNC_EXPR
,
2464 update_stmt (gsi_stmt (*gsi
));
2467 if (nelts
!= refnelts
)
2470 = gimple_build_assign (make_ssa_name (conv_src_type
),
2471 build3 (BIT_FIELD_REF
, conv_src_type
,
2472 orig
[0], TYPE_SIZE (conv_src_type
),
2473 bitsize_zero_node
));
2474 gsi_insert_before (gsi
, lowpart
, GSI_SAME_STMT
);
2475 orig
[0] = gimple_assign_lhs (lowpart
);
2477 if (conv_code
== ERROR_MARK
)
2479 tree src_type
= TREE_TYPE (orig
[0]);
2480 if (!useless_type_conversion_p (type
, src_type
))
2482 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
),
2483 TYPE_VECTOR_SUBPARTS (src_type
))
2484 && useless_type_conversion_p (TREE_TYPE (type
),
2485 TREE_TYPE (src_type
)));
2486 tree rhs
= build1 (VIEW_CONVERT_EXPR
, type
, orig
[0]);
2487 orig
[0] = make_ssa_name (type
);
2488 gassign
*assign
= gimple_build_assign (orig
[0], rhs
);
2489 gsi_insert_before (gsi
, assign
, GSI_SAME_STMT
);
2491 gimple_assign_set_rhs_from_tree (gsi
, orig
[0]);
2494 gimple_assign_set_rhs_with_ops (gsi
, conv_code
, orig
[0],
2495 NULL_TREE
, NULL_TREE
);
2499 /* If we combine a vector with a non-vector avoid cases where
2500 we'll obviously end up with more GIMPLE stmts which is when
2501 we'll later not fold this to a single insert into the vector
2502 and we had a single extract originally. See PR92819. */
2505 && orig
[1] == error_mark_node
2508 tree mask_type
, perm_type
, conv_src_type
;
2509 perm_type
= TREE_TYPE (orig
[0]);
2510 conv_src_type
= (nelts
== refnelts
2512 : build_vector_type (TREE_TYPE (perm_type
), nelts
));
2513 if (conv_code
!= ERROR_MARK
2514 && !supportable_convert_operation (conv_code
, type
, conv_src_type
,
2518 /* Now that we know the number of elements of the source build the
2520 ??? When the second vector has constant values we can shuffle
2521 it and its source indexes to make the permutation supported.
2522 For now it mimics a blend. */
2523 vec_perm_builder
sel (refnelts
, refnelts
, 1);
2524 bool all_same_p
= true;
2525 for (i
= 0; i
< elts
.length (); ++i
)
2527 sel
.quick_push (elts
[i
].second
+ elts
[i
].first
* refnelts
);
2528 all_same_p
&= known_eq (sel
[i
], sel
[0]);
2530 /* And fill the tail with "something". It's really don't care,
2531 and ideally we'd allow VEC_PERM to have a smaller destination
2532 vector. As a heuristic:
2534 (a) if what we have so far duplicates a single element, make the
2537 (b) otherwise preserve a uniform orig[0]. This facilitates
2538 later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR. */
2539 for (; i
< refnelts
; ++i
)
2540 sel
.quick_push (all_same_p
2542 : (elts
[0].second
== 0 && elts
[0].first
== 0
2543 ? 0 : refnelts
) + i
);
2544 vec_perm_indices
indices (sel
, orig
[1] ? 2 : 1, refnelts
);
2545 if (!can_vec_perm_const_p (TYPE_MODE (perm_type
), indices
))
2548 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
2550 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
2551 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type
)),
2552 GET_MODE_SIZE (TYPE_MODE (perm_type
))))
2554 tree op2
= vec_perm_indices_to_tree (mask_type
, indices
);
2555 bool converted_orig1
= false;
2556 gimple_seq stmts
= NULL
;
2559 else if (orig
[1] == error_mark_node
2562 /* ??? We can see if we can safely convert to the original
2564 converted_orig1
= conv_code
!= ERROR_MARK
;
2565 orig
[1] = gimple_build_vector_from_val (&stmts
, UNKNOWN_LOCATION
,
2570 else if (orig
[1] == error_mark_node
)
2572 /* ??? See if we can convert the vector to the original type. */
2573 converted_orig1
= conv_code
!= ERROR_MARK
;
2574 unsigned n
= converted_orig1
? nelts
: refnelts
;
2575 tree_vector_builder
vec (converted_orig1
2576 ? type
: perm_type
, n
, 1);
2577 for (unsigned i
= 0; i
< n
; ++i
)
2578 if (i
< nelts
&& constants
[i
])
2579 vec
.quick_push (constants
[i
]);
2581 /* ??? Push a don't-care value. */
2582 vec
.quick_push (one_constant
);
2583 orig
[1] = vec
.build ();
2585 tree blend_op2
= NULL_TREE
;
2586 if (converted_orig1
)
2588 /* Make sure we can do a blend in the target type. */
2589 vec_perm_builder
sel (nelts
, nelts
, 1);
2590 for (i
= 0; i
< elts
.length (); ++i
)
2591 sel
.quick_push (elts
[i
].first
2592 ? elts
[i
].second
+ nelts
: i
);
2593 vec_perm_indices
indices (sel
, 2, nelts
);
2594 if (!can_vec_perm_const_p (TYPE_MODE (type
), indices
))
2597 = build_vector_type (build_nonstandard_integer_type (elem_size
, 1),
2599 if (GET_MODE_CLASS (TYPE_MODE (mask_type
)) != MODE_VECTOR_INT
2600 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type
)),
2601 GET_MODE_SIZE (TYPE_MODE (type
))))
2603 blend_op2
= vec_perm_indices_to_tree (mask_type
, indices
);
2606 = converted_orig1
? build_zero_cst (perm_type
) : orig
[1];
2607 tree res
= gimple_build (&stmts
, VEC_PERM_EXPR
, perm_type
,
2608 orig
[0], orig1_for_perm
, op2
);
2609 if (nelts
!= refnelts
)
2610 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
2611 conv_code
!= ERROR_MARK
? conv_src_type
: type
,
2612 res
, TYPE_SIZE (type
), bitsize_zero_node
);
2613 if (conv_code
!= ERROR_MARK
)
2614 res
= gimple_build (&stmts
, conv_code
, type
, res
);
2615 else if (!useless_type_conversion_p (type
, TREE_TYPE (res
)))
2617 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type
),
2618 TYPE_VECTOR_SUBPARTS (perm_type
))
2619 && useless_type_conversion_p (TREE_TYPE (type
),
2620 TREE_TYPE (perm_type
)));
2621 res
= gimple_build (&stmts
, VIEW_CONVERT_EXPR
, type
, res
);
2623 /* Blend in the actual constant. */
2624 if (converted_orig1
)
2625 res
= gimple_build (&stmts
, VEC_PERM_EXPR
, type
,
2626 res
, orig
[1], blend_op2
);
2627 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2628 gimple_assign_set_rhs_with_ops (gsi
, SSA_NAME
, res
);
2630 update_stmt (gsi_stmt (*gsi
));
2635 /* Primitive "lattice" function for gimple_simplify. */
2638 fwprop_ssa_val (tree name
)
2640 /* First valueize NAME. */
2641 if (TREE_CODE (name
) == SSA_NAME
2642 && SSA_NAME_VERSION (name
) < lattice
.length ())
2644 tree val
= lattice
[SSA_NAME_VERSION (name
)];
2648 /* We continue matching along SSA use-def edges for SSA names
2649 that are not single-use. Currently there are no patterns
2650 that would cause any issues with that. */
2654 /* Main entry point for the forward propagation and statement combine
2659 const pass_data pass_data_forwprop
=
2661 GIMPLE_PASS
, /* type */
2662 "forwprop", /* name */
2663 OPTGROUP_NONE
, /* optinfo_flags */
2664 TV_TREE_FORWPROP
, /* tv_id */
2665 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2666 0, /* properties_provided */
2667 0, /* properties_destroyed */
2668 0, /* todo_flags_start */
2669 TODO_update_ssa
, /* todo_flags_finish */
2672 class pass_forwprop
: public gimple_opt_pass
2675 pass_forwprop (gcc::context
*ctxt
)
2676 : gimple_opt_pass (pass_data_forwprop
, ctxt
)
2679 /* opt_pass methods: */
2680 opt_pass
* clone () { return new pass_forwprop (m_ctxt
); }
2681 virtual bool gate (function
*) { return flag_tree_forwprop
; }
2682 virtual unsigned int execute (function
*);
2684 }; // class pass_forwprop
2687 pass_forwprop::execute (function
*fun
)
2689 unsigned int todoflags
= 0;
2691 cfg_changed
= false;
2693 /* Combine stmts with the stmts defining their operands. Do that
2694 in an order that guarantees visiting SSA defs before SSA uses. */
2695 lattice
.create (num_ssa_names
);
2696 lattice
.quick_grow_cleared (num_ssa_names
);
2697 int *postorder
= XNEWVEC (int, n_basic_blocks_for_fn (fun
));
2698 int postorder_num
= pre_and_rev_post_order_compute_fn (cfun
, NULL
,
2700 auto_vec
<gimple
*, 4> to_fixup
;
2701 auto_vec
<gimple
*, 32> to_remove
;
2702 to_purge
= BITMAP_ALLOC (NULL
);
2703 for (int i
= 0; i
< postorder_num
; ++i
)
2705 gimple_stmt_iterator gsi
;
2706 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, postorder
[i
]);
2708 /* Record degenerate PHIs in the lattice. */
2709 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);
2712 gphi
*phi
= si
.phi ();
2713 tree res
= gimple_phi_result (phi
);
2714 if (virtual_operand_p (res
))
2717 use_operand_p use_p
;
2719 tree first
= NULL_TREE
;
2720 bool all_same
= true;
2721 FOR_EACH_PHI_ARG (use_p
, phi
, it
, SSA_OP_USE
)
2723 tree use
= USE_FROM_PTR (use_p
);
2726 else if (! operand_equal_p (first
, use
, 0))
2734 if (may_propagate_copy (res
, first
))
2735 to_remove
.safe_push (phi
);
2736 fwprop_set_lattice_val (res
, first
);
2740 /* Apply forward propagation to all stmts in the basic-block.
2741 Note we update GSI within the loop as necessary. */
2742 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2744 gimple
*stmt
= gsi_stmt (gsi
);
2746 enum tree_code code
;
2748 if (!is_gimple_assign (stmt
))
2754 lhs
= gimple_assign_lhs (stmt
);
2755 rhs
= gimple_assign_rhs1 (stmt
);
2756 code
= gimple_assign_rhs_code (stmt
);
2757 if (TREE_CODE (lhs
) != SSA_NAME
2758 || has_zero_uses (lhs
))
2764 /* If this statement sets an SSA_NAME to an address,
2765 try to propagate the address into the uses of the SSA_NAME. */
2766 if (code
== ADDR_EXPR
2767 /* Handle pointer conversions on invariant addresses
2768 as well, as this is valid gimple. */
2769 || (CONVERT_EXPR_CODE_P (code
)
2770 && TREE_CODE (rhs
) == ADDR_EXPR
2771 && POINTER_TYPE_P (TREE_TYPE (lhs
))))
2773 tree base
= get_base_address (TREE_OPERAND (rhs
, 0));
2776 || decl_address_invariant_p (base
))
2777 && TREE_CODE (base
) != TARGET_MEM_REF
2778 && !stmt_references_abnormal_ssa_name (stmt
)
2779 && forward_propagate_addr_expr (lhs
, rhs
, true))
2781 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2782 release_defs (stmt
);
2783 gsi_remove (&gsi
, true);
2788 else if (code
== POINTER_PLUS_EXPR
)
2790 tree off
= gimple_assign_rhs2 (stmt
);
2791 if (TREE_CODE (off
) == INTEGER_CST
2792 && can_propagate_from (stmt
)
2793 && !simple_iv_increment_p (stmt
)
2794 /* ??? Better adjust the interface to that function
2795 instead of building new trees here. */
2796 && forward_propagate_addr_expr
2798 build1_loc (gimple_location (stmt
),
2799 ADDR_EXPR
, TREE_TYPE (rhs
),
2800 fold_build2 (MEM_REF
,
2801 TREE_TYPE (TREE_TYPE (rhs
)),
2803 fold_convert (ptr_type_node
,
2806 fwprop_invalidate_lattice (gimple_get_lhs (stmt
));
2807 release_defs (stmt
);
2808 gsi_remove (&gsi
, true);
2810 else if (is_gimple_min_invariant (rhs
))
2812 /* Make sure to fold &a[0] + off_1 here. */
2813 fold_stmt_inplace (&gsi
);
2815 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
2821 else if (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
2822 && gimple_assign_load_p (stmt
)
2823 && !gimple_has_volatile_ops (stmt
)
2824 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
2826 && !stmt_can_throw_internal (cfun
, stmt
))
2828 /* Rewrite loads used only in real/imagpart extractions to
2829 component-wise loads. */
2830 use_operand_p use_p
;
2831 imm_use_iterator iter
;
2832 bool rewrite
= true;
2833 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
2835 gimple
*use_stmt
= USE_STMT (use_p
);
2836 if (is_gimple_debug (use_stmt
))
2838 if (!is_gimple_assign (use_stmt
)
2839 || (gimple_assign_rhs_code (use_stmt
) != REALPART_EXPR
2840 && gimple_assign_rhs_code (use_stmt
) != IMAGPART_EXPR
)
2841 || TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 0) != lhs
)
2850 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
2852 if (is_gimple_debug (use_stmt
))
2854 if (gimple_debug_bind_p (use_stmt
))
2856 gimple_debug_bind_reset_value (use_stmt
);
2857 update_stmt (use_stmt
);
2862 tree new_rhs
= build1 (gimple_assign_rhs_code (use_stmt
),
2863 TREE_TYPE (TREE_TYPE (rhs
)),
2864 unshare_expr (rhs
));
2866 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
2869 location_t loc
= gimple_location (use_stmt
);
2870 gimple_set_location (new_stmt
, loc
);
2871 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2872 unlink_stmt_vdef (use_stmt
);
2873 gsi_remove (&gsi2
, true);
2875 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2878 release_defs (stmt
);
2879 gsi_remove (&gsi
, true);
2884 else if (TREE_CODE (TREE_TYPE (lhs
)) == VECTOR_TYPE
2885 && TYPE_MODE (TREE_TYPE (lhs
)) == BLKmode
2886 && gimple_assign_load_p (stmt
)
2887 && !gimple_has_volatile_ops (stmt
)
2888 && (TREE_CODE (gimple_assign_rhs1 (stmt
))
2890 && !stmt_can_throw_internal (cfun
, stmt
))
2892 /* Rewrite loads used only in BIT_FIELD_REF extractions to
2893 component-wise loads. */
2894 use_operand_p use_p
;
2895 imm_use_iterator iter
;
2896 bool rewrite
= true;
2897 FOR_EACH_IMM_USE_FAST (use_p
, iter
, lhs
)
2899 gimple
*use_stmt
= USE_STMT (use_p
);
2900 if (is_gimple_debug (use_stmt
))
2902 if (!is_gimple_assign (use_stmt
)
2903 || gimple_assign_rhs_code (use_stmt
) != BIT_FIELD_REF
2904 || TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 0) != lhs
)
2913 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
2915 if (is_gimple_debug (use_stmt
))
2917 if (gimple_debug_bind_p (use_stmt
))
2919 gimple_debug_bind_reset_value (use_stmt
);
2920 update_stmt (use_stmt
);
2925 tree bfr
= gimple_assign_rhs1 (use_stmt
);
2926 tree new_rhs
= fold_build3 (BIT_FIELD_REF
,
2929 TREE_OPERAND (bfr
, 1),
2930 TREE_OPERAND (bfr
, 2));
2932 = gimple_build_assign (gimple_assign_lhs (use_stmt
),
2935 location_t loc
= gimple_location (use_stmt
);
2936 gimple_set_location (new_stmt
, loc
);
2937 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2938 unlink_stmt_vdef (use_stmt
);
2939 gsi_remove (&gsi2
, true);
2941 gsi_insert_before (&gsi
, new_stmt
, GSI_SAME_STMT
);
2944 release_defs (stmt
);
2945 gsi_remove (&gsi
, true);
2951 else if (code
== COMPLEX_EXPR
)
2953 /* Rewrite stores of a single-use complex build expression
2954 to component-wise stores. */
2955 use_operand_p use_p
;
2957 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
2958 && gimple_store_p (use_stmt
)
2959 && !gimple_has_volatile_ops (use_stmt
)
2960 && is_gimple_assign (use_stmt
)
2961 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
2964 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2965 if (auto_var_p (use_lhs
))
2966 DECL_NOT_GIMPLE_REG_P (use_lhs
) = 1;
2967 tree new_lhs
= build1 (REALPART_EXPR
,
2968 TREE_TYPE (TREE_TYPE (use_lhs
)),
2969 unshare_expr (use_lhs
));
2970 gimple
*new_stmt
= gimple_build_assign (new_lhs
, rhs
);
2971 location_t loc
= gimple_location (use_stmt
);
2972 gimple_set_location (new_stmt
, loc
);
2973 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
2974 gimple_set_vdef (new_stmt
, make_ssa_name (gimple_vop (cfun
)));
2975 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2976 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
2977 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
2978 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
2980 new_lhs
= build1 (IMAGPART_EXPR
,
2981 TREE_TYPE (TREE_TYPE (use_lhs
)),
2982 unshare_expr (use_lhs
));
2983 gimple_assign_set_lhs (use_stmt
, new_lhs
);
2984 gimple_assign_set_rhs1 (use_stmt
, gimple_assign_rhs2 (stmt
));
2985 update_stmt (use_stmt
);
2987 release_defs (stmt
);
2988 gsi_remove (&gsi
, true);
2993 else if (code
== CONSTRUCTOR
2994 && VECTOR_TYPE_P (TREE_TYPE (rhs
))
2995 && TYPE_MODE (TREE_TYPE (rhs
)) == BLKmode
2996 && CONSTRUCTOR_NELTS (rhs
) > 0
2997 && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
))
2998 || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
))
3001 /* Rewrite stores of a single-use vector constructors
3002 to component-wise stores if the mode isn't supported. */
3003 use_operand_p use_p
;
3005 if (single_imm_use (lhs
, &use_p
, &use_stmt
)
3006 && gimple_store_p (use_stmt
)
3007 && !gimple_has_volatile_ops (use_stmt
)
3008 && !stmt_can_throw_internal (cfun
, use_stmt
)
3009 && is_gimple_assign (use_stmt
)
3010 && (TREE_CODE (gimple_assign_lhs (use_stmt
))
3013 tree elt_t
= TREE_TYPE (CONSTRUCTOR_ELT (rhs
, 0)->value
);
3014 unsigned HOST_WIDE_INT elt_w
3015 = tree_to_uhwi (TYPE_SIZE (elt_t
));
3016 unsigned HOST_WIDE_INT n
3017 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs
)));
3018 tree use_lhs
= gimple_assign_lhs (use_stmt
);
3019 if (auto_var_p (use_lhs
))
3020 DECL_NOT_GIMPLE_REG_P (use_lhs
) = 1;
3021 for (unsigned HOST_WIDE_INT bi
= 0; bi
< n
; bi
+= elt_w
)
3023 unsigned HOST_WIDE_INT ci
= bi
/ elt_w
;
3025 if (ci
< CONSTRUCTOR_NELTS (rhs
))
3026 new_rhs
= CONSTRUCTOR_ELT (rhs
, ci
)->value
;
3028 new_rhs
= build_zero_cst (elt_t
);
3029 tree new_lhs
= build3 (BIT_FIELD_REF
,
3031 unshare_expr (use_lhs
),
3032 bitsize_int (elt_w
),
3034 gimple
*new_stmt
= gimple_build_assign (new_lhs
, new_rhs
);
3035 location_t loc
= gimple_location (use_stmt
);
3036 gimple_set_location (new_stmt
, loc
);
3037 gimple_set_vuse (new_stmt
, gimple_vuse (use_stmt
));
3038 gimple_set_vdef (new_stmt
,
3039 make_ssa_name (gimple_vop (cfun
)));
3040 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
3041 gimple_set_vuse (use_stmt
, gimple_vdef (new_stmt
));
3042 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3043 gsi_insert_before (&gsi2
, new_stmt
, GSI_SAME_STMT
);
3045 gimple_stmt_iterator gsi2
= gsi_for_stmt (use_stmt
);
3046 unlink_stmt_vdef (use_stmt
);
3047 release_defs (use_stmt
);
3048 gsi_remove (&gsi2
, true);
3049 release_defs (stmt
);
3050 gsi_remove (&gsi
, true);
3059 /* Combine stmts with the stmts defining their operands.
3060 Note we update GSI within the loop as necessary. */
3061 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3063 gimple
*stmt
= gsi_stmt (gsi
);
3065 /* Mark stmt as potentially needing revisiting. */
3066 gimple_set_plf (stmt
, GF_PLF_1
, false);
3068 /* Substitute from our lattice. We need to do so only once. */
3069 bool substituted_p
= false;
3072 FOR_EACH_SSA_USE_OPERAND (usep
, stmt
, iter
, SSA_OP_USE
)
3074 tree use
= USE_FROM_PTR (usep
);
3075 tree val
= fwprop_ssa_val (use
);
3076 if (val
&& val
!= use
&& may_propagate_copy (use
, val
))
3078 propagate_value (usep
, val
);
3079 substituted_p
= true;
3083 && is_gimple_assign (stmt
)
3084 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
3085 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
3090 gimple
*orig_stmt
= stmt
= gsi_stmt (gsi
);
3091 bool was_noreturn
= (is_gimple_call (stmt
)
3092 && gimple_call_noreturn_p (stmt
));
3095 if (fold_stmt (&gsi
, fwprop_ssa_val
))
3098 stmt
= gsi_stmt (gsi
);
3099 /* Cleanup the CFG if we simplified a condition to
3101 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
3102 if (gimple_cond_true_p (cond
)
3103 || gimple_cond_false_p (cond
))
3107 if (changed
|| substituted_p
)
3109 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
3110 bitmap_set_bit (to_purge
, bb
->index
);
3112 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
3113 to_fixup
.safe_push (stmt
);
3115 substituted_p
= false;
3118 switch (gimple_code (stmt
))
3122 tree rhs1
= gimple_assign_rhs1 (stmt
);
3123 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3125 if (code
== COND_EXPR
3126 || code
== VEC_COND_EXPR
)
3128 /* In this case the entire COND_EXPR is in rhs1. */
3129 if (forward_propagate_into_cond (&gsi
))
3132 stmt
= gsi_stmt (gsi
);
3135 else if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3138 did_something
= forward_propagate_into_comparison (&gsi
);
3139 if (maybe_clean_or_replace_eh_stmt (stmt
, gsi_stmt (gsi
)))
3140 bitmap_set_bit (to_purge
, bb
->index
);
3141 if (did_something
== 2)
3143 changed
= did_something
!= 0;
3145 else if ((code
== PLUS_EXPR
3146 || code
== BIT_IOR_EXPR
3147 || code
== BIT_XOR_EXPR
)
3148 && simplify_rotate (&gsi
))
3150 else if (code
== VEC_PERM_EXPR
)
3152 int did_something
= simplify_permutation (&gsi
);
3153 if (did_something
== 2)
3155 changed
= did_something
!= 0;
3157 else if (code
== BIT_FIELD_REF
)
3158 changed
= simplify_bitfield_ref (&gsi
);
3159 else if (code
== CONSTRUCTOR
3160 && TREE_CODE (TREE_TYPE (rhs1
)) == VECTOR_TYPE
)
3161 changed
= simplify_vector_constructor (&gsi
);
3162 else if (code
== ARRAY_REF
)
3163 changed
= simplify_count_trailing_zeroes (&gsi
);
3168 changed
= simplify_gimple_switch (as_a
<gswitch
*> (stmt
));
3173 int did_something
= forward_propagate_into_gimple_cond
3174 (as_a
<gcond
*> (stmt
));
3175 if (did_something
== 2)
3177 changed
= did_something
!= 0;
3183 tree callee
= gimple_call_fndecl (stmt
);
3184 if (callee
!= NULL_TREE
3185 && fndecl_built_in_p (callee
, BUILT_IN_NORMAL
))
3186 changed
= simplify_builtin_call (&gsi
, callee
);
3195 /* If the stmt changed then re-visit it and the statements
3196 inserted before it. */
3197 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
3198 if (gimple_plf (gsi_stmt (gsi
), GF_PLF_1
))
3200 if (gsi_end_p (gsi
))
3201 gsi
= gsi_start_bb (bb
);
3208 /* Stmt no longer needs to be revisited. */
3209 stmt
= gsi_stmt (gsi
);
3210 gcc_checking_assert (!gimple_plf (stmt
, GF_PLF_1
));
3211 gimple_set_plf (stmt
, GF_PLF_1
, true);
3213 /* Fill up the lattice. */
3214 if (gimple_assign_single_p (stmt
))
3216 tree lhs
= gimple_assign_lhs (stmt
);
3217 tree rhs
= gimple_assign_rhs1 (stmt
);
3218 if (TREE_CODE (lhs
) == SSA_NAME
)
3221 if (TREE_CODE (rhs
) == SSA_NAME
)
3222 val
= fwprop_ssa_val (rhs
);
3223 else if (is_gimple_min_invariant (rhs
))
3225 /* If we can propagate the lattice-value mark the
3226 stmt for removal. */
3228 && may_propagate_copy (lhs
, val
))
3229 to_remove
.safe_push (stmt
);
3230 fwprop_set_lattice_val (lhs
, val
);
3233 else if (gimple_nop_p (stmt
))
3234 to_remove
.safe_push (stmt
);
3237 /* Substitute in destination PHI arguments. */
3240 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3241 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
3242 !gsi_end_p (gsi
); gsi_next (&gsi
))
3244 gphi
*phi
= gsi
.phi ();
3245 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
3246 tree arg
= USE_FROM_PTR (use_p
);
3247 if (TREE_CODE (arg
) != SSA_NAME
3248 || virtual_operand_p (arg
))
3250 tree val
= fwprop_ssa_val (arg
);
3252 && may_propagate_copy (arg
, val
))
3253 propagate_value (use_p
, val
);
3259 /* Remove stmts in reverse order to make debug stmt creation possible. */
3260 while (!to_remove
.is_empty())
3262 gimple
*stmt
= to_remove
.pop ();
3263 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3265 fprintf (dump_file
, "Removing dead stmt ");
3266 print_gimple_stmt (dump_file
, stmt
, 0);
3267 fprintf (dump_file
, "\n");
3269 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
3270 if (gimple_code (stmt
) == GIMPLE_PHI
)
3271 remove_phi_node (&gsi
, true);
3274 unlink_stmt_vdef (stmt
);
3275 gsi_remove (&gsi
, true);
3276 release_defs (stmt
);
3280 /* Fixup stmts that became noreturn calls. This may require splitting
3281 blocks and thus isn't possible during the walk. Do this
3282 in reverse order so we don't inadvertedly remove a stmt we want to
3283 fixup by visiting a dominating now noreturn call first. */
3284 while (!to_fixup
.is_empty ())
3286 gimple
*stmt
= to_fixup
.pop ();
3287 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3289 fprintf (dump_file
, "Fixing up noreturn call ");
3290 print_gimple_stmt (dump_file
, stmt
, 0);
3291 fprintf (dump_file
, "\n");
3293 cfg_changed
|= fixup_noreturn_call (stmt
);
3296 cfg_changed
|= gimple_purge_all_dead_eh_edges (to_purge
);
3297 BITMAP_FREE (to_purge
);
3300 todoflags
|= TODO_cleanup_cfg
;
3308 make_pass_forwprop (gcc::context
*ctxt
)
3310 return new pass_forwprop (ctxt
);