1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "stor-layout.h"
30 #include "langhooks.h"
31 #include "basic-block.h"
33 #include "gimple-pretty-print.h"
34 #include "pointer-set.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "tree-ssa-loop-manip.h"
45 #include "tree-into-ssa.h"
47 #include "tree-inline.h"
49 #include "tree-pass.h"
50 #include "diagnostic-core.h"
52 #include "cfgexpand.h"
54 /* Pointer map of variable mappings, keyed by edge. */
55 static struct pointer_map_t
*edge_var_maps
;
58 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
61 redirect_edge_var_map_add (edge e
, tree result
, tree def
, source_location locus
)
64 edge_var_map_vector
*head
;
65 edge_var_map new_node
;
67 if (edge_var_maps
== NULL
)
68 edge_var_maps
= pointer_map_create ();
70 slot
= pointer_map_insert (edge_var_maps
, e
);
71 head
= (edge_var_map_vector
*) *slot
;
73 vec_safe_reserve (head
, 5);
75 new_node
.result
= result
;
76 new_node
.locus
= locus
;
78 vec_safe_push (head
, new_node
);
83 /* Clear the var mappings in edge E. */
86 redirect_edge_var_map_clear (edge e
)
89 edge_var_map_vector
*head
;
94 slot
= pointer_map_contains (edge_var_maps
, e
);
98 head
= (edge_var_map_vector
*) *slot
;
105 /* Duplicate the redirected var mappings in OLDE in NEWE.
107 Since we can't remove a mapping, let's just duplicate it. This assumes a
108 pointer_map can have multiple edges mapping to the same var_map (many to
109 one mapping), since we don't remove the previous mappings. */
112 redirect_edge_var_map_dup (edge newe
, edge olde
)
114 void **new_slot
, **old_slot
;
115 edge_var_map_vector
*head
;
120 new_slot
= pointer_map_insert (edge_var_maps
, newe
);
121 old_slot
= pointer_map_contains (edge_var_maps
, olde
);
124 head
= (edge_var_map_vector
*) *old_slot
;
126 edge_var_map_vector
*new_head
= NULL
;
128 new_head
= vec_safe_copy (head
);
130 vec_safe_reserve (new_head
, 5);
131 *new_slot
= new_head
;
135 /* Return the variable mappings for a given edge. If there is none, return
138 edge_var_map_vector
*
139 redirect_edge_var_map_vector (edge e
)
143 /* Hey, what kind of idiot would... you'd be surprised. */
147 slot
= pointer_map_contains (edge_var_maps
, e
);
151 return (edge_var_map_vector
*) *slot
;
154 /* Used by redirect_edge_var_map_destroy to free all memory. */
157 free_var_map_entry (const void *key ATTRIBUTE_UNUSED
,
159 void *data ATTRIBUTE_UNUSED
)
161 edge_var_map_vector
*head
= (edge_var_map_vector
*) *value
;
166 /* Clear the edge variable mappings. */
169 redirect_edge_var_map_destroy (void)
173 pointer_map_traverse (edge_var_maps
, free_var_map_entry
, NULL
);
174 pointer_map_destroy (edge_var_maps
);
175 edge_var_maps
= NULL
;
180 /* Remove the corresponding arguments from the PHI nodes in E's
181 destination block and redirect it to DEST. Return redirected edge.
182 The list of removed arguments is stored in a vector accessed
183 through edge_var_maps. */
186 ssa_redirect_edge (edge e
, basic_block dest
)
188 gimple_stmt_iterator gsi
;
191 redirect_edge_var_map_clear (e
);
193 /* Remove the appropriate PHI arguments in E's destination block. */
194 for (gsi
= gsi_start_phis (e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
197 source_location locus
;
199 phi
= gsi_stmt (gsi
);
200 def
= gimple_phi_arg_def (phi
, e
->dest_idx
);
201 locus
= gimple_phi_arg_location (phi
, e
->dest_idx
);
203 if (def
== NULL_TREE
)
206 redirect_edge_var_map_add (e
, gimple_phi_result (phi
), def
, locus
);
209 e
= redirect_edge_succ_nodup (e
, dest
);
215 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
219 flush_pending_stmts (edge e
)
222 edge_var_map_vector
*v
;
225 gimple_stmt_iterator gsi
;
227 v
= redirect_edge_var_map_vector (e
);
231 for (gsi
= gsi_start_phis (e
->dest
), i
= 0;
232 !gsi_end_p (gsi
) && v
->iterate (i
, &vm
);
233 gsi_next (&gsi
), i
++)
237 phi
= gsi_stmt (gsi
);
238 def
= redirect_edge_var_map_def (vm
);
239 add_phi_arg (phi
, def
, e
, redirect_edge_var_map_location (vm
));
242 redirect_edge_var_map_clear (e
);
245 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
246 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
247 expression with a different value.
249 This will update any annotations (say debug bind stmts) referring
250 to the original LHS, so that they use the RHS instead. This is
251 done even if NLHS and LHS are the same, for it is understood that
252 the RHS will be modified afterwards, and NLHS will not be assigned
255 Adjusting any non-annotation uses of the LHS, if needed, is a
256 responsibility of the caller.
258 The effect of this call should be pretty much the same as that of
259 inserting a copy of STMT before STMT, and then removing the
260 original stmt, at which time gsi_remove() would have update
261 annotations, but using this function saves all the inserting,
262 copying and removing. */
265 gimple_replace_ssa_lhs (gimple stmt
, tree nlhs
)
267 if (MAY_HAVE_DEBUG_STMTS
)
269 tree lhs
= gimple_get_lhs (stmt
);
271 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
273 insert_debug_temp_for_var_def (NULL
, lhs
);
276 gimple_set_lhs (stmt
, nlhs
);
280 /* Given a tree for an expression for which we might want to emit
281 locations or values in debug information (generally a variable, but
282 we might deal with other kinds of trees in the future), return the
283 tree that should be used as the variable of a DEBUG_BIND STMT or
284 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
287 target_for_debug_bind (tree var
)
289 if (!MAY_HAVE_DEBUG_STMTS
)
292 if (TREE_CODE (var
) == SSA_NAME
)
294 var
= SSA_NAME_VAR (var
);
295 if (var
== NULL_TREE
)
299 if ((TREE_CODE (var
) != VAR_DECL
300 || VAR_DECL_IS_VIRTUAL_OPERAND (var
))
301 && TREE_CODE (var
) != PARM_DECL
)
304 if (DECL_HAS_VALUE_EXPR_P (var
))
305 return target_for_debug_bind (DECL_VALUE_EXPR (var
));
307 if (DECL_IGNORED_P (var
))
310 /* var-tracking only tracks registers. */
311 if (!is_gimple_reg_type (TREE_TYPE (var
)))
317 /* Called via walk_tree, look for SSA_NAMEs that have already been
321 find_released_ssa_name (tree
*tp
, int *walk_subtrees
, void *data_
)
323 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data_
;
325 if (wi
&& wi
->is_lhs
)
328 if (TREE_CODE (*tp
) == SSA_NAME
)
330 if (SSA_NAME_IN_FREE_LIST (*tp
))
335 else if (IS_TYPE_OR_DECL_P (*tp
))
341 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
342 by other DEBUG stmts, and replace uses of the DEF with the
343 newly-created debug temp. */
346 insert_debug_temp_for_var_def (gimple_stmt_iterator
*gsi
, tree var
)
348 imm_use_iterator imm_iter
;
351 gimple def_stmt
= NULL
;
355 if (!MAY_HAVE_DEBUG_STMTS
)
358 /* If this name has already been registered for replacement, do nothing
359 as anything that uses this name isn't in SSA form. */
360 if (name_registered_for_update_p (var
))
363 /* Check whether there are debug stmts that reference this variable and,
364 if there are, decide whether we should use a debug temp. */
365 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, var
)
367 stmt
= USE_STMT (use_p
);
369 if (!gimple_debug_bind_p (stmt
))
375 if (gimple_debug_bind_get_value (stmt
) != var
)
377 /* Count this as an additional use, so as to make sure we
378 use a temp unless VAR's definition has a SINGLE_RHS that
389 def_stmt
= gsi_stmt (*gsi
);
391 def_stmt
= SSA_NAME_DEF_STMT (var
);
393 /* If we didn't get an insertion point, and the stmt has already
394 been removed, we won't be able to insert the debug bind stmt, so
395 we'll have to drop debug information. */
396 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
398 value
= degenerate_phi_result (def_stmt
);
399 if (value
&& walk_tree (&value
, find_released_ssa_name
, NULL
, NULL
))
401 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
403 else if (value
== error_mark_node
)
406 else if (is_gimple_assign (def_stmt
))
408 bool no_value
= false;
410 if (!dom_info_available_p (CDI_DOMINATORS
))
412 struct walk_stmt_info wi
;
414 memset (&wi
, 0, sizeof (wi
));
416 /* When removing blocks without following reverse dominance
417 order, we may sometimes encounter SSA_NAMEs that have
418 already been released, referenced in other SSA_DEFs that
419 we're about to release. Consider:
428 If we deleted BB X first, propagating the value of w_2
429 won't do us any good. It's too late to recover their
430 original definition of v_1: when it was deleted, it was
431 only referenced in other DEFs, it couldn't possibly know
432 it should have been retained, and propagating every
433 single DEF just in case it might have to be propagated
434 into a DEBUG STMT would probably be too wasteful.
436 When dominator information is not readily available, we
437 check for and accept some loss of debug information. But
438 if it is available, there's no excuse for us to remove
439 blocks in the wrong order, so we don't even check for
440 dead SSA NAMEs. SSA verification shall catch any
442 if ((!gsi
&& !gimple_bb (def_stmt
))
443 || walk_gimple_op (def_stmt
, find_released_ssa_name
, &wi
))
448 value
= gimple_assign_rhs_to_tree (def_stmt
);
453 /* If there's a single use of VAR, and VAR is the entire debug
454 expression (usecount would have been incremented again
455 otherwise), and the definition involves only constants and
456 SSA names, then we can propagate VALUE into this single use,
459 We can also avoid using a temp if VALUE can be shared and
460 propagated into all uses, without generating expressions that
461 wouldn't be valid gimple RHSs.
463 Other cases that would require unsharing or non-gimple RHSs
464 are deferred to a debug temp, although we could avoid temps
465 at the expense of duplication of expressions. */
467 if (CONSTANT_CLASS_P (value
)
468 || gimple_code (def_stmt
) == GIMPLE_PHI
470 && (!gimple_assign_single_p (def_stmt
)
471 || is_gimple_min_invariant (value
)))
472 || is_gimple_reg (value
))
477 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
479 def_temp
= gimple_build_debug_bind (vexpr
,
480 unshare_expr (value
),
483 DECL_ARTIFICIAL (vexpr
) = 1;
484 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
486 DECL_MODE (vexpr
) = DECL_MODE (value
);
488 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (value
));
491 gsi_insert_before (gsi
, def_temp
, GSI_SAME_STMT
);
494 gimple_stmt_iterator ngsi
= gsi_for_stmt (def_stmt
);
495 gsi_insert_before (&ngsi
, def_temp
, GSI_SAME_STMT
);
502 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, var
)
504 if (!gimple_debug_bind_p (stmt
))
509 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
510 /* unshare_expr is not needed here. vexpr is either a
511 SINGLE_RHS, that can be safely shared, some other RHS
512 that was unshared when we found it had a single debug
513 use, or a DEBUG_EXPR_DECL, that can be safely
515 SET_USE (use_p
, unshare_expr (value
));
516 /* If we didn't replace uses with a debug decl fold the
517 resulting expression. Otherwise we end up with invalid IL. */
518 if (TREE_CODE (value
) != DEBUG_EXPR_DECL
)
520 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
521 fold_stmt_inplace (&gsi
);
525 gimple_debug_bind_reset_value (stmt
);
532 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
533 other DEBUG stmts, and replace uses of the DEF with the
534 newly-created debug temp. */
537 insert_debug_temps_for_defs (gimple_stmt_iterator
*gsi
)
543 if (!MAY_HAVE_DEBUG_STMTS
)
546 stmt
= gsi_stmt (*gsi
);
548 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
550 tree var
= DEF_FROM_PTR (def_p
);
552 if (TREE_CODE (var
) != SSA_NAME
)
555 insert_debug_temp_for_var_def (gsi
, var
);
559 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
562 reset_debug_uses (gimple stmt
)
566 imm_use_iterator imm_iter
;
569 if (!MAY_HAVE_DEBUG_STMTS
)
572 FOR_EACH_PHI_OR_STMT_DEF (def_p
, stmt
, op_iter
, SSA_OP_DEF
)
574 tree var
= DEF_FROM_PTR (def_p
);
576 if (TREE_CODE (var
) != SSA_NAME
)
579 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, var
)
581 if (!gimple_debug_bind_p (use_stmt
))
584 gimple_debug_bind_reset_value (use_stmt
);
585 update_stmt (use_stmt
);
590 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
591 dominated stmts before their dominators, so that release_ssa_defs
592 stands a chance of propagating DEFs into debug bind stmts. */
595 release_defs_bitset (bitmap toremove
)
600 /* Performing a topological sort is probably overkill, this will
601 most likely run in slightly superlinear time, rather than the
602 pathological quadratic worst case. */
603 while (!bitmap_empty_p (toremove
))
604 EXECUTE_IF_SET_IN_BITMAP (toremove
, 0, j
, bi
)
606 bool remove_now
= true;
607 tree var
= ssa_name (j
);
609 imm_use_iterator uit
;
611 FOR_EACH_IMM_USE_STMT (stmt
, uit
, var
)
616 /* We can't propagate PHI nodes into debug stmts. */
617 if (gimple_code (stmt
) == GIMPLE_PHI
618 || is_gimple_debug (stmt
))
621 /* If we find another definition to remove that uses
622 the one we're looking at, defer the removal of this
623 one, so that it can be propagated into debug stmts
624 after the other is. */
625 FOR_EACH_SSA_DEF_OPERAND (def_p
, stmt
, dit
, SSA_OP_DEF
)
627 tree odef
= DEF_FROM_PTR (def_p
);
629 if (bitmap_bit_p (toremove
, SSA_NAME_VERSION (odef
)))
637 BREAK_FROM_IMM_USE_STMT (uit
);
642 gimple def
= SSA_NAME_DEF_STMT (var
);
643 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
645 if (gimple_code (def
) == GIMPLE_PHI
)
646 remove_phi_node (&gsi
, true);
649 gsi_remove (&gsi
, true);
653 bitmap_clear_bit (toremove
, j
);
658 /* Return true if SSA_NAME is malformed and mark it visited.
660 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
664 verify_ssa_name (tree ssa_name
, bool is_virtual
)
666 if (TREE_CODE (ssa_name
) != SSA_NAME
)
668 error ("expected an SSA_NAME object");
672 if (SSA_NAME_IN_FREE_LIST (ssa_name
))
674 error ("found an SSA_NAME that had been released into the free pool");
678 if (SSA_NAME_VAR (ssa_name
) != NULL_TREE
679 && TREE_TYPE (ssa_name
) != TREE_TYPE (SSA_NAME_VAR (ssa_name
)))
681 error ("type mismatch between an SSA_NAME and its symbol");
685 if (is_virtual
&& !virtual_operand_p (ssa_name
))
687 error ("found a virtual definition for a GIMPLE register");
691 if (is_virtual
&& SSA_NAME_VAR (ssa_name
) != gimple_vop (cfun
))
693 error ("virtual SSA name for non-VOP decl");
697 if (!is_virtual
&& virtual_operand_p (ssa_name
))
699 error ("found a real definition for a non-register");
703 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
704 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
)))
706 error ("found a default name with a non-empty defining statement");
714 /* Return true if the definition of SSA_NAME at block BB is malformed.
716 STMT is the statement where SSA_NAME is created.
718 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
719 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
720 it means that the block in that array slot contains the
721 definition of SSA_NAME.
723 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
726 verify_def (basic_block bb
, basic_block
*definition_block
, tree ssa_name
,
727 gimple stmt
, bool is_virtual
)
729 if (verify_ssa_name (ssa_name
, is_virtual
))
732 if (SSA_NAME_VAR (ssa_name
)
733 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == RESULT_DECL
734 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name
)))
736 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
740 if (definition_block
[SSA_NAME_VERSION (ssa_name
)])
742 error ("SSA_NAME created in two different blocks %i and %i",
743 definition_block
[SSA_NAME_VERSION (ssa_name
)]->index
, bb
->index
);
747 definition_block
[SSA_NAME_VERSION (ssa_name
)] = bb
;
749 if (SSA_NAME_DEF_STMT (ssa_name
) != stmt
)
751 error ("SSA_NAME_DEF_STMT is wrong");
752 fprintf (stderr
, "Expected definition statement:\n");
753 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (ssa_name
), 4, TDF_VOPS
);
754 fprintf (stderr
, "\nActual definition statement:\n");
755 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
762 fprintf (stderr
, "while verifying SSA_NAME ");
763 print_generic_expr (stderr
, ssa_name
, 0);
764 fprintf (stderr
, " in statement\n");
765 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
771 /* Return true if the use of SSA_NAME at statement STMT in block BB is
774 DEF_BB is the block where SSA_NAME was found to be created.
776 IDOM contains immediate dominator information for the flowgraph.
778 CHECK_ABNORMAL is true if the caller wants to check whether this use
779 is flowing through an abnormal edge (only used when checking PHI
782 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
783 that are defined before STMT in basic block BB. */
786 verify_use (basic_block bb
, basic_block def_bb
, use_operand_p use_p
,
787 gimple stmt
, bool check_abnormal
, bitmap names_defined_in_bb
)
790 tree ssa_name
= USE_FROM_PTR (use_p
);
792 if (!TREE_VISITED (ssa_name
))
793 if (verify_imm_links (stderr
, ssa_name
))
796 TREE_VISITED (ssa_name
) = 1;
798 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name
))
799 && SSA_NAME_IS_DEFAULT_DEF (ssa_name
))
800 ; /* Default definitions have empty statements. Nothing to do. */
803 error ("missing definition");
806 else if (bb
!= def_bb
807 && !dominated_by_p (CDI_DOMINATORS
, bb
, def_bb
))
809 error ("definition in block %i does not dominate use in block %i",
810 def_bb
->index
, bb
->index
);
813 else if (bb
== def_bb
814 && names_defined_in_bb
!= NULL
815 && !bitmap_bit_p (names_defined_in_bb
, SSA_NAME_VERSION (ssa_name
)))
817 error ("definition in block %i follows the use", def_bb
->index
);
822 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
824 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
828 /* Make sure the use is in an appropriate list by checking the previous
829 element to make sure it's the same. */
830 if (use_p
->prev
== NULL
)
832 error ("no immediate_use list");
838 if (use_p
->prev
->use
== NULL
)
839 listvar
= use_p
->prev
->loc
.ssa_name
;
841 listvar
= USE_FROM_PTR (use_p
->prev
);
842 if (listvar
!= ssa_name
)
844 error ("wrong immediate use list");
851 fprintf (stderr
, "for SSA_NAME: ");
852 print_generic_expr (stderr
, ssa_name
, TDF_VOPS
);
853 fprintf (stderr
, " in statement:\n");
854 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
861 /* Return true if any of the arguments for PHI node PHI at block BB is
864 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
865 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
866 it means that the block in that array slot contains the
867 definition of SSA_NAME. */
870 verify_phi_args (gimple phi
, basic_block bb
, basic_block
*definition_block
)
874 size_t i
, phi_num_args
= gimple_phi_num_args (phi
);
876 if (EDGE_COUNT (bb
->preds
) != phi_num_args
)
878 error ("incoming edge count does not match number of PHI arguments");
883 for (i
= 0; i
< phi_num_args
; i
++)
885 use_operand_p op_p
= gimple_phi_arg_imm_use_ptr (phi
, i
);
886 tree op
= USE_FROM_PTR (op_p
);
888 e
= EDGE_PRED (bb
, i
);
892 error ("PHI argument is missing for edge %d->%d",
899 if (TREE_CODE (op
) != SSA_NAME
&& !is_gimple_min_invariant (op
))
901 error ("PHI argument is not SSA_NAME, or invariant");
905 if (TREE_CODE (op
) == SSA_NAME
)
907 err
= verify_ssa_name (op
, virtual_operand_p (gimple_phi_result (phi
)));
908 err
|= verify_use (e
->src
, definition_block
[SSA_NAME_VERSION (op
)],
909 op_p
, phi
, e
->flags
& EDGE_ABNORMAL
, NULL
);
912 if (TREE_CODE (op
) == ADDR_EXPR
)
914 tree base
= TREE_OPERAND (op
, 0);
915 while (handled_component_p (base
))
916 base
= TREE_OPERAND (base
, 0);
917 if ((TREE_CODE (base
) == VAR_DECL
918 || TREE_CODE (base
) == PARM_DECL
919 || TREE_CODE (base
) == RESULT_DECL
)
920 && !TREE_ADDRESSABLE (base
))
922 error ("address taken, but ADDRESSABLE bit not set");
929 error ("wrong edge %d->%d for PHI argument",
930 e
->src
->index
, e
->dest
->index
);
936 fprintf (stderr
, "PHI argument\n");
937 print_generic_stmt (stderr
, op
, TDF_VOPS
);
945 fprintf (stderr
, "for PHI node\n");
946 print_gimple_stmt (stderr
, phi
, 0, TDF_VOPS
|TDF_MEMSYMS
);
954 /* Verify common invariants in the SSA web.
955 TODO: verify the variable annotations. */
958 verify_ssa (bool check_modified_stmt
)
962 basic_block
*definition_block
= XCNEWVEC (basic_block
, num_ssa_names
);
965 enum dom_state orig_dom_state
= dom_info_state (CDI_DOMINATORS
);
966 bitmap names_defined_in_bb
= BITMAP_ALLOC (NULL
);
968 gcc_assert (!need_ssa_update_p (cfun
));
970 timevar_push (TV_TREE_SSA_VERIFY
);
972 /* Keep track of SSA names present in the IL. */
973 for (i
= 1; i
< num_ssa_names
; i
++)
975 tree name
= ssa_name (i
);
979 TREE_VISITED (name
) = 0;
981 verify_ssa_name (name
, virtual_operand_p (name
));
983 stmt
= SSA_NAME_DEF_STMT (name
);
984 if (!gimple_nop_p (stmt
))
986 basic_block bb
= gimple_bb (stmt
);
987 verify_def (bb
, definition_block
,
988 name
, stmt
, virtual_operand_p (name
));
994 calculate_dominance_info (CDI_DOMINATORS
);
996 /* Now verify all the uses and make sure they agree with the definitions
997 found in the previous pass. */
1003 gimple_stmt_iterator gsi
;
1005 /* Make sure that all edges have a clear 'aux' field. */
1006 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1010 error ("AUX pointer initialized for edge %d->%d", e
->src
->index
,
1016 /* Verify the arguments for every PHI node in the block. */
1017 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1019 phi
= gsi_stmt (gsi
);
1020 if (verify_phi_args (phi
, bb
, definition_block
))
1023 bitmap_set_bit (names_defined_in_bb
,
1024 SSA_NAME_VERSION (gimple_phi_result (phi
)));
1027 /* Now verify all the uses and vuses in every statement of the block. */
1028 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1030 gimple stmt
= gsi_stmt (gsi
);
1031 use_operand_p use_p
;
1033 if (check_modified_stmt
&& gimple_modified_p (stmt
))
1035 error ("stmt (%p) marked modified after optimization pass: ",
1037 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1041 if (verify_ssa_operands (stmt
))
1043 print_gimple_stmt (stderr
, stmt
, 0, TDF_VOPS
);
1047 if (gimple_debug_bind_p (stmt
)
1048 && !gimple_debug_bind_has_value_p (stmt
))
1051 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
|SSA_OP_VUSE
)
1053 op
= USE_FROM_PTR (use_p
);
1054 if (verify_use (bb
, definition_block
[SSA_NAME_VERSION (op
)],
1055 use_p
, stmt
, false, names_defined_in_bb
))
1059 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_ALL_DEFS
)
1061 if (SSA_NAME_DEF_STMT (op
) != stmt
)
1063 error ("SSA_NAME_DEF_STMT is wrong");
1064 fprintf (stderr
, "Expected definition statement:\n");
1065 print_gimple_stmt (stderr
, stmt
, 4, TDF_VOPS
);
1066 fprintf (stderr
, "\nActual definition statement:\n");
1067 print_gimple_stmt (stderr
, SSA_NAME_DEF_STMT (op
),
1071 bitmap_set_bit (names_defined_in_bb
, SSA_NAME_VERSION (op
));
1075 bitmap_clear (names_defined_in_bb
);
1078 free (definition_block
);
1080 /* Restore the dominance information to its prior known state, so
1081 that we do not perturb the compiler's subsequent behavior. */
1082 if (orig_dom_state
== DOM_NONE
)
1083 free_dominance_info (CDI_DOMINATORS
);
1085 set_dom_info_availability (CDI_DOMINATORS
, orig_dom_state
);
1087 BITMAP_FREE (names_defined_in_bb
);
1088 timevar_pop (TV_TREE_SSA_VERIFY
);
1092 internal_error ("verify_ssa failed");
1095 /* Return true if the DECL_UID in both trees are equal. */
1098 uid_ssaname_map_eq (const void *va
, const void *vb
)
1100 const_tree a
= (const_tree
) va
;
1101 const_tree b
= (const_tree
) vb
;
1102 return (a
->ssa_name
.var
->decl_minimal
.uid
== b
->ssa_name
.var
->decl_minimal
.uid
);
1105 /* Hash a tree in a uid_decl_map. */
1108 uid_ssaname_map_hash (const void *item
)
1110 return ((const_tree
)item
)->ssa_name
.var
->decl_minimal
.uid
;
1114 /* Initialize global DFA and SSA structures. */
1117 init_tree_ssa (struct function
*fn
)
1119 fn
->gimple_df
= ggc_alloc_cleared_gimple_df ();
1120 fn
->gimple_df
->default_defs
= htab_create_ggc (20, uid_ssaname_map_hash
,
1121 uid_ssaname_map_eq
, NULL
);
1122 pt_solution_reset (&fn
->gimple_df
->escaped
);
1123 init_ssanames (fn
, 0);
1126 /* Do the actions required to initialize internal data structures used
1127 in tree-ssa optimization passes. */
1130 execute_init_datastructures (void)
1132 /* Allocate hash tables, arrays and other structures. */
1133 gcc_assert (!cfun
->gimple_df
);
1134 init_tree_ssa (cfun
);
1138 /* Gate for IPCP optimization. */
1141 gate_init_datastructures (void)
1143 /* Do nothing for funcions that was produced already in SSA form. */
1144 return !(cfun
->curr_properties
& PROP_ssa
);
1149 const pass_data pass_data_init_datastructures
=
1151 GIMPLE_PASS
, /* type */
1152 "*init_datastructures", /* name */
1153 OPTGROUP_NONE
, /* optinfo_flags */
1154 true, /* has_gate */
1155 true, /* has_execute */
1156 TV_NONE
, /* tv_id */
1157 PROP_cfg
, /* properties_required */
1158 0, /* properties_provided */
1159 0, /* properties_destroyed */
1160 0, /* todo_flags_start */
1161 0, /* todo_flags_finish */
1164 class pass_init_datastructures
: public gimple_opt_pass
1167 pass_init_datastructures (gcc::context
*ctxt
)
1168 : gimple_opt_pass (pass_data_init_datastructures
, ctxt
)
1171 /* opt_pass methods: */
1172 bool gate () { return gate_init_datastructures (); }
1173 unsigned int execute () { return execute_init_datastructures (); }
1175 }; // class pass_init_datastructures
1180 make_pass_init_datastructures (gcc::context
*ctxt
)
1182 return new pass_init_datastructures (ctxt
);
1185 /* Deallocate memory associated with SSA data structures for FNDECL. */
1188 delete_tree_ssa (void)
1192 /* We no longer maintain the SSA operand cache at this point. */
1193 if (ssa_operands_active (cfun
))
1194 fini_ssa_operands ();
1196 htab_delete (cfun
->gimple_df
->default_defs
);
1197 cfun
->gimple_df
->default_defs
= NULL
;
1198 pt_solution_reset (&cfun
->gimple_df
->escaped
);
1199 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
1200 pointer_map_destroy (cfun
->gimple_df
->decls_to_pointers
);
1201 cfun
->gimple_df
->decls_to_pointers
= NULL
;
1202 cfun
->gimple_df
->modified_noreturn_calls
= NULL
;
1203 cfun
->gimple_df
= NULL
;
1205 /* We no longer need the edge variable maps. */
1206 redirect_edge_var_map_destroy ();
1209 /* Return true if EXPR is a useless type conversion, otherwise return
1213 tree_ssa_useless_type_conversion (tree expr
)
1215 /* If we have an assignment that merely uses a NOP_EXPR to change
1216 the top of the RHS to the type of the LHS and the type conversion
1217 is "safe", then strip away the type conversion so that we can
1218 enter LHS = RHS into the const_and_copies table. */
1219 if (CONVERT_EXPR_P (expr
)
1220 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
1221 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
1222 return useless_type_conversion_p
1224 TREE_TYPE (TREE_OPERAND (expr
, 0)));
1229 /* Strip conversions from EXP according to
1230 tree_ssa_useless_type_conversion and return the resulting
1234 tree_ssa_strip_useless_type_conversions (tree exp
)
1236 while (tree_ssa_useless_type_conversion (exp
))
1237 exp
= TREE_OPERAND (exp
, 0);
1242 /* Return true if T, an SSA_NAME, has an undefined value. */
1245 ssa_undefined_value_p (tree t
)
1247 tree var
= SSA_NAME_VAR (t
);
1251 /* Parameters get their initial value from the function entry. */
1252 else if (TREE_CODE (var
) == PARM_DECL
)
1254 /* When returning by reference the return address is actually a hidden
1256 else if (TREE_CODE (var
) == RESULT_DECL
&& DECL_BY_REFERENCE (var
))
1258 /* Hard register variables get their initial value from the ether. */
1259 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1262 /* The value is undefined iff its definition statement is empty. */
1263 return gimple_nop_p (SSA_NAME_DEF_STMT (t
));
1267 /* If necessary, rewrite the base of the reference tree *TP from
1268 a MEM_REF to a plain or converted symbol. */
1271 maybe_rewrite_mem_ref_base (tree
*tp
, bitmap suitable_for_renaming
)
1275 while (handled_component_p (*tp
))
1276 tp
= &TREE_OPERAND (*tp
, 0);
1277 if (TREE_CODE (*tp
) == MEM_REF
1278 && TREE_CODE (TREE_OPERAND (*tp
, 0)) == ADDR_EXPR
1279 && (sym
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0))
1281 && !TREE_ADDRESSABLE (sym
)
1282 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1284 if (TREE_CODE (TREE_TYPE (sym
)) == VECTOR_TYPE
1285 && useless_type_conversion_p (TREE_TYPE (*tp
),
1286 TREE_TYPE (TREE_TYPE (sym
)))
1287 && multiple_of_p (sizetype
, TREE_OPERAND (*tp
, 1),
1288 TYPE_SIZE_UNIT (TREE_TYPE (*tp
))))
1290 *tp
= build3 (BIT_FIELD_REF
, TREE_TYPE (*tp
), sym
,
1291 TYPE_SIZE (TREE_TYPE (*tp
)),
1292 int_const_binop (MULT_EXPR
,
1293 bitsize_int (BITS_PER_UNIT
),
1294 TREE_OPERAND (*tp
, 1)));
1296 else if (TREE_CODE (TREE_TYPE (sym
)) == COMPLEX_TYPE
1297 && useless_type_conversion_p (TREE_TYPE (*tp
),
1298 TREE_TYPE (TREE_TYPE (sym
))))
1300 *tp
= build1 (integer_zerop (TREE_OPERAND (*tp
, 1))
1301 ? REALPART_EXPR
: IMAGPART_EXPR
,
1302 TREE_TYPE (*tp
), sym
);
1304 else if (integer_zerop (TREE_OPERAND (*tp
, 1)))
1306 if (!useless_type_conversion_p (TREE_TYPE (*tp
),
1308 *tp
= build1 (VIEW_CONVERT_EXPR
,
1309 TREE_TYPE (*tp
), sym
);
1316 /* For a tree REF return its base if it is the base of a MEM_REF
1317 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1320 non_rewritable_mem_ref_base (tree ref
)
1324 /* A plain decl does not need it set. */
1328 while (handled_component_p (base
))
1329 base
= TREE_OPERAND (base
, 0);
1331 /* But watch out for MEM_REFs we cannot lower to a
1332 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1333 if (TREE_CODE (base
) == MEM_REF
1334 && TREE_CODE (TREE_OPERAND (base
, 0)) == ADDR_EXPR
)
1336 tree decl
= TREE_OPERAND (TREE_OPERAND (base
, 0), 0);
1337 if ((TREE_CODE (TREE_TYPE (decl
)) == VECTOR_TYPE
1338 || TREE_CODE (TREE_TYPE (decl
)) == COMPLEX_TYPE
)
1339 && useless_type_conversion_p (TREE_TYPE (base
),
1340 TREE_TYPE (TREE_TYPE (decl
)))
1341 && wi::fits_uhwi_p (mem_ref_offset (base
))
1342 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl
))),
1343 mem_ref_offset (base
))
1344 && multiple_of_p (sizetype
, TREE_OPERAND (base
, 1),
1345 TYPE_SIZE_UNIT (TREE_TYPE (base
))))
1348 && (!integer_zerop (TREE_OPERAND (base
, 1))
1349 || (DECL_SIZE (decl
)
1350 != TYPE_SIZE (TREE_TYPE (base
)))
1351 || TREE_THIS_VOLATILE (decl
) != TREE_THIS_VOLATILE (base
)))
1358 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1359 Otherwise return true. */
1362 non_rewritable_lvalue_p (tree lhs
)
1364 /* A plain decl is always rewritable. */
1368 /* A decl that is wrapped inside a MEM-REF that covers
1369 it full is also rewritable.
1370 ??? The following could be relaxed allowing component
1371 references that do not change the access size. */
1372 if (TREE_CODE (lhs
) == MEM_REF
1373 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1374 && integer_zerop (TREE_OPERAND (lhs
, 1)))
1376 tree decl
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0);
1378 && DECL_SIZE (decl
) == TYPE_SIZE (TREE_TYPE (lhs
))
1379 && (TREE_THIS_VOLATILE (decl
) == TREE_THIS_VOLATILE (lhs
)))
1386 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1387 mark the variable VAR for conversion into SSA. Return true when updating
1388 stmts is required. */
1391 maybe_optimize_var (tree var
, bitmap addresses_taken
, bitmap not_reg_needs
,
1392 bitmap suitable_for_renaming
)
1394 /* Global Variables, result decls cannot be changed. */
1395 if (is_global_var (var
)
1396 || TREE_CODE (var
) == RESULT_DECL
1397 || bitmap_bit_p (addresses_taken
, DECL_UID (var
)))
1400 if (TREE_ADDRESSABLE (var
)
1401 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1402 a non-register. Otherwise we are confused and forget to
1403 add virtual operands for it. */
1404 && (!is_gimple_reg_type (TREE_TYPE (var
))
1405 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
1406 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1407 || !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))))
1409 TREE_ADDRESSABLE (var
) = 0;
1410 if (is_gimple_reg (var
))
1411 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1414 fprintf (dump_file
, "No longer having address taken: ");
1415 print_generic_expr (dump_file
, var
, 0);
1416 fprintf (dump_file
, "\n");
1420 if (!DECL_GIMPLE_REG_P (var
)
1421 && !bitmap_bit_p (not_reg_needs
, DECL_UID (var
))
1422 && (TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
1423 || TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
)
1424 && !TREE_THIS_VOLATILE (var
)
1425 && (TREE_CODE (var
) != VAR_DECL
|| !DECL_HARD_REGISTER (var
)))
1427 DECL_GIMPLE_REG_P (var
) = 1;
1428 bitmap_set_bit (suitable_for_renaming
, DECL_UID (var
));
1431 fprintf (dump_file
, "Now a gimple register: ");
1432 print_generic_expr (dump_file
, var
, 0);
1433 fprintf (dump_file
, "\n");
1438 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1441 execute_update_addresses_taken (void)
1443 gimple_stmt_iterator gsi
;
1445 bitmap addresses_taken
= BITMAP_ALLOC (NULL
);
1446 bitmap not_reg_needs
= BITMAP_ALLOC (NULL
);
1447 bitmap suitable_for_renaming
= BITMAP_ALLOC (NULL
);
1451 timevar_push (TV_ADDRESS_TAKEN
);
1453 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1454 the function body. */
1457 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1459 gimple stmt
= gsi_stmt (gsi
);
1460 enum gimple_code code
= gimple_code (stmt
);
1463 /* Note all addresses taken by the stmt. */
1464 gimple_ior_addresses_taken (addresses_taken
, stmt
);
1466 /* If we have a call or an assignment, see if the lhs contains
1467 a local decl that requires not to be a gimple register. */
1468 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1470 tree lhs
= gimple_get_lhs (stmt
);
1472 && TREE_CODE (lhs
) != SSA_NAME
1473 && non_rewritable_lvalue_p (lhs
))
1475 decl
= get_base_address (lhs
);
1477 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1481 if (gimple_assign_single_p (stmt
))
1483 tree rhs
= gimple_assign_rhs1 (stmt
);
1484 if ((decl
= non_rewritable_mem_ref_base (rhs
)))
1485 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1488 else if (code
== GIMPLE_CALL
)
1490 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1492 tree arg
= gimple_call_arg (stmt
, i
);
1493 if ((decl
= non_rewritable_mem_ref_base (arg
)))
1494 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1498 else if (code
== GIMPLE_ASM
)
1500 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1502 tree link
= gimple_asm_output_op (stmt
, i
);
1503 tree lhs
= TREE_VALUE (link
);
1504 if (TREE_CODE (lhs
) != SSA_NAME
)
1506 decl
= get_base_address (lhs
);
1508 && (non_rewritable_lvalue_p (lhs
)
1509 /* We cannot move required conversions from
1510 the lhs to the rhs in asm statements, so
1511 require we do not need any. */
1512 || !useless_type_conversion_p
1513 (TREE_TYPE (lhs
), TREE_TYPE (decl
))))
1514 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1517 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1519 tree link
= gimple_asm_input_op (stmt
, i
);
1520 if ((decl
= non_rewritable_mem_ref_base (TREE_VALUE (link
))))
1521 bitmap_set_bit (not_reg_needs
, DECL_UID (decl
));
1526 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1529 gimple phi
= gsi_stmt (gsi
);
1531 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1533 tree op
= PHI_ARG_DEF (phi
, i
), var
;
1534 if (TREE_CODE (op
) == ADDR_EXPR
1535 && (var
= get_base_address (TREE_OPERAND (op
, 0))) != NULL
1537 bitmap_set_bit (addresses_taken
, DECL_UID (var
));
1542 /* We cannot iterate over all referenced vars because that can contain
1543 unused vars from BLOCK trees, which causes code generation differences
1545 for (var
= DECL_ARGUMENTS (cfun
->decl
); var
; var
= DECL_CHAIN (var
))
1546 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1547 suitable_for_renaming
);
1549 FOR_EACH_VEC_SAFE_ELT (cfun
->local_decls
, i
, var
)
1550 maybe_optimize_var (var
, addresses_taken
, not_reg_needs
,
1551 suitable_for_renaming
);
1553 /* Operand caches need to be recomputed for operands referencing the updated
1554 variables and operands need to be rewritten to expose bare symbols. */
1555 if (!bitmap_empty_p (suitable_for_renaming
))
1558 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
1560 gimple stmt
= gsi_stmt (gsi
);
1562 /* Re-write TARGET_MEM_REFs of symbols we want to
1563 rewrite into SSA form. */
1564 if (gimple_assign_single_p (stmt
))
1566 tree lhs
= gimple_assign_lhs (stmt
);
1567 tree rhs
, *rhsp
= gimple_assign_rhs1_ptr (stmt
);
1570 /* We shouldn't have any fancy wrapping of
1571 component-refs on the LHS, but look through
1572 VIEW_CONVERT_EXPRs as that is easy. */
1573 while (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1574 lhs
= TREE_OPERAND (lhs
, 0);
1575 if (TREE_CODE (lhs
) == MEM_REF
1576 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == ADDR_EXPR
1577 && integer_zerop (TREE_OPERAND (lhs
, 1))
1578 && (sym
= TREE_OPERAND (TREE_OPERAND (lhs
, 0), 0))
1580 && !TREE_ADDRESSABLE (sym
)
1581 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (sym
)))
1584 lhs
= gimple_assign_lhs (stmt
);
1586 /* Rewrite the RHS and make sure the resulting assignment
1587 is validly typed. */
1588 maybe_rewrite_mem_ref_base (rhsp
, suitable_for_renaming
);
1589 rhs
= gimple_assign_rhs1 (stmt
);
1590 if (gimple_assign_lhs (stmt
) != lhs
1591 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1593 rhs
= fold_build1 (VIEW_CONVERT_EXPR
,
1594 TREE_TYPE (lhs
), rhs
);
1596 if (gimple_assign_lhs (stmt
) != lhs
)
1597 gimple_assign_set_lhs (stmt
, lhs
);
1599 /* For var ={v} {CLOBBER}; where var lost
1600 TREE_ADDRESSABLE just remove the stmt. */
1602 && TREE_CLOBBER_P (rhs
)
1603 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (lhs
)))
1605 unlink_stmt_vdef (stmt
);
1606 gsi_remove (&gsi
, true);
1607 release_defs (stmt
);
1611 if (gimple_assign_rhs1 (stmt
) != rhs
)
1613 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
1614 gimple_assign_set_rhs_from_tree (&gsi
, rhs
);
1618 else if (gimple_code (stmt
) == GIMPLE_CALL
)
1621 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
1623 tree
*argp
= gimple_call_arg_ptr (stmt
, i
);
1624 maybe_rewrite_mem_ref_base (argp
, suitable_for_renaming
);
1628 else if (gimple_code (stmt
) == GIMPLE_ASM
)
1631 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1633 tree link
= gimple_asm_output_op (stmt
, i
);
1634 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1635 suitable_for_renaming
);
1637 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
1639 tree link
= gimple_asm_input_op (stmt
, i
);
1640 maybe_rewrite_mem_ref_base (&TREE_VALUE (link
),
1641 suitable_for_renaming
);
1645 else if (gimple_debug_bind_p (stmt
)
1646 && gimple_debug_bind_has_value_p (stmt
))
1648 tree
*valuep
= gimple_debug_bind_get_value_ptr (stmt
);
1650 maybe_rewrite_mem_ref_base (valuep
, suitable_for_renaming
);
1651 decl
= non_rewritable_mem_ref_base (*valuep
);
1653 && bitmap_bit_p (suitable_for_renaming
, DECL_UID (decl
)))
1654 gimple_debug_bind_reset_value (stmt
);
1657 if (gimple_references_memory_p (stmt
)
1658 || is_gimple_debug (stmt
))
1664 /* Update SSA form here, we are called as non-pass as well. */
1665 if (number_of_loops (cfun
) > 1
1666 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
1667 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
1669 update_ssa (TODO_update_ssa
);
1672 BITMAP_FREE (not_reg_needs
);
1673 BITMAP_FREE (addresses_taken
);
1674 BITMAP_FREE (suitable_for_renaming
);
1675 timevar_pop (TV_ADDRESS_TAKEN
);
1680 const pass_data pass_data_update_address_taken
=
1682 GIMPLE_PASS
, /* type */
1683 "addressables", /* name */
1684 OPTGROUP_NONE
, /* optinfo_flags */
1685 false, /* has_gate */
1686 false, /* has_execute */
1687 TV_ADDRESS_TAKEN
, /* tv_id */
1688 PROP_ssa
, /* properties_required */
1689 0, /* properties_provided */
1690 0, /* properties_destroyed */
1691 0, /* todo_flags_start */
1692 TODO_update_address_taken
, /* todo_flags_finish */
1695 class pass_update_address_taken
: public gimple_opt_pass
1698 pass_update_address_taken (gcc::context
*ctxt
)
1699 : gimple_opt_pass (pass_data_update_address_taken
, ctxt
)
1702 /* opt_pass methods: */
1704 }; // class pass_update_address_taken
1709 make_pass_update_address_taken (gcc::context
*ctxt
)
1711 return new pass_update_address_taken (ctxt
);