1 /* Convert a program in SSA form into Normal form.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Andrew Macleod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
36 #include "stor-layout.h"
40 #include "gimple-iterator.h"
43 #include "tree-ssa-live.h"
44 #include "tree-ssa-ter.h"
45 #include "tree-ssa-coalesce.h"
46 #include "tree-outof-ssa.h"
49 /* FIXME: A lot of code here deals with expanding to RTL. All that code
50 should be in cfgexpand.c. */
54 /* Return TRUE if expression STMT is suitable for replacement. */
57 ssa_is_replaceable_p (gimple
*stmt
)
63 /* Only consider modify stmts. */
64 if (!is_gimple_assign (stmt
))
67 /* If the statement may throw an exception, it cannot be replaced. */
68 if (stmt_could_throw_p (cfun
, stmt
))
71 /* Punt if there is more than 1 def. */
72 def
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_DEF
);
76 /* Only consider definitions which have a single use. */
77 if (!single_imm_use (def
, &use_p
, &use_stmt
))
80 /* Used in this block, but at the TOP of the block, not the end. */
81 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
84 /* There must be no VDEFs. */
85 if (gimple_vdef (stmt
))
88 /* Float expressions must go through memory if float-store is on. */
90 && FLOAT_TYPE_P (gimple_expr_type (stmt
)))
93 /* An assignment with a register variable on the RHS is not
95 if (gimple_assign_rhs_code (stmt
) == VAR_DECL
96 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
)))
99 /* No function calls can be replaced. */
100 if (is_gimple_call (stmt
))
103 /* Leave any stmt with volatile operands alone as well. */
104 if (gimple_has_volatile_ops (stmt
))
111 /* Used to hold all the components required to do SSA PHI elimination.
112 The node and pred/succ list is a simple linear list of nodes and
113 edges represented as pairs of nodes.
115 The predecessor and successor list: Nodes are entered in pairs, where
116 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent
117 predecessors, all the odd elements are successors.
120 When implemented as bitmaps, very large programs SSA->Normal times were
121 being dominated by clearing the interference graph.
123 Typically this list of edges is extremely small since it only includes
124 PHI results and uses from a single edge which have not coalesced with
125 each other. This means that no virtual PHI nodes are included, and
126 empirical evidence suggests that the number of edges rarely exceed
127 3, and in a bootstrap of GCC, the maximum size encountered was 7.
128 This also limits the number of possible nodes that are involved to
129 rarely more than 6, and in the bootstrap of gcc, the maximum number
130 of nodes encountered was 12. */
134 elim_graph (var_map map
);
136 /* Size of the elimination vectors. */
139 /* List of nodes in the elimination graph. */
142 /* The predecessor and successor edge list. */
143 auto_vec
<int> edge_list
;
145 /* Source locus on each edge */
146 auto_vec
<location_t
> edge_locus
;
148 /* Visited vector. */
149 auto_sbitmap visited
;
151 /* Stack for visited nodes. */
154 /* The variable partition map. */
157 /* Edge being eliminated by this graph. */
160 /* List of constant copies to emit. These are pushed on in pairs. */
161 auto_vec
<int> const_dests
;
162 auto_vec
<tree
> const_copies
;
164 /* Source locations for any constant copies. */
165 auto_vec
<location_t
> copy_locus
;
169 /* For an edge E find out a good source location to associate with
170 instructions inserted on edge E. If E has an implicit goto set,
171 use its location. Otherwise search instructions in predecessors
172 of E for a location, and use that one. That makes sense because
173 we insert on edges for PHI nodes, and effects of PHIs happen on
174 the end of the predecessor conceptually. */
177 set_location_for_edge (edge e
)
181 set_curr_insn_location (e
->goto_locus
);
185 basic_block bb
= e
->src
;
186 gimple_stmt_iterator gsi
;
190 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
192 gimple
*stmt
= gsi_stmt (gsi
);
193 if (is_gimple_debug (stmt
))
195 if (gimple_has_location (stmt
) || gimple_block (stmt
))
197 set_curr_insn_location (gimple_location (stmt
));
201 /* Nothing found in this basic block. Make a half-assed attempt
202 to continue with another block. */
203 if (single_pred_p (bb
))
204 bb
= single_pred (bb
);
208 while (bb
!= e
->src
);
212 /* Emit insns to copy SRC into DEST converting SRC if necessary. As
213 SRC/DEST might be BLKmode memory locations SIZEEXP is a tree from
214 which we deduce the size to copy in that case. */
216 static inline rtx_insn
*
217 emit_partition_copy (rtx dest
, rtx src
, int unsignedsrcp
, tree sizeexp
)
221 if (GET_MODE (src
) != VOIDmode
&& GET_MODE (src
) != GET_MODE (dest
))
222 src
= convert_to_mode (GET_MODE (dest
), src
, unsignedsrcp
);
223 if (GET_MODE (src
) == BLKmode
)
225 gcc_assert (GET_MODE (dest
) == BLKmode
);
226 emit_block_move (dest
, src
, expr_size (sizeexp
), BLOCK_OP_NORMAL
);
229 emit_move_insn (dest
, src
);
230 do_pending_stack_adjust ();
232 rtx_insn
*seq
= get_insns ();
238 /* Insert a copy instruction from partition SRC to DEST onto edge E. */
241 insert_partition_copy_on_edge (edge e
, int dest
, int src
, location_t locus
)
244 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
247 "Inserting a partition copy on edge BB%d->BB%d : "
250 e
->dest
->index
, dest
, src
);
251 fprintf (dump_file
, "\n");
254 gcc_assert (SA
.partition_to_pseudo
[dest
]);
255 gcc_assert (SA
.partition_to_pseudo
[src
]);
257 set_location_for_edge (e
);
258 /* If a locus is provided, override the default. */
260 set_curr_insn_location (locus
);
262 var
= partition_to_var (SA
.map
, src
);
263 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
264 copy_rtx (SA
.partition_to_pseudo
[src
]),
265 TYPE_UNSIGNED (TREE_TYPE (var
)),
268 insert_insn_on_edge (seq
, e
);
271 /* Insert a copy instruction from expression SRC to partition DEST
275 insert_value_copy_on_edge (edge e
, int dest
, tree src
, location_t locus
)
277 rtx dest_rtx
, seq
, x
;
278 machine_mode dest_mode
, src_mode
;
281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
284 "Inserting a value copy on edge BB%d->BB%d : PART.%d = ",
286 e
->dest
->index
, dest
);
287 print_generic_expr (dump_file
, src
, TDF_SLIM
);
288 fprintf (dump_file
, "\n");
291 dest_rtx
= copy_rtx (SA
.partition_to_pseudo
[dest
]);
292 gcc_assert (dest_rtx
);
294 set_location_for_edge (e
);
295 /* If a locus is provided, override the default. */
297 set_curr_insn_location (locus
);
301 tree name
= partition_to_var (SA
.map
, dest
);
302 src_mode
= TYPE_MODE (TREE_TYPE (src
));
303 dest_mode
= GET_MODE (dest_rtx
);
304 gcc_assert (src_mode
== TYPE_MODE (TREE_TYPE (name
)));
305 gcc_assert (!REG_P (dest_rtx
)
306 || dest_mode
== promote_ssa_mode (name
, &unsignedp
));
308 if (src_mode
!= dest_mode
)
310 x
= expand_expr (src
, NULL
, src_mode
, EXPAND_NORMAL
);
311 x
= convert_modes (dest_mode
, src_mode
, x
, unsignedp
);
313 else if (src_mode
== BLKmode
)
316 store_expr (src
, x
, 0, false, false);
319 x
= expand_expr (src
, dest_rtx
, dest_mode
, EXPAND_NORMAL
);
322 emit_move_insn (dest_rtx
, x
);
323 do_pending_stack_adjust ();
328 insert_insn_on_edge (seq
, e
);
331 /* Insert a copy instruction from RTL expression SRC to partition DEST
335 insert_rtx_to_part_on_edge (edge e
, int dest
, rtx src
, int unsignedsrcp
,
338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
341 "Inserting a temp copy on edge BB%d->BB%d : PART.%d = ",
343 e
->dest
->index
, dest
);
344 print_simple_rtl (dump_file
, src
);
345 fprintf (dump_file
, "\n");
348 gcc_assert (SA
.partition_to_pseudo
[dest
]);
350 set_location_for_edge (e
);
351 /* If a locus is provided, override the default. */
353 set_curr_insn_location (locus
);
355 /* We give the destination as sizeexp in case src/dest are BLKmode
356 mems. Usually we give the source. As we result from SSA names
357 the left and right size should be the same (and no WITH_SIZE_EXPR
358 involved), so it doesn't matter. */
359 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
361 partition_to_var (SA
.map
, dest
));
363 insert_insn_on_edge (seq
, e
);
366 /* Insert a copy instruction from partition SRC to RTL lvalue DEST
370 insert_part_to_rtx_on_edge (edge e
, rtx dest
, int src
, location_t locus
)
373 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
376 "Inserting a temp copy on edge BB%d->BB%d : ",
379 print_simple_rtl (dump_file
, dest
);
380 fprintf (dump_file
, "= PART.%d\n", src
);
383 gcc_assert (SA
.partition_to_pseudo
[src
]);
385 set_location_for_edge (e
);
386 /* If a locus is provided, override the default. */
388 set_curr_insn_location (locus
);
390 var
= partition_to_var (SA
.map
, src
);
391 rtx_insn
*seq
= emit_partition_copy (dest
,
392 copy_rtx (SA
.partition_to_pseudo
[src
]),
393 TYPE_UNSIGNED (TREE_TYPE (var
)),
396 insert_insn_on_edge (seq
, e
);
400 /* Create an elimination graph for map. */
402 elim_graph::elim_graph (var_map map
) :
403 nodes (30), edge_list (20), edge_locus (10), visited (map
->num_partitions
),
404 stack (30), map (map
), const_dests (20), const_copies (20), copy_locus (10)
409 /* Empty elimination graph G. */
412 clear_elim_graph (elim_graph
*g
)
414 g
->nodes
.truncate (0);
415 g
->edge_list
.truncate (0);
416 g
->edge_locus
.truncate (0);
420 /* Return the number of nodes in graph G. */
423 elim_graph_size (elim_graph
*g
)
425 return g
->nodes
.length ();
429 /* Add NODE to graph G, if it doesn't exist already. */
432 elim_graph_add_node (elim_graph
*g
, int node
)
437 FOR_EACH_VEC_ELT (g
->nodes
, x
, t
)
440 g
->nodes
.safe_push (node
);
444 /* Add the edge PRED->SUCC to graph G. */
447 elim_graph_add_edge (elim_graph
*g
, int pred
, int succ
, location_t locus
)
449 g
->edge_list
.safe_push (pred
);
450 g
->edge_list
.safe_push (succ
);
451 g
->edge_locus
.safe_push (locus
);
455 /* Remove an edge from graph G for which NODE is the predecessor, and
456 return the successor node. -1 is returned if there is no such edge. */
459 elim_graph_remove_succ_edge (elim_graph
*g
, int node
, location_t
*locus
)
463 for (x
= 0; x
< g
->edge_list
.length (); x
+= 2)
464 if (g
->edge_list
[x
] == node
)
466 g
->edge_list
[x
] = -1;
467 y
= g
->edge_list
[x
+ 1];
468 g
->edge_list
[x
+ 1] = -1;
469 *locus
= g
->edge_locus
[x
/ 2];
470 g
->edge_locus
[x
/ 2] = UNKNOWN_LOCATION
;
473 *locus
= UNKNOWN_LOCATION
;
478 /* Find all the nodes in GRAPH which are successors to NODE in the
479 edge list. VAR will hold the partition number found. CODE is the
480 code fragment executed for every node found. */
482 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, LOCUS, CODE) \
486 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
488 y_ = (GRAPH)->edge_list[x_]; \
491 (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]); \
492 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
498 /* Find all the nodes which are predecessors of NODE in the edge list for
499 GRAPH. VAR will hold the partition number found. CODE is the
500 code fragment executed for every node found. */
502 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, LOCUS, CODE) \
506 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
508 y_ = (GRAPH)->edge_list[x_ + 1]; \
511 (void) ((VAR) = (GRAPH)->edge_list[x_]); \
512 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
518 /* Add T to elimination graph G. */
521 eliminate_name (elim_graph
*g
, int T
)
523 elim_graph_add_node (g
, T
);
526 /* Return true if this phi argument T should have a copy queued when using
527 var_map MAP. PHI nodes should contain only ssa_names and invariants. A
528 test for ssa_name is definitely simpler, but don't let invalid contents
529 slip through in the meantime. */
532 queue_phi_copy_p (var_map map
, tree t
)
534 if (TREE_CODE (t
) == SSA_NAME
)
536 if (var_to_partition (map
, t
) == NO_PARTITION
)
540 gcc_checking_assert (is_gimple_min_invariant (t
));
544 /* Build elimination graph G for basic block BB on incoming PHI edge
548 eliminate_build (elim_graph
*g
)
554 clear_elim_graph (g
);
556 for (gsi
= gsi_start_phis (g
->e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
558 gphi
*phi
= gsi
.phi ();
561 p0
= var_to_partition (g
->map
, gimple_phi_result (phi
));
562 /* Ignore results which are not in partitions. */
563 if (p0
== NO_PARTITION
)
566 Ti
= PHI_ARG_DEF (phi
, g
->e
->dest_idx
);
567 locus
= gimple_phi_arg_location_from_edge (phi
, g
->e
);
569 /* If this argument is a constant, or a SSA_NAME which is being
570 left in SSA form, just queue a copy to be emitted on this
572 if (queue_phi_copy_p (g
->map
, Ti
))
574 /* Save constant copies until all other copies have been emitted
576 g
->const_dests
.safe_push (p0
);
577 g
->const_copies
.safe_push (Ti
);
578 g
->copy_locus
.safe_push (locus
);
582 pi
= var_to_partition (g
->map
, Ti
);
585 eliminate_name (g
, p0
);
586 eliminate_name (g
, pi
);
587 elim_graph_add_edge (g
, p0
, pi
, locus
);
594 /* Push successors of T onto the elimination stack for G. */
597 elim_forward (elim_graph
*g
, int T
)
602 bitmap_set_bit (g
->visited
, T
);
603 FOR_EACH_ELIM_GRAPH_SUCC (g
, T
, S
, locus
,
605 if (!bitmap_bit_p (g
->visited
, S
))
608 g
->stack
.safe_push (T
);
612 /* Return 1 if there unvisited predecessors of T in graph G. */
615 elim_unvisited_predecessor (elim_graph
*g
, int T
)
620 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
622 if (!bitmap_bit_p (g
->visited
, P
))
628 /* Process predecessors first, and insert a copy. */
631 elim_backward (elim_graph
*g
, int T
)
636 bitmap_set_bit (g
->visited
, T
);
637 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
639 if (!bitmap_bit_p (g
->visited
, P
))
641 elim_backward (g
, P
);
642 insert_partition_copy_on_edge (g
->e
, P
, T
, locus
);
647 /* Allocate a new pseudo register usable for storing values sitting
648 in NAME (a decl or SSA name), i.e. with matching mode and attributes. */
651 get_temp_reg (tree name
)
653 tree type
= TREE_TYPE (name
);
655 machine_mode reg_mode
= promote_ssa_mode (name
, &unsignedp
);
656 rtx x
= gen_reg_rtx (reg_mode
);
657 if (POINTER_TYPE_P (type
))
658 mark_reg_pointer (x
, TYPE_ALIGN (TREE_TYPE (type
)));
662 /* Insert required copies for T in graph G. Check for a strongly connected
663 region, and create a temporary to break the cycle if one is found. */
666 elim_create (elim_graph
*g
, int T
)
671 if (elim_unvisited_predecessor (g
, T
))
673 tree var
= partition_to_var (g
->map
, T
);
674 rtx U
= get_temp_reg (var
);
675 int unsignedsrcp
= TYPE_UNSIGNED (TREE_TYPE (var
));
677 insert_part_to_rtx_on_edge (g
->e
, U
, T
, UNKNOWN_LOCATION
);
678 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
680 if (!bitmap_bit_p (g
->visited
, P
))
682 elim_backward (g
, P
);
683 insert_rtx_to_part_on_edge (g
->e
, P
, U
, unsignedsrcp
, locus
);
689 S
= elim_graph_remove_succ_edge (g
, T
, &locus
);
692 bitmap_set_bit (g
->visited
, T
);
693 insert_partition_copy_on_edge (g
->e
, T
, S
, locus
);
699 /* Eliminate all the phi nodes on edge E in graph G. */
702 eliminate_phi (edge e
, elim_graph
*g
)
706 gcc_assert (g
->const_copies
.length () == 0);
707 gcc_assert (g
->copy_locus
.length () == 0);
709 /* Abnormal edges already have everything coalesced. */
710 if (e
->flags
& EDGE_ABNORMAL
)
717 if (elim_graph_size (g
) != 0)
721 bitmap_clear (g
->visited
);
722 g
->stack
.truncate (0);
724 FOR_EACH_VEC_ELT (g
->nodes
, x
, part
)
726 if (!bitmap_bit_p (g
->visited
, part
))
727 elim_forward (g
, part
);
730 bitmap_clear (g
->visited
);
731 while (g
->stack
.length () > 0)
734 if (!bitmap_bit_p (g
->visited
, x
))
739 /* If there are any pending constant copies, issue them now. */
740 while (g
->const_copies
.length () > 0)
746 src
= g
->const_copies
.pop ();
747 dest
= g
->const_dests
.pop ();
748 locus
= g
->copy_locus
.pop ();
749 insert_value_copy_on_edge (e
, dest
, src
, locus
);
754 /* Remove each argument from PHI. If an arg was the last use of an SSA_NAME,
755 check to see if this allows another PHI node to be removed. */
758 remove_gimple_phi_args (gphi
*phi
)
763 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
765 fprintf (dump_file
, "Removing Dead PHI definition: ");
766 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
769 FOR_EACH_PHI_ARG (arg_p
, phi
, iter
, SSA_OP_USE
)
771 tree arg
= USE_FROM_PTR (arg_p
);
772 if (TREE_CODE (arg
) == SSA_NAME
)
774 /* Remove the reference to the existing argument. */
775 SET_USE (arg_p
, NULL_TREE
);
776 if (has_zero_uses (arg
))
779 gimple_stmt_iterator gsi
;
781 stmt
= SSA_NAME_DEF_STMT (arg
);
783 /* Also remove the def if it is a PHI node. */
784 if (gimple_code (stmt
) == GIMPLE_PHI
)
786 remove_gimple_phi_args (as_a
<gphi
*> (stmt
));
787 gsi
= gsi_for_stmt (stmt
);
788 remove_phi_node (&gsi
, true);
796 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses. */
799 eliminate_useless_phis (void)
805 FOR_EACH_BB_FN (bb
, cfun
)
807 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); )
809 gphi
*phi
= gsi
.phi ();
810 result
= gimple_phi_result (phi
);
811 if (virtual_operand_p (result
))
812 remove_phi_node (&gsi
, true);
815 /* Also remove real PHIs with no uses. */
816 if (has_zero_uses (result
))
818 remove_gimple_phi_args (phi
);
819 remove_phi_node (&gsi
, true);
829 /* This function will rewrite the current program using the variable mapping
830 found in MAP. If the replacement vector VALUES is provided, any
831 occurrences of partitions with non-null entries in the vector will be
832 replaced with the expression in the vector instead of its mapped
836 rewrite_trees (var_map map
)
842 /* Search for PHIs where the destination has no partition, but one
843 or more arguments has a partition. This should not happen and can
844 create incorrect code. */
845 FOR_EACH_BB_FN (bb
, cfun
)
848 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
850 gphi
*phi
= gsi
.phi ();
851 tree T0
= var_to_partition_to_var (map
, gimple_phi_result (phi
));
855 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
857 tree arg
= PHI_ARG_DEF (phi
, i
);
859 if (TREE_CODE (arg
) == SSA_NAME
860 && var_to_partition (map
, arg
) != NO_PARTITION
)
862 fprintf (stderr
, "Argument of PHI is in a partition :(");
863 print_generic_expr (stderr
, arg
, TDF_SLIM
);
864 fprintf (stderr
, "), but the result is not :");
865 print_gimple_stmt (stderr
, phi
, 0, TDF_SLIM
);
866 internal_error ("SSA corruption");
874 /* Create a default def for VAR. */
877 create_default_def (tree var
, void *arg ATTRIBUTE_UNUSED
)
879 if (!is_gimple_reg (var
))
882 tree ssa
= get_or_create_ssa_default_def (cfun
, var
);
886 /* Call CALLBACK for all PARM_DECLs and RESULT_DECLs for which
887 assign_parms may ask for a default partition. */
890 for_all_parms (void (*callback
)(tree var
, void *arg
), void *arg
)
892 for (tree var
= DECL_ARGUMENTS (current_function_decl
); var
;
893 var
= DECL_CHAIN (var
))
895 if (!VOID_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
896 callback (DECL_RESULT (current_function_decl
), arg
);
897 if (cfun
->static_chain_decl
)
898 callback (cfun
->static_chain_decl
, arg
);
901 /* We need to pass two arguments to set_parm_default_def_partition,
902 but for_all_parms only supports one. Use a pair. */
904 typedef std::pair
<var_map
, bitmap
> parm_default_def_partition_arg
;
906 /* Set in ARG's PARTS bitmap the bit corresponding to the partition in
907 ARG's MAP containing VAR's default def. */
910 set_parm_default_def_partition (tree var
, void *arg_
)
912 parm_default_def_partition_arg
*arg
= (parm_default_def_partition_arg
*)arg_
;
913 var_map map
= arg
->first
;
914 bitmap parts
= arg
->second
;
916 if (!is_gimple_reg (var
))
919 tree ssa
= ssa_default_def (cfun
, var
);
922 int version
= var_to_partition (map
, ssa
);
923 gcc_assert (version
!= NO_PARTITION
);
925 bool changed
= bitmap_set_bit (parts
, version
);
926 gcc_assert (changed
);
929 /* Allocate and return a bitmap that has a bit set for each partition
930 that contains a default def for a parameter. */
933 get_parm_default_def_partitions (var_map map
)
935 bitmap parm_default_def_parts
= BITMAP_ALLOC (NULL
);
937 parm_default_def_partition_arg
938 arg
= std::make_pair (map
, parm_default_def_parts
);
940 for_all_parms (set_parm_default_def_partition
, &arg
);
942 return parm_default_def_parts
;
945 /* Allocate and return a bitmap that has a bit set for each partition
946 that contains an undefined value. */
949 get_undefined_value_partitions (var_map map
)
951 bitmap undefined_value_parts
= BITMAP_ALLOC (NULL
);
953 for (unsigned int i
= 1; i
< num_ssa_names
; i
++)
955 tree var
= ssa_name (i
);
957 && !virtual_operand_p (var
)
958 && !has_zero_uses (var
)
959 && ssa_undefined_value_p (var
))
961 const int p
= var_to_partition (map
, var
);
962 if (p
!= NO_PARTITION
)
963 bitmap_set_bit (undefined_value_parts
, p
);
967 return undefined_value_parts
;
970 /* Given the out-of-ssa info object SA (with prepared partitions)
971 eliminate all phi nodes in all basic blocks. Afterwards no
972 basic block will have phi nodes anymore and there are possibly
973 some RTL instructions inserted on edges. */
976 expand_phi_nodes (struct ssaexpand
*sa
)
979 elim_graph
g (sa
->map
);
981 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
,
982 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
983 if (!gimple_seq_empty_p (phi_nodes (bb
)))
987 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
988 eliminate_phi (e
, &g
);
989 set_phi_nodes (bb
, NULL
);
990 /* We can't redirect EH edges in RTL land, so we need to do this
991 here. Redirection happens only when splitting is necessary,
992 which it is only for critical edges, normally. For EH edges
993 it might also be necessary when the successor has more than
994 one predecessor. In that case the edge is either required to
995 be fallthru (which EH edges aren't), or the predecessor needs
996 to end with a jump (which again, isn't the case with EH edges).
997 Hence, split all EH edges on which we inserted instructions
998 and whose successor has multiple predecessors. */
999 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
1001 if (e
->insns
.r
&& (e
->flags
& EDGE_EH
)
1002 && !single_pred_p (e
->dest
))
1004 rtx_insn
*insns
= e
->insns
.r
;
1007 bb
= split_edge (e
);
1008 single_pred_edge (bb
)->insns
.r
= insns
;
1017 /* Remove the ssa-names in the current function and translate them into normal
1018 compiler variables. PERFORM_TER is true if Temporary Expression Replacement
1019 should also be used. */
1022 remove_ssa_form (bool perform_ter
, struct ssaexpand
*sa
)
1024 bitmap values
= NULL
;
1027 for_all_parms (create_default_def
, NULL
);
1028 map
= init_var_map (num_ssa_names
);
1029 coalesce_ssa_name (map
);
1031 /* Return to viewing the variable list as just all reference variables after
1032 coalescing has been performed. */
1033 partition_view_normal (map
);
1035 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1037 fprintf (dump_file
, "After Coalescing:\n");
1038 dump_var_map (dump_file
, map
);
1043 values
= find_replaceable_exprs (map
);
1044 if (values
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
1045 dump_replaceable_exprs (dump_file
, values
);
1048 rewrite_trees (map
);
1051 sa
->values
= values
;
1052 sa
->partitions_for_parm_default_defs
= get_parm_default_def_partitions (map
);
1053 sa
->partitions_for_undefined_values
= get_undefined_value_partitions (map
);
1057 /* If not already done so for basic block BB, assign increasing uids
1058 to each of its instructions. */
1061 maybe_renumber_stmts_bb (basic_block bb
)
1064 gimple_stmt_iterator gsi
;
1069 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1071 gimple
*stmt
= gsi_stmt (gsi
);
1072 gimple_set_uid (stmt
, i
);
1078 /* Return true if we can determine that the SSA_NAMEs RESULT (a result
1079 of a PHI node) and ARG (one of its arguments) conflict. Return false
1080 otherwise, also when we simply aren't sure. */
1083 trivially_conflicts_p (basic_block bb
, tree result
, tree arg
)
1086 imm_use_iterator imm_iter
;
1087 gimple
*defa
= SSA_NAME_DEF_STMT (arg
);
1089 /* If ARG isn't defined in the same block it's too complicated for
1091 if (gimple_bb (defa
) != bb
)
1094 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, result
)
1096 gimple
*use_stmt
= USE_STMT (use
);
1097 if (is_gimple_debug (use_stmt
))
1099 /* Now, if there's a use of RESULT that lies outside this basic block,
1100 then there surely is a conflict with ARG. */
1101 if (gimple_bb (use_stmt
) != bb
)
1103 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1105 /* The use now is in a real stmt of BB, so if ARG was defined
1106 in a PHI node (like RESULT) both conflict. */
1107 if (gimple_code (defa
) == GIMPLE_PHI
)
1109 maybe_renumber_stmts_bb (bb
);
1110 /* If the use of RESULT occurs after the definition of ARG,
1111 the two conflict too. */
1112 if (gimple_uid (defa
) < gimple_uid (use_stmt
))
1120 /* Search every PHI node for arguments associated with backedges which
1121 we can trivially determine will need a copy (the argument is either
1122 not an SSA_NAME or the argument has a different underlying variable
1123 than the PHI result).
1125 Insert a copy from the PHI argument to a new destination at the
1126 end of the block with the backedge to the top of the loop. Update
1127 the PHI argument to reference this new destination. */
1130 insert_backedge_copies (void)
1135 mark_dfs_back_edges ();
1137 FOR_EACH_BB_FN (bb
, cfun
)
1139 /* Mark block as possibly needing calculation of UIDs. */
1142 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1144 gphi
*phi
= gsi
.phi ();
1145 tree result
= gimple_phi_result (phi
);
1148 if (virtual_operand_p (result
))
1151 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1153 tree arg
= gimple_phi_arg_def (phi
, i
);
1154 edge e
= gimple_phi_arg_edge (phi
, i
);
1155 /* We are only interested in copies emitted on critical
1157 if (!(e
->flags
& EDGE_DFS_BACK
)
1158 || !EDGE_CRITICAL_P (e
))
1161 /* If the argument is not an SSA_NAME, then we will need a
1162 constant initialization. If the argument is an SSA_NAME then
1163 a copy statement may be needed. First handle the case
1164 where we cannot insert before the argument definition. */
1165 if (TREE_CODE (arg
) != SSA_NAME
1166 || (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_PHI
1167 && trivially_conflicts_p (bb
, result
, arg
)))
1171 gimple
*last
= NULL
;
1172 gimple_stmt_iterator gsi2
;
1174 gsi2
= gsi_last_bb (gimple_phi_arg_edge (phi
, i
)->src
);
1175 if (!gsi_end_p (gsi2
))
1176 last
= gsi_stmt (gsi2
);
1178 /* In theory the only way we ought to get back to the
1179 start of a loop should be with a COND_EXPR or GOTO_EXPR.
1180 However, better safe than sorry.
1181 If the block ends with a control statement or
1182 something that might throw, then we have to
1183 insert this assignment before the last
1184 statement. Else insert it after the last statement. */
1185 if (last
&& stmt_ends_bb_p (last
))
1187 /* If the last statement in the block is the definition
1188 site of the PHI argument, then we can't insert
1189 anything after it. */
1190 if (TREE_CODE (arg
) == SSA_NAME
1191 && SSA_NAME_DEF_STMT (arg
) == last
)
1195 /* Create a new instance of the underlying variable of the
1197 name
= copy_ssa_name (result
);
1198 stmt
= gimple_build_assign (name
,
1199 gimple_phi_arg_def (phi
, i
));
1201 /* copy location if present. */
1202 if (gimple_phi_arg_has_location (phi
, i
))
1203 gimple_set_location (stmt
,
1204 gimple_phi_arg_location (phi
, i
));
1206 /* Insert the new statement into the block and update
1208 if (last
&& stmt_ends_bb_p (last
))
1209 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
1211 gsi_insert_after (&gsi2
, stmt
, GSI_NEW_STMT
);
1212 SET_PHI_ARG_DEF (phi
, i
, name
);
1214 /* Insert a copy before the definition of the backedge value
1215 and adjust all conflicting uses. */
1216 else if (trivially_conflicts_p (bb
, result
, arg
))
1218 gimple
*def
= SSA_NAME_DEF_STMT (arg
);
1219 if (gimple_nop_p (def
)
1220 || gimple_code (def
) == GIMPLE_PHI
)
1222 tree name
= copy_ssa_name (result
);
1223 gimple
*stmt
= gimple_build_assign (name
, result
);
1224 imm_use_iterator imm_iter
;
1226 /* The following matches trivially_conflicts_p. */
1227 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, result
)
1229 if (gimple_bb (use_stmt
) != bb
1230 || (gimple_code (use_stmt
) != GIMPLE_PHI
1231 && (maybe_renumber_stmts_bb (bb
), true)
1232 && gimple_uid (use_stmt
) > gimple_uid (def
)))
1235 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1236 SET_USE (use
, name
);
1239 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
1240 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
1245 /* Unmark this block again. */
1250 /* Free all memory associated with going out of SSA form. SA is
1251 the outof-SSA info object. */
1254 finish_out_of_ssa (struct ssaexpand
*sa
)
1256 free (sa
->partition_to_pseudo
);
1258 BITMAP_FREE (sa
->values
);
1259 delete_var_map (sa
->map
);
1260 BITMAP_FREE (sa
->partitions_for_parm_default_defs
);
1261 BITMAP_FREE (sa
->partitions_for_undefined_values
);
1262 memset (sa
, 0, sizeof *sa
);
1265 /* Take the current function out of SSA form, translating PHIs as described in
1266 R. Morgan, ``Building an Optimizing Compiler'',
1267 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
1270 rewrite_out_of_ssa (struct ssaexpand
*sa
)
1272 /* If elimination of a PHI requires inserting a copy on a backedge,
1273 then we will have to split the backedge which has numerous
1274 undesirable performance effects.
1276 A significant number of such cases can be handled here by inserting
1277 copies into the loop itself. */
1278 insert_backedge_copies ();
1281 /* Eliminate PHIs which are of no use, such as virtual or dead phis. */
1282 eliminate_useless_phis ();
1284 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1285 gimple_dump_cfg (dump_file
, dump_flags
& ~TDF_DETAILS
);
1287 remove_ssa_form (flag_tree_ter
, sa
);
1289 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1290 gimple_dump_cfg (dump_file
, dump_flags
& ~TDF_DETAILS
);