]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-outof-ssa.c
backport: ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
[thirdparty/gcc.git] / gcc / tree-outof-ssa.c
1 /* Convert a program in SSA form into Normal form.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Andrew Macleod <amacleod@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "ggc.h"
27 #include "basic-block.h"
28 #include "diagnostic.h"
29 #include "bitmap.h"
30 #include "tree-flow.h"
31 #include "timevar.h"
32 #include "tree-dump.h"
33 #include "tree-ssa-live.h"
34 #include "tree-pass.h"
35 #include "toplev.h"
36
37
38 /* Used to hold all the components required to do SSA PHI elimination.
39 The node and pred/succ list is a simple linear list of nodes and
40 edges represented as pairs of nodes.
41
42 The predecessor and successor list: Nodes are entered in pairs, where
43 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent
44 predecessors, all the odd elements are successors.
45
46 Rationale:
47 When implemented as bitmaps, very large programs SSA->Normal times were
48 being dominated by clearing the interference graph.
49
50 Typically this list of edges is extremely small since it only includes
51 PHI results and uses from a single edge which have not coalesced with
52 each other. This means that no virtual PHI nodes are included, and
53 empirical evidence suggests that the number of edges rarely exceed
54 3, and in a bootstrap of GCC, the maximum size encountered was 7.
55 This also limits the number of possible nodes that are involved to
56 rarely more than 6, and in the bootstrap of gcc, the maximum number
57 of nodes encountered was 12. */
58
59 typedef struct _elim_graph {
60 /* Size of the elimination vectors. */
61 int size;
62
63 /* List of nodes in the elimination graph. */
64 VEC(tree,heap) *nodes;
65
66 /* The predecessor and successor edge list. */
67 VEC(int,heap) *edge_list;
68
69 /* Visited vector. */
70 sbitmap visited;
71
72 /* Stack for visited nodes. */
73 VEC(int,heap) *stack;
74
75 /* The variable partition map. */
76 var_map map;
77
78 /* Edge being eliminated by this graph. */
79 edge e;
80
81 /* List of constant copies to emit. These are pushed on in pairs. */
82 VEC(tree,heap) *const_copies;
83 } *elim_graph;
84
85
86 /* Create a temporary variable based on the type of variable T. Use T's name
87 as the prefix. */
88
89 static tree
90 create_temp (tree t)
91 {
92 tree tmp;
93 const char *name = NULL;
94 tree type;
95
96 if (TREE_CODE (t) == SSA_NAME)
97 t = SSA_NAME_VAR (t);
98
99 gcc_assert (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL);
100
101 type = TREE_TYPE (t);
102 tmp = DECL_NAME (t);
103 if (tmp)
104 name = IDENTIFIER_POINTER (tmp);
105
106 if (name == NULL)
107 name = "temp";
108 tmp = create_tmp_var (type, name);
109
110 if (DECL_DEBUG_EXPR_IS_FROM (t) && DECL_DEBUG_EXPR (t))
111 {
112 SET_DECL_DEBUG_EXPR (tmp, DECL_DEBUG_EXPR (t));
113 DECL_DEBUG_EXPR_IS_FROM (tmp) = 1;
114 }
115 else if (!DECL_IGNORED_P (t))
116 {
117 SET_DECL_DEBUG_EXPR (tmp, t);
118 DECL_DEBUG_EXPR_IS_FROM (tmp) = 1;
119 }
120 DECL_ARTIFICIAL (tmp) = DECL_ARTIFICIAL (t);
121 DECL_IGNORED_P (tmp) = DECL_IGNORED_P (t);
122 DECL_GIMPLE_REG_P (tmp) = DECL_GIMPLE_REG_P (t);
123 add_referenced_var (tmp);
124
125 /* add_referenced_var will create the annotation and set up some
126 of the flags in the annotation. However, some flags we need to
127 inherit from our original variable. */
128 set_symbol_mem_tag (tmp, symbol_mem_tag (t));
129 if (is_call_clobbered (t))
130 mark_call_clobbered (tmp, var_ann (t)->escape_mask);
131 if (bitmap_bit_p (gimple_call_used_vars (cfun), DECL_UID (t)))
132 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (tmp));
133
134 return tmp;
135 }
136
137
138 /* This helper function fill insert a copy from a constant or variable SRC to
139 variable DEST on edge E. */
140
141 static void
142 insert_copy_on_edge (edge e, tree dest, tree src)
143 {
144 gimple copy;
145
146 copy = gimple_build_assign (dest, src);
147 set_is_used (dest);
148
149 if (TREE_CODE (src) == ADDR_EXPR)
150 src = TREE_OPERAND (src, 0);
151 if (TREE_CODE (src) == VAR_DECL || TREE_CODE (src) == PARM_DECL)
152 set_is_used (src);
153
154 if (dump_file && (dump_flags & TDF_DETAILS))
155 {
156 fprintf (dump_file,
157 "Inserting a copy on edge BB%d->BB%d :",
158 e->src->index,
159 e->dest->index);
160 print_gimple_stmt (dump_file, copy, 0, dump_flags);
161 fprintf (dump_file, "\n");
162 }
163
164 gsi_insert_on_edge (e, copy);
165 }
166
167
168 /* Create an elimination graph with SIZE nodes and associated data
169 structures. */
170
171 static elim_graph
172 new_elim_graph (int size)
173 {
174 elim_graph g = (elim_graph) xmalloc (sizeof (struct _elim_graph));
175
176 g->nodes = VEC_alloc (tree, heap, 30);
177 g->const_copies = VEC_alloc (tree, heap, 20);
178 g->edge_list = VEC_alloc (int, heap, 20);
179 g->stack = VEC_alloc (int, heap, 30);
180
181 g->visited = sbitmap_alloc (size);
182
183 return g;
184 }
185
186
187 /* Empty elimination graph G. */
188
189 static inline void
190 clear_elim_graph (elim_graph g)
191 {
192 VEC_truncate (tree, g->nodes, 0);
193 VEC_truncate (int, g->edge_list, 0);
194 }
195
196
197 /* Delete elimination graph G. */
198
199 static inline void
200 delete_elim_graph (elim_graph g)
201 {
202 sbitmap_free (g->visited);
203 VEC_free (int, heap, g->stack);
204 VEC_free (int, heap, g->edge_list);
205 VEC_free (tree, heap, g->const_copies);
206 VEC_free (tree, heap, g->nodes);
207 free (g);
208 }
209
210
211 /* Return the number of nodes in graph G. */
212
213 static inline int
214 elim_graph_size (elim_graph g)
215 {
216 return VEC_length (tree, g->nodes);
217 }
218
219
220 /* Add NODE to graph G, if it doesn't exist already. */
221
222 static inline void
223 elim_graph_add_node (elim_graph g, tree node)
224 {
225 int x;
226 tree t;
227
228 for (x = 0; VEC_iterate (tree, g->nodes, x, t); x++)
229 if (t == node)
230 return;
231 VEC_safe_push (tree, heap, g->nodes, node);
232 }
233
234
235 /* Add the edge PRED->SUCC to graph G. */
236
237 static inline void
238 elim_graph_add_edge (elim_graph g, int pred, int succ)
239 {
240 VEC_safe_push (int, heap, g->edge_list, pred);
241 VEC_safe_push (int, heap, g->edge_list, succ);
242 }
243
244
245 /* Remove an edge from graph G for which NODE is the predecessor, and
246 return the successor node. -1 is returned if there is no such edge. */
247
248 static inline int
249 elim_graph_remove_succ_edge (elim_graph g, int node)
250 {
251 int y;
252 unsigned x;
253 for (x = 0; x < VEC_length (int, g->edge_list); x += 2)
254 if (VEC_index (int, g->edge_list, x) == node)
255 {
256 VEC_replace (int, g->edge_list, x, -1);
257 y = VEC_index (int, g->edge_list, x + 1);
258 VEC_replace (int, g->edge_list, x + 1, -1);
259 return y;
260 }
261 return -1;
262 }
263
264
265 /* Find all the nodes in GRAPH which are successors to NODE in the
266 edge list. VAR will hold the partition number found. CODE is the
267 code fragment executed for every node found. */
268
269 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, CODE) \
270 do { \
271 unsigned x_; \
272 int y_; \
273 for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \
274 { \
275 y_ = VEC_index (int, (GRAPH)->edge_list, x_); \
276 if (y_ != (NODE)) \
277 continue; \
278 (VAR) = VEC_index (int, (GRAPH)->edge_list, x_ + 1); \
279 CODE; \
280 } \
281 } while (0)
282
283
284 /* Find all the nodes which are predecessors of NODE in the edge list for
285 GRAPH. VAR will hold the partition number found. CODE is the
286 code fragment executed for every node found. */
287
288 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, CODE) \
289 do { \
290 unsigned x_; \
291 int y_; \
292 for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \
293 { \
294 y_ = VEC_index (int, (GRAPH)->edge_list, x_ + 1); \
295 if (y_ != (NODE)) \
296 continue; \
297 (VAR) = VEC_index (int, (GRAPH)->edge_list, x_); \
298 CODE; \
299 } \
300 } while (0)
301
302
303 /* Add T to elimination graph G. */
304
305 static inline void
306 eliminate_name (elim_graph g, tree T)
307 {
308 elim_graph_add_node (g, T);
309 }
310
311
312 /* Build elimination graph G for basic block BB on incoming PHI edge
313 G->e. */
314
315 static void
316 eliminate_build (elim_graph g, basic_block B)
317 {
318 tree T0, Ti;
319 int p0, pi;
320 gimple_stmt_iterator gsi;
321
322 clear_elim_graph (g);
323
324 for (gsi = gsi_start_phis (B); !gsi_end_p (gsi); gsi_next (&gsi))
325 {
326 gimple phi = gsi_stmt (gsi);
327
328 T0 = var_to_partition_to_var (g->map, gimple_phi_result (phi));
329
330 /* Ignore results which are not in partitions. */
331 if (T0 == NULL_TREE)
332 continue;
333
334 Ti = PHI_ARG_DEF (phi, g->e->dest_idx);
335
336 /* If this argument is a constant, or a SSA_NAME which is being
337 left in SSA form, just queue a copy to be emitted on this
338 edge. */
339 if (!phi_ssa_name_p (Ti)
340 || (TREE_CODE (Ti) == SSA_NAME
341 && var_to_partition (g->map, Ti) == NO_PARTITION))
342 {
343 /* Save constant copies until all other copies have been emitted
344 on this edge. */
345 VEC_safe_push (tree, heap, g->const_copies, T0);
346 VEC_safe_push (tree, heap, g->const_copies, Ti);
347 }
348 else
349 {
350 Ti = var_to_partition_to_var (g->map, Ti);
351 if (T0 != Ti)
352 {
353 eliminate_name (g, T0);
354 eliminate_name (g, Ti);
355 p0 = var_to_partition (g->map, T0);
356 pi = var_to_partition (g->map, Ti);
357 elim_graph_add_edge (g, p0, pi);
358 }
359 }
360 }
361 }
362
363
364 /* Push successors of T onto the elimination stack for G. */
365
366 static void
367 elim_forward (elim_graph g, int T)
368 {
369 int S;
370 SET_BIT (g->visited, T);
371 FOR_EACH_ELIM_GRAPH_SUCC (g, T, S,
372 {
373 if (!TEST_BIT (g->visited, S))
374 elim_forward (g, S);
375 });
376 VEC_safe_push (int, heap, g->stack, T);
377 }
378
379
380 /* Return 1 if there unvisited predecessors of T in graph G. */
381
382 static int
383 elim_unvisited_predecessor (elim_graph g, int T)
384 {
385 int P;
386 FOR_EACH_ELIM_GRAPH_PRED (g, T, P,
387 {
388 if (!TEST_BIT (g->visited, P))
389 return 1;
390 });
391 return 0;
392 }
393
394 /* Process predecessors first, and insert a copy. */
395
396 static void
397 elim_backward (elim_graph g, int T)
398 {
399 int P;
400 SET_BIT (g->visited, T);
401 FOR_EACH_ELIM_GRAPH_PRED (g, T, P,
402 {
403 if (!TEST_BIT (g->visited, P))
404 {
405 elim_backward (g, P);
406 insert_copy_on_edge (g->e,
407 partition_to_var (g->map, P),
408 partition_to_var (g->map, T));
409 }
410 });
411 }
412
413 /* Insert required copies for T in graph G. Check for a strongly connected
414 region, and create a temporary to break the cycle if one is found. */
415
416 static void
417 elim_create (elim_graph g, int T)
418 {
419 tree U;
420 int P, S;
421
422 if (elim_unvisited_predecessor (g, T))
423 {
424 U = create_temp (partition_to_var (g->map, T));
425 insert_copy_on_edge (g->e, U, partition_to_var (g->map, T));
426 FOR_EACH_ELIM_GRAPH_PRED (g, T, P,
427 {
428 if (!TEST_BIT (g->visited, P))
429 {
430 elim_backward (g, P);
431 insert_copy_on_edge (g->e, partition_to_var (g->map, P), U);
432 }
433 });
434 }
435 else
436 {
437 S = elim_graph_remove_succ_edge (g, T);
438 if (S != -1)
439 {
440 SET_BIT (g->visited, T);
441 insert_copy_on_edge (g->e,
442 partition_to_var (g->map, T),
443 partition_to_var (g->map, S));
444 }
445 }
446
447 }
448
449
450 /* Eliminate all the phi nodes on edge E in graph G. */
451
452 static void
453 eliminate_phi (edge e, elim_graph g)
454 {
455 int x;
456 basic_block B = e->dest;
457
458 gcc_assert (VEC_length (tree, g->const_copies) == 0);
459
460 /* Abnormal edges already have everything coalesced. */
461 if (e->flags & EDGE_ABNORMAL)
462 return;
463
464 g->e = e;
465
466 eliminate_build (g, B);
467
468 if (elim_graph_size (g) != 0)
469 {
470 tree var;
471
472 sbitmap_zero (g->visited);
473 VEC_truncate (int, g->stack, 0);
474
475 for (x = 0; VEC_iterate (tree, g->nodes, x, var); x++)
476 {
477 int p = var_to_partition (g->map, var);
478 if (!TEST_BIT (g->visited, p))
479 elim_forward (g, p);
480 }
481
482 sbitmap_zero (g->visited);
483 while (VEC_length (int, g->stack) > 0)
484 {
485 x = VEC_pop (int, g->stack);
486 if (!TEST_BIT (g->visited, x))
487 elim_create (g, x);
488 }
489 }
490
491 /* If there are any pending constant copies, issue them now. */
492 while (VEC_length (tree, g->const_copies) > 0)
493 {
494 tree src, dest;
495 src = VEC_pop (tree, g->const_copies);
496 dest = VEC_pop (tree, g->const_copies);
497 insert_copy_on_edge (e, dest, src);
498 }
499 }
500
501
502 /* Take the ssa-name var_map MAP, and assign real variables to each
503 partition. */
504
505 static void
506 assign_vars (var_map map)
507 {
508 int x, num;
509 tree var, root;
510 var_ann_t ann;
511
512 num = num_var_partitions (map);
513 for (x = 0; x < num; x++)
514 {
515 var = partition_to_var (map, x);
516 if (TREE_CODE (var) != SSA_NAME)
517 {
518 ann = var_ann (var);
519 /* It must already be coalesced. */
520 gcc_assert (ann->out_of_ssa_tag == 1);
521 if (dump_file && (dump_flags & TDF_DETAILS))
522 {
523 fprintf (dump_file, "partition %d already has variable ", x);
524 print_generic_expr (dump_file, var, TDF_SLIM);
525 fprintf (dump_file, " assigned to it.\n");
526 }
527 }
528 else
529 {
530 root = SSA_NAME_VAR (var);
531 ann = var_ann (root);
532 /* If ROOT is already associated, create a new one. */
533 if (ann->out_of_ssa_tag)
534 {
535 root = create_temp (root);
536 ann = var_ann (root);
537 }
538 /* ROOT has not been coalesced yet, so use it. */
539 if (dump_file && (dump_flags & TDF_DETAILS))
540 {
541 fprintf (dump_file, "Partition %d is assigned to var ", x);
542 print_generic_stmt (dump_file, root, TDF_SLIM);
543 }
544 change_partition_var (map, root, x);
545 }
546 }
547 }
548
549
550 /* Replace use operand P with whatever variable it has been rewritten to based
551 on the partitions in MAP. EXPR is an optional expression vector over SSA
552 versions which is used to replace P with an expression instead of a variable.
553 If the stmt is changed, return true. */
554
555 static inline bool
556 replace_use_variable (var_map map, use_operand_p p, gimple *expr)
557 {
558 tree new_var;
559 tree var = USE_FROM_PTR (p);
560
561 /* Check if we are replacing this variable with an expression. */
562 if (expr)
563 {
564 int version = SSA_NAME_VERSION (var);
565 if (expr[version])
566 {
567 SET_USE (p, gimple_assign_rhs_to_tree (expr[version]));
568 return true;
569 }
570 }
571
572 new_var = var_to_partition_to_var (map, var);
573 if (new_var)
574 {
575 SET_USE (p, new_var);
576 set_is_used (new_var);
577 return true;
578 }
579 return false;
580 }
581
582
583 /* Replace def operand DEF_P with whatever variable it has been rewritten to
584 based on the partitions in MAP. EXPR is an optional expression vector over
585 SSA versions which is used to replace DEF_P with an expression instead of a
586 variable. If the stmt is changed, return true. */
587
588 static inline bool
589 replace_def_variable (var_map map, def_operand_p def_p, tree *expr)
590 {
591 tree new_var;
592 tree var = DEF_FROM_PTR (def_p);
593
594 /* Do nothing if we are replacing this variable with an expression. */
595 if (expr && expr[SSA_NAME_VERSION (var)])
596 return true;
597
598 new_var = var_to_partition_to_var (map, var);
599 if (new_var)
600 {
601 SET_DEF (def_p, new_var);
602 set_is_used (new_var);
603 return true;
604 }
605 return false;
606 }
607
608
609 /* Remove any PHI node which is a virtual PHI. */
610
611 static void
612 eliminate_virtual_phis (void)
613 {
614 basic_block bb;
615 gimple_stmt_iterator gsi;
616
617 FOR_EACH_BB (bb)
618 {
619 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
620 {
621 gimple phi = gsi_stmt (gsi);
622 if (!is_gimple_reg (SSA_NAME_VAR (gimple_phi_result (phi))))
623 {
624 #ifdef ENABLE_CHECKING
625 size_t i;
626 /* There should be no arguments of this PHI which are in
627 the partition list, or we get incorrect results. */
628 for (i = 0; i < gimple_phi_num_args (phi); i++)
629 {
630 tree arg = PHI_ARG_DEF (phi, i);
631 if (TREE_CODE (arg) == SSA_NAME
632 && is_gimple_reg (SSA_NAME_VAR (arg)))
633 {
634 fprintf (stderr, "Argument of PHI is not virtual (");
635 print_generic_expr (stderr, arg, TDF_SLIM);
636 fprintf (stderr, "), but the result is :");
637 print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
638 internal_error ("SSA corruption");
639 }
640 }
641 #endif
642 remove_phi_node (&gsi, true);
643 }
644 else
645 gsi_next (&gsi);
646 }
647 }
648 }
649
650
651 /* This function will rewrite the current program using the variable mapping
652 found in MAP. If the replacement vector VALUES is provided, any
653 occurrences of partitions with non-null entries in the vector will be
654 replaced with the expression in the vector instead of its mapped
655 variable. */
656
657 static void
658 rewrite_trees (var_map map, gimple *values)
659 {
660 elim_graph g;
661 basic_block bb;
662 gimple_stmt_iterator gsi;
663 edge e;
664 gimple_seq phi;
665 bool changed;
666
667 #ifdef ENABLE_CHECKING
668 /* Search for PHIs where the destination has no partition, but one
669 or more arguments has a partition. This should not happen and can
670 create incorrect code. */
671 FOR_EACH_BB (bb)
672 {
673 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
674 {
675 gimple phi = gsi_stmt (gsi);
676 tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
677 if (T0 == NULL_TREE)
678 {
679 size_t i;
680 for (i = 0; i < gimple_phi_num_args (phi); i++)
681 {
682 tree arg = PHI_ARG_DEF (phi, i);
683
684 if (TREE_CODE (arg) == SSA_NAME
685 && var_to_partition (map, arg) != NO_PARTITION)
686 {
687 fprintf (stderr, "Argument of PHI is in a partition :(");
688 print_generic_expr (stderr, arg, TDF_SLIM);
689 fprintf (stderr, "), but the result is not :");
690 print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
691 internal_error ("SSA corruption");
692 }
693 }
694 }
695 }
696 }
697 #endif
698
699 /* Replace PHI nodes with any required copies. */
700 g = new_elim_graph (map->num_partitions);
701 g->map = map;
702 FOR_EACH_BB (bb)
703 {
704 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
705 {
706 gimple stmt = gsi_stmt (gsi);
707 use_operand_p use_p, copy_use_p;
708 def_operand_p def_p;
709 bool remove = false, is_copy = false;
710 int num_uses = 0;
711 ssa_op_iter iter;
712
713 changed = false;
714
715 if (gimple_assign_copy_p (stmt))
716 is_copy = true;
717
718 copy_use_p = NULL_USE_OPERAND_P;
719 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
720 {
721 if (replace_use_variable (map, use_p, values))
722 changed = true;
723 copy_use_p = use_p;
724 num_uses++;
725 }
726
727 if (num_uses != 1)
728 is_copy = false;
729
730 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
731
732 if (def_p != NULL)
733 {
734 /* Mark this stmt for removal if it is the list of replaceable
735 expressions. */
736 if (values && values[SSA_NAME_VERSION (DEF_FROM_PTR (def_p))])
737 remove = true;
738 else
739 {
740 if (replace_def_variable (map, def_p, NULL))
741 changed = true;
742 /* If both SSA_NAMEs coalesce to the same variable,
743 mark the now redundant copy for removal. */
744 if (is_copy)
745 {
746 gcc_assert (copy_use_p != NULL_USE_OPERAND_P);
747 if (DEF_FROM_PTR (def_p) == USE_FROM_PTR (copy_use_p))
748 remove = true;
749 }
750 }
751 }
752 else
753 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
754 if (replace_def_variable (map, def_p, NULL))
755 changed = true;
756
757 /* Remove any stmts marked for removal. */
758 if (remove)
759 gsi_remove (&gsi, true);
760 else
761 {
762 if (changed)
763 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
764 gimple_purge_dead_eh_edges (bb);
765 gsi_next (&gsi);
766 }
767 }
768
769 phi = phi_nodes (bb);
770 if (phi)
771 {
772 edge_iterator ei;
773 FOR_EACH_EDGE (e, ei, bb->preds)
774 eliminate_phi (e, g);
775 }
776 }
777
778 delete_elim_graph (g);
779 }
780
781 /* These are the local work structures used to determine the best place to
782 insert the copies that were placed on edges by the SSA->normal pass.. */
783 static VEC(edge,heap) *edge_leader;
784 static VEC(gimple_seq,heap) *stmt_list;
785 static bitmap leader_has_match = NULL;
786 static edge leader_match = NULL;
787
788
789 /* Pass this function to make_forwarder_block so that all the edges with
790 matching PENDING_STMT lists to 'curr_stmt_list' get redirected. E is the
791 edge to test for a match. */
792
793 static inline bool
794 same_stmt_list_p (edge e)
795 {
796 return (e->aux == (PTR) leader_match) ? true : false;
797 }
798
799
800 /* Return TRUE if S1 and S2 are equivalent copies. */
801
802 static inline bool
803 identical_copies_p (const_gimple s1, const_gimple s2)
804 {
805 #ifdef ENABLE_CHECKING
806 gcc_assert (is_gimple_assign (s1));
807 gcc_assert (is_gimple_assign (s2));
808 gcc_assert (DECL_P (gimple_assign_lhs (s1)));
809 gcc_assert (DECL_P (gimple_assign_lhs (s2)));
810 #endif
811
812 if (gimple_assign_lhs (s1) != gimple_assign_lhs (s2))
813 return false;
814
815 if (gimple_assign_rhs1 (s1) != gimple_assign_rhs1 (s2))
816 return false;
817
818 return true;
819 }
820
821
822 /* Compare the PENDING_STMT list for edges E1 and E2. Return true if the lists
823 contain the same sequence of copies. */
824
825 static inline bool
826 identical_stmt_lists_p (const_edge e1, const_edge e2)
827 {
828 gimple_seq t1 = PENDING_STMT (e1);
829 gimple_seq t2 = PENDING_STMT (e2);
830 gimple_stmt_iterator gsi1, gsi2;
831
832 for (gsi1 = gsi_start (t1), gsi2 = gsi_start (t2);
833 !gsi_end_p (gsi1) && !gsi_end_p (gsi2);
834 gsi_next (&gsi1), gsi_next (&gsi2))
835 {
836 if (!identical_copies_p (gsi_stmt (gsi1), gsi_stmt (gsi2)))
837 break;
838 }
839
840 if (!gsi_end_p (gsi1) || !gsi_end_p (gsi2))
841 return false;
842
843 return true;
844 }
845
846
847 /* Allocate data structures used in analyze_edges_for_bb. */
848
849 static void
850 init_analyze_edges_for_bb (void)
851 {
852 edge_leader = VEC_alloc (edge, heap, 25);
853 stmt_list = VEC_alloc (gimple_seq, heap, 25);
854 leader_has_match = BITMAP_ALLOC (NULL);
855 }
856
857
858 /* Free data structures used in analyze_edges_for_bb. */
859
860 static void
861 fini_analyze_edges_for_bb (void)
862 {
863 VEC_free (edge, heap, edge_leader);
864 VEC_free (gimple_seq, heap, stmt_list);
865 BITMAP_FREE (leader_has_match);
866 }
867
868 /* A helper function to be called via walk_tree. Return DATA if it is
869 contained in subtree TP. */
870
871 static tree
872 contains_tree_r (tree * tp, int *walk_subtrees, void *data)
873 {
874 if (*tp == data)
875 {
876 *walk_subtrees = 0;
877 return (tree) data;
878 }
879 else
880 return NULL_TREE;
881 }
882
883 /* A threshold for the number of insns contained in the latch block.
884 It is used to prevent blowing the loop with too many copies from
885 the latch. */
886 #define MAX_STMTS_IN_LATCH 2
887
888 /* Return TRUE if the stmts on SINGLE-EDGE can be moved to the
889 body of the loop. This should be permitted only if SINGLE-EDGE is a
890 single-basic-block latch edge and thus cleaning the latch will help
891 to create a single-basic-block loop. Otherwise return FALSE. */
892
893 static bool
894 process_single_block_loop_latch (edge single_edge)
895 {
896 gimple_seq stmts;
897 basic_block b_exit, b_pheader, b_loop = single_edge->src;
898 edge_iterator ei;
899 edge e;
900 gimple_stmt_iterator gsi, gsi_exit;
901 gimple_stmt_iterator tsi;
902 tree expr;
903 gimple stmt;
904 unsigned int count = 0;
905
906 if (single_edge == NULL || (single_edge->dest != single_edge->src)
907 || (EDGE_COUNT (b_loop->succs) != 2)
908 || (EDGE_COUNT (b_loop->preds) != 2))
909 return false;
910
911 /* Get the stmts on the latch edge. */
912 stmts = PENDING_STMT (single_edge);
913
914 /* Find the successor edge which is not the latch edge. */
915 FOR_EACH_EDGE (e, ei, b_loop->succs)
916 if (e->dest != b_loop)
917 break;
918
919 b_exit = e->dest;
920
921 /* Check that the exit block has only the loop as a predecessor,
922 and that there are no pending stmts on that edge as well. */
923 if (EDGE_COUNT (b_exit->preds) != 1 || PENDING_STMT (e))
924 return false;
925
926 /* Find the predecessor edge which is not the latch edge. */
927 FOR_EACH_EDGE (e, ei, b_loop->preds)
928 if (e->src != b_loop)
929 break;
930
931 b_pheader = e->src;
932
933 if (b_exit == b_pheader || b_exit == b_loop || b_pheader == b_loop)
934 return false;
935
936 gsi_exit = gsi_after_labels (b_exit);
937
938 /* Get the last stmt in the loop body. */
939 gsi = gsi_last_bb (single_edge->src);
940 stmt = gsi_stmt (gsi);
941
942 if (gimple_code (stmt) != GIMPLE_COND)
943 return false;
944
945
946 expr = build2 (gimple_cond_code (stmt), boolean_type_node,
947 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
948 /* Iterate over the insns on the latch and count them. */
949 for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi))
950 {
951 gimple stmt1 = gsi_stmt (tsi);
952 tree var;
953
954 count++;
955 /* Check that the condition does not contain any new definition
956 created in the latch as the stmts from the latch intended
957 to precede it. */
958 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
959 return false;
960 var = gimple_assign_lhs (stmt1);
961 if (TREE_THIS_VOLATILE (var)
962 || TYPE_VOLATILE (TREE_TYPE (var))
963 || walk_tree (&expr, contains_tree_r, var, NULL))
964 return false;
965 }
966 /* Check that the latch does not contain more than MAX_STMTS_IN_LATCH
967 insns. The purpose of this restriction is to prevent blowing the
968 loop with too many copies from the latch. */
969 if (count > MAX_STMTS_IN_LATCH)
970 return false;
971
972 /* Apply the transformation - clean up the latch block:
973
974 var = something;
975 L1:
976 x1 = expr;
977 if (cond) goto L2 else goto L3;
978 L2:
979 var = x1;
980 goto L1
981 L3:
982 ...
983
984 ==>
985
986 var = something;
987 L1:
988 x1 = expr;
989 tmp_var = var;
990 var = x1;
991 if (cond) goto L1 else goto L2;
992 L2:
993 var = tmp_var;
994 ...
995 */
996 for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi))
997 {
998 gimple stmt1 = gsi_stmt (tsi);
999 tree var, tmp_var;
1000 gimple copy;
1001
1002 /* Create a new variable to load back the value of var in case
1003 we exit the loop. */
1004 var = gimple_assign_lhs (stmt1);
1005 tmp_var = create_temp (var);
1006 copy = gimple_build_assign (tmp_var, var);
1007 set_is_used (tmp_var);
1008 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
1009 copy = gimple_build_assign (var, tmp_var);
1010 gsi_insert_before (&gsi_exit, copy, GSI_SAME_STMT);
1011 }
1012
1013 PENDING_STMT (single_edge) = 0;
1014 /* Insert the new stmts to the loop body. */
1015 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
1016
1017 if (dump_file)
1018 fprintf (dump_file,
1019 "\nCleaned-up latch block of loop with single BB: %d\n\n",
1020 single_edge->dest->index);
1021
1022 return true;
1023 }
1024
1025 /* Look at all the incoming edges to block BB, and decide where the best place
1026 to insert the stmts on each edge are, and perform those insertions. */
1027
1028 static void
1029 analyze_edges_for_bb (basic_block bb)
1030 {
1031 edge e;
1032 edge_iterator ei;
1033 int count;
1034 unsigned int x;
1035 bool have_opportunity;
1036 gimple_stmt_iterator gsi;
1037 gimple stmt;
1038 edge single_edge = NULL;
1039 bool is_label;
1040 edge leader;
1041
1042 count = 0;
1043
1044 /* Blocks which contain at least one abnormal edge cannot use
1045 make_forwarder_block. Look for these blocks, and commit any PENDING_STMTs
1046 found on edges in these block. */
1047 have_opportunity = true;
1048 FOR_EACH_EDGE (e, ei, bb->preds)
1049 if (e->flags & EDGE_ABNORMAL)
1050 {
1051 have_opportunity = false;
1052 break;
1053 }
1054
1055 if (!have_opportunity)
1056 {
1057 FOR_EACH_EDGE (e, ei, bb->preds)
1058 if (PENDING_STMT (e))
1059 gsi_commit_one_edge_insert (e, NULL);
1060 return;
1061 }
1062
1063 /* Find out how many edges there are with interesting pending stmts on them.
1064 Commit the stmts on edges we are not interested in. */
1065 FOR_EACH_EDGE (e, ei, bb->preds)
1066 {
1067 if (PENDING_STMT (e))
1068 {
1069 gcc_assert (!(e->flags & EDGE_ABNORMAL));
1070 if (e->flags & EDGE_FALLTHRU)
1071 {
1072 gsi = gsi_start_bb (e->src);
1073 if (!gsi_end_p (gsi))
1074 {
1075 stmt = gsi_stmt (gsi);
1076 gsi_next (&gsi);
1077 gcc_assert (stmt != NULL);
1078 is_label = (gimple_code (stmt) == GIMPLE_LABEL);
1079 /* Punt if it has non-label stmts, or isn't local. */
1080 if (!is_label
1081 || DECL_NONLOCAL (gimple_label_label (stmt))
1082 || !gsi_end_p (gsi))
1083 {
1084 gsi_commit_one_edge_insert (e, NULL);
1085 continue;
1086 }
1087 }
1088 }
1089 single_edge = e;
1090 count++;
1091 }
1092 }
1093
1094 /* If there aren't at least 2 edges, no sharing will happen. */
1095 if (count < 2)
1096 {
1097 if (single_edge)
1098 {
1099 /* Add stmts to the edge unless processed specially as a
1100 single-block loop latch edge. */
1101 if (!process_single_block_loop_latch (single_edge))
1102 gsi_commit_one_edge_insert (single_edge, NULL);
1103 }
1104 return;
1105 }
1106
1107 /* Ensure that we have empty worklists. */
1108 #ifdef ENABLE_CHECKING
1109 gcc_assert (VEC_length (edge, edge_leader) == 0);
1110 gcc_assert (VEC_length (gimple_seq, stmt_list) == 0);
1111 gcc_assert (bitmap_empty_p (leader_has_match));
1112 #endif
1113
1114 /* Find the "leader" block for each set of unique stmt lists. Preference is
1115 given to FALLTHRU blocks since they would need a GOTO to arrive at another
1116 block. The leader edge destination is the block which all the other edges
1117 with the same stmt list will be redirected to. */
1118 have_opportunity = false;
1119 FOR_EACH_EDGE (e, ei, bb->preds)
1120 {
1121 if (PENDING_STMT (e))
1122 {
1123 bool found = false;
1124
1125 /* Look for the same stmt list in edge leaders list. */
1126 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++)
1127 {
1128 if (identical_stmt_lists_p (leader, e))
1129 {
1130 /* Give this edge the same stmt list pointer. */
1131 PENDING_STMT (e) = NULL;
1132 e->aux = leader;
1133 bitmap_set_bit (leader_has_match, x);
1134 have_opportunity = found = true;
1135 break;
1136 }
1137 }
1138
1139 /* If no similar stmt list, add this edge to the leader list. */
1140 if (!found)
1141 {
1142 VEC_safe_push (edge, heap, edge_leader, e);
1143 VEC_safe_push (gimple_seq, heap, stmt_list, PENDING_STMT (e));
1144 }
1145 }
1146 }
1147
1148 /* If there are no similar lists, just issue the stmts. */
1149 if (!have_opportunity)
1150 {
1151 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++)
1152 gsi_commit_one_edge_insert (leader, NULL);
1153 VEC_truncate (edge, edge_leader, 0);
1154 VEC_truncate (gimple_seq, stmt_list, 0);
1155 bitmap_clear (leader_has_match);
1156 return;
1157 }
1158
1159 if (dump_file)
1160 fprintf (dump_file, "\nOpportunities in BB %d for stmt/block reduction:\n",
1161 bb->index);
1162
1163 /* For each common list, create a forwarding block and issue the stmt's
1164 in that block. */
1165 for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++)
1166 if (bitmap_bit_p (leader_has_match, x))
1167 {
1168 edge new_edge;
1169 gimple_stmt_iterator gsi;
1170 gimple_seq curr_stmt_list;
1171
1172 leader_match = leader;
1173
1174 /* The tree_* cfg manipulation routines use the PENDING_EDGE field
1175 for various PHI manipulations, so it gets cleared when calls are
1176 made to make_forwarder_block(). So make sure the edge is clear,
1177 and use the saved stmt list. */
1178 PENDING_STMT (leader) = NULL;
1179 leader->aux = leader;
1180 curr_stmt_list = VEC_index (gimple_seq, stmt_list, x);
1181
1182 new_edge = make_forwarder_block (leader->dest, same_stmt_list_p,
1183 NULL);
1184 bb = new_edge->dest;
1185 if (dump_file)
1186 {
1187 fprintf (dump_file, "Splitting BB %d for Common stmt list. ",
1188 leader->dest->index);
1189 fprintf (dump_file, "Original block is now BB%d.\n", bb->index);
1190 print_gimple_seq (dump_file, curr_stmt_list, 0, TDF_VOPS);
1191 }
1192
1193 FOR_EACH_EDGE (e, ei, new_edge->src->preds)
1194 {
1195 e->aux = NULL;
1196 if (dump_file)
1197 fprintf (dump_file, " Edge (%d->%d) lands here.\n",
1198 e->src->index, e->dest->index);
1199 }
1200
1201 gsi = gsi_last_bb (leader->dest);
1202 gsi_insert_seq_after (&gsi, curr_stmt_list, GSI_NEW_STMT);
1203
1204 leader_match = NULL;
1205 /* We should never get a new block now. */
1206 }
1207 else
1208 {
1209 PENDING_STMT (leader) = VEC_index (gimple_seq, stmt_list, x);
1210 gsi_commit_one_edge_insert (leader, NULL);
1211 }
1212
1213
1214 /* Clear the working data structures. */
1215 VEC_truncate (edge, edge_leader, 0);
1216 VEC_truncate (gimple_seq, stmt_list, 0);
1217 bitmap_clear (leader_has_match);
1218 }
1219
1220
1221 /* This function will analyze the insertions which were performed on edges,
1222 and decide whether they should be left on that edge, or whether it is more
1223 efficient to emit some subset of them in a single block. All stmts are
1224 inserted somewhere. */
1225
1226 static void
1227 perform_edge_inserts (void)
1228 {
1229 basic_block bb;
1230
1231 if (dump_file)
1232 fprintf(dump_file, "Analyzing Edge Insertions.\n");
1233
1234 /* analyze_edges_for_bb calls make_forwarder_block, which tries to
1235 incrementally update the dominator information. Since we don't
1236 need dominator information after this pass, go ahead and free the
1237 dominator information. */
1238 free_dominance_info (CDI_DOMINATORS);
1239 free_dominance_info (CDI_POST_DOMINATORS);
1240
1241 /* Allocate data structures used in analyze_edges_for_bb. */
1242 init_analyze_edges_for_bb ();
1243
1244 FOR_EACH_BB (bb)
1245 analyze_edges_for_bb (bb);
1246
1247 analyze_edges_for_bb (EXIT_BLOCK_PTR);
1248
1249 /* Free data structures used in analyze_edges_for_bb. */
1250 fini_analyze_edges_for_bb ();
1251
1252 #ifdef ENABLE_CHECKING
1253 {
1254 edge_iterator ei;
1255 edge e;
1256 FOR_EACH_BB (bb)
1257 {
1258 FOR_EACH_EDGE (e, ei, bb->preds)
1259 {
1260 if (PENDING_STMT (e))
1261 error (" Pending stmts not issued on PRED edge (%d, %d)\n",
1262 e->src->index, e->dest->index);
1263 }
1264 FOR_EACH_EDGE (e, ei, bb->succs)
1265 {
1266 if (PENDING_STMT (e))
1267 error (" Pending stmts not issued on SUCC edge (%d, %d)\n",
1268 e->src->index, e->dest->index);
1269 }
1270 }
1271 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1272 {
1273 if (PENDING_STMT (e))
1274 error (" Pending stmts not issued on ENTRY edge (%d, %d)\n",
1275 e->src->index, e->dest->index);
1276 }
1277 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1278 {
1279 if (PENDING_STMT (e))
1280 error (" Pending stmts not issued on EXIT edge (%d, %d)\n",
1281 e->src->index, e->dest->index);
1282 }
1283 }
1284 #endif
1285 }
1286
1287
1288 /* Remove the ssa-names in the current function and translate them into normal
1289 compiler variables. PERFORM_TER is true if Temporary Expression Replacement
1290 should also be used. */
1291
1292 static void
1293 remove_ssa_form (bool perform_ter)
1294 {
1295 basic_block bb;
1296 gimple *values = NULL;
1297 var_map map;
1298 gimple_stmt_iterator gsi;
1299
1300 map = coalesce_ssa_name ();
1301
1302 /* Return to viewing the variable list as just all reference variables after
1303 coalescing has been performed. */
1304 partition_view_normal (map, false);
1305
1306 if (dump_file && (dump_flags & TDF_DETAILS))
1307 {
1308 fprintf (dump_file, "After Coalescing:\n");
1309 dump_var_map (dump_file, map);
1310 }
1311
1312 if (perform_ter)
1313 {
1314 values = find_replaceable_exprs (map);
1315 if (values && dump_file && (dump_flags & TDF_DETAILS))
1316 dump_replaceable_exprs (dump_file, values);
1317 }
1318
1319 /* Assign real variables to the partitions now. */
1320 assign_vars (map);
1321
1322 if (dump_file && (dump_flags & TDF_DETAILS))
1323 {
1324 fprintf (dump_file, "After Base variable replacement:\n");
1325 dump_var_map (dump_file, map);
1326 }
1327
1328 rewrite_trees (map, values);
1329
1330 if (values)
1331 free (values);
1332
1333 /* Remove PHI nodes which have been translated back to real variables. */
1334 FOR_EACH_BB (bb)
1335 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1336 remove_phi_node (&gsi, true);
1337
1338 /* If any copies were inserted on edges, analyze and insert them now. */
1339 perform_edge_inserts ();
1340
1341 delete_var_map (map);
1342 }
1343
1344
1345 /* Search every PHI node for arguments associated with backedges which
1346 we can trivially determine will need a copy (the argument is either
1347 not an SSA_NAME or the argument has a different underlying variable
1348 than the PHI result).
1349
1350 Insert a copy from the PHI argument to a new destination at the
1351 end of the block with the backedge to the top of the loop. Update
1352 the PHI argument to reference this new destination. */
1353
1354 static void
1355 insert_backedge_copies (void)
1356 {
1357 basic_block bb;
1358 gimple_stmt_iterator gsi;
1359
1360 FOR_EACH_BB (bb)
1361 {
1362 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1363 {
1364 gimple phi = gsi_stmt (gsi);
1365 tree result = gimple_phi_result (phi);
1366 tree result_var;
1367 size_t i;
1368
1369 if (!is_gimple_reg (result))
1370 continue;
1371
1372 result_var = SSA_NAME_VAR (result);
1373 for (i = 0; i < gimple_phi_num_args (phi); i++)
1374 {
1375 tree arg = gimple_phi_arg_def (phi, i);
1376 edge e = gimple_phi_arg_edge (phi, i);
1377
1378 /* If the argument is not an SSA_NAME, then we will need a
1379 constant initialization. If the argument is an SSA_NAME with
1380 a different underlying variable then a copy statement will be
1381 needed. */
1382 if ((e->flags & EDGE_DFS_BACK)
1383 && (TREE_CODE (arg) != SSA_NAME
1384 || SSA_NAME_VAR (arg) != result_var))
1385 {
1386 tree name;
1387 gimple stmt, last = NULL;
1388 gimple_stmt_iterator gsi2;
1389
1390 gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
1391 if (!gsi_end_p (gsi2))
1392 last = gsi_stmt (gsi2);
1393
1394 /* In theory the only way we ought to get back to the
1395 start of a loop should be with a COND_EXPR or GOTO_EXPR.
1396 However, better safe than sorry.
1397 If the block ends with a control statement or
1398 something that might throw, then we have to
1399 insert this assignment before the last
1400 statement. Else insert it after the last statement. */
1401 if (last && stmt_ends_bb_p (last))
1402 {
1403 /* If the last statement in the block is the definition
1404 site of the PHI argument, then we can't insert
1405 anything after it. */
1406 if (TREE_CODE (arg) == SSA_NAME
1407 && SSA_NAME_DEF_STMT (arg) == last)
1408 continue;
1409 }
1410
1411 /* Create a new instance of the underlying variable of the
1412 PHI result. */
1413 stmt = gimple_build_assign (result_var,
1414 gimple_phi_arg_def (phi, i));
1415 name = make_ssa_name (result_var, stmt);
1416 gimple_assign_set_lhs (stmt, name);
1417
1418 /* Insert the new statement into the block and update
1419 the PHI node. */
1420 if (last && stmt_ends_bb_p (last))
1421 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
1422 else
1423 gsi_insert_after (&gsi2, stmt, GSI_NEW_STMT);
1424 SET_PHI_ARG_DEF (phi, i, name);
1425 }
1426 }
1427 }
1428 }
1429 }
1430
1431 /* Take the current function out of SSA form, translating PHIs as described in
1432 R. Morgan, ``Building an Optimizing Compiler'',
1433 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
1434
1435 static unsigned int
1436 rewrite_out_of_ssa (void)
1437 {
1438 /* If elimination of a PHI requires inserting a copy on a backedge,
1439 then we will have to split the backedge which has numerous
1440 undesirable performance effects.
1441
1442 A significant number of such cases can be handled here by inserting
1443 copies into the loop itself. */
1444 insert_backedge_copies ();
1445
1446 eliminate_virtual_phis ();
1447
1448 if (dump_file && (dump_flags & TDF_DETAILS))
1449 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1450
1451 remove_ssa_form (flag_tree_ter && !flag_mudflap);
1452
1453 if (dump_file && (dump_flags & TDF_DETAILS))
1454 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1455
1456 cfun->gimple_df->in_ssa_p = false;
1457 return 0;
1458 }
1459
1460
1461 /* Define the parameters of the out of SSA pass. */
1462
1463 struct gimple_opt_pass pass_del_ssa =
1464 {
1465 {
1466 GIMPLE_PASS,
1467 "optimized", /* name */
1468 NULL, /* gate */
1469 rewrite_out_of_ssa, /* execute */
1470 NULL, /* sub */
1471 NULL, /* next */
1472 0, /* static_pass_number */
1473 TV_TREE_SSA_TO_NORMAL, /* tv_id */
1474 PROP_cfg | PROP_ssa, /* properties_required */
1475 0, /* properties_provided */
1476 /* ??? If TER is enabled, we also kill gimple. */
1477 PROP_ssa, /* properties_destroyed */
1478 TODO_verify_ssa | TODO_verify_flow
1479 | TODO_verify_stmts, /* todo_flags_start */
1480 TODO_dump_func
1481 | TODO_ggc_collect
1482 | TODO_remove_unused_locals /* todo_flags_finish */
1483 }
1484 };