]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-outof-ssa.c
re PR tree-optimization/88945 (ICE in fold_convert_loc in FRE when using -fdump-tree...
[thirdparty/gcc.git] / gcc / tree-outof-ssa.c
1 /* Convert a program in SSA form into Normal form.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Andrew Macleod <amacleod@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "ssa.h"
30 #include "tree-ssa.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "tree-dfa.h"
36 #include "stor-layout.h"
37 #include "cfgrtl.h"
38 #include "cfganal.h"
39 #include "tree-eh.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "dumpfile.h"
43 #include "tree-ssa-live.h"
44 #include "tree-ssa-ter.h"
45 #include "tree-ssa-coalesce.h"
46 #include "tree-outof-ssa.h"
47 #include "dojump.h"
48
49 /* FIXME: A lot of code here deals with expanding to RTL. All that code
50 should be in cfgexpand.c. */
51 #include "explow.h"
52 #include "expr.h"
53
54 /* Return TRUE if expression STMT is suitable for replacement. */
55
56 bool
57 ssa_is_replaceable_p (gimple *stmt)
58 {
59 use_operand_p use_p;
60 tree def;
61 gimple *use_stmt;
62
63 /* Only consider modify stmts. */
64 if (!is_gimple_assign (stmt))
65 return false;
66
67 /* If the statement may throw an exception, it cannot be replaced. */
68 if (stmt_could_throw_p (cfun, stmt))
69 return false;
70
71 /* Punt if there is more than 1 def. */
72 def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF);
73 if (!def)
74 return false;
75
76 /* Only consider definitions which have a single use. */
77 if (!single_imm_use (def, &use_p, &use_stmt))
78 return false;
79
80 /* Used in this block, but at the TOP of the block, not the end. */
81 if (gimple_code (use_stmt) == GIMPLE_PHI)
82 return false;
83
84 /* There must be no VDEFs. */
85 if (gimple_vdef (stmt))
86 return false;
87
88 /* Float expressions must go through memory if float-store is on. */
89 if (flag_float_store
90 && FLOAT_TYPE_P (gimple_expr_type (stmt)))
91 return false;
92
93 /* An assignment with a register variable on the RHS is not
94 replaceable. */
95 if (gimple_assign_rhs_code (stmt) == VAR_DECL
96 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt)))
97 return false;
98
99 /* No function calls can be replaced. */
100 if (is_gimple_call (stmt))
101 return false;
102
103 /* Leave any stmt with volatile operands alone as well. */
104 if (gimple_has_volatile_ops (stmt))
105 return false;
106
107 return true;
108 }
109
110
111 /* Used to hold all the components required to do SSA PHI elimination.
112 The node and pred/succ list is a simple linear list of nodes and
113 edges represented as pairs of nodes.
114
115 The predecessor and successor list: Nodes are entered in pairs, where
116 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent
117 predecessors, all the odd elements are successors.
118
119 Rationale:
120 When implemented as bitmaps, very large programs SSA->Normal times were
121 being dominated by clearing the interference graph.
122
123 Typically this list of edges is extremely small since it only includes
124 PHI results and uses from a single edge which have not coalesced with
125 each other. This means that no virtual PHI nodes are included, and
126 empirical evidence suggests that the number of edges rarely exceed
127 3, and in a bootstrap of GCC, the maximum size encountered was 7.
128 This also limits the number of possible nodes that are involved to
129 rarely more than 6, and in the bootstrap of gcc, the maximum number
130 of nodes encountered was 12. */
131
132 struct elim_graph
133 {
134 elim_graph (var_map map);
135
136 /* Size of the elimination vectors. */
137 int size;
138
139 /* List of nodes in the elimination graph. */
140 auto_vec<int> nodes;
141
142 /* The predecessor and successor edge list. */
143 auto_vec<int> edge_list;
144
145 /* Source locus on each edge */
146 auto_vec<location_t> edge_locus;
147
148 /* Visited vector. */
149 auto_sbitmap visited;
150
151 /* Stack for visited nodes. */
152 auto_vec<int> stack;
153
154 /* The variable partition map. */
155 var_map map;
156
157 /* Edge being eliminated by this graph. */
158 edge e;
159
160 /* List of constant copies to emit. These are pushed on in pairs. */
161 auto_vec<int> const_dests;
162 auto_vec<tree> const_copies;
163
164 /* Source locations for any constant copies. */
165 auto_vec<location_t> copy_locus;
166 };
167
168
169 /* For an edge E find out a good source location to associate with
170 instructions inserted on edge E. If E has an implicit goto set,
171 use its location. Otherwise search instructions in predecessors
172 of E for a location, and use that one. That makes sense because
173 we insert on edges for PHI nodes, and effects of PHIs happen on
174 the end of the predecessor conceptually. */
175
176 static void
177 set_location_for_edge (edge e)
178 {
179 if (e->goto_locus)
180 {
181 set_curr_insn_location (e->goto_locus);
182 }
183 else
184 {
185 basic_block bb = e->src;
186 gimple_stmt_iterator gsi;
187
188 do
189 {
190 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
191 {
192 gimple *stmt = gsi_stmt (gsi);
193 if (is_gimple_debug (stmt))
194 continue;
195 if (gimple_has_location (stmt) || gimple_block (stmt))
196 {
197 set_curr_insn_location (gimple_location (stmt));
198 return;
199 }
200 }
201 /* Nothing found in this basic block. Make a half-assed attempt
202 to continue with another block. */
203 if (single_pred_p (bb))
204 bb = single_pred (bb);
205 else
206 bb = e->src;
207 }
208 while (bb != e->src);
209 }
210 }
211
212 /* Emit insns to copy SRC into DEST converting SRC if necessary. As
213 SRC/DEST might be BLKmode memory locations SIZEEXP is a tree from
214 which we deduce the size to copy in that case. */
215
216 static inline rtx_insn *
217 emit_partition_copy (rtx dest, rtx src, int unsignedsrcp, tree sizeexp)
218 {
219 start_sequence ();
220
221 if (GET_MODE (src) != VOIDmode && GET_MODE (src) != GET_MODE (dest))
222 src = convert_to_mode (GET_MODE (dest), src, unsignedsrcp);
223 if (GET_MODE (src) == BLKmode)
224 {
225 gcc_assert (GET_MODE (dest) == BLKmode);
226 emit_block_move (dest, src, expr_size (sizeexp), BLOCK_OP_NORMAL);
227 }
228 else
229 emit_move_insn (dest, src);
230 do_pending_stack_adjust ();
231
232 rtx_insn *seq = get_insns ();
233 end_sequence ();
234
235 return seq;
236 }
237
238 /* Insert a copy instruction from partition SRC to DEST onto edge E. */
239
240 static void
241 insert_partition_copy_on_edge (edge e, int dest, int src, location_t locus)
242 {
243 tree var;
244 if (dump_file && (dump_flags & TDF_DETAILS))
245 {
246 fprintf (dump_file,
247 "Inserting a partition copy on edge BB%d->BB%d : "
248 "PART.%d = PART.%d",
249 e->src->index,
250 e->dest->index, dest, src);
251 fprintf (dump_file, "\n");
252 }
253
254 gcc_assert (SA.partition_to_pseudo[dest]);
255 gcc_assert (SA.partition_to_pseudo[src]);
256
257 set_location_for_edge (e);
258 /* If a locus is provided, override the default. */
259 if (locus)
260 set_curr_insn_location (locus);
261
262 var = partition_to_var (SA.map, src);
263 rtx_insn *seq = emit_partition_copy (copy_rtx (SA.partition_to_pseudo[dest]),
264 copy_rtx (SA.partition_to_pseudo[src]),
265 TYPE_UNSIGNED (TREE_TYPE (var)),
266 var);
267
268 insert_insn_on_edge (seq, e);
269 }
270
271 /* Insert a copy instruction from expression SRC to partition DEST
272 onto edge E. */
273
274 static void
275 insert_value_copy_on_edge (edge e, int dest, tree src, location_t locus)
276 {
277 rtx dest_rtx, seq, x;
278 machine_mode dest_mode, src_mode;
279 int unsignedp;
280
281 if (dump_file && (dump_flags & TDF_DETAILS))
282 {
283 fprintf (dump_file,
284 "Inserting a value copy on edge BB%d->BB%d : PART.%d = ",
285 e->src->index,
286 e->dest->index, dest);
287 print_generic_expr (dump_file, src, TDF_SLIM);
288 fprintf (dump_file, "\n");
289 }
290
291 dest_rtx = copy_rtx (SA.partition_to_pseudo[dest]);
292 gcc_assert (dest_rtx);
293
294 set_location_for_edge (e);
295 /* If a locus is provided, override the default. */
296 if (locus)
297 set_curr_insn_location (locus);
298
299 start_sequence ();
300
301 tree name = partition_to_var (SA.map, dest);
302 src_mode = TYPE_MODE (TREE_TYPE (src));
303 dest_mode = GET_MODE (dest_rtx);
304 gcc_assert (src_mode == TYPE_MODE (TREE_TYPE (name)));
305 gcc_assert (!REG_P (dest_rtx)
306 || dest_mode == promote_ssa_mode (name, &unsignedp));
307
308 if (src_mode != dest_mode)
309 {
310 x = expand_expr (src, NULL, src_mode, EXPAND_NORMAL);
311 x = convert_modes (dest_mode, src_mode, x, unsignedp);
312 }
313 else if (src_mode == BLKmode)
314 {
315 x = dest_rtx;
316 store_expr (src, x, 0, false, false);
317 }
318 else
319 x = expand_expr (src, dest_rtx, dest_mode, EXPAND_NORMAL);
320
321 if (x != dest_rtx)
322 emit_move_insn (dest_rtx, x);
323 do_pending_stack_adjust ();
324
325 seq = get_insns ();
326 end_sequence ();
327
328 insert_insn_on_edge (seq, e);
329 }
330
331 /* Insert a copy instruction from RTL expression SRC to partition DEST
332 onto edge E. */
333
334 static void
335 insert_rtx_to_part_on_edge (edge e, int dest, rtx src, int unsignedsrcp,
336 location_t locus)
337 {
338 if (dump_file && (dump_flags & TDF_DETAILS))
339 {
340 fprintf (dump_file,
341 "Inserting a temp copy on edge BB%d->BB%d : PART.%d = ",
342 e->src->index,
343 e->dest->index, dest);
344 print_simple_rtl (dump_file, src);
345 fprintf (dump_file, "\n");
346 }
347
348 gcc_assert (SA.partition_to_pseudo[dest]);
349
350 set_location_for_edge (e);
351 /* If a locus is provided, override the default. */
352 if (locus)
353 set_curr_insn_location (locus);
354
355 /* We give the destination as sizeexp in case src/dest are BLKmode
356 mems. Usually we give the source. As we result from SSA names
357 the left and right size should be the same (and no WITH_SIZE_EXPR
358 involved), so it doesn't matter. */
359 rtx_insn *seq = emit_partition_copy (copy_rtx (SA.partition_to_pseudo[dest]),
360 src, unsignedsrcp,
361 partition_to_var (SA.map, dest));
362
363 insert_insn_on_edge (seq, e);
364 }
365
366 /* Insert a copy instruction from partition SRC to RTL lvalue DEST
367 onto edge E. */
368
369 static void
370 insert_part_to_rtx_on_edge (edge e, rtx dest, int src, location_t locus)
371 {
372 tree var;
373 if (dump_file && (dump_flags & TDF_DETAILS))
374 {
375 fprintf (dump_file,
376 "Inserting a temp copy on edge BB%d->BB%d : ",
377 e->src->index,
378 e->dest->index);
379 print_simple_rtl (dump_file, dest);
380 fprintf (dump_file, "= PART.%d\n", src);
381 }
382
383 gcc_assert (SA.partition_to_pseudo[src]);
384
385 set_location_for_edge (e);
386 /* If a locus is provided, override the default. */
387 if (locus)
388 set_curr_insn_location (locus);
389
390 var = partition_to_var (SA.map, src);
391 rtx_insn *seq = emit_partition_copy (dest,
392 copy_rtx (SA.partition_to_pseudo[src]),
393 TYPE_UNSIGNED (TREE_TYPE (var)),
394 var);
395
396 insert_insn_on_edge (seq, e);
397 }
398
399
400 /* Create an elimination graph for map. */
401
402 elim_graph::elim_graph (var_map map) :
403 nodes (30), edge_list (20), edge_locus (10), visited (map->num_partitions),
404 stack (30), map (map), const_dests (20), const_copies (20), copy_locus (10)
405 {
406 }
407
408
409 /* Empty elimination graph G. */
410
411 static inline void
412 clear_elim_graph (elim_graph *g)
413 {
414 g->nodes.truncate (0);
415 g->edge_list.truncate (0);
416 g->edge_locus.truncate (0);
417 }
418
419
420 /* Return the number of nodes in graph G. */
421
422 static inline int
423 elim_graph_size (elim_graph *g)
424 {
425 return g->nodes.length ();
426 }
427
428
429 /* Add NODE to graph G, if it doesn't exist already. */
430
431 static inline void
432 elim_graph_add_node (elim_graph *g, int node)
433 {
434 int x;
435 int t;
436
437 FOR_EACH_VEC_ELT (g->nodes, x, t)
438 if (t == node)
439 return;
440 g->nodes.safe_push (node);
441 }
442
443
444 /* Add the edge PRED->SUCC to graph G. */
445
446 static inline void
447 elim_graph_add_edge (elim_graph *g, int pred, int succ, location_t locus)
448 {
449 g->edge_list.safe_push (pred);
450 g->edge_list.safe_push (succ);
451 g->edge_locus.safe_push (locus);
452 }
453
454
455 /* Remove an edge from graph G for which NODE is the predecessor, and
456 return the successor node. -1 is returned if there is no such edge. */
457
458 static inline int
459 elim_graph_remove_succ_edge (elim_graph *g, int node, location_t *locus)
460 {
461 int y;
462 unsigned x;
463 for (x = 0; x < g->edge_list.length (); x += 2)
464 if (g->edge_list[x] == node)
465 {
466 g->edge_list[x] = -1;
467 y = g->edge_list[x + 1];
468 g->edge_list[x + 1] = -1;
469 *locus = g->edge_locus[x / 2];
470 g->edge_locus[x / 2] = UNKNOWN_LOCATION;
471 return y;
472 }
473 *locus = UNKNOWN_LOCATION;
474 return -1;
475 }
476
477
478 /* Find all the nodes in GRAPH which are successors to NODE in the
479 edge list. VAR will hold the partition number found. CODE is the
480 code fragment executed for every node found. */
481
482 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, LOCUS, CODE) \
483 do { \
484 unsigned x_; \
485 int y_; \
486 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
487 { \
488 y_ = (GRAPH)->edge_list[x_]; \
489 if (y_ != (NODE)) \
490 continue; \
491 (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]); \
492 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
493 CODE; \
494 } \
495 } while (0)
496
497
498 /* Find all the nodes which are predecessors of NODE in the edge list for
499 GRAPH. VAR will hold the partition number found. CODE is the
500 code fragment executed for every node found. */
501
502 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, LOCUS, CODE) \
503 do { \
504 unsigned x_; \
505 int y_; \
506 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
507 { \
508 y_ = (GRAPH)->edge_list[x_ + 1]; \
509 if (y_ != (NODE)) \
510 continue; \
511 (void) ((VAR) = (GRAPH)->edge_list[x_]); \
512 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
513 CODE; \
514 } \
515 } while (0)
516
517
518 /* Add T to elimination graph G. */
519
520 static inline void
521 eliminate_name (elim_graph *g, int T)
522 {
523 elim_graph_add_node (g, T);
524 }
525
526 /* Return true if this phi argument T should have a copy queued when using
527 var_map MAP. PHI nodes should contain only ssa_names and invariants. A
528 test for ssa_name is definitely simpler, but don't let invalid contents
529 slip through in the meantime. */
530
531 static inline bool
532 queue_phi_copy_p (var_map map, tree t)
533 {
534 if (TREE_CODE (t) == SSA_NAME)
535 {
536 if (var_to_partition (map, t) == NO_PARTITION)
537 return true;
538 return false;
539 }
540 gcc_checking_assert (is_gimple_min_invariant (t));
541 return true;
542 }
543
544 /* Build elimination graph G for basic block BB on incoming PHI edge
545 G->e. */
546
547 static void
548 eliminate_build (elim_graph *g)
549 {
550 tree Ti;
551 int p0, pi;
552 gphi_iterator gsi;
553
554 clear_elim_graph (g);
555
556 for (gsi = gsi_start_phis (g->e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
557 {
558 gphi *phi = gsi.phi ();
559 location_t locus;
560
561 p0 = var_to_partition (g->map, gimple_phi_result (phi));
562 /* Ignore results which are not in partitions. */
563 if (p0 == NO_PARTITION)
564 continue;
565
566 Ti = PHI_ARG_DEF (phi, g->e->dest_idx);
567 locus = gimple_phi_arg_location_from_edge (phi, g->e);
568
569 /* If this argument is a constant, or a SSA_NAME which is being
570 left in SSA form, just queue a copy to be emitted on this
571 edge. */
572 if (queue_phi_copy_p (g->map, Ti))
573 {
574 /* Save constant copies until all other copies have been emitted
575 on this edge. */
576 g->const_dests.safe_push (p0);
577 g->const_copies.safe_push (Ti);
578 g->copy_locus.safe_push (locus);
579 }
580 else
581 {
582 pi = var_to_partition (g->map, Ti);
583 if (p0 != pi)
584 {
585 eliminate_name (g, p0);
586 eliminate_name (g, pi);
587 elim_graph_add_edge (g, p0, pi, locus);
588 }
589 }
590 }
591 }
592
593
594 /* Push successors of T onto the elimination stack for G. */
595
596 static void
597 elim_forward (elim_graph *g, int T)
598 {
599 int S;
600 location_t locus;
601
602 bitmap_set_bit (g->visited, T);
603 FOR_EACH_ELIM_GRAPH_SUCC (g, T, S, locus,
604 {
605 if (!bitmap_bit_p (g->visited, S))
606 elim_forward (g, S);
607 });
608 g->stack.safe_push (T);
609 }
610
611
612 /* Return 1 if there unvisited predecessors of T in graph G. */
613
614 static int
615 elim_unvisited_predecessor (elim_graph *g, int T)
616 {
617 int P;
618 location_t locus;
619
620 FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
621 {
622 if (!bitmap_bit_p (g->visited, P))
623 return 1;
624 });
625 return 0;
626 }
627
628 /* Process predecessors first, and insert a copy. */
629
630 static void
631 elim_backward (elim_graph *g, int T)
632 {
633 int P;
634 location_t locus;
635
636 bitmap_set_bit (g->visited, T);
637 FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
638 {
639 if (!bitmap_bit_p (g->visited, P))
640 {
641 elim_backward (g, P);
642 insert_partition_copy_on_edge (g->e, P, T, locus);
643 }
644 });
645 }
646
647 /* Allocate a new pseudo register usable for storing values sitting
648 in NAME (a decl or SSA name), i.e. with matching mode and attributes. */
649
650 static rtx
651 get_temp_reg (tree name)
652 {
653 tree type = TREE_TYPE (name);
654 int unsignedp;
655 machine_mode reg_mode = promote_ssa_mode (name, &unsignedp);
656 rtx x = gen_reg_rtx (reg_mode);
657 if (POINTER_TYPE_P (type))
658 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
659 return x;
660 }
661
662 /* Insert required copies for T in graph G. Check for a strongly connected
663 region, and create a temporary to break the cycle if one is found. */
664
665 static void
666 elim_create (elim_graph *g, int T)
667 {
668 int P, S;
669 location_t locus;
670
671 if (elim_unvisited_predecessor (g, T))
672 {
673 tree var = partition_to_var (g->map, T);
674 rtx U = get_temp_reg (var);
675 int unsignedsrcp = TYPE_UNSIGNED (TREE_TYPE (var));
676
677 insert_part_to_rtx_on_edge (g->e, U, T, UNKNOWN_LOCATION);
678 FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
679 {
680 if (!bitmap_bit_p (g->visited, P))
681 {
682 elim_backward (g, P);
683 insert_rtx_to_part_on_edge (g->e, P, U, unsignedsrcp, locus);
684 }
685 });
686 }
687 else
688 {
689 S = elim_graph_remove_succ_edge (g, T, &locus);
690 if (S != -1)
691 {
692 bitmap_set_bit (g->visited, T);
693 insert_partition_copy_on_edge (g->e, T, S, locus);
694 }
695 }
696 }
697
698
699 /* Eliminate all the phi nodes on edge E in graph G. */
700
701 static void
702 eliminate_phi (edge e, elim_graph *g)
703 {
704 int x;
705
706 gcc_assert (g->const_copies.length () == 0);
707 gcc_assert (g->copy_locus.length () == 0);
708
709 /* Abnormal edges already have everything coalesced. */
710 if (e->flags & EDGE_ABNORMAL)
711 return;
712
713 g->e = e;
714
715 eliminate_build (g);
716
717 if (elim_graph_size (g) != 0)
718 {
719 int part;
720
721 bitmap_clear (g->visited);
722 g->stack.truncate (0);
723
724 FOR_EACH_VEC_ELT (g->nodes, x, part)
725 {
726 if (!bitmap_bit_p (g->visited, part))
727 elim_forward (g, part);
728 }
729
730 bitmap_clear (g->visited);
731 while (g->stack.length () > 0)
732 {
733 x = g->stack.pop ();
734 if (!bitmap_bit_p (g->visited, x))
735 elim_create (g, x);
736 }
737 }
738
739 /* If there are any pending constant copies, issue them now. */
740 while (g->const_copies.length () > 0)
741 {
742 int dest;
743 tree src;
744 location_t locus;
745
746 src = g->const_copies.pop ();
747 dest = g->const_dests.pop ();
748 locus = g->copy_locus.pop ();
749 insert_value_copy_on_edge (e, dest, src, locus);
750 }
751 }
752
753
754 /* Remove each argument from PHI. If an arg was the last use of an SSA_NAME,
755 check to see if this allows another PHI node to be removed. */
756
757 static void
758 remove_gimple_phi_args (gphi *phi)
759 {
760 use_operand_p arg_p;
761 ssa_op_iter iter;
762
763 if (dump_file && (dump_flags & TDF_DETAILS))
764 {
765 fprintf (dump_file, "Removing Dead PHI definition: ");
766 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
767 }
768
769 FOR_EACH_PHI_ARG (arg_p, phi, iter, SSA_OP_USE)
770 {
771 tree arg = USE_FROM_PTR (arg_p);
772 if (TREE_CODE (arg) == SSA_NAME)
773 {
774 /* Remove the reference to the existing argument. */
775 SET_USE (arg_p, NULL_TREE);
776 if (has_zero_uses (arg))
777 {
778 gimple *stmt;
779 gimple_stmt_iterator gsi;
780
781 stmt = SSA_NAME_DEF_STMT (arg);
782
783 /* Also remove the def if it is a PHI node. */
784 if (gimple_code (stmt) == GIMPLE_PHI)
785 {
786 remove_gimple_phi_args (as_a <gphi *> (stmt));
787 gsi = gsi_for_stmt (stmt);
788 remove_phi_node (&gsi, true);
789 }
790
791 }
792 }
793 }
794 }
795
796 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses. */
797
798 static void
799 eliminate_useless_phis (void)
800 {
801 basic_block bb;
802 gphi_iterator gsi;
803 tree result;
804
805 FOR_EACH_BB_FN (bb, cfun)
806 {
807 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
808 {
809 gphi *phi = gsi.phi ();
810 result = gimple_phi_result (phi);
811 if (virtual_operand_p (result))
812 remove_phi_node (&gsi, true);
813 else
814 {
815 /* Also remove real PHIs with no uses. */
816 if (has_zero_uses (result))
817 {
818 remove_gimple_phi_args (phi);
819 remove_phi_node (&gsi, true);
820 }
821 else
822 gsi_next (&gsi);
823 }
824 }
825 }
826 }
827
828
829 /* This function will rewrite the current program using the variable mapping
830 found in MAP. If the replacement vector VALUES is provided, any
831 occurrences of partitions with non-null entries in the vector will be
832 replaced with the expression in the vector instead of its mapped
833 variable. */
834
835 static void
836 rewrite_trees (var_map map)
837 {
838 if (!flag_checking)
839 return;
840
841 basic_block bb;
842 /* Search for PHIs where the destination has no partition, but one
843 or more arguments has a partition. This should not happen and can
844 create incorrect code. */
845 FOR_EACH_BB_FN (bb, cfun)
846 {
847 gphi_iterator gsi;
848 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
849 {
850 gphi *phi = gsi.phi ();
851 tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
852 if (T0 == NULL_TREE)
853 {
854 size_t i;
855 for (i = 0; i < gimple_phi_num_args (phi); i++)
856 {
857 tree arg = PHI_ARG_DEF (phi, i);
858
859 if (TREE_CODE (arg) == SSA_NAME
860 && var_to_partition (map, arg) != NO_PARTITION)
861 {
862 fprintf (stderr, "Argument of PHI is in a partition :(");
863 print_generic_expr (stderr, arg, TDF_SLIM);
864 fprintf (stderr, "), but the result is not :");
865 print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
866 internal_error ("SSA corruption");
867 }
868 }
869 }
870 }
871 }
872 }
873
874 /* Create a default def for VAR. */
875
876 static void
877 create_default_def (tree var, void *arg ATTRIBUTE_UNUSED)
878 {
879 if (!is_gimple_reg (var))
880 return;
881
882 tree ssa = get_or_create_ssa_default_def (cfun, var);
883 gcc_assert (ssa);
884 }
885
886 /* Call CALLBACK for all PARM_DECLs and RESULT_DECLs for which
887 assign_parms may ask for a default partition. */
888
889 static void
890 for_all_parms (void (*callback)(tree var, void *arg), void *arg)
891 {
892 for (tree var = DECL_ARGUMENTS (current_function_decl); var;
893 var = DECL_CHAIN (var))
894 callback (var, arg);
895 if (!VOID_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
896 callback (DECL_RESULT (current_function_decl), arg);
897 if (cfun->static_chain_decl)
898 callback (cfun->static_chain_decl, arg);
899 }
900
901 /* We need to pass two arguments to set_parm_default_def_partition,
902 but for_all_parms only supports one. Use a pair. */
903
904 typedef std::pair<var_map, bitmap> parm_default_def_partition_arg;
905
906 /* Set in ARG's PARTS bitmap the bit corresponding to the partition in
907 ARG's MAP containing VAR's default def. */
908
909 static void
910 set_parm_default_def_partition (tree var, void *arg_)
911 {
912 parm_default_def_partition_arg *arg = (parm_default_def_partition_arg *)arg_;
913 var_map map = arg->first;
914 bitmap parts = arg->second;
915
916 if (!is_gimple_reg (var))
917 return;
918
919 tree ssa = ssa_default_def (cfun, var);
920 gcc_assert (ssa);
921
922 int version = var_to_partition (map, ssa);
923 gcc_assert (version != NO_PARTITION);
924
925 bool changed = bitmap_set_bit (parts, version);
926 gcc_assert (changed);
927 }
928
929 /* Allocate and return a bitmap that has a bit set for each partition
930 that contains a default def for a parameter. */
931
932 static bitmap
933 get_parm_default_def_partitions (var_map map)
934 {
935 bitmap parm_default_def_parts = BITMAP_ALLOC (NULL);
936
937 parm_default_def_partition_arg
938 arg = std::make_pair (map, parm_default_def_parts);
939
940 for_all_parms (set_parm_default_def_partition, &arg);
941
942 return parm_default_def_parts;
943 }
944
945 /* Allocate and return a bitmap that has a bit set for each partition
946 that contains an undefined value. */
947
948 static bitmap
949 get_undefined_value_partitions (var_map map)
950 {
951 bitmap undefined_value_parts = BITMAP_ALLOC (NULL);
952
953 for (unsigned int i = 1; i < num_ssa_names; i++)
954 {
955 tree var = ssa_name (i);
956 if (var
957 && !virtual_operand_p (var)
958 && !has_zero_uses (var)
959 && ssa_undefined_value_p (var))
960 {
961 const int p = var_to_partition (map, var);
962 if (p != NO_PARTITION)
963 bitmap_set_bit (undefined_value_parts, p);
964 }
965 }
966
967 return undefined_value_parts;
968 }
969
970 /* Given the out-of-ssa info object SA (with prepared partitions)
971 eliminate all phi nodes in all basic blocks. Afterwards no
972 basic block will have phi nodes anymore and there are possibly
973 some RTL instructions inserted on edges. */
974
975 void
976 expand_phi_nodes (struct ssaexpand *sa)
977 {
978 basic_block bb;
979 elim_graph g (sa->map);
980
981 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
982 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
983 if (!gimple_seq_empty_p (phi_nodes (bb)))
984 {
985 edge e;
986 edge_iterator ei;
987 FOR_EACH_EDGE (e, ei, bb->preds)
988 eliminate_phi (e, &g);
989 set_phi_nodes (bb, NULL);
990 /* We can't redirect EH edges in RTL land, so we need to do this
991 here. Redirection happens only when splitting is necessary,
992 which it is only for critical edges, normally. For EH edges
993 it might also be necessary when the successor has more than
994 one predecessor. In that case the edge is either required to
995 be fallthru (which EH edges aren't), or the predecessor needs
996 to end with a jump (which again, isn't the case with EH edges).
997 Hence, split all EH edges on which we inserted instructions
998 and whose successor has multiple predecessors. */
999 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
1000 {
1001 if (e->insns.r && (e->flags & EDGE_EH)
1002 && !single_pred_p (e->dest))
1003 {
1004 rtx_insn *insns = e->insns.r;
1005 basic_block bb;
1006 e->insns.r = NULL;
1007 bb = split_edge (e);
1008 single_pred_edge (bb)->insns.r = insns;
1009 }
1010 else
1011 ei_next (&ei);
1012 }
1013 }
1014 }
1015
1016
1017 /* Remove the ssa-names in the current function and translate them into normal
1018 compiler variables. PERFORM_TER is true if Temporary Expression Replacement
1019 should also be used. */
1020
1021 static void
1022 remove_ssa_form (bool perform_ter, struct ssaexpand *sa)
1023 {
1024 bitmap values = NULL;
1025 var_map map;
1026
1027 for_all_parms (create_default_def, NULL);
1028 map = init_var_map (num_ssa_names);
1029 coalesce_ssa_name (map);
1030
1031 /* Return to viewing the variable list as just all reference variables after
1032 coalescing has been performed. */
1033 partition_view_normal (map);
1034
1035 if (dump_file && (dump_flags & TDF_DETAILS))
1036 {
1037 fprintf (dump_file, "After Coalescing:\n");
1038 dump_var_map (dump_file, map);
1039 }
1040
1041 if (perform_ter)
1042 {
1043 values = find_replaceable_exprs (map);
1044 if (values && dump_file && (dump_flags & TDF_DETAILS))
1045 dump_replaceable_exprs (dump_file, values);
1046 }
1047
1048 rewrite_trees (map);
1049
1050 sa->map = map;
1051 sa->values = values;
1052 sa->partitions_for_parm_default_defs = get_parm_default_def_partitions (map);
1053 sa->partitions_for_undefined_values = get_undefined_value_partitions (map);
1054 }
1055
1056
1057 /* If not already done so for basic block BB, assign increasing uids
1058 to each of its instructions. */
1059
1060 static void
1061 maybe_renumber_stmts_bb (basic_block bb)
1062 {
1063 unsigned i = 0;
1064 gimple_stmt_iterator gsi;
1065
1066 if (!bb->aux)
1067 return;
1068 bb->aux = NULL;
1069 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1070 {
1071 gimple *stmt = gsi_stmt (gsi);
1072 gimple_set_uid (stmt, i);
1073 i++;
1074 }
1075 }
1076
1077
1078 /* Return true if we can determine that the SSA_NAMEs RESULT (a result
1079 of a PHI node) and ARG (one of its arguments) conflict. Return false
1080 otherwise, also when we simply aren't sure. */
1081
1082 static bool
1083 trivially_conflicts_p (basic_block bb, tree result, tree arg)
1084 {
1085 use_operand_p use;
1086 imm_use_iterator imm_iter;
1087 gimple *defa = SSA_NAME_DEF_STMT (arg);
1088
1089 /* If ARG isn't defined in the same block it's too complicated for
1090 our little mind. */
1091 if (gimple_bb (defa) != bb)
1092 return false;
1093
1094 FOR_EACH_IMM_USE_FAST (use, imm_iter, result)
1095 {
1096 gimple *use_stmt = USE_STMT (use);
1097 if (is_gimple_debug (use_stmt))
1098 continue;
1099 /* Now, if there's a use of RESULT that lies outside this basic block,
1100 then there surely is a conflict with ARG. */
1101 if (gimple_bb (use_stmt) != bb)
1102 return true;
1103 if (gimple_code (use_stmt) == GIMPLE_PHI)
1104 continue;
1105 /* The use now is in a real stmt of BB, so if ARG was defined
1106 in a PHI node (like RESULT) both conflict. */
1107 if (gimple_code (defa) == GIMPLE_PHI)
1108 return true;
1109 maybe_renumber_stmts_bb (bb);
1110 /* If the use of RESULT occurs after the definition of ARG,
1111 the two conflict too. */
1112 if (gimple_uid (defa) < gimple_uid (use_stmt))
1113 return true;
1114 }
1115
1116 return false;
1117 }
1118
1119
1120 /* Search every PHI node for arguments associated with backedges which
1121 we can trivially determine will need a copy (the argument is either
1122 not an SSA_NAME or the argument has a different underlying variable
1123 than the PHI result).
1124
1125 Insert a copy from the PHI argument to a new destination at the
1126 end of the block with the backedge to the top of the loop. Update
1127 the PHI argument to reference this new destination. */
1128
1129 static void
1130 insert_backedge_copies (void)
1131 {
1132 basic_block bb;
1133 gphi_iterator gsi;
1134
1135 mark_dfs_back_edges ();
1136
1137 FOR_EACH_BB_FN (bb, cfun)
1138 {
1139 /* Mark block as possibly needing calculation of UIDs. */
1140 bb->aux = &bb->aux;
1141
1142 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1143 {
1144 gphi *phi = gsi.phi ();
1145 tree result = gimple_phi_result (phi);
1146 size_t i;
1147
1148 if (virtual_operand_p (result))
1149 continue;
1150
1151 for (i = 0; i < gimple_phi_num_args (phi); i++)
1152 {
1153 tree arg = gimple_phi_arg_def (phi, i);
1154 edge e = gimple_phi_arg_edge (phi, i);
1155 /* We are only interested in copies emitted on critical
1156 backedges. */
1157 if (!(e->flags & EDGE_DFS_BACK)
1158 || !EDGE_CRITICAL_P (e))
1159 continue;
1160
1161 /* If the argument is not an SSA_NAME, then we will need a
1162 constant initialization. If the argument is an SSA_NAME then
1163 a copy statement may be needed. First handle the case
1164 where we cannot insert before the argument definition. */
1165 if (TREE_CODE (arg) != SSA_NAME
1166 || (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_PHI
1167 && trivially_conflicts_p (bb, result, arg)))
1168 {
1169 tree name;
1170 gassign *stmt;
1171 gimple *last = NULL;
1172 gimple_stmt_iterator gsi2;
1173
1174 gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
1175 if (!gsi_end_p (gsi2))
1176 last = gsi_stmt (gsi2);
1177
1178 /* In theory the only way we ought to get back to the
1179 start of a loop should be with a COND_EXPR or GOTO_EXPR.
1180 However, better safe than sorry.
1181 If the block ends with a control statement or
1182 something that might throw, then we have to
1183 insert this assignment before the last
1184 statement. Else insert it after the last statement. */
1185 if (last && stmt_ends_bb_p (last))
1186 {
1187 /* If the last statement in the block is the definition
1188 site of the PHI argument, then we can't insert
1189 anything after it. */
1190 if (TREE_CODE (arg) == SSA_NAME
1191 && SSA_NAME_DEF_STMT (arg) == last)
1192 continue;
1193 }
1194
1195 /* Create a new instance of the underlying variable of the
1196 PHI result. */
1197 name = copy_ssa_name (result);
1198 stmt = gimple_build_assign (name,
1199 gimple_phi_arg_def (phi, i));
1200
1201 /* copy location if present. */
1202 if (gimple_phi_arg_has_location (phi, i))
1203 gimple_set_location (stmt,
1204 gimple_phi_arg_location (phi, i));
1205
1206 /* Insert the new statement into the block and update
1207 the PHI node. */
1208 if (last && stmt_ends_bb_p (last))
1209 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
1210 else
1211 gsi_insert_after (&gsi2, stmt, GSI_NEW_STMT);
1212 SET_PHI_ARG_DEF (phi, i, name);
1213 }
1214 /* Insert a copy before the definition of the backedge value
1215 and adjust all conflicting uses. */
1216 else if (trivially_conflicts_p (bb, result, arg))
1217 {
1218 gimple *def = SSA_NAME_DEF_STMT (arg);
1219 if (gimple_nop_p (def)
1220 || gimple_code (def) == GIMPLE_PHI)
1221 continue;
1222 tree name = copy_ssa_name (result);
1223 gimple *stmt = gimple_build_assign (name, result);
1224 imm_use_iterator imm_iter;
1225 gimple *use_stmt;
1226 /* The following matches trivially_conflicts_p. */
1227 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, result)
1228 {
1229 if (gimple_bb (use_stmt) != bb
1230 || (gimple_code (use_stmt) != GIMPLE_PHI
1231 && (maybe_renumber_stmts_bb (bb), true)
1232 && gimple_uid (use_stmt) > gimple_uid (def)))
1233 {
1234 use_operand_p use;
1235 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1236 SET_USE (use, name);
1237 }
1238 }
1239 gimple_stmt_iterator gsi = gsi_for_stmt (def);
1240 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
1241 }
1242 }
1243 }
1244
1245 /* Unmark this block again. */
1246 bb->aux = NULL;
1247 }
1248 }
1249
1250 /* Free all memory associated with going out of SSA form. SA is
1251 the outof-SSA info object. */
1252
1253 void
1254 finish_out_of_ssa (struct ssaexpand *sa)
1255 {
1256 free (sa->partition_to_pseudo);
1257 if (sa->values)
1258 BITMAP_FREE (sa->values);
1259 delete_var_map (sa->map);
1260 BITMAP_FREE (sa->partitions_for_parm_default_defs);
1261 BITMAP_FREE (sa->partitions_for_undefined_values);
1262 memset (sa, 0, sizeof *sa);
1263 }
1264
1265 /* Take the current function out of SSA form, translating PHIs as described in
1266 R. Morgan, ``Building an Optimizing Compiler'',
1267 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
1268
1269 unsigned int
1270 rewrite_out_of_ssa (struct ssaexpand *sa)
1271 {
1272 /* If elimination of a PHI requires inserting a copy on a backedge,
1273 then we will have to split the backedge which has numerous
1274 undesirable performance effects.
1275
1276 A significant number of such cases can be handled here by inserting
1277 copies into the loop itself. */
1278 insert_backedge_copies ();
1279
1280
1281 /* Eliminate PHIs which are of no use, such as virtual or dead phis. */
1282 eliminate_useless_phis ();
1283
1284 if (dump_file && (dump_flags & TDF_DETAILS))
1285 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1286
1287 remove_ssa_form (flag_tree_ter, sa);
1288
1289 if (dump_file && (dump_flags & TDF_DETAILS))
1290 gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1291
1292 return 0;
1293 }