]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfganal.c
In PR70010, a function is marked with target(no-vsx) to disable VSX code
[thirdparty/gcc.git] / gcc / cfganal.c
CommitLineData
65f34de5 1/* Control flow graph analysis code for GNU compiler.
fbd26352 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
65f34de5 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
65f34de5 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
65f34de5 19
20/* This file contains various simple utilities to analyze the CFG. */
4a020a8c 21
65f34de5 22#include "config.h"
23#include "system.h"
805e22b2 24#include "coretypes.h"
9ef16211 25#include "backend.h"
7c29e30e 26#include "cfghooks.h"
9858d888 27#include "timevar.h"
7c29e30e 28#include "cfganal.h"
f52a3ef6 29#include "cfgloop.h"
65f34de5 30
6a4bbca7 31namespace {
65f34de5 32/* Store the data structures necessary for depth-first search. */
6a4bbca7 33class depth_first_search
34 {
35public:
36 depth_first_search ();
37
38 basic_block execute (basic_block);
39 void add_bb (basic_block);
65f34de5 40
6a4bbca7 41private:
42 /* stack for backtracking during the algorithm */
43 auto_vec<basic_block, 20> m_stack;
65f34de5 44
45 /* record of basic blocks already seen by depth-first search */
6a4bbca7 46 auto_sbitmap m_visited_blocks;
65f34de5 47};
6a4bbca7 48}
65f34de5 49\f
65f34de5 50/* Mark the back edges in DFS traversal.
d10cfa8d 51 Return nonzero if a loop (natural or otherwise) is present.
65f34de5 52 Inspired by Depth_First_Search_PP described in:
53
54 Advanced Compiler Design and Implementation
55 Steven Muchnick
56 Morgan Kaufmann, 1997
57
6180f28d 58 and heavily borrowed from pre_and_rev_post_order_compute. */
65f34de5 59
60bool
4c9e08a4 61mark_dfs_back_edges (void)
65f34de5 62{
65f34de5 63 int *pre;
64 int *post;
65f34de5 65 int prenum = 1;
66 int postnum = 1;
65f34de5 67 bool found = false;
68
69 /* Allocate the preorder and postorder number arrays. */
fe672ac0 70 pre = XCNEWVEC (int, last_basic_block_for_fn (cfun));
71 post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
65f34de5 72
73 /* Allocate stack for back-tracking up CFG. */
82669763 74 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
65f34de5 75
76 /* Allocate bitmap to track nodes that have been visited. */
3c6549f8 77 auto_sbitmap visited (last_basic_block_for_fn (cfun));
65f34de5 78
79 /* None of the nodes in the CFG have been visited yet. */
53c5d9d4 80 bitmap_clear (visited);
65f34de5 81
82 /* Push the first edge on to the stack. */
82669763 83 stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
65f34de5 84
82669763 85 while (!stack.is_empty ())
65f34de5 86 {
65f34de5 87 basic_block src;
88 basic_block dest;
89
90 /* Look at the edge on the top of the stack. */
82669763 91 edge_iterator ei = stack.last ();
cd665a06 92 src = ei_edge (ei)->src;
93 dest = ei_edge (ei)->dest;
94 ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
65f34de5 95
96 /* Check if the edge destination has been visited yet. */
34154e27 97 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
98 dest->index))
65f34de5 99 {
100 /* Mark that we have visited the destination. */
08b7917c 101 bitmap_set_bit (visited, dest->index);
65f34de5 102
b3d6de89 103 pre[dest->index] = prenum++;
cd665a06 104 if (EDGE_COUNT (dest->succs) > 0)
65f34de5 105 {
106 /* Since the DEST node has been visited for the first
107 time, check its successors. */
82669763 108 stack.quick_push (ei_start (dest->succs));
65f34de5 109 }
110 else
b3d6de89 111 post[dest->index] = postnum++;
65f34de5 112 }
113 else
114 {
34154e27 115 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
116 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
b3d6de89 117 && pre[src->index] >= pre[dest->index]
118 && post[dest->index] == 0)
cd665a06 119 ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
65f34de5 120
34154e27 121 if (ei_one_before_end_p (ei)
122 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
b3d6de89 123 post[src->index] = postnum++;
65f34de5 124
cd665a06 125 if (!ei_one_before_end_p (ei))
82669763 126 ei_next (&stack.last ());
65f34de5 127 else
82669763 128 stack.pop ();
65f34de5 129 }
130 }
131
132 free (pre);
133 free (post);
65f34de5 134
135 return found;
136}
137
65f34de5 138/* Find unreachable blocks. An unreachable block will have 0 in
d10cfa8d 139 the reachable bit in block->flags. A nonzero value indicates the
65f34de5 140 block is reachable. */
141
142void
4c9e08a4 143find_unreachable_blocks (void)
65f34de5 144{
145 edge e;
cd665a06 146 edge_iterator ei;
4c26117a 147 basic_block *tos, *worklist, bb;
65f34de5 148
a28770e1 149 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
65f34de5 150
151 /* Clear all the reachability flags. */
152
fc00614f 153 FOR_EACH_BB_FN (bb, cfun)
4c26117a 154 bb->flags &= ~BB_REACHABLE;
65f34de5 155
156 /* Add our starting points to the worklist. Almost always there will
4a82352a 157 be only one. It isn't inconceivable that we might one day directly
65f34de5 158 support Fortran alternate entry points. */
159
34154e27 160 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
65f34de5 161 {
162 *tos++ = e->dest;
163
164 /* Mark the block reachable. */
165 e->dest->flags |= BB_REACHABLE;
166 }
167
168 /* Iterate: find everything reachable from what we've already seen. */
169
170 while (tos != worklist)
171 {
172 basic_block b = *--tos;
173
cd665a06 174 FOR_EACH_EDGE (e, ei, b->succs)
045fbdee 175 {
176 basic_block dest = e->dest;
177
178 if (!(dest->flags & BB_REACHABLE))
179 {
180 *tos++ = dest;
181 dest->flags |= BB_REACHABLE;
182 }
183 }
65f34de5 184 }
185
186 free (worklist);
187}
5f80a2b0 188
189/* Verify that there are no unreachable blocks in the current function. */
190
191void
192verify_no_unreachable_blocks (void)
193{
194 find_unreachable_blocks ();
195
196 basic_block bb;
197 FOR_EACH_BB_FN (bb, cfun)
198 gcc_assert ((bb->flags & BB_REACHABLE) != 0);
199}
200
65f34de5 201\f
202/* Functions to access an edge list with a vector representation.
203 Enough data is kept such that given an index number, the
204 pred and succ that edge represents can be determined, or
205 given a pred and a succ, its index number can be returned.
206 This allows algorithms which consume a lot of memory to
207 represent the normally full matrix of edge (pred,succ) with a
208 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
209 wasted space in the client code due to sparse flow graphs. */
210
211/* This functions initializes the edge list. Basically the entire
212 flowgraph is processed, and all edges are assigned a number,
213 and the data structure is filled in. */
214
215struct edge_list *
4c9e08a4 216create_edge_list (void)
65f34de5 217{
218 struct edge_list *elist;
219 edge e;
220 int num_edges;
4c26117a 221 basic_block bb;
cd665a06 222 edge_iterator ei;
65f34de5 223
65f34de5 224 /* Determine the number of edges in the flow graph by counting successor
225 edges on each basic block. */
4a020a8c 226 num_edges = 0;
34154e27 227 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
228 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
65f34de5 229 {
cd665a06 230 num_edges += EDGE_COUNT (bb->succs);
65f34de5 231 }
e4fc8aad 232
4c36ffe6 233 elist = XNEW (struct edge_list);
65f34de5 234 elist->num_edges = num_edges;
4c36ffe6 235 elist->index_to_edge = XNEWVEC (edge, num_edges);
65f34de5 236
237 num_edges = 0;
238
4c26117a 239 /* Follow successors of blocks, and register these edges. */
34154e27 240 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
241 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
cd665a06 242 FOR_EACH_EDGE (e, ei, bb->succs)
4c26117a 243 elist->index_to_edge[num_edges++] = e;
e4fc8aad 244
65f34de5 245 return elist;
246}
247
248/* This function free's memory associated with an edge list. */
249
250void
4c9e08a4 251free_edge_list (struct edge_list *elist)
65f34de5 252{
253 if (elist)
254 {
255 free (elist->index_to_edge);
256 free (elist);
257 }
258}
259
260/* This function provides debug output showing an edge list. */
261
4b987fac 262DEBUG_FUNCTION void
4c9e08a4 263print_edge_list (FILE *f, struct edge_list *elist)
65f34de5 264{
265 int x;
e4fc8aad 266
65f34de5 267 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
a28770e1 268 n_basic_blocks_for_fn (cfun), elist->num_edges);
65f34de5 269
270 for (x = 0; x < elist->num_edges; x++)
271 {
272 fprintf (f, " %-4d - edge(", x);
34154e27 273 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
65f34de5 274 fprintf (f, "entry,");
275 else
b3d6de89 276 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
65f34de5 277
34154e27 278 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
65f34de5 279 fprintf (f, "exit)\n");
280 else
b3d6de89 281 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
65f34de5 282 }
283}
284
285/* This function provides an internal consistency check of an edge list,
286 verifying that all edges are present, and that there are no
287 extra edges. */
288
4b987fac 289DEBUG_FUNCTION void
4c9e08a4 290verify_edge_list (FILE *f, struct edge_list *elist)
65f34de5 291{
4c26117a 292 int pred, succ, index;
65f34de5 293 edge e;
4c26117a 294 basic_block bb, p, s;
cd665a06 295 edge_iterator ei;
65f34de5 296
34154e27 297 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
298 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
65f34de5 299 {
cd665a06 300 FOR_EACH_EDGE (e, ei, bb->succs)
65f34de5 301 {
b3d6de89 302 pred = e->src->index;
303 succ = e->dest->index;
65f34de5 304 index = EDGE_INDEX (elist, e->src, e->dest);
305 if (index == EDGE_INDEX_NO_EDGE)
306 {
307 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
308 continue;
309 }
e4fc8aad 310
b3d6de89 311 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
65f34de5 312 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
b3d6de89 313 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
314 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
65f34de5 315 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
b3d6de89 316 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
317 }
318 }
319
4c26117a 320 /* We've verified that all the edges are in the list, now lets make sure
4a020a8c 321 there are no spurious edges in the list. This is an expensive check! */
65f34de5 322
34154e27 323 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
324 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
325 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
65f34de5 326 {
65f34de5 327 int found_edge = 0;
328
cd665a06 329 FOR_EACH_EDGE (e, ei, p->succs)
65f34de5 330 if (e->dest == s)
331 {
332 found_edge = 1;
333 break;
334 }
e4fc8aad 335
cd665a06 336 FOR_EACH_EDGE (e, ei, s->preds)
65f34de5 337 if (e->src == p)
338 {
339 found_edge = 1;
340 break;
341 }
e4fc8aad 342
4c26117a 343 if (EDGE_INDEX (elist, p, s)
65f34de5 344 == EDGE_INDEX_NO_EDGE && found_edge != 0)
345 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
4c26117a 346 p->index, s->index);
347 if (EDGE_INDEX (elist, p, s)
65f34de5 348 != EDGE_INDEX_NO_EDGE && found_edge == 0)
349 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
4c26117a 350 p->index, s->index, EDGE_INDEX (elist, p, s));
65f34de5 351 }
65f34de5 352}
353
7ee4e8e8 354
355/* Functions to compute control dependences. */
356
357/* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
358void
359control_dependences::set_control_dependence_map_bit (basic_block bb,
360 int edge_index)
361{
34154e27 362 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
7ee4e8e8 363 return;
34154e27 364 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
7ee4e8e8 365 bitmap_set_bit (control_dependence_map[bb->index], edge_index);
366}
367
368/* Clear all control dependences for block BB. */
369void
370control_dependences::clear_control_dependence_bitmap (basic_block bb)
371{
372 bitmap_clear (control_dependence_map[bb->index]);
373}
374
375/* Find the immediate postdominator PDOM of the specified basic block BLOCK.
376 This function is necessary because some blocks have negative numbers. */
377
378static inline basic_block
379find_pdom (basic_block block)
380{
34154e27 381 gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
7ee4e8e8 382
34154e27 383 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
384 return EXIT_BLOCK_PTR_FOR_FN (cfun);
7ee4e8e8 385 else
386 {
387 basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
388 if (! bb)
34154e27 389 return EXIT_BLOCK_PTR_FOR_FN (cfun);
7ee4e8e8 390 return bb;
391 }
392}
393
394/* Determine all blocks' control dependences on the given edge with edge_list
395 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
396
397void
398control_dependences::find_control_dependence (int edge_index)
399{
400 basic_block current_block;
401 basic_block ending_block;
402
ce143ff0 403 gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
7ee4e8e8 404
ce143ff0 405 /* For abnormal edges, we don't make current_block control
406 dependent because instructions that throw are always necessary
407 anyway. */
408 edge e = find_edge (get_edge_src (edge_index), get_edge_dest (edge_index));
409 if (e->flags & EDGE_ABNORMAL)
410 return;
411
412 if (get_edge_src (edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
34154e27 413 ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
7ee4e8e8 414 else
ce143ff0 415 ending_block = find_pdom (get_edge_src (edge_index));
7ee4e8e8 416
ce143ff0 417 for (current_block = get_edge_dest (edge_index);
34154e27 418 current_block != ending_block
419 && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
7ee4e8e8 420 current_block = find_pdom (current_block))
ce143ff0 421 set_control_dependence_map_bit (current_block, edge_index);
7ee4e8e8 422}
423
424/* Record all blocks' control dependences on all edges in the edge
425 list EL, ala Morgan, Section 3.6. */
426
ce143ff0 427control_dependences::control_dependences ()
7ee4e8e8 428{
429 timevar_push (TV_CONTROL_DEPENDENCES);
ce143ff0 430
431 /* Initialize the edge list. */
432 int num_edges = 0;
433 basic_block bb;
434 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
435 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
436 num_edges += EDGE_COUNT (bb->succs);
437 m_el.create (num_edges);
438 edge e;
439 edge_iterator ei;
440 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
441 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
442 FOR_EACH_EDGE (e, ei, bb->succs)
443 m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
444
fe672ac0 445 control_dependence_map.create (last_basic_block_for_fn (cfun));
446 for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
7ee4e8e8 447 control_dependence_map.quick_push (BITMAP_ALLOC (NULL));
ce143ff0 448 for (int i = 0; i < num_edges; ++i)
7ee4e8e8 449 find_control_dependence (i);
ce143ff0 450
7ee4e8e8 451 timevar_pop (TV_CONTROL_DEPENDENCES);
452}
453
454/* Free control dependences and the associated edge list. */
455
456control_dependences::~control_dependences ()
457{
20f31620 458 for (unsigned i = 0; i < control_dependence_map.length (); ++i)
7ee4e8e8 459 BITMAP_FREE (control_dependence_map[i]);
460 control_dependence_map.release ();
ce143ff0 461 m_el.release ();
7ee4e8e8 462}
463
464/* Returns the bitmap of edges the basic-block I is dependent on. */
465
466bitmap
467control_dependences::get_edges_dependent_on (int i)
468{
469 return control_dependence_map[i];
470}
471
ce143ff0 472/* Returns the edge source with index I from the edge list. */
7ee4e8e8 473
ce143ff0 474basic_block
475control_dependences::get_edge_src (int i)
476{
477 return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
478}
479
480/* Returns the edge destination with index I from the edge list. */
481
482basic_block
483control_dependences::get_edge_dest (int i)
7ee4e8e8 484{
ce143ff0 485 return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
7ee4e8e8 486}
487
488
4ee9c684 489/* Given PRED and SUCC blocks, return the edge which connects the blocks.
490 If no such edge exists, return NULL. */
491
492edge
493find_edge (basic_block pred, basic_block succ)
494{
495 edge e;
cd665a06 496 edge_iterator ei;
4ee9c684 497
bd98a418 498 if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
499 {
500 FOR_EACH_EDGE (e, ei, pred->succs)
501 if (e->dest == succ)
502 return e;
503 }
504 else
505 {
506 FOR_EACH_EDGE (e, ei, succ->preds)
507 if (e->src == pred)
508 return e;
509 }
4ee9c684 510
511 return NULL;
512}
513
65f34de5 514/* This routine will determine what, if any, edge there is between
515 a specified predecessor and successor. */
516
517int
4c9e08a4 518find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
65f34de5 519{
520 int x;
e4fc8aad 521
65f34de5 522 for (x = 0; x < NUM_EDGES (edge_list); x++)
e4fc8aad 523 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
524 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
525 return x;
526
65f34de5 527 return (EDGE_INDEX_NO_EDGE);
528}
65f34de5 529\f
41d24834 530/* This routine will remove any fake predecessor edges for a basic block.
531 When the edge is removed, it is also removed from whatever successor
65f34de5 532 list it is in. */
533
534static void
41d24834 535remove_fake_predecessors (basic_block bb)
65f34de5 536{
537 edge e;
cd665a06 538 edge_iterator ei;
e4fc8aad 539
cd665a06 540 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
65f34de5 541 {
cd665a06 542 if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
543 remove_edge (e);
544 else
545 ei_next (&ei);
65f34de5 546 }
547}
548
549/* This routine will remove all fake edges from the flow graph. If
550 we remove all fake successors, it will automatically remove all
551 fake predecessors. */
552
553void
4c9e08a4 554remove_fake_edges (void)
65f34de5 555{
4c26117a 556 basic_block bb;
65f34de5 557
34154e27 558 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
41d24834 559 remove_fake_predecessors (bb);
65f34de5 560}
561
41d24834 562/* This routine will remove all fake edges to the EXIT_BLOCK. */
563
564void
565remove_fake_exit_edges (void)
566{
34154e27 567 remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
41d24834 568}
569
570
65f34de5 571/* This function will add a fake edge between any block which has no
572 successors, and the exit block. Some data flow equations require these
573 edges to exist. */
574
575void
4c9e08a4 576add_noreturn_fake_exit_edges (void)
65f34de5 577{
4c26117a 578 basic_block bb;
65f34de5 579
fc00614f 580 FOR_EACH_BB_FN (bb, cfun)
cd665a06 581 if (EDGE_COUNT (bb->succs) == 0)
34154e27 582 make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
65f34de5 583}
584
585/* This function adds a fake edge between any infinite loops to the
586 exit block. Some optimizations require a path from each node to
587 the exit node.
588
589 See also Morgan, Figure 3.10, pp. 82-83.
590
591 The current implementation is ugly, not attempting to minimize the
592 number of inserted fake edges. To reduce the number of fake edges
593 to insert, add fake edges from _innermost_ loops containing only
594 nodes not reachable from the exit block. */
595
596void
4c9e08a4 597connect_infinite_loops_to_exit (void)
65f34de5 598{
65f34de5 599 /* Perform depth-first search in the reverse graph to find nodes
600 reachable from the exit block. */
6a4bbca7 601 depth_first_search dfs;
602 dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
65f34de5 603
604 /* Repeatedly add fake edges, updating the unreachable nodes. */
6a4bbca7 605 basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
65f34de5 606 while (1)
607 {
6a4bbca7 608 unvisited_block = dfs.execute (unvisited_block);
65f34de5 609 if (!unvisited_block)
610 break;
e4fc8aad 611
6a4bbca7 612 basic_block deadend_block = dfs_find_deadend (unvisited_block);
720cfc43 613 edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
614 EDGE_FAKE);
720cfc43 615 e->probability = profile_probability::never ();
6a4bbca7 616 dfs.add_bb (deadend_block);
65f34de5 617 }
65f34de5 618}
619\f
3072d30e 620/* Compute reverse top sort order. This is computing a post order
851d9296 621 numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
3072d30e 622 ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
623 true, unreachable blocks are deleted. */
e4fc8aad 624
6180f28d 625int
48e1416a 626post_order_compute (int *post_order, bool include_entry_exit,
3072d30e 627 bool delete_unreachable)
65f34de5 628{
6180f28d 629 int post_order_num = 0;
3072d30e 630 int count;
65f34de5 631
6180f28d 632 if (include_entry_exit)
633 post_order[post_order_num++] = EXIT_BLOCK;
634
65f34de5 635 /* Allocate stack for back-tracking up CFG. */
82669763 636 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
65f34de5 637
638 /* Allocate bitmap to track nodes that have been visited. */
3c6549f8 639 auto_sbitmap visited (last_basic_block_for_fn (cfun));
65f34de5 640
641 /* None of the nodes in the CFG have been visited yet. */
53c5d9d4 642 bitmap_clear (visited);
65f34de5 643
644 /* Push the first edge on to the stack. */
82669763 645 stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
65f34de5 646
82669763 647 while (!stack.is_empty ())
65f34de5 648 {
65f34de5 649 basic_block src;
650 basic_block dest;
651
652 /* Look at the edge on the top of the stack. */
82669763 653 edge_iterator ei = stack.last ();
cd665a06 654 src = ei_edge (ei)->src;
655 dest = ei_edge (ei)->dest;
65f34de5 656
657 /* Check if the edge destination has been visited yet. */
34154e27 658 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
659 && ! bitmap_bit_p (visited, dest->index))
65f34de5 660 {
661 /* Mark that we have visited the destination. */
08b7917c 662 bitmap_set_bit (visited, dest->index);
65f34de5 663
cd665a06 664 if (EDGE_COUNT (dest->succs) > 0)
e4fc8aad 665 /* Since the DEST node has been visited for the first
666 time, check its successors. */
82669763 667 stack.quick_push (ei_start (dest->succs));
65f34de5 668 else
6180f28d 669 post_order[post_order_num++] = dest->index;
65f34de5 670 }
671 else
672 {
34154e27 673 if (ei_one_before_end_p (ei)
674 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3072d30e 675 post_order[post_order_num++] = src->index;
65f34de5 676
cd665a06 677 if (!ei_one_before_end_p (ei))
82669763 678 ei_next (&stack.last ());
65f34de5 679 else
82669763 680 stack.pop ();
65f34de5 681 }
682 }
683
6180f28d 684 if (include_entry_exit)
3072d30e 685 {
686 post_order[post_order_num++] = ENTRY_BLOCK;
687 count = post_order_num;
688 }
48e1416a 689 else
3072d30e 690 count = post_order_num + 2;
48e1416a 691
3072d30e 692 /* Delete the unreachable blocks if some were found and we are
693 supposed to do it. */
a28770e1 694 if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
3072d30e 695 {
696 basic_block b;
697 basic_block next_bb;
34154e27 698 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
699 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
3072d30e 700 {
701 next_bb = b->next_bb;
48e1416a 702
08b7917c 703 if (!(bitmap_bit_p (visited, b->index)))
3072d30e 704 delete_basic_block (b);
705 }
48e1416a 706
3072d30e 707 tidy_fallthru_edges ();
708 }
709
3072d30e 710 return post_order_num;
711}
712
713
9ccd9ece 714/* Helper routine for inverted_post_order_compute
715 flow_dfs_compute_reverse_execute, and the reverse-CFG
716 deapth first search in dominance.c.
3072d30e 717 BB has to belong to a region of CFG
718 unreachable by inverted traversal from the exit.
719 i.e. there's no control flow path from ENTRY to EXIT
720 that contains this BB.
721 This can happen in two cases - if there's an infinite loop
722 or if there's a block that has no successor
723 (call to a function with no return).
48e1416a 724 Some RTL passes deal with this condition by
725 calling connect_infinite_loops_to_exit () and/or
3072d30e 726 add_noreturn_fake_exit_edges ().
727 However, those methods involve modifying the CFG itself
728 which may not be desirable.
729 Hence, we deal with the infinite loop/no return cases
730 by identifying a unique basic block that can reach all blocks
731 in such a region by inverted traversal.
732 This function returns a basic block that guarantees
733 that all blocks in the region are reachable
734 by starting an inverted traversal from the returned block. */
735
9ccd9ece 736basic_block
3072d30e 737dfs_find_deadend (basic_block bb)
738{
801a5e7f 739 auto_bitmap visited;
740 basic_block next = bb;
3072d30e 741
742 for (;;)
743 {
801a5e7f 744 if (EDGE_COUNT (next->succs) == 0)
745 return next;
3072d30e 746
801a5e7f 747 if (! bitmap_set_bit (visited, next->index))
748 return bb;
749
750 bb = next;
f52a3ef6 751 /* If we are in an analyzed cycle make sure to try exiting it.
752 Note this is a heuristic only and expected to work when loop
753 fixup is needed as well. */
754 if (! bb->loop_father
755 || ! loop_outer (bb->loop_father))
801a5e7f 756 next = EDGE_SUCC (bb, 0)->dest;
f52a3ef6 757 else
758 {
759 edge_iterator ei;
760 edge e;
761 FOR_EACH_EDGE (e, ei, bb->succs)
762 if (loop_exit_edge_p (bb->loop_father, e))
763 break;
801a5e7f 764 next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
f52a3ef6 765 }
3072d30e 766 }
767
768 gcc_unreachable ();
769}
770
771
772/* Compute the reverse top sort order of the inverted CFG
773 i.e. starting from the exit block and following the edges backward
774 (from successors to predecessors).
775 This ordering can be used for forward dataflow problems among others.
776
e98da821 777 Optionally if START_POINTS is specified, start from exit block and all
778 basic blocks in START_POINTS. This is used by CD-DCE.
779
3072d30e 780 This function assumes that all blocks in the CFG are reachable
781 from the ENTRY (but not necessarily from EXIT).
782
783 If there's an infinite loop,
784 a simple inverted traversal starting from the blocks
785 with no successors can't visit all blocks.
786 To solve this problem, we first do inverted traversal
787 starting from the blocks with no successor.
48e1416a 788 And if there's any block left that's not visited by the regular
3072d30e 789 inverted traversal from EXIT,
790 those blocks are in such problematic region.
48e1416a 791 Among those, we find one block that has
3072d30e 792 any visited predecessor (which is an entry into such a region),
48e1416a 793 and start looking for a "dead end" from that block
3072d30e 794 and do another inverted traversal from that block. */
795
a4421e7b 796void
797inverted_post_order_compute (vec<int> *post_order,
e98da821 798 sbitmap *start_points)
3072d30e 799{
800 basic_block bb;
a4421e7b 801 post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
3072d30e 802
29134d13 803 if (flag_checking)
804 verify_no_unreachable_blocks ();
5f80a2b0 805
3072d30e 806 /* Allocate stack for back-tracking up CFG. */
82669763 807 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
3072d30e 808
809 /* Allocate bitmap to track nodes that have been visited. */
3c6549f8 810 auto_sbitmap visited (last_basic_block_for_fn (cfun));
3072d30e 811
812 /* None of the nodes in the CFG have been visited yet. */
53c5d9d4 813 bitmap_clear (visited);
3072d30e 814
e98da821 815 if (start_points)
816 {
817 FOR_ALL_BB_FN (bb, cfun)
818 if (bitmap_bit_p (*start_points, bb->index)
819 && EDGE_COUNT (bb->preds) > 0)
820 {
82669763 821 stack.quick_push (ei_start (bb->preds));
e98da821 822 bitmap_set_bit (visited, bb->index);
823 }
824 if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
825 {
82669763 826 stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
e98da821 827 bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
828 }
829 }
830 else
3072d30e 831 /* Put all blocks that have no successor into the initial work list. */
ed7d889a 832 FOR_ALL_BB_FN (bb, cfun)
3072d30e 833 if (EDGE_COUNT (bb->succs) == 0)
834 {
835 /* Push the initial edge on to the stack. */
48e1416a 836 if (EDGE_COUNT (bb->preds) > 0)
3072d30e 837 {
82669763 838 stack.quick_push (ei_start (bb->preds));
08b7917c 839 bitmap_set_bit (visited, bb->index);
3072d30e 840 }
841 }
842
48e1416a 843 do
3072d30e 844 {
845 bool has_unvisited_bb = false;
846
847 /* The inverted traversal loop. */
82669763 848 while (!stack.is_empty ())
3072d30e 849 {
850 edge_iterator ei;
851 basic_block pred;
852
853 /* Look at the edge on the top of the stack. */
82669763 854 ei = stack.last ();
3072d30e 855 bb = ei_edge (ei)->dest;
856 pred = ei_edge (ei)->src;
857
858 /* Check if the predecessor has been visited yet. */
08b7917c 859 if (! bitmap_bit_p (visited, pred->index))
3072d30e 860 {
861 /* Mark that we have visited the destination. */
08b7917c 862 bitmap_set_bit (visited, pred->index);
3072d30e 863
864 if (EDGE_COUNT (pred->preds) > 0)
865 /* Since the predecessor node has been visited for the first
866 time, check its predecessors. */
82669763 867 stack.quick_push (ei_start (pred->preds));
3072d30e 868 else
a4421e7b 869 post_order->quick_push (pred->index);
3072d30e 870 }
871 else
872 {
34154e27 873 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
874 && ei_one_before_end_p (ei))
a4421e7b 875 post_order->quick_push (bb->index);
3072d30e 876
877 if (!ei_one_before_end_p (ei))
82669763 878 ei_next (&stack.last ());
3072d30e 879 else
82669763 880 stack.pop ();
3072d30e 881 }
882 }
883
48e1416a 884 /* Detect any infinite loop and activate the kludge.
3072d30e 885 Note that this doesn't check EXIT_BLOCK itself
82669763 886 since EXIT_BLOCK is always added after the outer do-while loop. */
34154e27 887 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
888 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
08b7917c 889 if (!bitmap_bit_p (visited, bb->index))
3072d30e 890 {
891 has_unvisited_bb = true;
892
893 if (EDGE_COUNT (bb->preds) > 0)
894 {
895 edge_iterator ei;
896 edge e;
897 basic_block visited_pred = NULL;
898
899 /* Find an already visited predecessor. */
900 FOR_EACH_EDGE (e, ei, bb->preds)
901 {
08b7917c 902 if (bitmap_bit_p (visited, e->src->index))
3072d30e 903 visited_pred = e->src;
904 }
905
906 if (visited_pred)
907 {
908 basic_block be = dfs_find_deadend (bb);
909 gcc_assert (be != NULL);
08b7917c 910 bitmap_set_bit (visited, be->index);
82669763 911 stack.quick_push (ei_start (be->preds));
3072d30e 912 break;
913 }
914 }
915 }
916
82669763 917 if (has_unvisited_bb && stack.is_empty ())
3072d30e 918 {
82669763 919 /* No blocks are reachable from EXIT at all.
3072d30e 920 Find a dead-end from the ENTRY, and restart the iteration. */
34154e27 921 basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3072d30e 922 gcc_assert (be != NULL);
08b7917c 923 bitmap_set_bit (visited, be->index);
82669763 924 stack.quick_push (ei_start (be->preds));
3072d30e 925 }
926
48e1416a 927 /* The only case the below while fires is
3072d30e 928 when there's an infinite loop. */
929 }
82669763 930 while (!stack.is_empty ());
3072d30e 931
932 /* EXIT_BLOCK is always included. */
a4421e7b 933 post_order->quick_push (EXIT_BLOCK);
65f34de5 934}
935
f484312f 936/* Compute the depth first search order of FN and store in the array
937 PRE_ORDER if nonzero. If REV_POST_ORDER is nonzero, return the
938 reverse completion number for each node. Returns the number of nodes
939 visited. A depth first search tries to get as far away from the starting
940 point as quickly as possible.
6180f28d 941
f484312f 942 In case the function has unreachable blocks the number of nodes
943 visited does not include them.
944
945 pre_order is a really a preorder numbering of the graph.
946 rev_post_order is really a reverse postorder numbering of the graph. */
65f34de5 947
948int
f484312f 949pre_and_rev_post_order_compute_fn (struct function *fn,
950 int *pre_order, int *rev_post_order,
951 bool include_entry_exit)
65f34de5 952{
6180f28d 953 int pre_order_num = 0;
70cf6439 954 int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
65f34de5 955
956 /* Allocate stack for back-tracking up CFG. */
70cf6439 957 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
65f34de5 958
6180f28d 959 if (include_entry_exit)
960 {
961 if (pre_order)
962 pre_order[pre_order_num] = ENTRY_BLOCK;
963 pre_order_num++;
964 if (rev_post_order)
f0d48a74 965 rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
6180f28d 966 }
48e1416a 967 else
6180f28d 968 rev_post_order_num -= NUM_FIXED_BLOCKS;
969
65f34de5 970 /* Allocate bitmap to track nodes that have been visited. */
70cf6439 971 auto_sbitmap visited (last_basic_block_for_fn (fn));
65f34de5 972
973 /* None of the nodes in the CFG have been visited yet. */
53c5d9d4 974 bitmap_clear (visited);
65f34de5 975
976 /* Push the first edge on to the stack. */
82669763 977 stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
65f34de5 978
82669763 979 while (!stack.is_empty ())
65f34de5 980 {
65f34de5 981 basic_block src;
982 basic_block dest;
983
984 /* Look at the edge on the top of the stack. */
82669763 985 edge_iterator ei = stack.last ();
cd665a06 986 src = ei_edge (ei)->src;
987 dest = ei_edge (ei)->dest;
65f34de5 988
989 /* Check if the edge destination has been visited yet. */
34154e27 990 if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
f484312f 991 && ! bitmap_bit_p (visited, dest->index))
65f34de5 992 {
993 /* Mark that we have visited the destination. */
08b7917c 994 bitmap_set_bit (visited, dest->index);
65f34de5 995
6180f28d 996 if (pre_order)
997 pre_order[pre_order_num] = dest->index;
2568b2b6 998
6180f28d 999 pre_order_num++;
65f34de5 1000
cd665a06 1001 if (EDGE_COUNT (dest->succs) > 0)
e4fc8aad 1002 /* Since the DEST node has been visited for the first
1003 time, check its successors. */
82669763 1004 stack.quick_push (ei_start (dest->succs));
6180f28d 1005 else if (rev_post_order)
e4fc8aad 1006 /* There are no successors for the DEST node so assign
1007 its reverse completion number. */
6180f28d 1008 rev_post_order[rev_post_order_num--] = dest->index;
65f34de5 1009 }
1010 else
1011 {
f484312f 1012 if (ei_one_before_end_p (ei)
34154e27 1013 && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
6180f28d 1014 && rev_post_order)
e4fc8aad 1015 /* There are no more successors for the SRC node
1016 so assign its reverse completion number. */
6180f28d 1017 rev_post_order[rev_post_order_num--] = src->index;
65f34de5 1018
cd665a06 1019 if (!ei_one_before_end_p (ei))
82669763 1020 ei_next (&stack.last ());
65f34de5 1021 else
82669763 1022 stack.pop ();
65f34de5 1023 }
1024 }
1025
6180f28d 1026 if (include_entry_exit)
1027 {
1028 if (pre_order)
1029 pre_order[pre_order_num] = EXIT_BLOCK;
1030 pre_order_num++;
1031 if (rev_post_order)
f0d48a74 1032 rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
6180f28d 1033 }
f484312f 1034
1035 return pre_order_num;
1036}
1037
1038/* Like pre_and_rev_post_order_compute_fn but operating on the
1039 current function and asserting that all nodes were visited. */
1040
1041int
1042pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1043 bool include_entry_exit)
1044{
1045 int pre_order_num
1046 = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1047 include_entry_exit);
1048 if (include_entry_exit)
1049 /* The number of nodes visited should be the number of blocks. */
a28770e1 1050 gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
6180f28d 1051 else
1052 /* The number of nodes visited should be the number of blocks minus
1053 the entry and exit blocks which are not visited here. */
a28770e1 1054 gcc_assert (pre_order_num
1055 == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
e4fc8aad 1056
6180f28d 1057 return pre_order_num;
65f34de5 1058}
1059
51e85e64 1060/* Unlike pre_and_rev_post_order_compute we fill rev_post_order backwards
1061 so iterating in RPO order needs to start with rev_post_order[n - 1]
1062 going to rev_post_order[0]. If FOR_ITERATION is true then try to
1063 make CFG cycles fit into small contiguous regions of the RPO order.
1064 When FOR_ITERATION is true this requires up-to-date loop structures. */
1065
1066int
1067rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1068 bitmap exit_bbs, bool for_iteration,
1069 int *rev_post_order)
1070{
1071 int pre_order_num = 0;
1072 int rev_post_order_num = 0;
1073
1074 /* Allocate stack for back-tracking up CFG. Worst case we need
1075 O(n^2) edges but the following should suffice in practice without
1076 a need to re-allocate. */
1077 auto_vec<edge, 20> stack (2 * n_basic_blocks_for_fn (fn));
1078
1079 int *pre = XNEWVEC (int, 2 * last_basic_block_for_fn (fn));
1080 int *post = pre + last_basic_block_for_fn (fn);
1081
1082 /* BB flag to track nodes that have been visited. */
1083 auto_bb_flag visited (fn);
1084 /* BB flag to track which nodes have post[] assigned to avoid
1085 zeroing post. */
1086 auto_bb_flag post_assigned (fn);
1087
1088 /* Push the first edge on to the stack. */
1089 stack.quick_push (entry);
1090
1091 while (!stack.is_empty ())
1092 {
1093 basic_block src;
1094 basic_block dest;
1095
1096 /* Look at the edge on the top of the stack. */
1097 int idx = stack.length () - 1;
1098 edge e = stack[idx];
1099 src = e->src;
1100 dest = e->dest;
1101 e->flags &= ~EDGE_DFS_BACK;
1102
1103 /* Check if the edge destination has been visited yet. */
1104 if (! bitmap_bit_p (exit_bbs, dest->index)
1105 && ! (dest->flags & visited))
1106 {
1107 /* Mark that we have visited the destination. */
1108 dest->flags |= visited;
1109
1110 pre[dest->index] = pre_order_num++;
1111
1112 if (EDGE_COUNT (dest->succs) > 0)
1113 {
1114 /* Since the DEST node has been visited for the first
1115 time, check its successors. */
1116 /* Push the edge vector in reverse to match previous behavior. */
1117 stack.reserve (EDGE_COUNT (dest->succs));
1118 for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1119 stack.quick_push (EDGE_SUCC (dest, i));
1120 /* Generalize to handle more successors? */
1121 if (for_iteration
1122 && EDGE_COUNT (dest->succs) == 2)
1123 {
1124 edge &e1 = stack[stack.length () - 2];
1125 if (loop_exit_edge_p (e1->src->loop_father, e1))
1126 std::swap (e1, stack.last ());
1127 }
1128 }
1129 else
1130 {
1131 /* There are no successors for the DEST node so assign
1132 its reverse completion number. */
1133 post[dest->index] = rev_post_order_num;
1134 dest->flags |= post_assigned;
1135 rev_post_order[rev_post_order_num] = dest->index;
1136 rev_post_order_num++;
1137 }
1138 }
1139 else
1140 {
1141 if (dest->flags & visited
1142 && src != entry->src
1143 && pre[src->index] >= pre[dest->index]
1144 && !(dest->flags & post_assigned))
1145 e->flags |= EDGE_DFS_BACK;
1146
1147 if (idx != 0 && stack[idx - 1]->src != src)
1148 {
1149 /* There are no more successors for the SRC node
1150 so assign its reverse completion number. */
1151 post[src->index] = rev_post_order_num;
1152 src->flags |= post_assigned;
1153 rev_post_order[rev_post_order_num] = src->index;
1154 rev_post_order_num++;
1155 }
1156
1157 stack.pop ();
1158 }
1159 }
1160
1161 XDELETEVEC (pre);
1162
1163 /* Clear the temporarily allocated flags. */
1164 for (int i = 0; i < rev_post_order_num; ++i)
1165 BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1166 &= ~(post_assigned|visited);
1167
1168 return rev_post_order_num;
1169}
1170
1171
1172
65f34de5 1173/* Compute the depth first search order on the _reverse_ graph and
51e85e64 1174 store it in the array DFS_ORDER, marking the nodes visited in VISITED.
65f34de5 1175 Returns the number of nodes visited.
1176
1177 The computation is split into three pieces:
1178
1179 flow_dfs_compute_reverse_init () creates the necessary data
1180 structures.
1181
1182 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1183 structures. The block will start the search.
1184
1185 flow_dfs_compute_reverse_execute () continues (or starts) the
1186 search using the block on the top of the stack, stopping when the
1187 stack is empty.
1188
1189 flow_dfs_compute_reverse_finish () destroys the necessary data
1190 structures.
1191
1192 Thus, the user will probably call ..._init(), call ..._add_bb() to
1193 add a beginning basic block to the stack, call ..._execute(),
1194 possibly add another bb to the stack and again call ..._execute(),
1195 ..., and finally call _finish(). */
1196
1197/* Initialize the data structures used for depth-first search on the
1198 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1199 added to the basic block stack. DATA is the current depth-first
d10cfa8d 1200 search context. If INITIALIZE_STACK is nonzero, there is an
65f34de5 1201 element on the stack. */
1202
6a4bbca7 1203depth_first_search::depth_first_search () :
1204 m_stack (n_basic_blocks_for_fn (cfun)),
1205 m_visited_blocks (last_basic_block_for_fn (cfun))
65f34de5 1206{
6a4bbca7 1207 bitmap_clear (m_visited_blocks);
65f34de5 1208}
1209
1210/* Add the specified basic block to the top of the dfs data
1211 structures. When the search continues, it will start at the
1212 block. */
1213
6a4bbca7 1214void
1215depth_first_search::add_bb (basic_block bb)
65f34de5 1216{
6a4bbca7 1217 m_stack.quick_push (bb);
1218 bitmap_set_bit (m_visited_blocks, bb->index);
65f34de5 1219}
1220
e4fc8aad 1221/* Continue the depth-first search through the reverse graph starting with the
1222 block at the stack's top and ending when the stack is empty. Visited nodes
1223 are marked. Returns an unvisited basic block, or NULL if there is none
1224 available. */
65f34de5 1225
6a4bbca7 1226basic_block
1227depth_first_search::execute (basic_block last_unvisited)
65f34de5 1228{
1229 basic_block bb;
1230 edge e;
cd665a06 1231 edge_iterator ei;
65f34de5 1232
6a4bbca7 1233 while (!m_stack.is_empty ())
65f34de5 1234 {
6a4bbca7 1235 bb = m_stack.pop ();
e4fc8aad 1236
0ddab289 1237 /* Perform depth-first search on adjacent vertices. */
cd665a06 1238 FOR_EACH_EDGE (e, ei, bb->preds)
6a4bbca7 1239 if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1240 add_bb (e->src);
65f34de5 1241 }
1242
1243 /* Determine if there are unvisited basic blocks. */
23652f37 1244 FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
6a4bbca7 1245 if (!bitmap_bit_p (m_visited_blocks, bb->index))
92b0c449 1246 return bb;
e4fc8aad 1247
65f34de5 1248 return NULL;
1249}
1250
7fb12188 1251/* Performs dfs search from BB over vertices satisfying PREDICATE;
1252 if REVERSE, go against direction of edges. Returns number of blocks
1253 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1254int
4c9e08a4 1255dfs_enumerate_from (basic_block bb, int reverse,
7ecb5bb2 1256 bool (*predicate) (const_basic_block, const void *),
1257 basic_block *rslt, int rslt_max, const void *data)
7fb12188 1258{
1259 basic_block *st, lbb;
1260 int sp = 0, tv = 0;
0f69b266 1261
2515797e 1262 auto_bb_flag visited (cfun);
0f69b266 1263
2515797e 1264#define MARK_VISITED(BB) ((BB)->flags |= visited)
1265#define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1266#define VISITED_P(BB) (((BB)->flags & visited) != 0)
7fb12188 1267
ed7e2206 1268 st = XNEWVEC (basic_block, rslt_max);
7fb12188 1269 rslt[tv++] = st[sp++] = bb;
0f69b266 1270 MARK_VISITED (bb);
7fb12188 1271 while (sp)
1272 {
1273 edge e;
cd665a06 1274 edge_iterator ei;
7fb12188 1275 lbb = st[--sp];
1276 if (reverse)
a0c938f0 1277 {
cd665a06 1278 FOR_EACH_EDGE (e, ei, lbb->preds)
0f69b266 1279 if (!VISITED_P (e->src) && predicate (e->src, data))
7fb12188 1280 {
a0c938f0 1281 gcc_assert (tv != rslt_max);
1282 rslt[tv++] = st[sp++] = e->src;
1283 MARK_VISITED (e->src);
7fb12188 1284 }
a0c938f0 1285 }
7fb12188 1286 else
a0c938f0 1287 {
cd665a06 1288 FOR_EACH_EDGE (e, ei, lbb->succs)
0f69b266 1289 if (!VISITED_P (e->dest) && predicate (e->dest, data))
7fb12188 1290 {
a0c938f0 1291 gcc_assert (tv != rslt_max);
1292 rslt[tv++] = st[sp++] = e->dest;
1293 MARK_VISITED (e->dest);
7fb12188 1294 }
1295 }
1296 }
1297 free (st);
1298 for (sp = 0; sp < tv; sp++)
0f69b266 1299 UNMARK_VISITED (rslt[sp]);
7fb12188 1300 return tv;
0f69b266 1301#undef MARK_VISITED
1302#undef UNMARK_VISITED
1303#undef VISITED_P
7fb12188 1304}
9858d888 1305
1306
ddf88afa 1307/* Compute dominance frontiers, ala Harvey, Ferrante, et al.
a0c938f0 1308
ddf88afa 1309 This algorithm can be found in Timothy Harvey's PhD thesis, at
78a1d03f 1310 http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
ddf88afa 1311 dominance algorithms.
9858d888 1312
ddf88afa 1313 First, we identify each join point, j (any node with more than one
a0c938f0 1314 incoming edge is a join point).
9858d888 1315
ddf88afa 1316 We then examine each predecessor, p, of j and walk up the dominator tree
a0c938f0 1317 starting at p.
1318
ddf88afa 1319 We stop the walk when we reach j's immediate dominator - j is in the
1320 dominance frontier of each of the nodes in the walk, except for j's
1321 immediate dominator. Intuitively, all of the rest of j's dominators are
1322 shared by j's predecessors as well.
1323 Since they dominate j, they will not have j in their dominance frontiers.
1324
a0c938f0 1325 The number of nodes touched by this algorithm is equal to the size
ddf88afa 1326 of the dominance frontiers, no more, no less.
1327*/
9858d888 1328
9858d888 1329
1330static void
8a2980be 1331compute_dominance_frontiers_1 (bitmap_head *frontiers)
9858d888 1332{
ddf88afa 1333 edge p;
cd665a06 1334 edge_iterator ei;
ddf88afa 1335 basic_block b;
fc00614f 1336 FOR_EACH_BB_FN (b, cfun)
9858d888 1337 {
ddf88afa 1338 if (EDGE_COUNT (b->preds) >= 2)
9858d888 1339 {
ddf88afa 1340 FOR_EACH_EDGE (p, ei, b->preds)
1341 {
1342 basic_block runner = p->src;
1343 basic_block domsb;
34154e27 1344 if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
ddf88afa 1345 continue;
a0c938f0 1346
ddf88afa 1347 domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1348 while (runner != domsb)
1349 {
8a2980be 1350 if (!bitmap_set_bit (&frontiers[runner->index],
39192870 1351 b->index))
8a14140c 1352 break;
ddf88afa 1353 runner = get_immediate_dominator (CDI_DOMINATORS,
1354 runner);
1355 }
1356 }
0cc4271a 1357 }
9858d888 1358 }
a0c938f0 1359}
1360
9858d888 1361
1362void
8a2980be 1363compute_dominance_frontiers (bitmap_head *frontiers)
9858d888 1364{
9858d888 1365 timevar_push (TV_DOM_FRONTIERS);
1366
ddf88afa 1367 compute_dominance_frontiers_1 (frontiers);
9858d888 1368
1369 timevar_pop (TV_DOM_FRONTIERS);
1370}
6dae3408 1371
1372/* Given a set of blocks with variable definitions (DEF_BLOCKS),
1373 return a bitmap with all the blocks in the iterated dominance
1374 frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1375 frontier information as returned by compute_dominance_frontiers.
1376
1377 The resulting set of blocks are the potential sites where PHI nodes
1378 are needed. The caller is responsible for freeing the memory
1379 allocated for the return value. */
1380
1381bitmap
8a2980be 1382compute_idf (bitmap def_blocks, bitmap_head *dfs)
6dae3408 1383{
1384 bitmap_iterator bi;
1385 unsigned bb_index, i;
6dae3408 1386 bitmap phi_insertion_points;
1387
545cf51c 1388 /* Each block can appear at most twice on the work-stack. */
c2078b80 1389 auto_vec<int> work_stack (2 * n_basic_blocks_for_fn (cfun));
6dae3408 1390 phi_insertion_points = BITMAP_ALLOC (NULL);
1391
1392 /* Seed the work list with all the blocks in DEF_BLOCKS. We use
f1f41a6c 1393 vec::quick_push here for speed. This is safe because we know that
6dae3408 1394 the number of definition blocks is no greater than the number of
1395 basic blocks, which is the initial capacity of WORK_STACK. */
1396 EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
f1f41a6c 1397 work_stack.quick_push (bb_index);
6dae3408 1398
1399 /* Pop a block off the worklist, add every block that appears in
1400 the original block's DF that we have not already processed to
1401 the worklist. Iterate until the worklist is empty. Blocks
1402 which are added to the worklist are potential sites for
1403 PHI nodes. */
f1f41a6c 1404 while (work_stack.length () > 0)
6dae3408 1405 {
f1f41a6c 1406 bb_index = work_stack.pop ();
6dae3408 1407
1408 /* Since the registration of NEW -> OLD name mappings is done
1409 separately from the call to update_ssa, when updating the SSA
1410 form, the basic blocks where new and/or old names are defined
1411 may have disappeared by CFG cleanup calls. In this case,
1412 we may pull a non-existing block from the work stack. */
fe672ac0 1413 gcc_checking_assert (bb_index
1414 < (unsigned) last_basic_block_for_fn (cfun));
6dae3408 1415
8a2980be 1416 EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
6dae3408 1417 0, i, bi)
1418 {
545cf51c 1419 work_stack.quick_push (i);
6dae3408 1420 bitmap_set_bit (phi_insertion_points, i);
1421 }
1422 }
1423
6dae3408 1424 return phi_insertion_points;
1425}
1426
34ad4b87 1427/* Intersection and union of preds/succs for sbitmap based data flow
1428 solvers. All four functions defined below take the same arguments:
1429 B is the basic block to perform the operation for. DST is the
1430 target sbitmap, i.e. the result. SRC is an sbitmap vector of size
1431 last_basic_block so that it can be indexed with basic block indices.
1432 DST may be (but does not have to be) SRC[B->index]. */
6dae3408 1433
34ad4b87 1434/* Set the bitmap DST to the intersection of SRC of successors of
1435 basic block B. */
1436
1437void
08b7917c 1438bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
34ad4b87 1439{
1440 unsigned int set_size = dst->size;
1441 edge e;
1442 unsigned ix;
1443
34ad4b87 1444 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1445 {
1446 e = EDGE_SUCC (b, ix);
34154e27 1447 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1448 continue;
1449
53c5d9d4 1450 bitmap_copy (dst, src[e->dest->index]);
34ad4b87 1451 break;
1452 }
1453
1454 if (e == 0)
53c5d9d4 1455 bitmap_ones (dst);
34ad4b87 1456 else
1457 for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1458 {
1459 unsigned int i;
1460 SBITMAP_ELT_TYPE *p, *r;
1461
1462 e = EDGE_SUCC (b, ix);
34154e27 1463 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1464 continue;
1465
1466 p = src[e->dest->index]->elms;
1467 r = dst->elms;
1468 for (i = 0; i < set_size; i++)
1469 *r++ &= *p++;
1470 }
1471}
1472
1473/* Set the bitmap DST to the intersection of SRC of predecessors of
1474 basic block B. */
1475
1476void
08b7917c 1477bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
34ad4b87 1478{
1479 unsigned int set_size = dst->size;
1480 edge e;
1481 unsigned ix;
1482
34ad4b87 1483 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1484 {
1485 e = EDGE_PRED (b, ix);
34154e27 1486 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1487 continue;
1488
53c5d9d4 1489 bitmap_copy (dst, src[e->src->index]);
34ad4b87 1490 break;
1491 }
1492
1493 if (e == 0)
53c5d9d4 1494 bitmap_ones (dst);
34ad4b87 1495 else
1496 for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1497 {
1498 unsigned int i;
1499 SBITMAP_ELT_TYPE *p, *r;
1500
1501 e = EDGE_PRED (b, ix);
34154e27 1502 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1503 continue;
1504
1505 p = src[e->src->index]->elms;
1506 r = dst->elms;
1507 for (i = 0; i < set_size; i++)
1508 *r++ &= *p++;
1509 }
1510}
1511
1512/* Set the bitmap DST to the union of SRC of successors of
1513 basic block B. */
1514
1515void
08b7917c 1516bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
34ad4b87 1517{
1518 unsigned int set_size = dst->size;
1519 edge e;
1520 unsigned ix;
1521
34ad4b87 1522 for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1523 {
1524 e = EDGE_SUCC (b, ix);
34154e27 1525 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1526 continue;
1527
53c5d9d4 1528 bitmap_copy (dst, src[e->dest->index]);
34ad4b87 1529 break;
1530 }
1531
1532 if (ix == EDGE_COUNT (b->succs))
53c5d9d4 1533 bitmap_clear (dst);
34ad4b87 1534 else
1535 for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1536 {
1537 unsigned int i;
1538 SBITMAP_ELT_TYPE *p, *r;
1539
1540 e = EDGE_SUCC (b, ix);
34154e27 1541 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1542 continue;
1543
1544 p = src[e->dest->index]->elms;
1545 r = dst->elms;
1546 for (i = 0; i < set_size; i++)
1547 *r++ |= *p++;
1548 }
1549}
1550
1551/* Set the bitmap DST to the union of SRC of predecessors of
1552 basic block B. */
1553
1554void
08b7917c 1555bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
34ad4b87 1556{
1557 unsigned int set_size = dst->size;
1558 edge e;
1559 unsigned ix;
1560
34ad4b87 1561 for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1562 {
1563 e = EDGE_PRED (b, ix);
34154e27 1564 if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1565 continue;
1566
53c5d9d4 1567 bitmap_copy (dst, src[e->src->index]);
34ad4b87 1568 break;
1569 }
1570
1571 if (ix == EDGE_COUNT (b->preds))
53c5d9d4 1572 bitmap_clear (dst);
34ad4b87 1573 else
1574 for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1575 {
1576 unsigned int i;
1577 SBITMAP_ELT_TYPE *p, *r;
1578
1579 e = EDGE_PRED (b, ix);
34154e27 1580 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
34ad4b87 1581 continue;
1582
1583 p = src[e->src->index]->elms;
1584 r = dst->elms;
1585 for (i = 0; i < set_size; i++)
1586 *r++ |= *p++;
1587 }
1588}
ba4d2b2f 1589
1590/* Returns the list of basic blocks in the function in an order that guarantees
1591 that if a block X has just a single predecessor Y, then Y is after X in the
1592 ordering. */
1593
1594basic_block *
1595single_pred_before_succ_order (void)
1596{
1597 basic_block x, y;
a28770e1 1598 basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1599 unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
ba4d2b2f 1600 unsigned np, i;
3c6549f8 1601 auto_sbitmap visited (last_basic_block_for_fn (cfun));
ba4d2b2f 1602
1603#define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1604#define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1605
1606 bitmap_clear (visited);
1607
34154e27 1608 MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
fc00614f 1609 FOR_EACH_BB_FN (x, cfun)
ba4d2b2f 1610 {
1611 if (VISITED_P (x))
1612 continue;
1613
1614 /* Walk the predecessors of x as long as they have precisely one
1615 predecessor and add them to the list, so that they get stored
1616 after x. */
1617 for (y = x, np = 1;
1618 single_pred_p (y) && !VISITED_P (single_pred (y));
1619 y = single_pred (y))
1620 np++;
1621 for (y = x, i = n - np;
1622 single_pred_p (y) && !VISITED_P (single_pred (y));
1623 y = single_pred (y), i++)
1624 {
1625 order[i] = y;
1626 MARK_VISITED (y);
1627 }
1628 order[i] = y;
1629 MARK_VISITED (y);
1630
1631 gcc_assert (i == n - 1);
1632 n -= np;
1633 }
1634
ba4d2b2f 1635 gcc_assert (n == 0);
1636 return order;
1637
1638#undef MARK_VISITED
1639#undef VISITED_P
1640}
1477a5a7 1641
1642/* Ignoring loop backedges, if BB has precisely one incoming edge then
1643 return that edge. Otherwise return NULL.
1644
1645 When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1646 as executable. */
1647
1648edge
1649single_pred_edge_ignoring_loop_edges (basic_block bb,
1650 bool ignore_not_executable)
1651{
1652 edge retval = NULL;
1653 edge e;
1654 edge_iterator ei;
1655
1656 FOR_EACH_EDGE (e, ei, bb->preds)
1657 {
1658 /* A loop back edge can be identified by the destination of
1659 the edge dominating the source of the edge. */
1660 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1661 continue;
1662
1663 /* We can safely ignore edges that are not executable. */
1664 if (ignore_not_executable
1665 && (e->flags & EDGE_EXECUTABLE) == 0)
1666 continue;
1667
1668 /* If we have already seen a non-loop edge, then we must have
1669 multiple incoming non-loop edges and thus we return NULL. */
1670 if (retval)
1671 return NULL;
1672
1673 /* This is the first non-loop incoming edge we have found. Record
1674 it. */
1675 retval = e;
1676 }
1677
1678 return retval;
1679}