]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfganal.cc
c++, mingw: Fix up types of dtor hooks to __cxa_{,thread_}atexit/__cxa_throw on mingw...
[thirdparty/gcc.git] / gcc / cfganal.cc
CommitLineData
402209ff 1/* Control flow graph analysis code for GNU compiler.
a945c346 2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
402209ff
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
402209ff
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
402209ff
JH
19
20/* This file contains various simple utilities to analyze the CFG. */
532aafad 21
402209ff
JH
22#include "config.h"
23#include "system.h"
4977bab6 24#include "coretypes.h"
c7131fb2 25#include "backend.h"
957060b5 26#include "cfghooks.h"
bd454efd 27#include "timevar.h"
957060b5 28#include "cfganal.h"
44ab146a 29#include "cfgloop.h"
83ad3a96 30#include "diagnostic.h"
402209ff 31
35bfaf4d 32namespace {
402209ff 33/* Store the data structures necessary for depth-first search. */
35bfaf4d
TS
34class depth_first_search
35 {
36public:
37 depth_first_search ();
38
39 basic_block execute (basic_block);
40 void add_bb (basic_block);
402209ff 41
35bfaf4d
TS
42private:
43 /* stack for backtracking during the algorithm */
44 auto_vec<basic_block, 20> m_stack;
402209ff
JH
45
46 /* record of basic blocks already seen by depth-first search */
35bfaf4d 47 auto_sbitmap m_visited_blocks;
402209ff 48};
35bfaf4d 49}
402209ff 50\f
402209ff 51/* Mark the back edges in DFS traversal.
da7d8304 52 Return nonzero if a loop (natural or otherwise) is present.
402209ff
JH
53 Inspired by Depth_First_Search_PP described in:
54
55 Advanced Compiler Design and Implementation
56 Steven Muchnick
57 Morgan Kaufmann, 1997
58
f91a0beb 59 and heavily borrowed from pre_and_rev_post_order_compute. */
402209ff
JH
60
61bool
b500d259 62mark_dfs_back_edges (struct function *fun)
402209ff 63{
402209ff
JH
64 int *pre;
65 int *post;
402209ff
JH
66 int prenum = 1;
67 int postnum = 1;
402209ff
JH
68 bool found = false;
69
70 /* Allocate the preorder and postorder number arrays. */
b500d259
RB
71 pre = XCNEWVEC (int, last_basic_block_for_fn (fun));
72 post = XCNEWVEC (int, last_basic_block_for_fn (fun));
402209ff
JH
73
74 /* Allocate stack for back-tracking up CFG. */
b500d259 75 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fun) + 1);
402209ff
JH
76
77 /* Allocate bitmap to track nodes that have been visited. */
b500d259 78 auto_sbitmap visited (last_basic_block_for_fn (fun));
402209ff
JH
79
80 /* None of the nodes in the CFG have been visited yet. */
f61e445a 81 bitmap_clear (visited);
402209ff
JH
82
83 /* Push the first edge on to the stack. */
b500d259 84 stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fun)->succs));
402209ff 85
792bb49b 86 while (!stack.is_empty ())
402209ff 87 {
402209ff
JH
88 basic_block src;
89 basic_block dest;
90
91 /* Look at the edge on the top of the stack. */
792bb49b 92 edge_iterator ei = stack.last ();
628f6a4e
BE
93 src = ei_edge (ei)->src;
94 dest = ei_edge (ei)->dest;
95 ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
402209ff
JH
96
97 /* Check if the edge destination has been visited yet. */
b500d259
RB
98 if (dest != EXIT_BLOCK_PTR_FOR_FN (fun) && ! bitmap_bit_p (visited,
99 dest->index))
402209ff
JH
100 {
101 /* Mark that we have visited the destination. */
d7c028c0 102 bitmap_set_bit (visited, dest->index);
402209ff 103
0b17ab2f 104 pre[dest->index] = prenum++;
628f6a4e 105 if (EDGE_COUNT (dest->succs) > 0)
402209ff
JH
106 {
107 /* Since the DEST node has been visited for the first
108 time, check its successors. */
792bb49b 109 stack.quick_push (ei_start (dest->succs));
402209ff
JH
110 }
111 else
0b17ab2f 112 post[dest->index] = postnum++;
402209ff
JH
113 }
114 else
115 {
b500d259
RB
116 if (dest != EXIT_BLOCK_PTR_FOR_FN (fun)
117 && src != ENTRY_BLOCK_PTR_FOR_FN (fun)
0b17ab2f
RH
118 && pre[src->index] >= pre[dest->index]
119 && post[dest->index] == 0)
628f6a4e 120 ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
402209ff 121
fefa31b5 122 if (ei_one_before_end_p (ei)
b500d259 123 && src != ENTRY_BLOCK_PTR_FOR_FN (fun))
0b17ab2f 124 post[src->index] = postnum++;
402209ff 125
628f6a4e 126 if (!ei_one_before_end_p (ei))
792bb49b 127 ei_next (&stack.last ());
402209ff 128 else
792bb49b 129 stack.pop ();
402209ff
JH
130 }
131 }
132
133 free (pre);
134 free (post);
402209ff
JH
135
136 return found;
137}
b500d259
RB
138
139bool
140mark_dfs_back_edges (void)
141{
142 return mark_dfs_back_edges (cfun);
143}
83ad3a96
AH
144
145/* Return TRUE if EDGE_DFS_BACK is up to date for CFUN. */
146
147void
148verify_marked_backedges (struct function *fun)
149{
150 auto_edge_flag saved_dfs_back (fun);
151 basic_block bb;
152 edge e;
153 edge_iterator ei;
154
155 // Save all the back edges...
156 FOR_EACH_BB_FN (bb, fun)
157 FOR_EACH_EDGE (e, ei, bb->succs)
158 {
159 if (e->flags & EDGE_DFS_BACK)
160 {
161 e->flags |= saved_dfs_back;
162 e->flags &= ~EDGE_DFS_BACK;
163 }
164 }
165
166 // ...and verify that recalculating them agrees with the saved ones.
167 mark_dfs_back_edges ();
168 FOR_EACH_BB_FN (bb, fun)
169 FOR_EACH_EDGE (e, ei, bb->succs)
170 {
171 if (((e->flags & EDGE_DFS_BACK) != 0)
172 != ((e->flags & saved_dfs_back) != 0))
173 internal_error ("%<verify_marked_backedges%> failed");
174
175 e->flags &= ~saved_dfs_back;
176 }
177}
402209ff 178
402209ff 179/* Find unreachable blocks. An unreachable block will have 0 in
da7d8304 180 the reachable bit in block->flags. A nonzero value indicates the
402209ff
JH
181 block is reachable. */
182
183void
d329e058 184find_unreachable_blocks (void)
402209ff
JH
185{
186 edge e;
628f6a4e 187 edge_iterator ei;
e0082a72 188 basic_block *tos, *worklist, bb;
402209ff 189
0cae8d31 190 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
402209ff
JH
191
192 /* Clear all the reachability flags. */
193
11cd3bed 194 FOR_EACH_BB_FN (bb, cfun)
e0082a72 195 bb->flags &= ~BB_REACHABLE;
402209ff
JH
196
197 /* Add our starting points to the worklist. Almost always there will
eaec9b3d 198 be only one. It isn't inconceivable that we might one day directly
402209ff
JH
199 support Fortran alternate entry points. */
200
fefa31b5 201 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
402209ff
JH
202 {
203 *tos++ = e->dest;
204
205 /* Mark the block reachable. */
206 e->dest->flags |= BB_REACHABLE;
207 }
208
209 /* Iterate: find everything reachable from what we've already seen. */
210
211 while (tos != worklist)
212 {
213 basic_block b = *--tos;
214
628f6a4e 215 FOR_EACH_EDGE (e, ei, b->succs)
0b612e0b
JL
216 {
217 basic_block dest = e->dest;
218
219 if (!(dest->flags & BB_REACHABLE))
220 {
221 *tos++ = dest;
222 dest->flags |= BB_REACHABLE;
223 }
224 }
402209ff
JH
225 }
226
227 free (worklist);
228}
a352b710
TV
229
230/* Verify that there are no unreachable blocks in the current function. */
231
232void
233verify_no_unreachable_blocks (void)
234{
235 find_unreachable_blocks ();
236
237 basic_block bb;
238 FOR_EACH_BB_FN (bb, cfun)
239 gcc_assert ((bb->flags & BB_REACHABLE) != 0);
240}
241
402209ff
JH
242\f
243/* Functions to access an edge list with a vector representation.
244 Enough data is kept such that given an index number, the
245 pred and succ that edge represents can be determined, or
246 given a pred and a succ, its index number can be returned.
247 This allows algorithms which consume a lot of memory to
248 represent the normally full matrix of edge (pred,succ) with a
249 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
250 wasted space in the client code due to sparse flow graphs. */
251
252/* This functions initializes the edge list. Basically the entire
253 flowgraph is processed, and all edges are assigned a number,
254 and the data structure is filled in. */
255
256struct edge_list *
d329e058 257create_edge_list (void)
402209ff
JH
258{
259 struct edge_list *elist;
260 edge e;
261 int num_edges;
e0082a72 262 basic_block bb;
628f6a4e 263 edge_iterator ei;
402209ff 264
402209ff
JH
265 /* Determine the number of edges in the flow graph by counting successor
266 edges on each basic block. */
532aafad 267 num_edges = 0;
fefa31b5
DM
268 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
269 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
402209ff 270 {
628f6a4e 271 num_edges += EDGE_COUNT (bb->succs);
402209ff 272 }
4891442b 273
5ed6ace5 274 elist = XNEW (struct edge_list);
402209ff 275 elist->num_edges = num_edges;
5ed6ace5 276 elist->index_to_edge = XNEWVEC (edge, num_edges);
402209ff
JH
277
278 num_edges = 0;
279
e0082a72 280 /* Follow successors of blocks, and register these edges. */
fefa31b5
DM
281 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
282 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
628f6a4e 283 FOR_EACH_EDGE (e, ei, bb->succs)
e0082a72 284 elist->index_to_edge[num_edges++] = e;
4891442b 285
402209ff
JH
286 return elist;
287}
288
289/* This function free's memory associated with an edge list. */
290
291void
d329e058 292free_edge_list (struct edge_list *elist)
402209ff
JH
293{
294 if (elist)
295 {
296 free (elist->index_to_edge);
297 free (elist);
298 }
299}
300
301/* This function provides debug output showing an edge list. */
302
24e47c76 303DEBUG_FUNCTION void
d329e058 304print_edge_list (FILE *f, struct edge_list *elist)
402209ff
JH
305{
306 int x;
4891442b 307
402209ff 308 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
0cae8d31 309 n_basic_blocks_for_fn (cfun), elist->num_edges);
402209ff
JH
310
311 for (x = 0; x < elist->num_edges; x++)
312 {
313 fprintf (f, " %-4d - edge(", x);
fefa31b5 314 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
402209ff
JH
315 fprintf (f, "entry,");
316 else
0b17ab2f 317 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
402209ff 318
fefa31b5 319 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
402209ff
JH
320 fprintf (f, "exit)\n");
321 else
0b17ab2f 322 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
402209ff
JH
323 }
324}
325
326/* This function provides an internal consistency check of an edge list,
327 verifying that all edges are present, and that there are no
328 extra edges. */
329
24e47c76 330DEBUG_FUNCTION void
d329e058 331verify_edge_list (FILE *f, struct edge_list *elist)
402209ff 332{
e0082a72 333 int pred, succ, index;
402209ff 334 edge e;
e0082a72 335 basic_block bb, p, s;
628f6a4e 336 edge_iterator ei;
402209ff 337
fefa31b5
DM
338 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
339 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
402209ff 340 {
628f6a4e 341 FOR_EACH_EDGE (e, ei, bb->succs)
402209ff 342 {
0b17ab2f
RH
343 pred = e->src->index;
344 succ = e->dest->index;
402209ff
JH
345 index = EDGE_INDEX (elist, e->src, e->dest);
346 if (index == EDGE_INDEX_NO_EDGE)
347 {
348 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
349 continue;
350 }
4891442b 351
0b17ab2f 352 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
402209ff 353 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
0b17ab2f
RH
354 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
355 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
402209ff 356 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
0b17ab2f
RH
357 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
358 }
359 }
360
e0082a72 361 /* We've verified that all the edges are in the list, now lets make sure
532aafad 362 there are no spurious edges in the list. This is an expensive check! */
402209ff 363
fefa31b5
DM
364 FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
365 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
366 FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
402209ff 367 {
402209ff
JH
368 int found_edge = 0;
369
628f6a4e 370 FOR_EACH_EDGE (e, ei, p->succs)
402209ff
JH
371 if (e->dest == s)
372 {
373 found_edge = 1;
374 break;
375 }
4891442b 376
628f6a4e 377 FOR_EACH_EDGE (e, ei, s->preds)
402209ff
JH
378 if (e->src == p)
379 {
380 found_edge = 1;
381 break;
382 }
4891442b 383
e0082a72 384 if (EDGE_INDEX (elist, p, s)
402209ff
JH
385 == EDGE_INDEX_NO_EDGE && found_edge != 0)
386 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
e0082a72
ZD
387 p->index, s->index);
388 if (EDGE_INDEX (elist, p, s)
402209ff
JH
389 != EDGE_INDEX_NO_EDGE && found_edge == 0)
390 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
e0082a72 391 p->index, s->index, EDGE_INDEX (elist, p, s));
402209ff 392 }
402209ff
JH
393}
394
c8e9d8c3
RB
395
396/* Functions to compute control dependences. */
397
398/* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
399void
400control_dependences::set_control_dependence_map_bit (basic_block bb,
401 int edge_index)
402{
fefa31b5 403 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
c8e9d8c3 404 return;
fefa31b5 405 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
a11afa7a 406 bitmap_set_bit (&control_dependence_map[bb->index], edge_index);
c8e9d8c3
RB
407}
408
409/* Clear all control dependences for block BB. */
410void
411control_dependences::clear_control_dependence_bitmap (basic_block bb)
412{
a11afa7a 413 bitmap_clear (&control_dependence_map[bb->index]);
c8e9d8c3
RB
414}
415
c8e9d8c3
RB
416/* Determine all blocks' control dependences on the given edge with edge_list
417 EL index EDGE_INDEX, ala Morgan, Section 3.6. */
418
419void
420control_dependences::find_control_dependence (int edge_index)
421{
422 basic_block current_block;
423 basic_block ending_block;
424
30fd2977 425 gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
c8e9d8c3 426
0136f25a
RB
427 ending_block = get_immediate_dominator (CDI_POST_DOMINATORS,
428 get_edge_src (edge_index));
c8e9d8c3 429
30fd2977 430 for (current_block = get_edge_dest (edge_index);
fefa31b5
DM
431 current_block != ending_block
432 && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
0136f25a
RB
433 current_block = get_immediate_dominator (CDI_POST_DOMINATORS,
434 current_block))
30fd2977 435 set_control_dependence_map_bit (current_block, edge_index);
c8e9d8c3
RB
436}
437
438/* Record all blocks' control dependences on all edges in the edge
439 list EL, ala Morgan, Section 3.6. */
440
30fd2977 441control_dependences::control_dependences ()
c8e9d8c3
RB
442{
443 timevar_push (TV_CONTROL_DEPENDENCES);
30fd2977
RB
444
445 /* Initialize the edge list. */
446 int num_edges = 0;
447 basic_block bb;
448 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
449 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
450 num_edges += EDGE_COUNT (bb->succs);
451 m_el.create (num_edges);
452 edge e;
453 edge_iterator ei;
454 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
455 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
456 FOR_EACH_EDGE (e, ei, bb->succs)
a11afa7a
RB
457 {
458 /* For abnormal edges, we don't make current_block control
459 dependent because instructions that throw are always necessary
460 anyway. */
461 if (e->flags & EDGE_ABNORMAL)
462 {
463 num_edges--;
464 continue;
465 }
466 m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
467 }
30fd2977 468
a11afa7a 469 bitmap_obstack_initialize (&m_bitmaps);
8b1c6fd7 470 control_dependence_map.create (last_basic_block_for_fn (cfun));
a5613697 471 control_dependence_map.quick_grow_cleared (last_basic_block_for_fn (cfun));
8b1c6fd7 472 for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
a11afa7a 473 bitmap_initialize (&control_dependence_map[i], &m_bitmaps);
30fd2977 474 for (int i = 0; i < num_edges; ++i)
c8e9d8c3 475 find_control_dependence (i);
30fd2977 476
c8e9d8c3
RB
477 timevar_pop (TV_CONTROL_DEPENDENCES);
478}
479
480/* Free control dependences and the associated edge list. */
481
482control_dependences::~control_dependences ()
483{
c8e9d8c3 484 control_dependence_map.release ();
30fd2977 485 m_el.release ();
a11afa7a 486 bitmap_obstack_release (&m_bitmaps);
c8e9d8c3
RB
487}
488
489/* Returns the bitmap of edges the basic-block I is dependent on. */
490
491bitmap
492control_dependences::get_edges_dependent_on (int i)
493{
a11afa7a 494 return &control_dependence_map[i];
c8e9d8c3
RB
495}
496
30fd2977 497/* Returns the edge source with index I from the edge list. */
c8e9d8c3 498
30fd2977
RB
499basic_block
500control_dependences::get_edge_src (int i)
501{
502 return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
503}
504
505/* Returns the edge destination with index I from the edge list. */
506
507basic_block
508control_dependences::get_edge_dest (int i)
c8e9d8c3 509{
30fd2977 510 return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
c8e9d8c3
RB
511}
512
513
6de9cd9a
DN
514/* Given PRED and SUCC blocks, return the edge which connects the blocks.
515 If no such edge exists, return NULL. */
516
517edge
518find_edge (basic_block pred, basic_block succ)
519{
520 edge e;
628f6a4e 521 edge_iterator ei;
6de9cd9a 522
df95526b
JL
523 if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
524 {
525 FOR_EACH_EDGE (e, ei, pred->succs)
526 if (e->dest == succ)
527 return e;
528 }
529 else
530 {
531 FOR_EACH_EDGE (e, ei, succ->preds)
532 if (e->src == pred)
533 return e;
534 }
6de9cd9a
DN
535
536 return NULL;
537}
538
402209ff
JH
539/* This routine will determine what, if any, edge there is between
540 a specified predecessor and successor. */
541
542int
d329e058 543find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
402209ff
JH
544{
545 int x;
4891442b 546
402209ff 547 for (x = 0; x < NUM_EDGES (edge_list); x++)
4891442b
RK
548 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
549 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
550 return x;
551
402209ff
JH
552 return (EDGE_INDEX_NO_EDGE);
553}
402209ff 554\f
6809cbf9
RH
555/* This routine will remove any fake predecessor edges for a basic block.
556 When the edge is removed, it is also removed from whatever successor
402209ff
JH
557 list it is in. */
558
559static void
6809cbf9 560remove_fake_predecessors (basic_block bb)
402209ff
JH
561{
562 edge e;
628f6a4e 563 edge_iterator ei;
4891442b 564
628f6a4e 565 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
402209ff 566 {
628f6a4e
BE
567 if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
568 remove_edge (e);
569 else
570 ei_next (&ei);
402209ff
JH
571 }
572}
573
574/* This routine will remove all fake edges from the flow graph. If
575 we remove all fake successors, it will automatically remove all
576 fake predecessors. */
577
578void
d329e058 579remove_fake_edges (void)
402209ff 580{
e0082a72 581 basic_block bb;
402209ff 582
fefa31b5 583 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
6809cbf9 584 remove_fake_predecessors (bb);
402209ff
JH
585}
586
6809cbf9
RH
587/* This routine will remove all fake edges to the EXIT_BLOCK. */
588
589void
590remove_fake_exit_edges (void)
591{
fefa31b5 592 remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
6809cbf9
RH
593}
594
595
402209ff
JH
596/* This function will add a fake edge between any block which has no
597 successors, and the exit block. Some data flow equations require these
598 edges to exist. */
599
600void
d329e058 601add_noreturn_fake_exit_edges (void)
402209ff 602{
e0082a72 603 basic_block bb;
402209ff 604
11cd3bed 605 FOR_EACH_BB_FN (bb, cfun)
628f6a4e 606 if (EDGE_COUNT (bb->succs) == 0)
fefa31b5 607 make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
402209ff
JH
608}
609
58ad6b28
RB
610/* This function adds a fake edge between any noreturn block and
611 infinite loops to the exit block. Some optimizations require a path
612 from each node to the exit node.
402209ff
JH
613
614 See also Morgan, Figure 3.10, pp. 82-83.
615
616 The current implementation is ugly, not attempting to minimize the
617 number of inserted fake edges. To reduce the number of fake edges
618 to insert, add fake edges from _innermost_ loops containing only
619 nodes not reachable from the exit block. */
620
621void
d329e058 622connect_infinite_loops_to_exit (void)
402209ff 623{
58ad6b28
RB
624 /* First add fake exits to noreturn blocks, this is required to
625 discover only truly infinite loops below. */
626 add_noreturn_fake_exit_edges ();
627
402209ff
JH
628 /* Perform depth-first search in the reverse graph to find nodes
629 reachable from the exit block. */
35bfaf4d
TS
630 depth_first_search dfs;
631 dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
402209ff
JH
632
633 /* Repeatedly add fake edges, updating the unreachable nodes. */
35bfaf4d 634 basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
402209ff
JH
635 while (1)
636 {
35bfaf4d 637 unvisited_block = dfs.execute (unvisited_block);
402209ff
JH
638 if (!unvisited_block)
639 break;
4891442b 640
35bfaf4d 641 basic_block deadend_block = dfs_find_deadend (unvisited_block);
357067f2
JH
642 edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
643 EDGE_FAKE);
357067f2 644 e->probability = profile_probability::never ();
35bfaf4d 645 dfs.add_bb (deadend_block);
402209ff 646 }
402209ff
JH
647}
648\f
6fb5fa3c 649/* Compute reverse top sort order. This is computing a post order
dd5a833e 650 numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
6fb5fa3c
DB
651 ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
652 true, unreachable blocks are deleted. */
4891442b 653
f91a0beb 654int
b8698a0f 655post_order_compute (int *post_order, bool include_entry_exit,
6fb5fa3c 656 bool delete_unreachable)
402209ff 657{
f91a0beb 658 int post_order_num = 0;
6fb5fa3c 659 int count;
402209ff 660
f91a0beb
KZ
661 if (include_entry_exit)
662 post_order[post_order_num++] = EXIT_BLOCK;
663
402209ff 664 /* Allocate stack for back-tracking up CFG. */
792bb49b 665 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
402209ff
JH
666
667 /* Allocate bitmap to track nodes that have been visited. */
7ba9e72d 668 auto_sbitmap visited (last_basic_block_for_fn (cfun));
402209ff
JH
669
670 /* None of the nodes in the CFG have been visited yet. */
f61e445a 671 bitmap_clear (visited);
402209ff
JH
672
673 /* Push the first edge on to the stack. */
792bb49b 674 stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
402209ff 675
792bb49b 676 while (!stack.is_empty ())
402209ff 677 {
402209ff
JH
678 basic_block src;
679 basic_block dest;
680
681 /* Look at the edge on the top of the stack. */
792bb49b 682 edge_iterator ei = stack.last ();
628f6a4e
BE
683 src = ei_edge (ei)->src;
684 dest = ei_edge (ei)->dest;
402209ff
JH
685
686 /* Check if the edge destination has been visited yet. */
fefa31b5
DM
687 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
688 && ! bitmap_bit_p (visited, dest->index))
402209ff
JH
689 {
690 /* Mark that we have visited the destination. */
d7c028c0 691 bitmap_set_bit (visited, dest->index);
402209ff 692
628f6a4e 693 if (EDGE_COUNT (dest->succs) > 0)
4891442b
RK
694 /* Since the DEST node has been visited for the first
695 time, check its successors. */
792bb49b 696 stack.quick_push (ei_start (dest->succs));
402209ff 697 else
f91a0beb 698 post_order[post_order_num++] = dest->index;
402209ff
JH
699 }
700 else
701 {
fefa31b5
DM
702 if (ei_one_before_end_p (ei)
703 && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6fb5fa3c 704 post_order[post_order_num++] = src->index;
402209ff 705
628f6a4e 706 if (!ei_one_before_end_p (ei))
792bb49b 707 ei_next (&stack.last ());
402209ff 708 else
792bb49b 709 stack.pop ();
402209ff
JH
710 }
711 }
712
f91a0beb 713 if (include_entry_exit)
6fb5fa3c
DB
714 {
715 post_order[post_order_num++] = ENTRY_BLOCK;
716 count = post_order_num;
717 }
b8698a0f 718 else
6fb5fa3c 719 count = post_order_num + 2;
b8698a0f 720
6fb5fa3c
DB
721 /* Delete the unreachable blocks if some were found and we are
722 supposed to do it. */
0cae8d31 723 if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
6fb5fa3c
DB
724 {
725 basic_block b;
726 basic_block next_bb;
fefa31b5
DM
727 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
728 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
6fb5fa3c
DB
729 {
730 next_bb = b->next_bb;
b8698a0f 731
d7c028c0 732 if (!(bitmap_bit_p (visited, b->index)))
6fb5fa3c
DB
733 delete_basic_block (b);
734 }
b8698a0f 735
6fb5fa3c
DB
736 tidy_fallthru_edges ();
737 }
738
6fb5fa3c
DB
739 return post_order_num;
740}
741
742
773cc925 743/* Helper routine for inverted_rev_post_order_compute
03b06a83 744 flow_dfs_compute_reverse_execute, and the reverse-CFG
e53b6e56 745 deapth first search in dominance.cc.
6fb5fa3c
DB
746 BB has to belong to a region of CFG
747 unreachable by inverted traversal from the exit.
748 i.e. there's no control flow path from ENTRY to EXIT
749 that contains this BB.
750 This can happen in two cases - if there's an infinite loop
751 or if there's a block that has no successor
752 (call to a function with no return).
b8698a0f
L
753 Some RTL passes deal with this condition by
754 calling connect_infinite_loops_to_exit () and/or
6fb5fa3c
DB
755 add_noreturn_fake_exit_edges ().
756 However, those methods involve modifying the CFG itself
757 which may not be desirable.
758 Hence, we deal with the infinite loop/no return cases
759 by identifying a unique basic block that can reach all blocks
760 in such a region by inverted traversal.
761 This function returns a basic block that guarantees
762 that all blocks in the region are reachable
763 by starting an inverted traversal from the returned block. */
764
03b06a83 765basic_block
6fb5fa3c
DB
766dfs_find_deadend (basic_block bb)
767{
34e5c511
RB
768 auto_bitmap visited;
769 basic_block next = bb;
6fb5fa3c
DB
770
771 for (;;)
772 {
34e5c511
RB
773 if (EDGE_COUNT (next->succs) == 0)
774 return next;
6fb5fa3c 775
34e5c511
RB
776 if (! bitmap_set_bit (visited, next->index))
777 return bb;
778
779 bb = next;
44ab146a
RB
780 /* If we are in an analyzed cycle make sure to try exiting it.
781 Note this is a heuristic only and expected to work when loop
782 fixup is needed as well. */
783 if (! bb->loop_father
784 || ! loop_outer (bb->loop_father))
34e5c511 785 next = EDGE_SUCC (bb, 0)->dest;
44ab146a
RB
786 else
787 {
788 edge_iterator ei;
789 edge e;
790 FOR_EACH_EDGE (e, ei, bb->succs)
791 if (loop_exit_edge_p (bb->loop_father, e))
792 break;
34e5c511 793 next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
44ab146a 794 }
6fb5fa3c 795 }
6fb5fa3c
DB
796}
797
798
799/* Compute the reverse top sort order of the inverted CFG
800 i.e. starting from the exit block and following the edges backward
801 (from successors to predecessors).
802 This ordering can be used for forward dataflow problems among others.
803
e4dbb0d4
JH
804 Optionally if START_POINTS is specified, start from exit block and all
805 basic blocks in START_POINTS. This is used by CD-DCE.
806
6fb5fa3c
DB
807 This function assumes that all blocks in the CFG are reachable
808 from the ENTRY (but not necessarily from EXIT).
809
810 If there's an infinite loop,
811 a simple inverted traversal starting from the blocks
812 with no successors can't visit all blocks.
813 To solve this problem, we first do inverted traversal
814 starting from the blocks with no successor.
b8698a0f 815 And if there's any block left that's not visited by the regular
6fb5fa3c
DB
816 inverted traversal from EXIT,
817 those blocks are in such problematic region.
b8698a0f 818 Among those, we find one block that has
6fb5fa3c 819 any visited predecessor (which is an entry into such a region),
b8698a0f 820 and start looking for a "dead end" from that block
6fb5fa3c
DB
821 and do another inverted traversal from that block. */
822
773cc925
RB
823int
824inverted_rev_post_order_compute (struct function *fn,
825 int *rev_post_order,
826 sbitmap *start_points)
6fb5fa3c
DB
827{
828 basic_block bb;
773cc925
RB
829
830 int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
6fb5fa3c 831
a6c764d0
MM
832 if (flag_checking)
833 verify_no_unreachable_blocks ();
a352b710 834
6fb5fa3c 835 /* Allocate stack for back-tracking up CFG. */
792bb49b 836 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
6fb5fa3c
DB
837
838 /* Allocate bitmap to track nodes that have been visited. */
7ba9e72d 839 auto_sbitmap visited (last_basic_block_for_fn (cfun));
6fb5fa3c
DB
840
841 /* None of the nodes in the CFG have been visited yet. */
f61e445a 842 bitmap_clear (visited);
6fb5fa3c 843
e4dbb0d4
JH
844 if (start_points)
845 {
846 FOR_ALL_BB_FN (bb, cfun)
847 if (bitmap_bit_p (*start_points, bb->index)
848 && EDGE_COUNT (bb->preds) > 0)
849 {
792bb49b 850 stack.quick_push (ei_start (bb->preds));
e4dbb0d4
JH
851 bitmap_set_bit (visited, bb->index);
852 }
853 if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
854 {
792bb49b 855 stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
e4dbb0d4
JH
856 bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
857 }
858 }
859 else
773cc925
RB
860 /* Put all blocks that have no successor into the initial work list. */
861 FOR_ALL_BB_FN (bb, cfun)
862 if (EDGE_COUNT (bb->succs) == 0)
863 {
864 /* Push the initial edge on to the stack. */
865 if (EDGE_COUNT (bb->preds) > 0)
866 {
867 stack.quick_push (ei_start (bb->preds));
868 bitmap_set_bit (visited, bb->index);
869 }
870 }
6fb5fa3c 871
b8698a0f 872 do
6fb5fa3c
DB
873 {
874 bool has_unvisited_bb = false;
875
876 /* The inverted traversal loop. */
792bb49b 877 while (!stack.is_empty ())
6fb5fa3c
DB
878 {
879 edge_iterator ei;
880 basic_block pred;
881
882 /* Look at the edge on the top of the stack. */
792bb49b 883 ei = stack.last ();
6fb5fa3c
DB
884 bb = ei_edge (ei)->dest;
885 pred = ei_edge (ei)->src;
886
887 /* Check if the predecessor has been visited yet. */
d7c028c0 888 if (! bitmap_bit_p (visited, pred->index))
6fb5fa3c
DB
889 {
890 /* Mark that we have visited the destination. */
d7c028c0 891 bitmap_set_bit (visited, pred->index);
6fb5fa3c
DB
892
893 if (EDGE_COUNT (pred->preds) > 0)
894 /* Since the predecessor node has been visited for the first
895 time, check its predecessors. */
792bb49b 896 stack.quick_push (ei_start (pred->preds));
6fb5fa3c 897 else
773cc925 898 rev_post_order[rev_post_order_num--] = pred->index;
6fb5fa3c
DB
899 }
900 else
901 {
fefa31b5
DM
902 if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
903 && ei_one_before_end_p (ei))
773cc925 904 rev_post_order[rev_post_order_num--] = bb->index;
6fb5fa3c
DB
905
906 if (!ei_one_before_end_p (ei))
792bb49b 907 ei_next (&stack.last ());
6fb5fa3c 908 else
792bb49b 909 stack.pop ();
6fb5fa3c
DB
910 }
911 }
912
b8698a0f 913 /* Detect any infinite loop and activate the kludge.
6fb5fa3c 914 Note that this doesn't check EXIT_BLOCK itself
792bb49b 915 since EXIT_BLOCK is always added after the outer do-while loop. */
fefa31b5
DM
916 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
917 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
d7c028c0 918 if (!bitmap_bit_p (visited, bb->index))
6fb5fa3c
DB
919 {
920 has_unvisited_bb = true;
921
922 if (EDGE_COUNT (bb->preds) > 0)
923 {
924 edge_iterator ei;
925 edge e;
926 basic_block visited_pred = NULL;
927
928 /* Find an already visited predecessor. */
929 FOR_EACH_EDGE (e, ei, bb->preds)
930 {
d7c028c0 931 if (bitmap_bit_p (visited, e->src->index))
6fb5fa3c
DB
932 visited_pred = e->src;
933 }
934
935 if (visited_pred)
936 {
937 basic_block be = dfs_find_deadend (bb);
938 gcc_assert (be != NULL);
d7c028c0 939 bitmap_set_bit (visited, be->index);
792bb49b 940 stack.quick_push (ei_start (be->preds));
6fb5fa3c
DB
941 break;
942 }
943 }
944 }
945
792bb49b 946 if (has_unvisited_bb && stack.is_empty ())
6fb5fa3c 947 {
792bb49b 948 /* No blocks are reachable from EXIT at all.
6fb5fa3c 949 Find a dead-end from the ENTRY, and restart the iteration. */
fefa31b5 950 basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6fb5fa3c 951 gcc_assert (be != NULL);
d7c028c0 952 bitmap_set_bit (visited, be->index);
792bb49b 953 stack.quick_push (ei_start (be->preds));
6fb5fa3c
DB
954 }
955
b8698a0f 956 /* The only case the below while fires is
6fb5fa3c
DB
957 when there's an infinite loop. */
958 }
792bb49b 959 while (!stack.is_empty ());
6fb5fa3c
DB
960
961 /* EXIT_BLOCK is always included. */
773cc925
RB
962 rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
963 return n_basic_blocks_for_fn (fn);
402209ff
JH
964}
965
1bef9b23
RB
966/* Compute the depth first search order of FN and store in the array
967 PRE_ORDER if nonzero. If REV_POST_ORDER is nonzero, return the
968 reverse completion number for each node. Returns the number of nodes
969 visited. A depth first search tries to get as far away from the starting
970 point as quickly as possible.
f91a0beb 971
1bef9b23
RB
972 In case the function has unreachable blocks the number of nodes
973 visited does not include them.
974
975 pre_order is a really a preorder numbering of the graph.
976 rev_post_order is really a reverse postorder numbering of the graph. */
402209ff
JH
977
978int
1bef9b23
RB
979pre_and_rev_post_order_compute_fn (struct function *fn,
980 int *pre_order, int *rev_post_order,
981 bool include_entry_exit)
402209ff 982{
f91a0beb 983 int pre_order_num = 0;
1c93f6ce 984 int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
402209ff
JH
985
986 /* Allocate stack for back-tracking up CFG. */
1c93f6ce 987 auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
402209ff 988
f91a0beb
KZ
989 if (include_entry_exit)
990 {
991 if (pre_order)
992 pre_order[pre_order_num] = ENTRY_BLOCK;
993 pre_order_num++;
994 if (rev_post_order)
e5f95b66 995 rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
f91a0beb 996 }
b8698a0f 997 else
f91a0beb
KZ
998 rev_post_order_num -= NUM_FIXED_BLOCKS;
999
5a34952e
RB
1000 /* BB flag to track nodes that have been visited. */
1001 auto_bb_flag visited (fn);
402209ff
JH
1002
1003 /* Push the first edge on to the stack. */
792bb49b 1004 stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
402209ff 1005
792bb49b 1006 while (!stack.is_empty ())
402209ff 1007 {
402209ff
JH
1008 basic_block src;
1009 basic_block dest;
1010
1011 /* Look at the edge on the top of the stack. */
792bb49b 1012 edge_iterator ei = stack.last ();
628f6a4e
BE
1013 src = ei_edge (ei)->src;
1014 dest = ei_edge (ei)->dest;
402209ff
JH
1015
1016 /* Check if the edge destination has been visited yet. */
fefa31b5 1017 if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
5a34952e 1018 && ! (dest->flags & visited))
402209ff
JH
1019 {
1020 /* Mark that we have visited the destination. */
5a34952e 1021 dest->flags |= visited;
402209ff 1022
f91a0beb
KZ
1023 if (pre_order)
1024 pre_order[pre_order_num] = dest->index;
f5f53ae3 1025
f91a0beb 1026 pre_order_num++;
402209ff 1027
628f6a4e 1028 if (EDGE_COUNT (dest->succs) > 0)
4891442b
RK
1029 /* Since the DEST node has been visited for the first
1030 time, check its successors. */
792bb49b 1031 stack.quick_push (ei_start (dest->succs));
f91a0beb 1032 else if (rev_post_order)
4891442b
RK
1033 /* There are no successors for the DEST node so assign
1034 its reverse completion number. */
f91a0beb 1035 rev_post_order[rev_post_order_num--] = dest->index;
402209ff
JH
1036 }
1037 else
1038 {
1bef9b23 1039 if (ei_one_before_end_p (ei)
fefa31b5 1040 && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
f91a0beb 1041 && rev_post_order)
4891442b
RK
1042 /* There are no more successors for the SRC node
1043 so assign its reverse completion number. */
f91a0beb 1044 rev_post_order[rev_post_order_num--] = src->index;
402209ff 1045
628f6a4e 1046 if (!ei_one_before_end_p (ei))
792bb49b 1047 ei_next (&stack.last ());
402209ff 1048 else
792bb49b 1049 stack.pop ();
402209ff
JH
1050 }
1051 }
1052
f91a0beb
KZ
1053 if (include_entry_exit)
1054 {
1055 if (pre_order)
1056 pre_order[pre_order_num] = EXIT_BLOCK;
1057 pre_order_num++;
1058 if (rev_post_order)
e5f95b66 1059 rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
f91a0beb 1060 }
1bef9b23 1061
5a34952e 1062 /* Clear the temporarily allocated flag. */
1e89ab6c
RB
1063 if (!rev_post_order)
1064 rev_post_order = pre_order;
5a34952e
RB
1065 for (int i = 0; i < pre_order_num; ++i)
1066 BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags &= ~visited;
1067
1bef9b23
RB
1068 return pre_order_num;
1069}
1070
1071/* Like pre_and_rev_post_order_compute_fn but operating on the
1072 current function and asserting that all nodes were visited. */
1073
1074int
1075pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1076 bool include_entry_exit)
1077{
1078 int pre_order_num
1079 = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1080 include_entry_exit);
1081 if (include_entry_exit)
1082 /* The number of nodes visited should be the number of blocks. */
0cae8d31 1083 gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
f91a0beb
KZ
1084 else
1085 /* The number of nodes visited should be the number of blocks minus
1086 the entry and exit blocks which are not visited here. */
0cae8d31
DM
1087 gcc_assert (pre_order_num
1088 == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
4891442b 1089
f91a0beb 1090 return pre_order_num;
402209ff
JH
1091}
1092
3e61a205
RB
1093
1094/* Per basic-block data for rev_post_order_and_mark_dfs_back_seme,
1095 element of a sparsely populated array indexed by basic-block number. */
1096typedef auto_vec<int, 2> scc_exit_vec_t;
1097struct rpoamdbs_bb_data {
1098 int depth;
1099 int bb_to_pre;
1100 /* The basic-block index of the SCC entry of the block visited first
1101 (the SCC leader). */
1102 int scc;
1103 /* The index into the RPO array where the blocks SCC entries end
1104 (only valid for the SCC leader). */
1105 int scc_end;
1106 /* The indexes of the exits destinations of this SCC (only valid
1107 for the SCC leader). Initialized upon discovery of SCC leaders. */
1108 scc_exit_vec_t scc_exits;
1109};
1110
1111/* Tag H as a header of B, weaving H and its loop header list into the
1112 current loop header list of B. */
1113
1114static void
1115tag_header (int b, int h, rpoamdbs_bb_data *bb_data)
1116{
1117 if (h == -1 || b == h)
1118 return;
1119 int cur1 = b;
1120 int cur2 = h;
1121 while (bb_data[cur1].scc != -1)
1122 {
1123 int ih = bb_data[cur1].scc;
1124 if (ih == cur2)
1125 return;
1126 if (bb_data[ih].depth < bb_data[cur2].depth)
1127 {
1128 bb_data[cur1].scc = cur2;
1129 cur1 = cur2;
1130 cur2 = ih;
1131 }
1132 else
1133 cur1 = ih;
1134 }
1135 bb_data[cur1].scc = cur2;
1136}
1137
1138/* Comparator for a sort of two edges destinations E1 and E2 after their index
1139 in the PRE array as specified by BB_TO_PRE. */
1140
1141static int
1142cmp_edge_dest_pre (const void *e1_, const void *e2_, void *data_)
1143{
1144 const int *e1 = (const int *)e1_;
1145 const int *e2 = (const int *)e2_;
1146 rpoamdbs_bb_data *bb_data = (rpoamdbs_bb_data *)data_;
1147 return (bb_data[*e1].bb_to_pre - bb_data[*e2].bb_to_pre);
1148}
1149
1150/* Compute the reverse completion number of a depth first search
1151 on the SEME region denoted by the ENTRY edge and the EXIT_BBS set of
1152 exit block indexes and store it in the array REV_POST_ORDER.
1153 Also sets the EDGE_DFS_BACK edge flags according to this visitation
1154 order.
1155 Returns the number of nodes visited.
1156
1157 In case the function has unreachable blocks the number of nodes
1158 visited does not include them.
1159
1160 If FOR_ITERATION is true then compute an RPO where SCCs form a
1161 contiguous region in the RPO array.
1162 *TOPLEVEL_SCC_EXTENTS if not NULL is filled with pairs of
1163 *REV_POST_ORDER indexes denoting extents of the toplevel SCCs in
1164 this region. */
78ea9abc
RB
1165
1166int
1167rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1168 bitmap exit_bbs, bool for_iteration,
3e61a205
RB
1169 int *rev_post_order,
1170 vec<std::pair<int, int> >
1171 *toplevel_scc_extents)
78ea9abc 1172{
78ea9abc
RB
1173 int rev_post_order_num = 0;
1174
78ea9abc
RB
1175 /* BB flag to track nodes that have been visited. */
1176 auto_bb_flag visited (fn);
78ea9abc 1177
3e61a205
RB
1178 /* Lazily initialized per-BB data for the two DFS walks below. */
1179 rpoamdbs_bb_data *bb_data
1180 = XNEWVEC (rpoamdbs_bb_data, last_basic_block_for_fn (fn));
78ea9abc 1181
3e61a205
RB
1182 /* First DFS walk, loop discovery according to
1183 A New Algorithm for Identifying Loops in Decompilation
1184 by Tao Wei, Jian Mao, Wei Zou and You Chen of the Institute of
1185 Computer Science and Technology of the Peking University. */
1186 auto_vec<edge_iterator, 20> ei_stack (n_basic_blocks_for_fn (fn) + 1);
1187 auto_bb_flag is_header (fn);
1188 int depth = 1;
1189 unsigned n_sccs = 0;
78ea9abc 1190
3e61a205
RB
1191 basic_block dest = entry->dest;
1192 edge_iterator ei;
1193 int pre_num = 0;
1194
1195 /* DFS process DEST. */
1196find_loops:
1197 bb_data[dest->index].bb_to_pre = pre_num++;
1198 bb_data[dest->index].depth = depth;
1199 bb_data[dest->index].scc = -1;
1200 depth++;
1201 gcc_assert ((dest->flags & (is_header|visited)) == 0);
1202 dest->flags |= visited;
1203 ei = ei_start (dest->succs);
1204 while (!ei_end_p (ei))
1205 {
1206 ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
1207 if (bitmap_bit_p (exit_bbs, ei_edge (ei)->dest->index))
1208 ;
1209 else if (!(ei_edge (ei)->dest->flags & visited))
78ea9abc 1210 {
3e61a205
RB
1211 ei_stack.quick_push (ei);
1212 dest = ei_edge (ei)->dest;
1213 /* DFS recurse on DEST. */
1214 goto find_loops;
1215
1216ret_from_find_loops:
1217 /* Return point of DFS recursion. */
1218 ei = ei_stack.pop ();
1219 dest = ei_edge (ei)->src;
1220 int header = bb_data[ei_edge (ei)->dest->index].scc;
1221 tag_header (dest->index, header, bb_data);
1222 depth = bb_data[dest->index].depth + 1;
1223 }
1224 else
1225 {
1226 if (bb_data[ei_edge (ei)->dest->index].depth > 0) /* on the stack */
78ea9abc 1227 {
3e61a205
RB
1228 ei_edge (ei)->flags |= EDGE_DFS_BACK;
1229 n_sccs++;
1230 ei_edge (ei)->dest->flags |= is_header;
1231 ::new (&bb_data[ei_edge (ei)->dest->index].scc_exits)
1232 auto_vec<int, 2> ();
1233 tag_header (dest->index, ei_edge (ei)->dest->index, bb_data);
78ea9abc 1234 }
3e61a205
RB
1235 else if (bb_data[ei_edge (ei)->dest->index].scc == -1)
1236 ;
78ea9abc
RB
1237 else
1238 {
3e61a205
RB
1239 int header = bb_data[ei_edge (ei)->dest->index].scc;
1240 if (bb_data[header].depth > 0)
1241 tag_header (dest->index, header, bb_data);
1242 else
1243 {
1244 /* A re-entry into an existing loop. */
1245 /* ??? Need to mark is_header? */
1246 while (bb_data[header].scc != -1)
1247 {
1248 header = bb_data[header].scc;
1249 if (bb_data[header].depth > 0)
1250 {
1251 tag_header (dest->index, header, bb_data);
1252 break;
1253 }
1254 }
1255 }
78ea9abc
RB
1256 }
1257 }
3e61a205
RB
1258 ei_next (&ei);
1259 }
1260 rev_post_order[rev_post_order_num++] = dest->index;
1261 /* not on the stack anymore */
1262 bb_data[dest->index].depth = -bb_data[dest->index].depth;
1263 if (!ei_stack.is_empty ())
1264 /* Return from DFS recursion. */
1265 goto ret_from_find_loops;
1266
1267 /* Optimize for no SCCs found or !for_iteration. */
1268 if (n_sccs == 0 || !for_iteration)
1269 {
1270 /* Clear the temporarily allocated flags. */
1271 for (int i = 0; i < rev_post_order_num; ++i)
1272 BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1273 &= ~(is_header|visited);
1274 /* And swap elements. */
1275 for (int i = 0; i < rev_post_order_num/2; ++i)
1276 std::swap (rev_post_order[i], rev_post_order[rev_post_order_num-i-1]);
1277 XDELETEVEC (bb_data);
1278
1279 return rev_post_order_num;
1280 }
78ea9abc 1281
3e61a205
RB
1282 /* Next find SCC exits, clear the visited flag and compute an upper bound
1283 for the edge stack below. */
1284 unsigned edge_count = 0;
1285 for (int i = 0; i < rev_post_order_num; ++i)
1286 {
1287 int bb = rev_post_order[i];
1288 BASIC_BLOCK_FOR_FN (fn, bb)->flags &= ~visited;
1289 edge e;
1290 FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fn, bb)->succs)
1291 {
1292 if (bitmap_bit_p (exit_bbs, e->dest->index))
1293 continue;
1294 edge_count++;
1295 /* if e is an exit from e->src, record it for
1296 bb_data[e->src].scc. */
1297 int src_scc = e->src->index;
1298 if (!(e->src->flags & is_header))
1299 src_scc = bb_data[src_scc].scc;
1300 if (src_scc == -1)
1301 continue;
1302 int dest_scc = e->dest->index;
1303 if (!(e->dest->flags & is_header))
1304 dest_scc = bb_data[dest_scc].scc;
1305 if (src_scc == dest_scc)
1306 continue;
1307 /* When dest_scc is nested insde src_scc it's not an
1308 exit. */
1309 int tem_dest_scc = dest_scc;
1310 unsigned dest_scc_depth = 0;
1311 while (tem_dest_scc != -1)
78ea9abc 1312 {
3e61a205
RB
1313 dest_scc_depth++;
1314 if ((tem_dest_scc = bb_data[tem_dest_scc].scc) == src_scc)
1315 break;
1316 }
1317 if (tem_dest_scc != -1)
1318 continue;
1319 /* When src_scc is nested inside dest_scc record an
1320 exit from the outermost SCC this edge exits. */
1321 int tem_src_scc = src_scc;
1322 unsigned src_scc_depth = 0;
1323 while (tem_src_scc != -1)
1324 {
1325 if (bb_data[tem_src_scc].scc == dest_scc)
1326 {
1327 edge_count++;
1328 bb_data[tem_src_scc].scc_exits.safe_push (e->dest->index);
1329 break;
1330 }
1331 tem_src_scc = bb_data[tem_src_scc].scc;
1332 src_scc_depth++;
1333 }
1334 /* Else find the outermost SCC this edge exits (exits
1335 from the inner SCCs are not important for the DFS
1336 walk adjustment). Do so by computing the common
1337 ancestor SCC where the immediate child it to the source
1338 SCC is the exited SCC. */
1339 if (tem_src_scc == -1)
1340 {
1341 edge_count++;
1342 while (src_scc_depth > dest_scc_depth)
1343 {
1344 src_scc = bb_data[src_scc].scc;
1345 src_scc_depth--;
1346 }
1347 while (dest_scc_depth > src_scc_depth)
1348 {
1349 dest_scc = bb_data[dest_scc].scc;
1350 dest_scc_depth--;
1351 }
1352 while (bb_data[src_scc].scc != bb_data[dest_scc].scc)
1353 {
1354 src_scc = bb_data[src_scc].scc;
1355 dest_scc = bb_data[dest_scc].scc;
1356 }
1357 bb_data[src_scc].scc_exits.safe_push (e->dest->index);
78ea9abc 1358 }
78ea9abc
RB
1359 }
1360 }
1361
3e61a205
RB
1362 /* Now the second DFS walk to compute a RPO where the extent of SCCs
1363 is minimized thus SCC members are adjacent in the RPO array.
1364 This is done by performing a DFS walk computing RPO with first visiting
1365 extra direct edges from SCC entry to its exits.
1366 That simulates a DFS walk over the graph with SCCs collapsed and
1367 walking the SCCs themselves only when all outgoing edges from the
1368 SCCs have been visited.
1369 SCC_END[scc-header-index] is the position in the RPO array of the
1370 last member of the SCC. */
1371 auto_vec<std::pair<basic_block, basic_block>, 20> estack (edge_count + 1);
1372 int idx = rev_post_order_num;
1373 basic_block edest;
1374 dest = entry->dest;
1375
1376 /* DFS process DEST. */
1377dfs_rpo:
1378 gcc_checking_assert ((dest->flags & visited) == 0);
1379 /* Verify we enter SCCs through the same header and SCC nesting appears
1380 the same. */
1381 gcc_assert (bb_data[dest->index].scc == -1
1382 || (BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc)->flags
1383 & visited));
1384 dest->flags |= visited;
1385 bb_data[dest->index].scc_end = -1;
1386 if ((dest->flags & is_header)
1387 && !bb_data[dest->index].scc_exits.is_empty ())
1388 {
1389 /* Push the all SCC exits as outgoing edges from its header to
1390 be visited first.
1391 To process exits in the same relative order as in the first
1392 DFS walk sort them after their destination PRE order index. */
1393 gcc_sort_r (&bb_data[dest->index].scc_exits[0],
1394 bb_data[dest->index].scc_exits.length (),
1395 sizeof (int), cmp_edge_dest_pre, bb_data);
1396 /* Process edges in reverse to match previous DFS walk order. */
1397 for (int i = bb_data[dest->index].scc_exits.length () - 1; i >= 0; --i)
1398 estack.quick_push (std::make_pair
1399 (dest, BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc_exits[i])));
1400 }
1401 else
1402 {
1403 if (dest->flags & is_header)
1404 bb_data[dest->index].scc_end = idx - 1;
1405 /* Push the edge vector in reverse to match the iteration order
1406 from the DFS walk above. */
1407 for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1408 if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1409 estack.quick_push (std::make_pair (dest,
1410 EDGE_SUCC (dest, i)->dest));
1411 }
1412 while (!estack.is_empty ()
1413 && estack.last ().first == dest)
1414 {
1415 edest = estack.last ().second;
1416 if (!(edest->flags & visited))
1417 {
1418 dest = edest;
1419 /* DFS recurse on DEST. */
1420 goto dfs_rpo;
1421
1422ret_from_dfs_rpo:
1423 /* Return point of DFS recursion. */
1424 dest = estack.last ().first;
1425 }
1426 estack.pop ();
1427 /* If we processed all SCC exits from DEST mark the SCC end
1428 since all RPO entries up to DEST itself will now belong
1429 to its SCC. The special-case of no SCC exits is already
1430 dealt with above. */
1431 if (dest->flags & is_header
1432 /* When the last exit edge was processed mark the SCC end
1433 and push the regular edges. */
1434 && bb_data[dest->index].scc_end == -1
1435 && (estack.is_empty ()
1436 || estack.last ().first != dest))
1437 {
1438 bb_data[dest->index].scc_end = idx - 1;
1439 /* Push the edge vector in reverse to match the iteration order
1440 from the DFS walk above. */
1441 for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1442 if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1443 estack.quick_push (std::make_pair (dest,
1444 EDGE_SUCC (dest, i)->dest));
1445 }
1446 }
1447 rev_post_order[--idx] = dest->index;
1448 if (!estack.is_empty ())
1449 /* Return from DFS recursion. */
1450 goto ret_from_dfs_rpo;
1451
1452 /* Each SCC extends are from the position of the header inside
1453 the RPO array up to RPO array index scc_end[header-index]. */
1454 if (toplevel_scc_extents)
1455 for (int i = 0; i < rev_post_order_num; i++)
1456 {
1457 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1458 if (bb->flags & is_header)
1459 {
1460 toplevel_scc_extents->safe_push
1461 (std::make_pair (i, bb_data[bb->index].scc_end));
1462 i = bb_data[bb->index].scc_end;
1463 }
1464 }
78ea9abc 1465
3e61a205 1466 /* Clear the temporarily allocated flags and free memory. */
78ea9abc 1467 for (int i = 0; i < rev_post_order_num; ++i)
3e61a205
RB
1468 {
1469 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1470 if (bb->flags & is_header)
1471 bb_data[bb->index].scc_exits.~scc_exit_vec_t ();
1472 bb->flags &= ~(visited|is_header);
1473 }
1474
1475 XDELETEVEC (bb_data);
78ea9abc
RB
1476
1477 return rev_post_order_num;
1478}
1479
1480
1481
402209ff 1482/* Compute the depth first search order on the _reverse_ graph and
78ea9abc 1483 store it in the array DFS_ORDER, marking the nodes visited in VISITED.
402209ff
JH
1484 Returns the number of nodes visited.
1485
1486 The computation is split into three pieces:
1487
1488 flow_dfs_compute_reverse_init () creates the necessary data
1489 structures.
1490
1491 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1492 structures. The block will start the search.
1493
1494 flow_dfs_compute_reverse_execute () continues (or starts) the
1495 search using the block on the top of the stack, stopping when the
1496 stack is empty.
1497
1498 flow_dfs_compute_reverse_finish () destroys the necessary data
1499 structures.
1500
1501 Thus, the user will probably call ..._init(), call ..._add_bb() to
1502 add a beginning basic block to the stack, call ..._execute(),
1503 possibly add another bb to the stack and again call ..._execute(),
1504 ..., and finally call _finish(). */
1505
1506/* Initialize the data structures used for depth-first search on the
1507 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1508 added to the basic block stack. DATA is the current depth-first
da7d8304 1509 search context. If INITIALIZE_STACK is nonzero, there is an
402209ff
JH
1510 element on the stack. */
1511
35bfaf4d
TS
1512depth_first_search::depth_first_search () :
1513 m_stack (n_basic_blocks_for_fn (cfun)),
1514 m_visited_blocks (last_basic_block_for_fn (cfun))
402209ff 1515{
35bfaf4d 1516 bitmap_clear (m_visited_blocks);
402209ff
JH
1517}
1518
1519/* Add the specified basic block to the top of the dfs data
1520 structures. When the search continues, it will start at the
1521 block. */
1522
35bfaf4d
TS
1523void
1524depth_first_search::add_bb (basic_block bb)
402209ff 1525{
35bfaf4d
TS
1526 m_stack.quick_push (bb);
1527 bitmap_set_bit (m_visited_blocks, bb->index);
402209ff
JH
1528}
1529
4891442b
RK
1530/* Continue the depth-first search through the reverse graph starting with the
1531 block at the stack's top and ending when the stack is empty. Visited nodes
1532 are marked. Returns an unvisited basic block, or NULL if there is none
1533 available. */
402209ff 1534
35bfaf4d
TS
1535basic_block
1536depth_first_search::execute (basic_block last_unvisited)
402209ff
JH
1537{
1538 basic_block bb;
1539 edge e;
628f6a4e 1540 edge_iterator ei;
402209ff 1541
35bfaf4d 1542 while (!m_stack.is_empty ())
402209ff 1543 {
35bfaf4d 1544 bb = m_stack.pop ();
4891442b 1545
db4a8254 1546 /* Perform depth-first search on adjacent vertices. */
628f6a4e 1547 FOR_EACH_EDGE (e, ei, bb->preds)
35bfaf4d
TS
1548 if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1549 add_bb (e->src);
402209ff
JH
1550 }
1551
1552 /* Determine if there are unvisited basic blocks. */
24c75ec6 1553 FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
35bfaf4d 1554 if (!bitmap_bit_p (m_visited_blocks, bb->index))
d7bd989c 1555 return bb;
4891442b 1556
402209ff
JH
1557 return NULL;
1558}
1559
2ecfd709
ZD
1560/* Performs dfs search from BB over vertices satisfying PREDICATE;
1561 if REVERSE, go against direction of edges. Returns number of blocks
1562 found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1563int
d329e058 1564dfs_enumerate_from (basic_block bb, int reverse,
ed7a4b4b
KG
1565 bool (*predicate) (const_basic_block, const void *),
1566 basic_block *rslt, int rslt_max, const void *data)
2ecfd709
ZD
1567{
1568 basic_block *st, lbb;
1569 int sp = 0, tv = 0;
9e32d2be 1570
e144a2b3 1571 auto_bb_flag visited (cfun);
9e32d2be 1572
e144a2b3
RB
1573#define MARK_VISITED(BB) ((BB)->flags |= visited)
1574#define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1575#define VISITED_P(BB) (((BB)->flags & visited) != 0)
2ecfd709 1576
c302207e 1577 st = XNEWVEC (basic_block, rslt_max);
2ecfd709 1578 rslt[tv++] = st[sp++] = bb;
9e32d2be 1579 MARK_VISITED (bb);
2ecfd709
ZD
1580 while (sp)
1581 {
1582 edge e;
628f6a4e 1583 edge_iterator ei;
2ecfd709
ZD
1584 lbb = st[--sp];
1585 if (reverse)
c22cacf3 1586 {
628f6a4e 1587 FOR_EACH_EDGE (e, ei, lbb->preds)
9e32d2be 1588 if (!VISITED_P (e->src) && predicate (e->src, data))
2ecfd709 1589 {
c22cacf3
MS
1590 gcc_assert (tv != rslt_max);
1591 rslt[tv++] = st[sp++] = e->src;
1592 MARK_VISITED (e->src);
2ecfd709 1593 }
c22cacf3 1594 }
2ecfd709 1595 else
c22cacf3 1596 {
628f6a4e 1597 FOR_EACH_EDGE (e, ei, lbb->succs)
9e32d2be 1598 if (!VISITED_P (e->dest) && predicate (e->dest, data))
2ecfd709 1599 {
c22cacf3
MS
1600 gcc_assert (tv != rslt_max);
1601 rslt[tv++] = st[sp++] = e->dest;
1602 MARK_VISITED (e->dest);
2ecfd709
ZD
1603 }
1604 }
1605 }
1606 free (st);
1607 for (sp = 0; sp < tv; sp++)
9e32d2be 1608 UNMARK_VISITED (rslt[sp]);
2ecfd709 1609 return tv;
9e32d2be
ZD
1610#undef MARK_VISITED
1611#undef UNMARK_VISITED
1612#undef VISITED_P
2ecfd709 1613}
bd454efd
SB
1614
1615
4c7f5fea 1616/* Compute dominance frontiers, ala Harvey, Ferrante, et al.
c22cacf3 1617
4c7f5fea 1618 This algorithm can be found in Timothy Harvey's PhD thesis, at
708bde14 1619 http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
4c7f5fea 1620 dominance algorithms.
bd454efd 1621
4c7f5fea 1622 First, we identify each join point, j (any node with more than one
c22cacf3 1623 incoming edge is a join point).
bd454efd 1624
4c7f5fea 1625 We then examine each predecessor, p, of j and walk up the dominator tree
c22cacf3
MS
1626 starting at p.
1627
4c7f5fea
DB
1628 We stop the walk when we reach j's immediate dominator - j is in the
1629 dominance frontier of each of the nodes in the walk, except for j's
1630 immediate dominator. Intuitively, all of the rest of j's dominators are
1631 shared by j's predecessors as well.
1632 Since they dominate j, they will not have j in their dominance frontiers.
1633
c22cacf3 1634 The number of nodes touched by this algorithm is equal to the size
4c7f5fea
DB
1635 of the dominance frontiers, no more, no less.
1636*/
bd454efd 1637
d78b7095
RB
1638void
1639compute_dominance_frontiers (bitmap_head *frontiers)
bd454efd 1640{
d78b7095
RB
1641 timevar_push (TV_DOM_FRONTIERS);
1642
4c7f5fea 1643 edge p;
628f6a4e 1644 edge_iterator ei;
4c7f5fea 1645 basic_block b;
11cd3bed 1646 FOR_EACH_BB_FN (b, cfun)
bd454efd 1647 {
4c7f5fea 1648 if (EDGE_COUNT (b->preds) >= 2)
bd454efd 1649 {
d78b7095 1650 basic_block domsb = get_immediate_dominator (CDI_DOMINATORS, b);
4c7f5fea
DB
1651 FOR_EACH_EDGE (p, ei, b->preds)
1652 {
1653 basic_block runner = p->src;
fefa31b5 1654 if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
4c7f5fea 1655 continue;
c22cacf3 1656
4c7f5fea
DB
1657 while (runner != domsb)
1658 {
d78b7095 1659 if (!bitmap_set_bit (&frontiers[runner->index], b->index))
26eeea94 1660 break;
d78b7095 1661 runner = get_immediate_dominator (CDI_DOMINATORS, runner);
4c7f5fea
DB
1662 }
1663 }
87c476a2 1664 }
bd454efd 1665 }
bd454efd
SB
1666
1667 timevar_pop (TV_DOM_FRONTIERS);
1668}
25e87727
SB
1669
1670/* Given a set of blocks with variable definitions (DEF_BLOCKS),
1671 return a bitmap with all the blocks in the iterated dominance
1672 frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1673 frontier information as returned by compute_dominance_frontiers.
1674
1675 The resulting set of blocks are the potential sites where PHI nodes
1676 are needed. The caller is responsible for freeing the memory
1677 allocated for the return value. */
1678
1679bitmap
0fc555fb 1680compute_idf (bitmap def_blocks, bitmap_head *dfs)
25e87727
SB
1681{
1682 bitmap_iterator bi;
1683 unsigned bb_index, i;
25e87727
SB
1684 bitmap phi_insertion_points;
1685
25e87727
SB
1686 phi_insertion_points = BITMAP_ALLOC (NULL);
1687
d78b7095
RB
1688 /* Seed the work set with all the blocks in DEF_BLOCKS. */
1689 auto_bitmap work_set;
1690 bitmap_copy (work_set, def_blocks);
1691 bitmap_tree_view (work_set);
25e87727 1692
d78b7095 1693 /* Pop a block off the workset, add every block that appears in
25e87727 1694 the original block's DF that we have not already processed to
d78b7095
RB
1695 the workset. Iterate until the workset is empty. Blocks
1696 which are added to the workset are potential sites for
25e87727 1697 PHI nodes. */
d78b7095 1698 while (!bitmap_empty_p (work_set))
25e87727 1699 {
d78b7095
RB
1700 /* The dominance frontier of a block is blocks after it so iterating
1701 on earlier blocks first is better.
1702 ??? Basic blocks are by no means guaranteed to be ordered in
1703 optimal order for this iteration. */
0bad3039 1704 bb_index = bitmap_clear_first_set_bit (work_set);
25e87727
SB
1705
1706 /* Since the registration of NEW -> OLD name mappings is done
1707 separately from the call to update_ssa, when updating the SSA
1708 form, the basic blocks where new and/or old names are defined
1709 may have disappeared by CFG cleanup calls. In this case,
1710 we may pull a non-existing block from the work stack. */
8b1c6fd7
DM
1711 gcc_checking_assert (bb_index
1712 < (unsigned) last_basic_block_for_fn (cfun));
25e87727 1713
0bad3039
RB
1714 /* The population counts of the dominance frontiers is low
1715 compared to that of phi_insertion_points which approaches
1716 the IDF and of work_set which is at most that of the IDF
1717 as well. That makes iterating over the DFS bitmap preferential
1718 to whole bitmap operations involving also phi_insertion_points. */
1719 EXECUTE_IF_SET_IN_BITMAP (&dfs[bb_index], 0, i, bi)
1720 if (bitmap_set_bit (phi_insertion_points, i))
d78b7095 1721 bitmap_set_bit (work_set, i);
25e87727
SB
1722 }
1723
25e87727
SB
1724 return phi_insertion_points;
1725}
1726
3c2c4f22
SB
1727/* Intersection and union of preds/succs for sbitmap based data flow
1728 solvers. All four functions defined below take the same arguments:
1729 B is the basic block to perform the operation for. DST is the
1730 target sbitmap, i.e. the result. SRC is an sbitmap vector of size
1731 last_basic_block so that it can be indexed with basic block indices.
1732 DST may be (but does not have to be) SRC[B->index]. */
25e87727 1733
3c2c4f22
SB
1734/* Set the bitmap DST to the intersection of SRC of successors of
1735 basic block B. */
1736
1737void
d7c028c0 1738bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
3c2c4f22
SB
1739{
1740 unsigned int set_size = dst->size;
1741 edge e;
1742 unsigned ix;
1743
3c2c4f22
SB
1744 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1745 {
1746 e = EDGE_SUCC (b, ix);
fefa31b5 1747 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1748 continue;
1749
f61e445a 1750 bitmap_copy (dst, src[e->dest->index]);
3c2c4f22
SB
1751 break;
1752 }
1753
1754 if (e == 0)
f61e445a 1755 bitmap_ones (dst);
3c2c4f22
SB
1756 else
1757 for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1758 {
1759 unsigned int i;
1760 SBITMAP_ELT_TYPE *p, *r;
1761
1762 e = EDGE_SUCC (b, ix);
fefa31b5 1763 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1764 continue;
1765
1766 p = src[e->dest->index]->elms;
1767 r = dst->elms;
1768 for (i = 0; i < set_size; i++)
1769 *r++ &= *p++;
1770 }
1771}
1772
1773/* Set the bitmap DST to the intersection of SRC of predecessors of
1774 basic block B. */
1775
1776void
d7c028c0 1777bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
3c2c4f22
SB
1778{
1779 unsigned int set_size = dst->size;
1780 edge e;
1781 unsigned ix;
1782
3c2c4f22
SB
1783 for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1784 {
1785 e = EDGE_PRED (b, ix);
fefa31b5 1786 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1787 continue;
1788
f61e445a 1789 bitmap_copy (dst, src[e->src->index]);
3c2c4f22
SB
1790 break;
1791 }
1792
1793 if (e == 0)
f61e445a 1794 bitmap_ones (dst);
3c2c4f22
SB
1795 else
1796 for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1797 {
1798 unsigned int i;
1799 SBITMAP_ELT_TYPE *p, *r;
1800
1801 e = EDGE_PRED (b, ix);
fefa31b5 1802 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1803 continue;
1804
1805 p = src[e->src->index]->elms;
1806 r = dst->elms;
1807 for (i = 0; i < set_size; i++)
1808 *r++ &= *p++;
1809 }
1810}
1811
1812/* Set the bitmap DST to the union of SRC of successors of
1813 basic block B. */
1814
1815void
d7c028c0 1816bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
3c2c4f22
SB
1817{
1818 unsigned int set_size = dst->size;
1819 edge e;
1820 unsigned ix;
1821
3c2c4f22
SB
1822 for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1823 {
1824 e = EDGE_SUCC (b, ix);
fefa31b5 1825 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1826 continue;
1827
f61e445a 1828 bitmap_copy (dst, src[e->dest->index]);
3c2c4f22
SB
1829 break;
1830 }
1831
1832 if (ix == EDGE_COUNT (b->succs))
f61e445a 1833 bitmap_clear (dst);
3c2c4f22
SB
1834 else
1835 for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1836 {
1837 unsigned int i;
1838 SBITMAP_ELT_TYPE *p, *r;
1839
1840 e = EDGE_SUCC (b, ix);
fefa31b5 1841 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1842 continue;
1843
1844 p = src[e->dest->index]->elms;
1845 r = dst->elms;
1846 for (i = 0; i < set_size; i++)
1847 *r++ |= *p++;
1848 }
1849}
1850
1851/* Set the bitmap DST to the union of SRC of predecessors of
1852 basic block B. */
1853
1854void
d7c028c0 1855bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
3c2c4f22
SB
1856{
1857 unsigned int set_size = dst->size;
1858 edge e;
1859 unsigned ix;
1860
3c2c4f22
SB
1861 for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1862 {
1863 e = EDGE_PRED (b, ix);
fefa31b5 1864 if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1865 continue;
1866
f61e445a 1867 bitmap_copy (dst, src[e->src->index]);
3c2c4f22
SB
1868 break;
1869 }
1870
1871 if (ix == EDGE_COUNT (b->preds))
f61e445a 1872 bitmap_clear (dst);
3c2c4f22
SB
1873 else
1874 for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1875 {
1876 unsigned int i;
1877 SBITMAP_ELT_TYPE *p, *r;
1878
1879 e = EDGE_PRED (b, ix);
fefa31b5 1880 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
3c2c4f22
SB
1881 continue;
1882
1883 p = src[e->src->index]->elms;
1884 r = dst->elms;
1885 for (i = 0; i < set_size; i++)
1886 *r++ |= *p++;
1887 }
1888}
3d9c733e
AM
1889
1890/* Returns the list of basic blocks in the function in an order that guarantees
1891 that if a block X has just a single predecessor Y, then Y is after X in the
1892 ordering. */
1893
1894basic_block *
1895single_pred_before_succ_order (void)
1896{
1897 basic_block x, y;
0cae8d31
DM
1898 basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1899 unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
3d9c733e 1900 unsigned np, i;
7ba9e72d 1901 auto_sbitmap visited (last_basic_block_for_fn (cfun));
3d9c733e
AM
1902
1903#define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1904#define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1905
1906 bitmap_clear (visited);
1907
fefa31b5 1908 MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
11cd3bed 1909 FOR_EACH_BB_FN (x, cfun)
3d9c733e
AM
1910 {
1911 if (VISITED_P (x))
1912 continue;
1913
1914 /* Walk the predecessors of x as long as they have precisely one
1915 predecessor and add them to the list, so that they get stored
1916 after x. */
1917 for (y = x, np = 1;
1918 single_pred_p (y) && !VISITED_P (single_pred (y));
1919 y = single_pred (y))
1920 np++;
1921 for (y = x, i = n - np;
1922 single_pred_p (y) && !VISITED_P (single_pred (y));
1923 y = single_pred (y), i++)
1924 {
1925 order[i] = y;
1926 MARK_VISITED (y);
1927 }
1928 order[i] = y;
1929 MARK_VISITED (y);
1930
1931 gcc_assert (i == n - 1);
1932 n -= np;
1933 }
1934
3d9c733e
AM
1935 gcc_assert (n == 0);
1936 return order;
1937
1938#undef MARK_VISITED
1939#undef VISITED_P
1940}
2965f127
JL
1941
1942/* Ignoring loop backedges, if BB has precisely one incoming edge then
1943 return that edge. Otherwise return NULL.
1944
1945 When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1946 as executable. */
1947
1948edge
1949single_pred_edge_ignoring_loop_edges (basic_block bb,
1950 bool ignore_not_executable)
1951{
1952 edge retval = NULL;
1953 edge e;
1954 edge_iterator ei;
1955
1956 FOR_EACH_EDGE (e, ei, bb->preds)
1957 {
1958 /* A loop back edge can be identified by the destination of
1959 the edge dominating the source of the edge. */
1960 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1961 continue;
1962
1963 /* We can safely ignore edges that are not executable. */
1964 if (ignore_not_executable
1965 && (e->flags & EDGE_EXECUTABLE) == 0)
1966 continue;
1967
1968 /* If we have already seen a non-loop edge, then we must have
1969 multiple incoming non-loop edges and thus we return NULL. */
1970 if (retval)
1971 return NULL;
1972
1973 /* This is the first non-loop incoming edge we have found. Record
1974 it. */
1975 retval = e;
1976 }
1977
1978 return retval;
1979}