1 /* Generic partial redundancy elimination with lazy code motion
3 Copyright (C) 1998 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* These routines are meant to be used by various optimization
23 passes which can be modeled as lazy code motion problems.
24 Including, but not limited to:
26 * Traditional partial redundancy elimination.
28 * Placement of caller/caller register save/restores.
34 * Conversion of flat register files to a stacked register
37 * Dead load/store elimination.
39 These routines accept as input:
41 * Basic block information (number of blocks, lists of
42 predecessors and successors). Note the granularity
43 does not need to be basic block, they could be statements
46 * Bitmaps of local properties (computed, transparent and
47 anticipatable expressions).
49 The output of these routines is bitmap of redundant computations
50 and a bitmap of optimal placement points. */
58 #include "hard-reg-set.h"
61 #include "insn-config.h"
63 #include "basic-block.h"
65 /* Edge based LCM routines. */
66 static void compute_antinout_edge
PROTO ((sbitmap
*, sbitmap
*,
67 sbitmap
*, sbitmap
*));
68 static void compute_earliest
PROTO((struct edge_list
*, int, sbitmap
*,
69 sbitmap
*, sbitmap
*, sbitmap
*,
71 static void compute_laterin
PROTO((struct edge_list
*, sbitmap
*,
72 sbitmap
*, sbitmap
*, sbitmap
*));
73 static void compute_insert_delete
PROTO ((struct edge_list
*edge_list
,
74 sbitmap
*, sbitmap
*, sbitmap
*,
75 sbitmap
*, sbitmap
*));
77 /* Edge based LCM routines on a reverse flowgraph. */
78 static void compute_farthest
PROTO ((struct edge_list
*, int, sbitmap
*,
79 sbitmap
*, sbitmap
*, sbitmap
*,
81 static void compute_nearerout
PROTO((struct edge_list
*, sbitmap
*,
82 sbitmap
*, sbitmap
*, sbitmap
*));
83 static void compute_rev_insert_delete
PROTO ((struct edge_list
*edge_list
,
84 sbitmap
*, sbitmap
*, sbitmap
*,
85 sbitmap
*, sbitmap
*));
88 /* Edge based lcm routines. */
90 /* Compute expression anticipatability at entrance and exit of each block.
91 This is done based on the flow graph, and not on the pred-succ lists.
92 Other than that, its pretty much identical to compute_antinout. */
95 compute_antinout_edge (antloc
, transp
, antin
, antout
)
103 basic_block
*worklist
, *tos
;
105 /* Allocate a worklist array/queue. Entries are only added to the
106 list if they were not already on the list. So the size is
107 bounded by the number of basic blocks. */
108 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
)
111 /* We want a maximal solution, so make an optimistic initialization of
113 sbitmap_vector_ones (antin
, n_basic_blocks
);
115 /* Put the predecessors of the exit block on the worklist. */
116 for (e
= EXIT_BLOCK_PTR
->pred
; e
; e
= e
->pred_next
)
120 /* We use the block's aux field to track blocks which are in
121 the worklist; we also use it to quickly determine which blocks
122 are predecessors of the EXIT block. */
123 e
->src
->aux
= EXIT_BLOCK_PTR
;
126 /* Iterate until the worklist is empty. */
127 while (tos
!= worklist
)
129 /* Take the first entry off the worklist. */
130 basic_block b
= *--tos
;
133 if (b
->aux
== EXIT_BLOCK_PTR
)
135 /* Do not clear the aux field for blocks which are
136 predecessors of the EXIT block. That way we never
137 add then to the worklist again. */
138 sbitmap_zero (antout
[bb
]);
142 /* Clear the aux field of this block so that it can be added to
143 the worklist again if necessary. */
145 sbitmap_intersection_of_succs (antout
[bb
], antin
, bb
);
148 if (sbitmap_a_or_b_and_c (antin
[bb
], antloc
[bb
], transp
[bb
], antout
[bb
]))
150 /* If the in state of this block changed, then we need
151 to add the predecessors of this block to the worklist
152 if they are not already on the worklist. */
153 for (e
= b
->pred
; e
; e
= e
->pred_next
)
155 if (!e
->src
->aux
&& e
->src
!= ENTRY_BLOCK_PTR
)
166 /* Compute the earliest vector for edge based lcm. */
168 compute_earliest (edge_list
, n_exprs
, antin
, antout
, avout
, kill
, earliest
)
169 struct edge_list
*edge_list
;
171 sbitmap
*antin
, *antout
, *avout
, *kill
, *earliest
;
173 sbitmap difference
, temp_bitmap
;
175 basic_block pred
, succ
;
177 num_edges
= NUM_EDGES (edge_list
);
179 difference
= sbitmap_alloc (n_exprs
);
180 temp_bitmap
= sbitmap_alloc (n_exprs
);
182 for (x
= 0; x
< num_edges
; x
++)
184 pred
= INDEX_EDGE_PRED_BB (edge_list
, x
);
185 succ
= INDEX_EDGE_SUCC_BB (edge_list
, x
);
186 if (pred
== ENTRY_BLOCK_PTR
)
187 sbitmap_copy (earliest
[x
], antin
[succ
->index
]);
190 if (succ
== EXIT_BLOCK_PTR
)
192 sbitmap_zero (earliest
[x
]);
196 sbitmap_difference (difference
, antin
[succ
->index
],
198 sbitmap_not (temp_bitmap
, antout
[pred
->index
]);
199 sbitmap_a_and_b_or_c (earliest
[x
], difference
, kill
[pred
->index
],
208 /* later(p,s) is dependent on the calculation of laterin(p).
209 laterin(p) is dependent on the calculation of later(p2,p).
211 laterin(ENTRY) is defined as all 0's
212 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
213 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
215 If we progress in this manner, starting with all basic blocks
216 in the work list, anytime we change later(bb), we need to add
217 succs(bb) to the worklist if they are not already on the worklist.
221 We prime the worklist all the normal basic blocks. The ENTRY block can
222 never be added to the worklist since it is never the successor of any
223 block. We explicitly prevent the EXIT block from being added to the
226 We optimistically initialize LATER. That is the only time this routine
227 will compute LATER for an edge out of the entry block since the entry
228 block is never on the worklist. Thus, LATERIN is neither used nor
229 computed for the ENTRY block.
231 Since the EXIT block is never added to the worklist, we will neither
232 use nor compute LATERIN for the exit block. Edges which reach the
233 EXIT block are handled in the normal fashion inside the loop. However,
234 the insertion/deletion computation needs LATERIN(EXIT), so we have
238 compute_laterin (edge_list
, earliest
, antloc
, later
, laterin
)
239 struct edge_list
*edge_list
;
240 sbitmap
*earliest
, *antloc
, *later
, *laterin
;
242 int bb
, num_edges
, i
;
244 basic_block
*worklist
, *tos
;
246 num_edges
= NUM_EDGES (edge_list
);
248 /* Allocate a worklist array/queue. Entries are only added to the
249 list if they were not already on the list. So the size is
250 bounded by the number of basic blocks. */
251 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
)
252 * (n_basic_blocks
+ 1));
254 /* Initialize a mapping from each edge to its index. */
255 for (i
= 0; i
< num_edges
; i
++)
256 INDEX_EDGE (edge_list
, i
)->aux
= (void *)i
;
258 /* We want a maximal solution, so initially consider LATER true for
259 all edges. This allows propagation through a loop since the incoming
260 loop edge will have LATER set, so if all the other incoming edges
261 to the loop are set, then LATERIN will be set for the head of the
264 If the optimistic setting of LATER on that edge was incorrect (for
265 example the expression is ANTLOC in a block within the loop) then
266 this algorithm will detect it when we process the block at the head
267 of the optimistic edge. That will requeue the affected blocks. */
268 sbitmap_vector_ones (later
, num_edges
);
270 /* Add all the blocks to the worklist. This prevents an early exit from
271 the loop given our optimistic initialization of LATER above. */
272 for (bb
= n_basic_blocks
- 1; bb
>= 0; bb
--)
274 basic_block b
= BASIC_BLOCK (bb
);
279 /* Iterate until the worklist is empty. */
280 while (tos
!= worklist
)
282 /* Take the first entry off the worklist. */
283 basic_block b
= *--tos
;
286 /* Compute the intersection of LATERIN for each incoming edge to B. */
288 sbitmap_ones (laterin
[bb
]);
289 for (e
= b
->pred
; e
!= NULL
; e
= e
->pred_next
)
290 sbitmap_a_and_b (laterin
[bb
], laterin
[bb
], later
[(int)e
->aux
]);
292 /* Calculate LATER for all outgoing edges. */
293 for (e
= b
->succ
; e
!= NULL
; e
= e
->succ_next
)
295 if (sbitmap_union_of_diff (later
[(int)e
->aux
],
296 earliest
[(int)e
->aux
],
297 laterin
[e
->src
->index
],
298 antloc
[e
->src
->index
]))
300 /* If LATER for an outgoing edge was changed, then we need
301 to add the target of the outgoing edge to the worklist. */
302 if (e
->dest
!= EXIT_BLOCK_PTR
&& e
->dest
->aux
== 0)
311 /* Computation of insertion and deletion points requires computing LATERIN
312 for the EXIT block. We allocated an extra entry in the LATERIN array
313 for just this purpose. */
314 sbitmap_ones (laterin
[n_basic_blocks
]);
315 for (e
= EXIT_BLOCK_PTR
->pred
; e
!= NULL
; e
= e
->pred_next
)
316 sbitmap_a_and_b (laterin
[n_basic_blocks
],
317 laterin
[n_basic_blocks
],
323 /* Compute the insertion and deletion points for edge based LCM. */
325 compute_insert_delete (edge_list
, antloc
, later
, laterin
,
327 struct edge_list
*edge_list
;
328 sbitmap
*antloc
, *later
, *laterin
, *insert
, *delete;
332 for (x
= 0; x
< n_basic_blocks
; x
++)
333 sbitmap_difference (delete[x
], antloc
[x
], laterin
[x
]);
335 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
337 basic_block b
= INDEX_EDGE_SUCC_BB (edge_list
, x
);
338 if (b
== EXIT_BLOCK_PTR
)
339 sbitmap_difference (insert
[x
], later
[x
], laterin
[n_basic_blocks
]);
341 sbitmap_difference (insert
[x
], later
[x
], laterin
[b
->index
]);
345 /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the
346 insert and delete vectors for edge based LCM. Returns an
347 edgelist which is used to map the insert vector to what edge
348 an expression should be inserted on. */
351 pre_edge_lcm (file
, n_exprs
, transp
, avloc
, antloc
, kill
, insert
, delete)
352 FILE *file ATTRIBUTE_UNUSED
;
361 sbitmap
*antin
, *antout
, *earliest
;
362 sbitmap
*avin
, *avout
;
363 sbitmap
*later
, *laterin
;
364 struct edge_list
*edge_list
;
367 edge_list
= create_edge_list ();
368 num_edges
= NUM_EDGES (edge_list
);
370 #ifdef LCM_DEBUG_INFO
373 fprintf (file
, "Edge List:\n");
374 verify_edge_list (file
, edge_list
);
375 print_edge_list (file
, edge_list
);
376 dump_sbitmap_vector (file
, "transp", "", transp
, n_basic_blocks
);
377 dump_sbitmap_vector (file
, "antloc", "", antloc
, n_basic_blocks
);
378 dump_sbitmap_vector (file
, "avloc", "", avloc
, n_basic_blocks
);
379 dump_sbitmap_vector (file
, "kill", "", kill
, n_basic_blocks
);
383 /* Compute global availability. */
384 avin
= sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
385 avout
= sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
386 compute_available (avloc
, kill
, avout
, avin
);
391 /* Compute global anticipatability. */
392 antin
= sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
393 antout
= sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
394 compute_antinout_edge (antloc
, transp
, antin
, antout
);
396 #ifdef LCM_DEBUG_INFO
399 dump_sbitmap_vector (file
, "antin", "", antin
, n_basic_blocks
);
400 dump_sbitmap_vector (file
, "antout", "", antout
, n_basic_blocks
);
404 /* Compute earliestness. */
405 earliest
= sbitmap_vector_alloc (num_edges
, n_exprs
);
406 compute_earliest (edge_list
, n_exprs
, antin
, antout
, avout
, kill
, earliest
);
408 #ifdef LCM_DEBUG_INFO
410 dump_sbitmap_vector (file
, "earliest", "", earliest
, num_edges
);
417 later
= sbitmap_vector_alloc (num_edges
, n_exprs
);
418 /* Allocate an extra element for the exit block in the laterin vector. */
419 laterin
= sbitmap_vector_alloc (n_basic_blocks
+ 1, n_exprs
);
420 compute_laterin (edge_list
, earliest
, antloc
, later
, laterin
);
423 #ifdef LCM_DEBUG_INFO
426 dump_sbitmap_vector (file
, "laterin", "", laterin
, n_basic_blocks
+ 1);
427 dump_sbitmap_vector (file
, "later", "", later
, num_edges
);
433 *insert
= sbitmap_vector_alloc (num_edges
, n_exprs
);
434 *delete = sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
435 compute_insert_delete (edge_list
, antloc
, later
, laterin
, *insert
, *delete);
440 #ifdef LCM_DEBUG_INFO
443 dump_sbitmap_vector (file
, "pre_insert_map", "", *insert
, num_edges
);
444 dump_sbitmap_vector (file
, "pre_delete_map", "", *delete, n_basic_blocks
);
451 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
452 Return the number of passes we performed to iterate to a solution. */
454 compute_available (avloc
, kill
, avout
, avin
)
455 sbitmap
*avloc
, *kill
, *avout
, *avin
;
459 basic_block
*worklist
, *tos
;
461 /* Allocate a worklist array/queue. Entries are only added to the
462 list if they were not already on the list. So the size is
463 bounded by the number of basic blocks. */
464 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
)
467 /* We want a maximal solution. */
468 sbitmap_vector_ones (avout
, n_basic_blocks
);
470 /* Put the successors of the entry block on the worklist. */
471 for (e
= ENTRY_BLOCK_PTR
->succ
; e
; e
= e
->succ_next
)
475 /* We use the block's aux field to track blocks which are in
476 the worklist; we also use it to quickly determine which blocks
477 are successors of the ENTRY block. */
478 e
->dest
->aux
= ENTRY_BLOCK_PTR
;
481 /* Iterate until the worklist is empty. */
482 while (tos
!= worklist
)
484 /* Take the first entry off the worklist. */
485 basic_block b
= *--tos
;
488 /* If one of the predecessor blocks is the ENTRY block, then the
489 intersection of avouts is the null set. We can identify such blocks
490 by the special value in the AUX field in the block structure. */
491 if (b
->aux
== ENTRY_BLOCK_PTR
)
493 /* Do not clear the aux field for blocks which are
494 successors of the ENTRY block. That way we never
495 add then to the worklist again. */
496 sbitmap_zero (avin
[bb
]);
500 /* Clear the aux field of this block so that it can be added to
501 the worklist again if necessary. */
503 sbitmap_intersection_of_preds (avin
[bb
], avout
, bb
);
506 if (sbitmap_union_of_diff (avout
[bb
], avloc
[bb
], avin
[bb
], kill
[bb
]))
508 /* If the out state of this block changed, then we need
509 to add the successors of this block to the worklist
510 if they are not already on the worklist. */
511 for (e
= b
->succ
; e
; e
= e
->succ_next
)
513 if (!e
->dest
->aux
&& e
->dest
!= EXIT_BLOCK_PTR
)
524 /* Compute the farthest vector for edge based lcm. */
526 compute_farthest (edge_list
, n_exprs
, st_avout
, st_avin
, st_antin
,
528 struct edge_list
*edge_list
;
530 sbitmap
*st_avout
, *st_avin
, *st_antin
, *kill
, *farthest
;
532 sbitmap difference
, temp_bitmap
;
534 basic_block pred
, succ
;
536 num_edges
= NUM_EDGES (edge_list
);
538 difference
= sbitmap_alloc (n_exprs
);
539 temp_bitmap
= sbitmap_alloc (n_exprs
);
541 for (x
= 0; x
< num_edges
; x
++)
543 pred
= INDEX_EDGE_PRED_BB (edge_list
, x
);
544 succ
= INDEX_EDGE_SUCC_BB (edge_list
, x
);
545 if (succ
== EXIT_BLOCK_PTR
)
546 sbitmap_copy (farthest
[x
], st_avout
[pred
->index
]);
549 if (pred
== ENTRY_BLOCK_PTR
)
551 sbitmap_zero (farthest
[x
]);
555 sbitmap_difference (difference
, st_avout
[pred
->index
],
556 st_antin
[succ
->index
]);
557 sbitmap_not (temp_bitmap
, st_avin
[succ
->index
]);
558 sbitmap_a_and_b_or_c (farthest
[x
], difference
,
559 kill
[succ
->index
], temp_bitmap
);
567 /* Compute nearer and nearerout vectors for edge based lcm.
569 This is the mirror of compute_laterin, additional comments on the
570 implementation can be found before compute_laterin. */
573 compute_nearerout (edge_list
, farthest
, st_avloc
, nearer
, nearerout
)
574 struct edge_list
*edge_list
;
575 sbitmap
*farthest
, *st_avloc
, *nearer
, *nearerout
;
577 int bb
, num_edges
, i
;
579 basic_block
*worklist
, *tos
;
581 num_edges
= NUM_EDGES (edge_list
);
583 /* Allocate a worklist array/queue. Entries are only added to the
584 list if they were not already on the list. So the size is
585 bounded by the number of basic blocks. */
586 tos
= worklist
= (basic_block
*) xmalloc (sizeof (basic_block
)
587 * (n_basic_blocks
+ 1));
589 /* Initialize NEARER for each edge and build a mapping from an edge to
591 for (i
= 0; i
< num_edges
; i
++)
592 INDEX_EDGE (edge_list
, i
)->aux
= (void *)i
;
594 /* We want a maximal solution. */
595 sbitmap_vector_ones (nearer
, num_edges
);
597 /* Add all the blocks to the worklist. This prevents an early exit
598 from the loop given our optimistic initialization of NEARER. */
599 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
601 basic_block b
= BASIC_BLOCK (bb
);
606 /* Iterate until the worklist is empty. */
607 while (tos
!= worklist
)
609 /* Take the first entry off the worklist. */
610 basic_block b
= *--tos
;
613 /* Compute the intersection of NEARER for each outgoing edge from B. */
615 sbitmap_ones (nearerout
[bb
]);
616 for (e
= b
->succ
; e
!= NULL
; e
= e
->succ_next
)
617 sbitmap_a_and_b (nearerout
[bb
], nearerout
[bb
], nearer
[(int)e
->aux
]);
619 /* Calculate NEARER for all incoming edges. */
620 for (e
= b
->pred
; e
!= NULL
; e
= e
->pred_next
)
622 if (sbitmap_union_of_diff (nearer
[(int)e
->aux
],
623 farthest
[(int)e
->aux
],
624 nearerout
[e
->dest
->index
],
625 st_avloc
[e
->dest
->index
]))
627 /* If NEARER for an incoming edge was changed, then we need
628 to add the source of the incoming edge to the worklist. */
629 if (e
->src
!= ENTRY_BLOCK_PTR
&& e
->src
->aux
== 0)
638 /* Computation of insertion and deletion points requires computing NEAREROUT
639 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
640 for just this purpose. */
641 sbitmap_ones (nearerout
[n_basic_blocks
]);
642 for (e
= ENTRY_BLOCK_PTR
->succ
; e
!= NULL
; e
= e
->succ_next
)
643 sbitmap_a_and_b (nearerout
[n_basic_blocks
],
644 nearerout
[n_basic_blocks
],
645 nearer
[(int)e
->aux
]);
650 /* Compute the insertion and deletion points for edge based LCM. */
652 compute_rev_insert_delete (edge_list
, st_avloc
, nearer
, nearerout
,
654 struct edge_list
*edge_list
;
655 sbitmap
*st_avloc
, *nearer
, *nearerout
, *insert
, *delete;
659 for (x
= 0; x
< n_basic_blocks
; x
++)
660 sbitmap_difference (delete[x
], st_avloc
[x
], nearerout
[x
]);
662 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
664 basic_block b
= INDEX_EDGE_PRED_BB (edge_list
, x
);
665 if (b
== ENTRY_BLOCK_PTR
)
666 sbitmap_difference (insert
[x
], nearer
[x
], nearerout
[n_basic_blocks
]);
668 sbitmap_difference (insert
[x
], nearer
[x
], nearerout
[b
->index
]);
672 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
673 insert and delete vectors for edge based reverse LCM. Returns an
674 edgelist which is used to map the insert vector to what edge
675 an expression should be inserted on. */
678 pre_edge_rev_lcm (file
, n_exprs
, transp
, st_avloc
, st_antloc
, kill
,
680 FILE *file ATTRIBUTE_UNUSED
;
689 sbitmap
*st_antin
, *st_antout
;
690 sbitmap
*st_avout
, *st_avin
, *farthest
;
691 sbitmap
*nearer
, *nearerout
;
692 struct edge_list
*edge_list
;
695 edge_list
= create_edge_list ();
696 num_edges
= NUM_EDGES (edge_list
);
698 st_antin
= (sbitmap
*) sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
699 st_antout
= (sbitmap
*) sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
700 sbitmap_vector_zero (st_antin
, n_basic_blocks
);
701 sbitmap_vector_zero (st_antout
, n_basic_blocks
);
702 compute_antinout_edge (st_antloc
, transp
, st_antin
, st_antout
);
704 /* Compute global anticipatability. */
705 st_avout
= sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
706 st_avin
= sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
707 compute_available (st_avloc
, kill
, st_avout
, st_avin
);
709 #ifdef LCM_DEBUG_INFO
712 fprintf (file
, "Edge List:\n");
713 verify_edge_list (file
, edge_list
);
714 print_edge_list (file
, edge_list
);
715 dump_sbitmap_vector (file
, "transp", "", transp
, n_basic_blocks
);
716 dump_sbitmap_vector (file
, "st_avloc", "", st_avloc
, n_basic_blocks
);
717 dump_sbitmap_vector (file
, "st_antloc", "", st_antloc
, n_basic_blocks
);
718 dump_sbitmap_vector (file
, "st_antin", "", st_antin
, n_basic_blocks
);
719 dump_sbitmap_vector (file
, "st_antout", "", st_antout
, n_basic_blocks
);
720 dump_sbitmap_vector (file
, "st_kill", "", kill
, n_basic_blocks
);
724 #ifdef LCM_DEBUG_INFO
727 dump_sbitmap_vector (file
, "st_avout", "", st_avout
, n_basic_blocks
);
728 dump_sbitmap_vector (file
, "st_avin", "", st_avin
, n_basic_blocks
);
732 /* Compute farthestness. */
733 farthest
= sbitmap_vector_alloc (num_edges
, n_exprs
);
734 compute_farthest (edge_list
, n_exprs
, st_avout
, st_avin
, st_antin
,
737 #ifdef LCM_DEBUG_INFO
739 dump_sbitmap_vector (file
, "farthest", "", farthest
, num_edges
);
745 nearer
= sbitmap_vector_alloc (num_edges
, n_exprs
);
746 /* Allocate an extra element for the entry block. */
747 nearerout
= sbitmap_vector_alloc (n_basic_blocks
+ 1, n_exprs
);
748 compute_nearerout (edge_list
, farthest
, st_avloc
, nearer
, nearerout
);
750 #ifdef LCM_DEBUG_INFO
753 dump_sbitmap_vector (file
, "nearerout", "", nearerout
,
755 dump_sbitmap_vector (file
, "nearer", "", nearer
, num_edges
);
761 *insert
= sbitmap_vector_alloc (num_edges
, n_exprs
);
762 *delete = sbitmap_vector_alloc (n_basic_blocks
, n_exprs
);
763 compute_rev_insert_delete (edge_list
, st_avloc
, nearer
, nearerout
, *insert
, *delete);
768 #ifdef LCM_DEBUG_INFO
771 dump_sbitmap_vector (file
, "pre_insert_map", "", *insert
, num_edges
);
772 dump_sbitmap_vector (file
, "pre_delete_map", "", *delete, n_basic_blocks
);