]>
Commit | Line | Data |
---|---|---|
9ca8d59e | 1 | /* Generic partial redundancy elimination with lazy code motion support. |
d353bf18 | 2 | Copyright (C) 1998-2015 Free Software Foundation, Inc. |
e48ba7af | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
e48ba7af | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
e48ba7af | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
e48ba7af | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
e48ba7af | 19 | |
20 | /* These routines are meant to be used by various optimization | |
5ab08585 | 21 | passes which can be modeled as lazy code motion problems. |
e48ba7af | 22 | Including, but not limited to: |
23 | ||
24 | * Traditional partial redundancy elimination. | |
25 | ||
26 | * Placement of caller/caller register save/restores. | |
27 | ||
28 | * Load/store motion. | |
29 | ||
30 | * Copy motion. | |
31 | ||
32 | * Conversion of flat register files to a stacked register | |
33 | model. | |
34 | ||
35 | * Dead load/store elimination. | |
36 | ||
37 | These routines accept as input: | |
38 | ||
39 | * Basic block information (number of blocks, lists of | |
40 | predecessors and successors). Note the granularity | |
41 | does not need to be basic block, they could be statements | |
42 | or functions. | |
43 | ||
44 | * Bitmaps of local properties (computed, transparent and | |
45 | anticipatable expressions). | |
46 | ||
47 | The output of these routines is bitmap of redundant computations | |
48 | and a bitmap of optimal placement points. */ | |
49 | ||
50 | ||
51 | #include "config.h" | |
52 | #include "system.h" | |
805e22b2 | 53 | #include "coretypes.h" |
54 | #include "tm.h" | |
e48ba7af | 55 | #include "rtl.h" |
56 | #include "regs.h" | |
57 | #include "hard-reg-set.h" | |
58 | #include "flags.h" | |
e48ba7af | 59 | #include "insn-config.h" |
60 | #include "recog.h" | |
94ea8568 | 61 | #include "predict.h" |
6ce54148 | 62 | #include "function.h" |
94ea8568 | 63 | #include "dominance.h" |
64 | #include "cfg.h" | |
65 | #include "cfganal.h" | |
66 | #include "lcm.h" | |
67 | #include "basic-block.h" | |
68 | #include "tm_p.h" | |
0f71a633 | 69 | #include "sbitmap.h" |
b9ed1410 | 70 | #include "dumpfile.h" |
e48ba7af | 71 | |
7bcd381b | 72 | /* Edge based LCM routines. */ |
3ad4992f | 73 | static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *); |
74 | static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *, | |
75 | sbitmap *, sbitmap *, sbitmap *); | |
76 | static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *, | |
77 | sbitmap *, sbitmap *); | |
78 | static void compute_insert_delete (struct edge_list *edge_list, sbitmap *, | |
79 | sbitmap *, sbitmap *, sbitmap *, sbitmap *); | |
7bcd381b | 80 | |
81 | /* Edge based LCM routines on a reverse flowgraph. */ | |
3ad4992f | 82 | static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *, |
83 | sbitmap*, sbitmap *, sbitmap *); | |
84 | static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *, | |
85 | sbitmap *, sbitmap *); | |
86 | static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *, | |
87 | sbitmap *, sbitmap *, sbitmap *, | |
88 | sbitmap *); | |
7bcd381b | 89 | \f |
90 | /* Edge based lcm routines. */ | |
3b7e1f27 | 91 | |
5ab08585 | 92 | /* Compute expression anticipatability at entrance and exit of each block. |
93 | This is done based on the flow graph, and not on the pred-succ lists. | |
7bcd381b | 94 | Other than that, its pretty much identical to compute_antinout. */ |
e48ba7af | 95 | |
96 | static void | |
3ad4992f | 97 | compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin, |
98 | sbitmap *antout) | |
e48ba7af | 99 | { |
4c26117a | 100 | basic_block bb; |
7bcd381b | 101 | edge e; |
2c59145b | 102 | basic_block *worklist, *qin, *qout, *qend; |
103 | unsigned int qlen; | |
cd665a06 | 104 | edge_iterator ei; |
3b7e1f27 | 105 | |
2325f0e2 | 106 | /* Allocate a worklist array/queue. Entries are only added to the |
107 | list if they were not already on the list. So the size is | |
108 | bounded by the number of basic blocks. */ | |
a28770e1 | 109 | qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); |
e48ba7af | 110 | |
2325f0e2 | 111 | /* We want a maximal solution, so make an optimistic initialization of |
112 | ANTIN. */ | |
fe672ac0 | 113 | bitmap_vector_ones (antin, last_basic_block_for_fn (cfun)); |
e48ba7af | 114 | |
5d6931e2 | 115 | /* Put every block on the worklist; this is necessary because of the |
116 | optimistic initialization of ANTIN above. */ | |
17e85523 | 117 | int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun)); |
118 | int postorder_num = post_order_compute (postorder, false, false); | |
119 | for (int i = 0; i < postorder_num; ++i) | |
e48ba7af | 120 | { |
17e85523 | 121 | bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]); |
ea0041f4 | 122 | *qin++ = bb; |
4c26117a | 123 | bb->aux = bb; |
2325f0e2 | 124 | } |
17e85523 | 125 | free (postorder); |
5ab08585 | 126 | |
2c59145b | 127 | qin = worklist; |
a28770e1 | 128 | qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS]; |
129 | qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; | |
e48ba7af | 130 | |
5d6931e2 | 131 | /* Mark blocks which are predecessors of the exit block so that we |
132 | can easily identify them below. */ | |
34154e27 | 133 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
134 | e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun); | |
5d6931e2 | 135 | |
2325f0e2 | 136 | /* Iterate until the worklist is empty. */ |
2c59145b | 137 | while (qlen) |
2325f0e2 | 138 | { |
139 | /* Take the first entry off the worklist. */ | |
4c26117a | 140 | bb = *qout++; |
2c59145b | 141 | qlen--; |
3b7e1f27 | 142 | |
2c59145b | 143 | if (qout >= qend) |
8851e806 | 144 | qout = worklist; |
e48ba7af | 145 | |
34154e27 | 146 | if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
9ca8d59e | 147 | /* Do not clear the aux field for blocks which are predecessors of |
148 | the EXIT block. That way we never add then to the worklist | |
149 | again. */ | |
53c5d9d4 | 150 | bitmap_clear (antout[bb->index]); |
2325f0e2 | 151 | else |
152 | { | |
153 | /* Clear the aux field of this block so that it can be added to | |
154 | the worklist again if necessary. */ | |
4c26117a | 155 | bb->aux = NULL; |
08b7917c | 156 | bitmap_intersection_of_succs (antout[bb->index], antin, bb); |
2325f0e2 | 157 | } |
7bcd381b | 158 | |
53c5d9d4 | 159 | if (bitmap_or_and (antin[bb->index], antloc[bb->index], |
4c26117a | 160 | transp[bb->index], antout[bb->index])) |
9ca8d59e | 161 | /* If the in state of this block changed, then we need |
162 | to add the predecessors of this block to the worklist | |
163 | if they are not already on the worklist. */ | |
cd665a06 | 164 | FOR_EACH_EDGE (e, ei, bb->preds) |
34154e27 | 165 | if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
e48ba7af | 166 | { |
2c59145b | 167 | *qin++ = e->src; |
9ca8d59e | 168 | e->src->aux = e; |
2c59145b | 169 | qlen++; |
170 | if (qin >= qend) | |
8851e806 | 171 | qin = worklist; |
e48ba7af | 172 | } |
e48ba7af | 173 | } |
9ca8d59e | 174 | |
82f7392b | 175 | clear_aux_for_edges (); |
176 | clear_aux_for_blocks (); | |
2c59145b | 177 | free (worklist); |
e48ba7af | 178 | } |
179 | ||
7bcd381b | 180 | /* Compute the earliest vector for edge based lcm. */ |
9ca8d59e | 181 | |
e48ba7af | 182 | static void |
3ad4992f | 183 | compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin, |
184 | sbitmap *antout, sbitmap *avout, sbitmap *kill, | |
185 | sbitmap *earliest) | |
e48ba7af | 186 | { |
7bcd381b | 187 | sbitmap difference, temp_bitmap; |
5ab08585 | 188 | int x, num_edges; |
7bcd381b | 189 | basic_block pred, succ; |
e48ba7af | 190 | |
7bcd381b | 191 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 192 | |
7bcd381b | 193 | difference = sbitmap_alloc (n_exprs); |
194 | temp_bitmap = sbitmap_alloc (n_exprs); | |
e48ba7af | 195 | |
7bcd381b | 196 | for (x = 0; x < num_edges; x++) |
e48ba7af | 197 | { |
7bcd381b | 198 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
199 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
34154e27 | 200 | if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
53c5d9d4 | 201 | bitmap_copy (earliest[x], antin[succ->index]); |
7bcd381b | 202 | else |
8851e806 | 203 | { |
34154e27 | 204 | if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
53c5d9d4 | 205 | bitmap_clear (earliest[x]); |
7bcd381b | 206 | else |
e48ba7af | 207 | { |
53c5d9d4 | 208 | bitmap_and_compl (difference, antin[succ->index], |
b3d6de89 | 209 | avout[pred->index]); |
53c5d9d4 | 210 | bitmap_not (temp_bitmap, antout[pred->index]); |
211 | bitmap_and_or (earliest[x], difference, | |
b3d6de89 | 212 | kill[pred->index], temp_bitmap); |
e48ba7af | 213 | } |
214 | } | |
e48ba7af | 215 | } |
9ca8d59e | 216 | |
f5123ed5 | 217 | sbitmap_free (temp_bitmap); |
218 | sbitmap_free (difference); | |
e48ba7af | 219 | } |
220 | ||
2325f0e2 | 221 | /* later(p,s) is dependent on the calculation of laterin(p). |
222 | laterin(p) is dependent on the calculation of later(p2,p). | |
223 | ||
224 | laterin(ENTRY) is defined as all 0's | |
225 | later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY) | |
226 | laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)). | |
227 | ||
228 | If we progress in this manner, starting with all basic blocks | |
229 | in the work list, anytime we change later(bb), we need to add | |
230 | succs(bb) to the worklist if they are not already on the worklist. | |
231 | ||
232 | Boundary conditions: | |
233 | ||
234 | We prime the worklist all the normal basic blocks. The ENTRY block can | |
235 | never be added to the worklist since it is never the successor of any | |
236 | block. We explicitly prevent the EXIT block from being added to the | |
237 | worklist. | |
238 | ||
239 | We optimistically initialize LATER. That is the only time this routine | |
240 | will compute LATER for an edge out of the entry block since the entry | |
241 | block is never on the worklist. Thus, LATERIN is neither used nor | |
242 | computed for the ENTRY block. | |
243 | ||
244 | Since the EXIT block is never added to the worklist, we will neither | |
245 | use nor compute LATERIN for the exit block. Edges which reach the | |
246 | EXIT block are handled in the normal fashion inside the loop. However, | |
247 | the insertion/deletion computation needs LATERIN(EXIT), so we have | |
248 | to compute it. */ | |
5ab08585 | 249 | |
e48ba7af | 250 | static void |
3ad4992f | 251 | compute_laterin (struct edge_list *edge_list, sbitmap *earliest, |
252 | sbitmap *antloc, sbitmap *later, sbitmap *laterin) | |
e48ba7af | 253 | { |
4c26117a | 254 | int num_edges, i; |
2325f0e2 | 255 | edge e; |
4c26117a | 256 | basic_block *worklist, *qin, *qout, *qend, bb; |
2c59145b | 257 | unsigned int qlen; |
cd665a06 | 258 | edge_iterator ei; |
e48ba7af | 259 | |
7bcd381b | 260 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 261 | |
2325f0e2 | 262 | /* Allocate a worklist array/queue. Entries are only added to the |
263 | list if they were not already on the list. So the size is | |
264 | bounded by the number of basic blocks. */ | |
2c59145b | 265 | qin = qout = worklist |
a28770e1 | 266 | = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); |
2325f0e2 | 267 | |
268 | /* Initialize a mapping from each edge to its index. */ | |
269 | for (i = 0; i < num_edges; i++) | |
9ffd5d6d | 270 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
2325f0e2 | 271 | |
272 | /* We want a maximal solution, so initially consider LATER true for | |
273 | all edges. This allows propagation through a loop since the incoming | |
274 | loop edge will have LATER set, so if all the other incoming edges | |
275 | to the loop are set, then LATERIN will be set for the head of the | |
276 | loop. | |
277 | ||
278 | If the optimistic setting of LATER on that edge was incorrect (for | |
279 | example the expression is ANTLOC in a block within the loop) then | |
280 | this algorithm will detect it when we process the block at the head | |
281 | of the optimistic edge. That will requeue the affected blocks. */ | |
53c5d9d4 | 282 | bitmap_vector_ones (later, num_edges); |
2325f0e2 | 283 | |
048599b9 | 284 | /* Note that even though we want an optimistic setting of LATER, we |
285 | do not want to be overly optimistic. Consider an outgoing edge from | |
286 | the entry block. That edge should always have a LATER value the | |
287 | same as EARLIEST for that edge. */ | |
34154e27 | 288 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) |
53c5d9d4 | 289 | bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]); |
048599b9 | 290 | |
2325f0e2 | 291 | /* Add all the blocks to the worklist. This prevents an early exit from |
292 | the loop given our optimistic initialization of LATER above. */ | |
17e85523 | 293 | int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun)); |
294 | int postorder_num = inverted_post_order_compute (postorder); | |
295 | for (int i = 0; i < postorder_num; ++i) | |
e48ba7af | 296 | { |
17e85523 | 297 | bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]); |
298 | if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) | |
299 | || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
300 | continue; | |
4c26117a | 301 | *qin++ = bb; |
302 | bb->aux = bb; | |
7bcd381b | 303 | } |
17e85523 | 304 | free (postorder); |
d4d93ea0 | 305 | |
2c59145b | 306 | /* Note that we do not use the last allocated element for our queue, |
4d2e5d52 | 307 | as EXIT_BLOCK is never inserted into it. */ |
d4d93ea0 | 308 | qin = worklist; |
a28770e1 | 309 | qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS]; |
310 | qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; | |
7bcd381b | 311 | |
2325f0e2 | 312 | /* Iterate until the worklist is empty. */ |
2c59145b | 313 | while (qlen) |
7bcd381b | 314 | { |
2325f0e2 | 315 | /* Take the first entry off the worklist. */ |
4c26117a | 316 | bb = *qout++; |
317 | bb->aux = NULL; | |
2c59145b | 318 | qlen--; |
319 | if (qout >= qend) | |
8851e806 | 320 | qout = worklist; |
2325f0e2 | 321 | |
322 | /* Compute the intersection of LATERIN for each incoming edge to B. */ | |
53c5d9d4 | 323 | bitmap_ones (laterin[bb->index]); |
cd665a06 | 324 | FOR_EACH_EDGE (e, ei, bb->preds) |
53c5d9d4 | 325 | bitmap_and (laterin[bb->index], laterin[bb->index], |
17e85523 | 326 | later[(size_t)e->aux]); |
2325f0e2 | 327 | |
328 | /* Calculate LATER for all outgoing edges. */ | |
cd665a06 | 329 | FOR_EACH_EDGE (e, ei, bb->succs) |
53c5d9d4 | 330 | if (bitmap_ior_and_compl (later[(size_t) e->aux], |
17e85523 | 331 | earliest[(size_t) e->aux], |
332 | laterin[bb->index], | |
333 | antloc[bb->index]) | |
9ca8d59e | 334 | /* If LATER for an outgoing edge was changed, then we need |
335 | to add the target of the outgoing edge to the worklist. */ | |
34154e27 | 336 | && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0) |
9ca8d59e | 337 | { |
2c59145b | 338 | *qin++ = e->dest; |
9ca8d59e | 339 | e->dest->aux = e; |
2c59145b | 340 | qlen++; |
341 | if (qin >= qend) | |
342 | qin = worklist; | |
9ca8d59e | 343 | } |
e48ba7af | 344 | } |
345 | ||
2325f0e2 | 346 | /* Computation of insertion and deletion points requires computing LATERIN |
347 | for the EXIT block. We allocated an extra entry in the LATERIN array | |
348 | for just this purpose. */ | |
fe672ac0 | 349 | bitmap_ones (laterin[last_basic_block_for_fn (cfun)]); |
34154e27 | 350 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
fe672ac0 | 351 | bitmap_and (laterin[last_basic_block_for_fn (cfun)], |
17e85523 | 352 | laterin[last_basic_block_for_fn (cfun)], |
353 | later[(size_t) e->aux]); | |
2325f0e2 | 354 | |
82f7392b | 355 | clear_aux_for_edges (); |
2c59145b | 356 | free (worklist); |
e48ba7af | 357 | } |
358 | ||
7bcd381b | 359 | /* Compute the insertion and deletion points for edge based LCM. */ |
9ca8d59e | 360 | |
7bcd381b | 361 | static void |
3ad4992f | 362 | compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc, |
363 | sbitmap *later, sbitmap *laterin, sbitmap *insert, | |
9ce37fa7 | 364 | sbitmap *del) |
7bcd381b | 365 | { |
366 | int x; | |
4c26117a | 367 | basic_block bb; |
e48ba7af | 368 | |
fc00614f | 369 | FOR_EACH_BB_FN (bb, cfun) |
53c5d9d4 | 370 | bitmap_and_compl (del[bb->index], antloc[bb->index], |
d4d93ea0 | 371 | laterin[bb->index]); |
5ab08585 | 372 | |
7bcd381b | 373 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
374 | { | |
375 | basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x); | |
9ca8d59e | 376 | |
34154e27 | 377 | if (b == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
fe672ac0 | 378 | bitmap_and_compl (insert[x], later[x], |
379 | laterin[last_basic_block_for_fn (cfun)]); | |
7bcd381b | 380 | else |
53c5d9d4 | 381 | bitmap_and_compl (insert[x], later[x], laterin[b->index]); |
7bcd381b | 382 | } |
383 | } | |
e48ba7af | 384 | |
7fc0df2f | 385 | /* Given local properties TRANSP, ANTLOC, AVLOC, KILL return the insert and |
386 | delete vectors for edge based LCM and return the AVIN, AVOUT bitmap. | |
9ca8d59e | 387 | map the insert vector to what edge an expression should be inserted on. */ |
e48ba7af | 388 | |
7bcd381b | 389 | struct edge_list * |
7fc0df2f | 390 | pre_edge_lcm_avs (int n_exprs, sbitmap *transp, |
3ad4992f | 391 | sbitmap *avloc, sbitmap *antloc, sbitmap *kill, |
7fc0df2f | 392 | sbitmap *avin, sbitmap *avout, |
9ce37fa7 | 393 | sbitmap **insert, sbitmap **del) |
e48ba7af | 394 | { |
7bcd381b | 395 | sbitmap *antin, *antout, *earliest; |
7bcd381b | 396 | sbitmap *later, *laterin; |
397 | struct edge_list *edge_list; | |
398 | int num_edges; | |
e48ba7af | 399 | |
7bcd381b | 400 | edge_list = create_edge_list (); |
401 | num_edges = NUM_EDGES (edge_list); | |
e48ba7af | 402 | |
7bcd381b | 403 | #ifdef LCM_DEBUG_INFO |
3f5be5f4 | 404 | if (dump_file) |
e48ba7af | 405 | { |
3f5be5f4 | 406 | fprintf (dump_file, "Edge List:\n"); |
407 | verify_edge_list (dump_file, edge_list); | |
408 | print_edge_list (dump_file, edge_list); | |
fe672ac0 | 409 | dump_bitmap_vector (dump_file, "transp", "", transp, |
410 | last_basic_block_for_fn (cfun)); | |
411 | dump_bitmap_vector (dump_file, "antloc", "", antloc, | |
412 | last_basic_block_for_fn (cfun)); | |
413 | dump_bitmap_vector (dump_file, "avloc", "", avloc, | |
414 | last_basic_block_for_fn (cfun)); | |
415 | dump_bitmap_vector (dump_file, "kill", "", kill, | |
416 | last_basic_block_for_fn (cfun)); | |
e48ba7af | 417 | } |
7bcd381b | 418 | #endif |
e48ba7af | 419 | |
7bcd381b | 420 | /* Compute global availability. */ |
7bcd381b | 421 | compute_available (avloc, kill, avout, avin); |
e48ba7af | 422 | |
7bcd381b | 423 | /* Compute global anticipatability. */ |
fe672ac0 | 424 | antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); |
425 | antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); | |
7bcd381b | 426 | compute_antinout_edge (antloc, transp, antin, antout); |
e48ba7af | 427 | |
7bcd381b | 428 | #ifdef LCM_DEBUG_INFO |
3f5be5f4 | 429 | if (dump_file) |
e48ba7af | 430 | { |
fe672ac0 | 431 | dump_bitmap_vector (dump_file, "antin", "", antin, |
432 | last_basic_block_for_fn (cfun)); | |
433 | dump_bitmap_vector (dump_file, "antout", "", antout, | |
434 | last_basic_block_for_fn (cfun)); | |
e48ba7af | 435 | } |
7bcd381b | 436 | #endif |
e48ba7af | 437 | |
7bcd381b | 438 | /* Compute earliestness. */ |
439 | earliest = sbitmap_vector_alloc (num_edges, n_exprs); | |
440 | compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest); | |
e48ba7af | 441 | |
7bcd381b | 442 | #ifdef LCM_DEBUG_INFO |
3f5be5f4 | 443 | if (dump_file) |
53c5d9d4 | 444 | dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges); |
7bcd381b | 445 | #endif |
e48ba7af | 446 | |
cca23eb2 | 447 | sbitmap_vector_free (antout); |
448 | sbitmap_vector_free (antin); | |
e48ba7af | 449 | |
7bcd381b | 450 | later = sbitmap_vector_alloc (num_edges, n_exprs); |
9ca8d59e | 451 | |
7bcd381b | 452 | /* Allocate an extra element for the exit block in the laterin vector. */ |
fe672ac0 | 453 | laterin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1, |
454 | n_exprs); | |
2325f0e2 | 455 | compute_laterin (edge_list, earliest, antloc, later, laterin); |
456 | ||
7bcd381b | 457 | #ifdef LCM_DEBUG_INFO |
3f5be5f4 | 458 | if (dump_file) |
7bcd381b | 459 | { |
fe672ac0 | 460 | dump_bitmap_vector (dump_file, "laterin", "", laterin, |
461 | last_basic_block_for_fn (cfun) + 1); | |
53c5d9d4 | 462 | dump_bitmap_vector (dump_file, "later", "", later, num_edges); |
7bcd381b | 463 | } |
464 | #endif | |
e48ba7af | 465 | |
cca23eb2 | 466 | sbitmap_vector_free (earliest); |
7bcd381b | 467 | |
468 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
fe672ac0 | 469 | *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); |
53c5d9d4 | 470 | bitmap_vector_clear (*insert, num_edges); |
fe672ac0 | 471 | bitmap_vector_clear (*del, last_basic_block_for_fn (cfun)); |
9ce37fa7 | 472 | compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del); |
e48ba7af | 473 | |
cca23eb2 | 474 | sbitmap_vector_free (laterin); |
475 | sbitmap_vector_free (later); | |
7bcd381b | 476 | |
477 | #ifdef LCM_DEBUG_INFO | |
3f5be5f4 | 478 | if (dump_file) |
e48ba7af | 479 | { |
53c5d9d4 | 480 | dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges); |
481 | dump_bitmap_vector (dump_file, "pre_delete_map", "", *del, | |
fe672ac0 | 482 | last_basic_block_for_fn (cfun)); |
e48ba7af | 483 | } |
7bcd381b | 484 | #endif |
e48ba7af | 485 | |
7bcd381b | 486 | return edge_list; |
487 | } | |
3b7e1f27 | 488 | |
7fc0df2f | 489 | /* Wrapper to allocate avin/avout and call pre_edge_lcm_avs. */ |
490 | ||
491 | struct edge_list * | |
492 | pre_edge_lcm (int n_exprs, sbitmap *transp, | |
493 | sbitmap *avloc, sbitmap *antloc, sbitmap *kill, | |
494 | sbitmap **insert, sbitmap **del) | |
495 | { | |
496 | struct edge_list *edge_list; | |
497 | sbitmap *avin, *avout; | |
498 | ||
499 | avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); | |
500 | avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); | |
501 | ||
502 | edge_list = pre_edge_lcm_avs (n_exprs, transp, avloc, antloc, kill, | |
503 | avin, avout, insert, del); | |
504 | ||
505 | sbitmap_vector_free (avout); | |
506 | sbitmap_vector_free (avin); | |
507 | ||
508 | return edge_list; | |
509 | } | |
510 | ||
3b7e1f27 | 511 | /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors. |
512 | Return the number of passes we performed to iterate to a solution. */ | |
513 | ||
2325f0e2 | 514 | void |
3ad4992f | 515 | compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout, |
516 | sbitmap *avin) | |
e48ba7af | 517 | { |
3b7e1f27 | 518 | edge e; |
4c26117a | 519 | basic_block *worklist, *qin, *qout, *qend, bb; |
3b7e1f27 | 520 | unsigned int qlen; |
cd665a06 | 521 | edge_iterator ei; |
3b7e1f27 | 522 | |
523 | /* Allocate a worklist array/queue. Entries are only added to the | |
524 | list if they were not already on the list. So the size is | |
525 | bounded by the number of basic blocks. */ | |
48e1416a | 526 | qin = qout = worklist = |
a28770e1 | 527 | XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS); |
3b7e1f27 | 528 | |
529 | /* We want a maximal solution. */ | |
fe672ac0 | 530 | bitmap_vector_ones (avout, last_basic_block_for_fn (cfun)); |
3b7e1f27 | 531 | |
532 | /* Put every block on the worklist; this is necessary because of the | |
a2028ebf | 533 | optimistic initialization of AVOUT above. Use inverted postorder |
534 | to make the dataflow problem require less iterations. */ | |
535 | int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun)); | |
536 | int postorder_num = inverted_post_order_compute (postorder); | |
537 | for (int i = 0; i < postorder_num; ++i) | |
3b7e1f27 | 538 | { |
a2028ebf | 539 | bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]); |
540 | if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) | |
541 | || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)) | |
542 | continue; | |
4c26117a | 543 | *qin++ = bb; |
544 | bb->aux = bb; | |
3b7e1f27 | 545 | } |
a2028ebf | 546 | free (postorder); |
3b7e1f27 | 547 | |
548 | qin = worklist; | |
a28770e1 | 549 | qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS]; |
550 | qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; | |
3b7e1f27 | 551 | |
552 | /* Mark blocks which are successors of the entry block so that we | |
553 | can easily identify them below. */ | |
34154e27 | 554 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) |
555 | e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun); | |
3b7e1f27 | 556 | |
557 | /* Iterate until the worklist is empty. */ | |
558 | while (qlen) | |
559 | { | |
560 | /* Take the first entry off the worklist. */ | |
4c26117a | 561 | bb = *qout++; |
3b7e1f27 | 562 | qlen--; |
563 | ||
564 | if (qout >= qend) | |
8851e806 | 565 | qout = worklist; |
3b7e1f27 | 566 | |
567 | /* If one of the predecessor blocks is the ENTRY block, then the | |
568 | intersection of avouts is the null set. We can identify such blocks | |
569 | by the special value in the AUX field in the block structure. */ | |
34154e27 | 570 | if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
3b7e1f27 | 571 | /* Do not clear the aux field for blocks which are successors of the |
572 | ENTRY block. That way we never add then to the worklist again. */ | |
53c5d9d4 | 573 | bitmap_clear (avin[bb->index]); |
3b7e1f27 | 574 | else |
575 | { | |
576 | /* Clear the aux field of this block so that it can be added to | |
577 | the worklist again if necessary. */ | |
4c26117a | 578 | bb->aux = NULL; |
08b7917c | 579 | bitmap_intersection_of_preds (avin[bb->index], avout, bb); |
3b7e1f27 | 580 | } |
581 | ||
53c5d9d4 | 582 | if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index], |
d4d93ea0 | 583 | avin[bb->index], kill[bb->index])) |
3b7e1f27 | 584 | /* If the out state of this block changed, then we need |
585 | to add the successors of this block to the worklist | |
586 | if they are not already on the worklist. */ | |
cd665a06 | 587 | FOR_EACH_EDGE (e, ei, bb->succs) |
34154e27 | 588 | if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
3b7e1f27 | 589 | { |
590 | *qin++ = e->dest; | |
591 | e->dest->aux = e; | |
592 | qlen++; | |
593 | ||
594 | if (qin >= qend) | |
8851e806 | 595 | qin = worklist; |
3b7e1f27 | 596 | } |
597 | } | |
598 | ||
599 | clear_aux_for_edges (); | |
600 | clear_aux_for_blocks (); | |
601 | free (worklist); | |
e48ba7af | 602 | } |
603 | ||
7bcd381b | 604 | /* Compute the farthest vector for edge based lcm. */ |
9ca8d59e | 605 | |
e48ba7af | 606 | static void |
3ad4992f | 607 | compute_farthest (struct edge_list *edge_list, int n_exprs, |
608 | sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin, | |
609 | sbitmap *kill, sbitmap *farthest) | |
e48ba7af | 610 | { |
7bcd381b | 611 | sbitmap difference, temp_bitmap; |
5ab08585 | 612 | int x, num_edges; |
7bcd381b | 613 | basic_block pred, succ; |
e48ba7af | 614 | |
7bcd381b | 615 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 616 | |
7bcd381b | 617 | difference = sbitmap_alloc (n_exprs); |
618 | temp_bitmap = sbitmap_alloc (n_exprs); | |
e48ba7af | 619 | |
7bcd381b | 620 | for (x = 0; x < num_edges; x++) |
e48ba7af | 621 | { |
7bcd381b | 622 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
623 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
34154e27 | 624 | if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
53c5d9d4 | 625 | bitmap_copy (farthest[x], st_avout[pred->index]); |
7bcd381b | 626 | else |
e48ba7af | 627 | { |
34154e27 | 628 | if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
53c5d9d4 | 629 | bitmap_clear (farthest[x]); |
7bcd381b | 630 | else |
631 | { | |
53c5d9d4 | 632 | bitmap_and_compl (difference, st_avout[pred->index], |
b3d6de89 | 633 | st_antin[succ->index]); |
53c5d9d4 | 634 | bitmap_not (temp_bitmap, st_avin[succ->index]); |
635 | bitmap_and_or (farthest[x], difference, | |
b3d6de89 | 636 | kill[succ->index], temp_bitmap); |
7bcd381b | 637 | } |
e48ba7af | 638 | } |
e48ba7af | 639 | } |
9ca8d59e | 640 | |
f5123ed5 | 641 | sbitmap_free (temp_bitmap); |
642 | sbitmap_free (difference); | |
e48ba7af | 643 | } |
644 | ||
2325f0e2 | 645 | /* Compute nearer and nearerout vectors for edge based lcm. |
646 | ||
647 | This is the mirror of compute_laterin, additional comments on the | |
648 | implementation can be found before compute_laterin. */ | |
649 | ||
e48ba7af | 650 | static void |
3ad4992f | 651 | compute_nearerout (struct edge_list *edge_list, sbitmap *farthest, |
652 | sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout) | |
e48ba7af | 653 | { |
4c26117a | 654 | int num_edges, i; |
2325f0e2 | 655 | edge e; |
4c26117a | 656 | basic_block *worklist, *tos, bb; |
cd665a06 | 657 | edge_iterator ei; |
e48ba7af | 658 | |
7bcd381b | 659 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 660 | |
2325f0e2 | 661 | /* Allocate a worklist array/queue. Entries are only added to the |
662 | list if they were not already on the list. So the size is | |
663 | bounded by the number of basic blocks. */ | |
a28770e1 | 664 | tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1); |
e48ba7af | 665 | |
2325f0e2 | 666 | /* Initialize NEARER for each edge and build a mapping from an edge to |
667 | its index. */ | |
668 | for (i = 0; i < num_edges; i++) | |
9ffd5d6d | 669 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
7bcd381b | 670 | |
2325f0e2 | 671 | /* We want a maximal solution. */ |
53c5d9d4 | 672 | bitmap_vector_ones (nearer, num_edges); |
2325f0e2 | 673 | |
048599b9 | 674 | /* Note that even though we want an optimistic setting of NEARER, we |
675 | do not want to be overly optimistic. Consider an incoming edge to | |
676 | the exit block. That edge should always have a NEARER value the | |
677 | same as FARTHEST for that edge. */ | |
34154e27 | 678 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
53c5d9d4 | 679 | bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]); |
048599b9 | 680 | |
2325f0e2 | 681 | /* Add all the blocks to the worklist. This prevents an early exit |
682 | from the loop given our optimistic initialization of NEARER. */ | |
fc00614f | 683 | FOR_EACH_BB_FN (bb, cfun) |
e48ba7af | 684 | { |
4c26117a | 685 | *tos++ = bb; |
686 | bb->aux = bb; | |
7bcd381b | 687 | } |
5ab08585 | 688 | |
2325f0e2 | 689 | /* Iterate until the worklist is empty. */ |
690 | while (tos != worklist) | |
7bcd381b | 691 | { |
2325f0e2 | 692 | /* Take the first entry off the worklist. */ |
4c26117a | 693 | bb = *--tos; |
694 | bb->aux = NULL; | |
2325f0e2 | 695 | |
696 | /* Compute the intersection of NEARER for each outgoing edge from B. */ | |
53c5d9d4 | 697 | bitmap_ones (nearerout[bb->index]); |
cd665a06 | 698 | FOR_EACH_EDGE (e, ei, bb->succs) |
53c5d9d4 | 699 | bitmap_and (nearerout[bb->index], nearerout[bb->index], |
9ffd5d6d | 700 | nearer[(size_t) e->aux]); |
2325f0e2 | 701 | |
702 | /* Calculate NEARER for all incoming edges. */ | |
cd665a06 | 703 | FOR_EACH_EDGE (e, ei, bb->preds) |
53c5d9d4 | 704 | if (bitmap_ior_and_compl (nearer[(size_t) e->aux], |
b3d6de89 | 705 | farthest[(size_t) e->aux], |
706 | nearerout[e->dest->index], | |
707 | st_avloc[e->dest->index]) | |
9ca8d59e | 708 | /* If NEARER for an incoming edge was changed, then we need |
709 | to add the source of the incoming edge to the worklist. */ | |
34154e27 | 710 | && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0) |
9ca8d59e | 711 | { |
712 | *tos++ = e->src; | |
713 | e->src->aux = e; | |
714 | } | |
7bcd381b | 715 | } |
e48ba7af | 716 | |
2325f0e2 | 717 | /* Computation of insertion and deletion points requires computing NEAREROUT |
718 | for the ENTRY block. We allocated an extra entry in the NEAREROUT array | |
719 | for just this purpose. */ | |
fe672ac0 | 720 | bitmap_ones (nearerout[last_basic_block_for_fn (cfun)]); |
34154e27 | 721 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) |
fe672ac0 | 722 | bitmap_and (nearerout[last_basic_block_for_fn (cfun)], |
723 | nearerout[last_basic_block_for_fn (cfun)], | |
9ffd5d6d | 724 | nearer[(size_t) e->aux]); |
2325f0e2 | 725 | |
82f7392b | 726 | clear_aux_for_edges (); |
2325f0e2 | 727 | free (tos); |
7bcd381b | 728 | } |
e48ba7af | 729 | |
7bcd381b | 730 | /* Compute the insertion and deletion points for edge based LCM. */ |
9ca8d59e | 731 | |
e48ba7af | 732 | static void |
3ad4992f | 733 | compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc, |
734 | sbitmap *nearer, sbitmap *nearerout, | |
9ce37fa7 | 735 | sbitmap *insert, sbitmap *del) |
e48ba7af | 736 | { |
7bcd381b | 737 | int x; |
4c26117a | 738 | basic_block bb; |
e48ba7af | 739 | |
fc00614f | 740 | FOR_EACH_BB_FN (bb, cfun) |
53c5d9d4 | 741 | bitmap_and_compl (del[bb->index], st_avloc[bb->index], |
d4d93ea0 | 742 | nearerout[bb->index]); |
5ab08585 | 743 | |
7bcd381b | 744 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
e48ba7af | 745 | { |
7bcd381b | 746 | basic_block b = INDEX_EDGE_PRED_BB (edge_list, x); |
34154e27 | 747 | if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)) |
fe672ac0 | 748 | bitmap_and_compl (insert[x], nearer[x], |
749 | nearerout[last_basic_block_for_fn (cfun)]); | |
e48ba7af | 750 | else |
53c5d9d4 | 751 | bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]); |
e48ba7af | 752 | } |
e48ba7af | 753 | } |
754 | ||
5ab08585 | 755 | /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the |
7bcd381b | 756 | insert and delete vectors for edge based reverse LCM. Returns an |
757 | edgelist which is used to map the insert vector to what edge | |
758 | an expression should be inserted on. */ | |
e48ba7af | 759 | |
7bcd381b | 760 | struct edge_list * |
3f5be5f4 | 761 | pre_edge_rev_lcm (int n_exprs, sbitmap *transp, |
3ad4992f | 762 | sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill, |
9ce37fa7 | 763 | sbitmap **insert, sbitmap **del) |
e48ba7af | 764 | { |
7bcd381b | 765 | sbitmap *st_antin, *st_antout; |
766 | sbitmap *st_avout, *st_avin, *farthest; | |
767 | sbitmap *nearer, *nearerout; | |
768 | struct edge_list *edge_list; | |
27548a74 | 769 | int num_edges; |
7bcd381b | 770 | |
771 | edge_list = create_edge_list (); | |
772 | num_edges = NUM_EDGES (edge_list); | |
773 | ||
fe672ac0 | 774 | st_antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); |
775 | st_antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); | |
776 | bitmap_vector_clear (st_antin, last_basic_block_for_fn (cfun)); | |
777 | bitmap_vector_clear (st_antout, last_basic_block_for_fn (cfun)); | |
7bcd381b | 778 | compute_antinout_edge (st_antloc, transp, st_antin, st_antout); |
779 | ||
780 | /* Compute global anticipatability. */ | |
fe672ac0 | 781 | st_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); |
782 | st_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); | |
7bcd381b | 783 | compute_available (st_avloc, kill, st_avout, st_avin); |
784 | ||
785 | #ifdef LCM_DEBUG_INFO | |
3f5be5f4 | 786 | if (dump_file) |
7bcd381b | 787 | { |
3f5be5f4 | 788 | fprintf (dump_file, "Edge List:\n"); |
789 | verify_edge_list (dump_file, edge_list); | |
790 | print_edge_list (dump_file, edge_list); | |
fe672ac0 | 791 | dump_bitmap_vector (dump_file, "transp", "", transp, |
792 | last_basic_block_for_fn (cfun)); | |
793 | dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc, | |
794 | last_basic_block_for_fn (cfun)); | |
795 | dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc, | |
796 | last_basic_block_for_fn (cfun)); | |
797 | dump_bitmap_vector (dump_file, "st_antin", "", st_antin, | |
798 | last_basic_block_for_fn (cfun)); | |
799 | dump_bitmap_vector (dump_file, "st_antout", "", st_antout, | |
800 | last_basic_block_for_fn (cfun)); | |
801 | dump_bitmap_vector (dump_file, "st_kill", "", kill, | |
802 | last_basic_block_for_fn (cfun)); | |
7bcd381b | 803 | } |
804 | #endif | |
e48ba7af | 805 | |
7bcd381b | 806 | #ifdef LCM_DEBUG_INFO |
3f5be5f4 | 807 | if (dump_file) |
7bcd381b | 808 | { |
fe672ac0 | 809 | dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block_for_fn (cfun)); |
810 | dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block_for_fn (cfun)); | |
7bcd381b | 811 | } |
812 | #endif | |
e48ba7af | 813 | |
7bcd381b | 814 | /* Compute farthestness. */ |
815 | farthest = sbitmap_vector_alloc (num_edges, n_exprs); | |
5ab08585 | 816 | compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin, |
7bcd381b | 817 | kill, farthest); |
818 | ||
819 | #ifdef LCM_DEBUG_INFO | |
3f5be5f4 | 820 | if (dump_file) |
53c5d9d4 | 821 | dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges); |
7bcd381b | 822 | #endif |
823 | ||
cca23eb2 | 824 | sbitmap_vector_free (st_antin); |
825 | sbitmap_vector_free (st_antout); | |
826 | ||
827 | sbitmap_vector_free (st_avin); | |
828 | sbitmap_vector_free (st_avout); | |
7bcd381b | 829 | |
830 | nearer = sbitmap_vector_alloc (num_edges, n_exprs); | |
9ca8d59e | 831 | |
7bcd381b | 832 | /* Allocate an extra element for the entry block. */ |
fe672ac0 | 833 | nearerout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1, |
834 | n_exprs); | |
2325f0e2 | 835 | compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout); |
7bcd381b | 836 | |
837 | #ifdef LCM_DEBUG_INFO | |
3f5be5f4 | 838 | if (dump_file) |
e48ba7af | 839 | { |
53c5d9d4 | 840 | dump_bitmap_vector (dump_file, "nearerout", "", nearerout, |
fe672ac0 | 841 | last_basic_block_for_fn (cfun) + 1); |
53c5d9d4 | 842 | dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges); |
e48ba7af | 843 | } |
7bcd381b | 844 | #endif |
845 | ||
cca23eb2 | 846 | sbitmap_vector_free (farthest); |
7bcd381b | 847 | |
848 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
fe672ac0 | 849 | *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs); |
9ca8d59e | 850 | compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout, |
9ce37fa7 | 851 | *insert, *del); |
7bcd381b | 852 | |
cca23eb2 | 853 | sbitmap_vector_free (nearerout); |
854 | sbitmap_vector_free (nearer); | |
7bcd381b | 855 | |
856 | #ifdef LCM_DEBUG_INFO | |
3f5be5f4 | 857 | if (dump_file) |
7bcd381b | 858 | { |
53c5d9d4 | 859 | dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges); |
860 | dump_bitmap_vector (dump_file, "pre_delete_map", "", *del, | |
fe672ac0 | 861 | last_basic_block_for_fn (cfun)); |
7bcd381b | 862 | } |
863 | #endif | |
7bcd381b | 864 | return edge_list; |
e48ba7af | 865 | } |
18862b5a | 866 |