]>
Commit | Line | Data |
---|---|---|
9ca8d59e | 1 | /* Generic partial redundancy elimination with lazy code motion support. |
b25a2bca | 2 | Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc. |
e48ba7af | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
e48ba7af | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8 | Software Foundation; either version 2, or (at your option) any later | |
9 | version. | |
e48ba7af | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
e48ba7af | 15 | |
16 | You should have received a copy of the GNU General Public License | |
f12b58b3 | 17 | along with GCC; see the file COPYING. If not, write to the Free |
18 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
19 | 02111-1307, USA. */ | |
e48ba7af | 20 | |
21 | /* These routines are meant to be used by various optimization | |
5ab08585 | 22 | passes which can be modeled as lazy code motion problems. |
e48ba7af | 23 | Including, but not limited to: |
24 | ||
25 | * Traditional partial redundancy elimination. | |
26 | ||
27 | * Placement of caller/caller register save/restores. | |
28 | ||
29 | * Load/store motion. | |
30 | ||
31 | * Copy motion. | |
32 | ||
33 | * Conversion of flat register files to a stacked register | |
34 | model. | |
35 | ||
36 | * Dead load/store elimination. | |
37 | ||
38 | These routines accept as input: | |
39 | ||
40 | * Basic block information (number of blocks, lists of | |
41 | predecessors and successors). Note the granularity | |
42 | does not need to be basic block, they could be statements | |
43 | or functions. | |
44 | ||
45 | * Bitmaps of local properties (computed, transparent and | |
46 | anticipatable expressions). | |
47 | ||
48 | The output of these routines is bitmap of redundant computations | |
49 | and a bitmap of optimal placement points. */ | |
50 | ||
51 | ||
52 | #include "config.h" | |
53 | #include "system.h" | |
e48ba7af | 54 | #include "rtl.h" |
55 | #include "regs.h" | |
56 | #include "hard-reg-set.h" | |
57 | #include "flags.h" | |
58 | #include "real.h" | |
59 | #include "insn-config.h" | |
60 | #include "recog.h" | |
61 | #include "basic-block.h" | |
742ee135 | 62 | #include "output.h" |
18862b5a | 63 | #include "tm_p.h" |
9ca8d59e | 64 | |
18862b5a | 65 | /* We want target macros for the mode switching code to be able to refer |
66 | to instruction attribute values. */ | |
67 | #include "insn-attr.h" | |
e48ba7af | 68 | |
7bcd381b | 69 | /* Edge based LCM routines. */ |
9ca8d59e | 70 | static void compute_antinout_edge PARAMS ((sbitmap *, sbitmap *, |
71 | sbitmap *, sbitmap *)); | |
72 | static void compute_earliest PARAMS ((struct edge_list *, int, | |
73 | sbitmap *, sbitmap *, | |
74 | sbitmap *, sbitmap *, | |
75 | sbitmap *)); | |
76 | static void compute_laterin PARAMS ((struct edge_list *, sbitmap *, | |
77 | sbitmap *, sbitmap *, | |
78 | sbitmap *)); | |
79 | static void compute_insert_delete PARAMS ((struct edge_list *edge_list, | |
80 | sbitmap *, sbitmap *, | |
81 | sbitmap *, sbitmap *, | |
82 | sbitmap *)); | |
7bcd381b | 83 | |
84 | /* Edge based LCM routines on a reverse flowgraph. */ | |
9ca8d59e | 85 | static void compute_farthest PARAMS ((struct edge_list *, int, |
86 | sbitmap *, sbitmap *, | |
87 | sbitmap*, sbitmap *, | |
88 | sbitmap *)); | |
89 | static void compute_nearerout PARAMS ((struct edge_list *, sbitmap *, | |
90 | sbitmap *, sbitmap *, | |
91 | sbitmap *)); | |
92 | static void compute_rev_insert_delete PARAMS ((struct edge_list *edge_list, | |
93 | sbitmap *, sbitmap *, | |
94 | sbitmap *, sbitmap *, | |
95 | sbitmap *)); | |
7bcd381b | 96 | \f |
97 | /* Edge based lcm routines. */ | |
3b7e1f27 | 98 | |
5ab08585 | 99 | /* Compute expression anticipatability at entrance and exit of each block. |
100 | This is done based on the flow graph, and not on the pred-succ lists. | |
7bcd381b | 101 | Other than that, its pretty much identical to compute_antinout. */ |
e48ba7af | 102 | |
103 | static void | |
7bcd381b | 104 | compute_antinout_edge (antloc, transp, antin, antout) |
e48ba7af | 105 | sbitmap *antloc; |
106 | sbitmap *transp; | |
107 | sbitmap *antin; | |
108 | sbitmap *antout; | |
109 | { | |
4c26117a | 110 | basic_block bb; |
7bcd381b | 111 | edge e; |
2c59145b | 112 | basic_block *worklist, *qin, *qout, *qend; |
113 | unsigned int qlen; | |
3b7e1f27 | 114 | |
2325f0e2 | 115 | /* Allocate a worklist array/queue. Entries are only added to the |
116 | list if they were not already on the list. So the size is | |
117 | bounded by the number of basic blocks. */ | |
2c59145b | 118 | qin = qout = worklist |
b3d6de89 | 119 | = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks); |
e48ba7af | 120 | |
2325f0e2 | 121 | /* We want a maximal solution, so make an optimistic initialization of |
122 | ANTIN. */ | |
f20183e6 | 123 | sbitmap_vector_ones (antin, last_basic_block); |
e48ba7af | 124 | |
5d6931e2 | 125 | /* Put every block on the worklist; this is necessary because of the |
126 | optimistic initialization of ANTIN above. */ | |
4c26117a | 127 | FOR_EACH_BB_REVERSE (bb) |
e48ba7af | 128 | { |
4c26117a | 129 | *qin++ =bb; |
130 | bb->aux = bb; | |
2325f0e2 | 131 | } |
5ab08585 | 132 | |
2c59145b | 133 | qin = worklist; |
b3d6de89 | 134 | qend = &worklist[n_basic_blocks]; |
135 | qlen = n_basic_blocks; | |
e48ba7af | 136 | |
5d6931e2 | 137 | /* Mark blocks which are predecessors of the exit block so that we |
138 | can easily identify them below. */ | |
139 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) | |
140 | e->src->aux = EXIT_BLOCK_PTR; | |
141 | ||
2325f0e2 | 142 | /* Iterate until the worklist is empty. */ |
2c59145b | 143 | while (qlen) |
2325f0e2 | 144 | { |
145 | /* Take the first entry off the worklist. */ | |
4c26117a | 146 | bb = *qout++; |
2c59145b | 147 | qlen--; |
3b7e1f27 | 148 | |
2c59145b | 149 | if (qout >= qend) |
150 | qout = worklist; | |
e48ba7af | 151 | |
4c26117a | 152 | if (bb->aux == EXIT_BLOCK_PTR) |
9ca8d59e | 153 | /* Do not clear the aux field for blocks which are predecessors of |
154 | the EXIT block. That way we never add then to the worklist | |
155 | again. */ | |
4c26117a | 156 | sbitmap_zero (antout[bb->index]); |
2325f0e2 | 157 | else |
158 | { | |
159 | /* Clear the aux field of this block so that it can be added to | |
160 | the worklist again if necessary. */ | |
4c26117a | 161 | bb->aux = NULL; |
162 | sbitmap_intersection_of_succs (antout[bb->index], antin, bb->index); | |
2325f0e2 | 163 | } |
7bcd381b | 164 | |
4c26117a | 165 | if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index], |
166 | transp[bb->index], antout[bb->index])) | |
9ca8d59e | 167 | /* If the in state of this block changed, then we need |
168 | to add the predecessors of this block to the worklist | |
169 | if they are not already on the worklist. */ | |
4c26117a | 170 | for (e = bb->pred; e; e = e->pred_next) |
9ca8d59e | 171 | if (!e->src->aux && e->src != ENTRY_BLOCK_PTR) |
e48ba7af | 172 | { |
2c59145b | 173 | *qin++ = e->src; |
9ca8d59e | 174 | e->src->aux = e; |
2c59145b | 175 | qlen++; |
176 | if (qin >= qend) | |
177 | qin = worklist; | |
e48ba7af | 178 | } |
e48ba7af | 179 | } |
9ca8d59e | 180 | |
82f7392b | 181 | clear_aux_for_edges (); |
182 | clear_aux_for_blocks (); | |
2c59145b | 183 | free (worklist); |
e48ba7af | 184 | } |
185 | ||
7bcd381b | 186 | /* Compute the earliest vector for edge based lcm. */ |
9ca8d59e | 187 | |
e48ba7af | 188 | static void |
7bcd381b | 189 | compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest) |
190 | struct edge_list *edge_list; | |
e48ba7af | 191 | int n_exprs; |
7bcd381b | 192 | sbitmap *antin, *antout, *avout, *kill, *earliest; |
e48ba7af | 193 | { |
7bcd381b | 194 | sbitmap difference, temp_bitmap; |
5ab08585 | 195 | int x, num_edges; |
7bcd381b | 196 | basic_block pred, succ; |
e48ba7af | 197 | |
7bcd381b | 198 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 199 | |
7bcd381b | 200 | difference = sbitmap_alloc (n_exprs); |
201 | temp_bitmap = sbitmap_alloc (n_exprs); | |
e48ba7af | 202 | |
7bcd381b | 203 | for (x = 0; x < num_edges; x++) |
e48ba7af | 204 | { |
7bcd381b | 205 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
206 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
207 | if (pred == ENTRY_BLOCK_PTR) | |
b3d6de89 | 208 | sbitmap_copy (earliest[x], antin[succ->index]); |
7bcd381b | 209 | else |
210 | { | |
742ee135 | 211 | if (succ == EXIT_BLOCK_PTR) |
9ca8d59e | 212 | sbitmap_zero (earliest[x]); |
7bcd381b | 213 | else |
e48ba7af | 214 | { |
b3d6de89 | 215 | sbitmap_difference (difference, antin[succ->index], |
216 | avout[pred->index]); | |
217 | sbitmap_not (temp_bitmap, antout[pred->index]); | |
9ca8d59e | 218 | sbitmap_a_and_b_or_c (earliest[x], difference, |
b3d6de89 | 219 | kill[pred->index], temp_bitmap); |
e48ba7af | 220 | } |
221 | } | |
e48ba7af | 222 | } |
9ca8d59e | 223 | |
f5123ed5 | 224 | sbitmap_free (temp_bitmap); |
225 | sbitmap_free (difference); | |
e48ba7af | 226 | } |
227 | ||
2325f0e2 | 228 | /* later(p,s) is dependent on the calculation of laterin(p). |
229 | laterin(p) is dependent on the calculation of later(p2,p). | |
230 | ||
231 | laterin(ENTRY) is defined as all 0's | |
232 | later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY) | |
233 | laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)). | |
234 | ||
235 | If we progress in this manner, starting with all basic blocks | |
236 | in the work list, anytime we change later(bb), we need to add | |
237 | succs(bb) to the worklist if they are not already on the worklist. | |
238 | ||
239 | Boundary conditions: | |
240 | ||
241 | We prime the worklist all the normal basic blocks. The ENTRY block can | |
242 | never be added to the worklist since it is never the successor of any | |
243 | block. We explicitly prevent the EXIT block from being added to the | |
244 | worklist. | |
245 | ||
246 | We optimistically initialize LATER. That is the only time this routine | |
247 | will compute LATER for an edge out of the entry block since the entry | |
248 | block is never on the worklist. Thus, LATERIN is neither used nor | |
249 | computed for the ENTRY block. | |
250 | ||
251 | Since the EXIT block is never added to the worklist, we will neither | |
252 | use nor compute LATERIN for the exit block. Edges which reach the | |
253 | EXIT block are handled in the normal fashion inside the loop. However, | |
254 | the insertion/deletion computation needs LATERIN(EXIT), so we have | |
255 | to compute it. */ | |
5ab08585 | 256 | |
e48ba7af | 257 | static void |
2325f0e2 | 258 | compute_laterin (edge_list, earliest, antloc, later, laterin) |
7bcd381b | 259 | struct edge_list *edge_list; |
7bcd381b | 260 | sbitmap *earliest, *antloc, *later, *laterin; |
e48ba7af | 261 | { |
4c26117a | 262 | int num_edges, i; |
2325f0e2 | 263 | edge e; |
4c26117a | 264 | basic_block *worklist, *qin, *qout, *qend, bb; |
2c59145b | 265 | unsigned int qlen; |
e48ba7af | 266 | |
7bcd381b | 267 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 268 | |
2325f0e2 | 269 | /* Allocate a worklist array/queue. Entries are only added to the |
270 | list if they were not already on the list. So the size is | |
271 | bounded by the number of basic blocks. */ | |
2c59145b | 272 | qin = qout = worklist |
b3d6de89 | 273 | = (basic_block *) xmalloc (sizeof (basic_block) * (n_basic_blocks + 1)); |
2325f0e2 | 274 | |
275 | /* Initialize a mapping from each edge to its index. */ | |
276 | for (i = 0; i < num_edges; i++) | |
9ffd5d6d | 277 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
2325f0e2 | 278 | |
279 | /* We want a maximal solution, so initially consider LATER true for | |
280 | all edges. This allows propagation through a loop since the incoming | |
281 | loop edge will have LATER set, so if all the other incoming edges | |
282 | to the loop are set, then LATERIN will be set for the head of the | |
283 | loop. | |
284 | ||
285 | If the optimistic setting of LATER on that edge was incorrect (for | |
286 | example the expression is ANTLOC in a block within the loop) then | |
287 | this algorithm will detect it when we process the block at the head | |
288 | of the optimistic edge. That will requeue the affected blocks. */ | |
289 | sbitmap_vector_ones (later, num_edges); | |
290 | ||
048599b9 | 291 | /* Note that even though we want an optimistic setting of LATER, we |
292 | do not want to be overly optimistic. Consider an outgoing edge from | |
293 | the entry block. That edge should always have a LATER value the | |
294 | same as EARLIEST for that edge. */ | |
295 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
9ca8d59e | 296 | sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]); |
048599b9 | 297 | |
2325f0e2 | 298 | /* Add all the blocks to the worklist. This prevents an early exit from |
299 | the loop given our optimistic initialization of LATER above. */ | |
4c26117a | 300 | FOR_EACH_BB (bb) |
e48ba7af | 301 | { |
4c26117a | 302 | *qin++ = bb; |
303 | bb->aux = bb; | |
7bcd381b | 304 | } |
2c59145b | 305 | qin = worklist; |
306 | /* Note that we do not use the last allocated element for our queue, | |
307 | as EXIT_BLOCK is never inserted into it. In fact the above allocation | |
b3d6de89 | 308 | of n_basic_blocks + 1 elements is not encessary. */ |
309 | qend = &worklist[n_basic_blocks]; | |
310 | qlen = n_basic_blocks; | |
7bcd381b | 311 | |
2325f0e2 | 312 | /* Iterate until the worklist is empty. */ |
2c59145b | 313 | while (qlen) |
7bcd381b | 314 | { |
2325f0e2 | 315 | /* Take the first entry off the worklist. */ |
4c26117a | 316 | bb = *qout++; |
317 | bb->aux = NULL; | |
2c59145b | 318 | qlen--; |
319 | if (qout >= qend) | |
320 | qout = worklist; | |
2325f0e2 | 321 | |
322 | /* Compute the intersection of LATERIN for each incoming edge to B. */ | |
4c26117a | 323 | sbitmap_ones (laterin[bb->index]); |
324 | for (e = bb->pred; e != NULL; e = e->pred_next) | |
325 | sbitmap_a_and_b (laterin[bb->index], laterin[bb->index], later[(size_t)e->aux]); | |
2325f0e2 | 326 | |
327 | /* Calculate LATER for all outgoing edges. */ | |
4c26117a | 328 | for (e = bb->succ; e != NULL; e = e->succ_next) |
739c050b | 329 | if (sbitmap_union_of_diff_cg (later[(size_t) e->aux], |
b3d6de89 | 330 | earliest[(size_t) e->aux], |
331 | laterin[e->src->index], | |
332 | antloc[e->src->index]) | |
9ca8d59e | 333 | /* If LATER for an outgoing edge was changed, then we need |
334 | to add the target of the outgoing edge to the worklist. */ | |
335 | && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0) | |
336 | { | |
2c59145b | 337 | *qin++ = e->dest; |
9ca8d59e | 338 | e->dest->aux = e; |
2c59145b | 339 | qlen++; |
340 | if (qin >= qend) | |
341 | qin = worklist; | |
9ca8d59e | 342 | } |
e48ba7af | 343 | } |
344 | ||
2325f0e2 | 345 | /* Computation of insertion and deletion points requires computing LATERIN |
346 | for the EXIT block. We allocated an extra entry in the LATERIN array | |
347 | for just this purpose. */ | |
f20183e6 | 348 | sbitmap_ones (laterin[last_basic_block]); |
2325f0e2 | 349 | for (e = EXIT_BLOCK_PTR->pred; e != NULL; e = e->pred_next) |
f20183e6 | 350 | sbitmap_a_and_b (laterin[last_basic_block], |
351 | laterin[last_basic_block], | |
9ffd5d6d | 352 | later[(size_t) e->aux]); |
2325f0e2 | 353 | |
82f7392b | 354 | clear_aux_for_edges (); |
2c59145b | 355 | free (worklist); |
e48ba7af | 356 | } |
357 | ||
7bcd381b | 358 | /* Compute the insertion and deletion points for edge based LCM. */ |
9ca8d59e | 359 | |
7bcd381b | 360 | static void |
361 | compute_insert_delete (edge_list, antloc, later, laterin, | |
362 | insert, delete) | |
363 | struct edge_list *edge_list; | |
364 | sbitmap *antloc, *later, *laterin, *insert, *delete; | |
365 | { | |
366 | int x; | |
4c26117a | 367 | basic_block bb; |
e48ba7af | 368 | |
4c26117a | 369 | FOR_EACH_BB (bb) |
370 | sbitmap_difference (delete[bb->index], antloc[bb->index], laterin[bb->index]); | |
5ab08585 | 371 | |
7bcd381b | 372 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
373 | { | |
374 | basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x); | |
9ca8d59e | 375 | |
7bcd381b | 376 | if (b == EXIT_BLOCK_PTR) |
f20183e6 | 377 | sbitmap_difference (insert[x], later[x], laterin[last_basic_block]); |
7bcd381b | 378 | else |
b3d6de89 | 379 | sbitmap_difference (insert[x], later[x], laterin[b->index]); |
7bcd381b | 380 | } |
381 | } | |
e48ba7af | 382 | |
9ca8d59e | 383 | /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and |
384 | delete vectors for edge based LCM. Returns an edgelist which is used to | |
385 | map the insert vector to what edge an expression should be inserted on. */ | |
e48ba7af | 386 | |
7bcd381b | 387 | struct edge_list * |
388 | pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete) | |
27548a74 | 389 | FILE *file ATTRIBUTE_UNUSED; |
e48ba7af | 390 | int n_exprs; |
7bcd381b | 391 | sbitmap *transp; |
392 | sbitmap *avloc; | |
e48ba7af | 393 | sbitmap *antloc; |
7bcd381b | 394 | sbitmap *kill; |
395 | sbitmap **insert; | |
396 | sbitmap **delete; | |
e48ba7af | 397 | { |
7bcd381b | 398 | sbitmap *antin, *antout, *earliest; |
399 | sbitmap *avin, *avout; | |
400 | sbitmap *later, *laterin; | |
401 | struct edge_list *edge_list; | |
402 | int num_edges; | |
e48ba7af | 403 | |
7bcd381b | 404 | edge_list = create_edge_list (); |
405 | num_edges = NUM_EDGES (edge_list); | |
e48ba7af | 406 | |
7bcd381b | 407 | #ifdef LCM_DEBUG_INFO |
408 | if (file) | |
e48ba7af | 409 | { |
7bcd381b | 410 | fprintf (file, "Edge List:\n"); |
411 | verify_edge_list (file, edge_list); | |
412 | print_edge_list (file, edge_list); | |
f20183e6 | 413 | dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); |
414 | dump_sbitmap_vector (file, "antloc", "", antloc, last_basic_block); | |
415 | dump_sbitmap_vector (file, "avloc", "", avloc, last_basic_block); | |
416 | dump_sbitmap_vector (file, "kill", "", kill, last_basic_block); | |
e48ba7af | 417 | } |
7bcd381b | 418 | #endif |
e48ba7af | 419 | |
7bcd381b | 420 | /* Compute global availability. */ |
f20183e6 | 421 | avin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
422 | avout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
7bcd381b | 423 | compute_available (avloc, kill, avout, avin); |
cca23eb2 | 424 | sbitmap_vector_free (avin); |
e48ba7af | 425 | |
7bcd381b | 426 | /* Compute global anticipatability. */ |
f20183e6 | 427 | antin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
428 | antout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
7bcd381b | 429 | compute_antinout_edge (antloc, transp, antin, antout); |
e48ba7af | 430 | |
7bcd381b | 431 | #ifdef LCM_DEBUG_INFO |
432 | if (file) | |
e48ba7af | 433 | { |
f20183e6 | 434 | dump_sbitmap_vector (file, "antin", "", antin, last_basic_block); |
435 | dump_sbitmap_vector (file, "antout", "", antout, last_basic_block); | |
e48ba7af | 436 | } |
7bcd381b | 437 | #endif |
e48ba7af | 438 | |
7bcd381b | 439 | /* Compute earliestness. */ |
440 | earliest = sbitmap_vector_alloc (num_edges, n_exprs); | |
441 | compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest); | |
e48ba7af | 442 | |
7bcd381b | 443 | #ifdef LCM_DEBUG_INFO |
444 | if (file) | |
445 | dump_sbitmap_vector (file, "earliest", "", earliest, num_edges); | |
446 | #endif | |
e48ba7af | 447 | |
cca23eb2 | 448 | sbitmap_vector_free (antout); |
449 | sbitmap_vector_free (antin); | |
450 | sbitmap_vector_free (avout); | |
e48ba7af | 451 | |
7bcd381b | 452 | later = sbitmap_vector_alloc (num_edges, n_exprs); |
9ca8d59e | 453 | |
7bcd381b | 454 | /* Allocate an extra element for the exit block in the laterin vector. */ |
f20183e6 | 455 | laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); |
2325f0e2 | 456 | compute_laterin (edge_list, earliest, antloc, later, laterin); |
457 | ||
7bcd381b | 458 | #ifdef LCM_DEBUG_INFO |
459 | if (file) | |
460 | { | |
f20183e6 | 461 | dump_sbitmap_vector (file, "laterin", "", laterin, last_basic_block + 1); |
7bcd381b | 462 | dump_sbitmap_vector (file, "later", "", later, num_edges); |
463 | } | |
464 | #endif | |
e48ba7af | 465 | |
cca23eb2 | 466 | sbitmap_vector_free (earliest); |
7bcd381b | 467 | |
468 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
f20183e6 | 469 | *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); |
7bcd381b | 470 | compute_insert_delete (edge_list, antloc, later, laterin, *insert, *delete); |
e48ba7af | 471 | |
cca23eb2 | 472 | sbitmap_vector_free (laterin); |
473 | sbitmap_vector_free (later); | |
7bcd381b | 474 | |
475 | #ifdef LCM_DEBUG_INFO | |
476 | if (file) | |
e48ba7af | 477 | { |
7bcd381b | 478 | dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); |
9ca8d59e | 479 | dump_sbitmap_vector (file, "pre_delete_map", "", *delete, |
f20183e6 | 480 | last_basic_block); |
e48ba7af | 481 | } |
7bcd381b | 482 | #endif |
e48ba7af | 483 | |
7bcd381b | 484 | return edge_list; |
485 | } | |
3b7e1f27 | 486 | |
487 | /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors. | |
488 | Return the number of passes we performed to iterate to a solution. */ | |
489 | ||
2325f0e2 | 490 | void |
7bcd381b | 491 | compute_available (avloc, kill, avout, avin) |
3b7e1f27 | 492 | sbitmap *avloc, *kill, *avout, *avin; |
e48ba7af | 493 | { |
3b7e1f27 | 494 | edge e; |
4c26117a | 495 | basic_block *worklist, *qin, *qout, *qend, bb; |
3b7e1f27 | 496 | unsigned int qlen; |
497 | ||
498 | /* Allocate a worklist array/queue. Entries are only added to the | |
499 | list if they were not already on the list. So the size is | |
500 | bounded by the number of basic blocks. */ | |
501 | qin = qout = worklist | |
b3d6de89 | 502 | = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks); |
3b7e1f27 | 503 | |
504 | /* We want a maximal solution. */ | |
f20183e6 | 505 | sbitmap_vector_ones (avout, last_basic_block); |
3b7e1f27 | 506 | |
507 | /* Put every block on the worklist; this is necessary because of the | |
508 | optimistic initialization of AVOUT above. */ | |
4c26117a | 509 | FOR_EACH_BB (bb) |
3b7e1f27 | 510 | { |
4c26117a | 511 | *qin++ = bb; |
512 | bb->aux = bb; | |
3b7e1f27 | 513 | } |
514 | ||
515 | qin = worklist; | |
b3d6de89 | 516 | qend = &worklist[n_basic_blocks]; |
517 | qlen = n_basic_blocks; | |
3b7e1f27 | 518 | |
519 | /* Mark blocks which are successors of the entry block so that we | |
520 | can easily identify them below. */ | |
521 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
522 | e->dest->aux = ENTRY_BLOCK_PTR; | |
523 | ||
524 | /* Iterate until the worklist is empty. */ | |
525 | while (qlen) | |
526 | { | |
527 | /* Take the first entry off the worklist. */ | |
4c26117a | 528 | bb = *qout++; |
3b7e1f27 | 529 | qlen--; |
530 | ||
531 | if (qout >= qend) | |
532 | qout = worklist; | |
533 | ||
534 | /* If one of the predecessor blocks is the ENTRY block, then the | |
535 | intersection of avouts is the null set. We can identify such blocks | |
536 | by the special value in the AUX field in the block structure. */ | |
4c26117a | 537 | if (bb->aux == ENTRY_BLOCK_PTR) |
3b7e1f27 | 538 | /* Do not clear the aux field for blocks which are successors of the |
539 | ENTRY block. That way we never add then to the worklist again. */ | |
4c26117a | 540 | sbitmap_zero (avin[bb->index]); |
3b7e1f27 | 541 | else |
542 | { | |
543 | /* Clear the aux field of this block so that it can be added to | |
544 | the worklist again if necessary. */ | |
4c26117a | 545 | bb->aux = NULL; |
546 | sbitmap_intersection_of_preds (avin[bb->index], avout, bb->index); | |
3b7e1f27 | 547 | } |
548 | ||
4c26117a | 549 | if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index], avin[bb->index], kill[bb->index])) |
3b7e1f27 | 550 | /* If the out state of this block changed, then we need |
551 | to add the successors of this block to the worklist | |
552 | if they are not already on the worklist. */ | |
4c26117a | 553 | for (e = bb->succ; e; e = e->succ_next) |
3b7e1f27 | 554 | if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR) |
555 | { | |
556 | *qin++ = e->dest; | |
557 | e->dest->aux = e; | |
558 | qlen++; | |
559 | ||
560 | if (qin >= qend) | |
561 | qin = worklist; | |
562 | } | |
563 | } | |
564 | ||
565 | clear_aux_for_edges (); | |
566 | clear_aux_for_blocks (); | |
567 | free (worklist); | |
e48ba7af | 568 | } |
569 | ||
7bcd381b | 570 | /* Compute the farthest vector for edge based lcm. */ |
9ca8d59e | 571 | |
e48ba7af | 572 | static void |
5ab08585 | 573 | compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin, |
7bcd381b | 574 | kill, farthest) |
575 | struct edge_list *edge_list; | |
e48ba7af | 576 | int n_exprs; |
7bcd381b | 577 | sbitmap *st_avout, *st_avin, *st_antin, *kill, *farthest; |
e48ba7af | 578 | { |
7bcd381b | 579 | sbitmap difference, temp_bitmap; |
5ab08585 | 580 | int x, num_edges; |
7bcd381b | 581 | basic_block pred, succ; |
e48ba7af | 582 | |
7bcd381b | 583 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 584 | |
7bcd381b | 585 | difference = sbitmap_alloc (n_exprs); |
586 | temp_bitmap = sbitmap_alloc (n_exprs); | |
e48ba7af | 587 | |
7bcd381b | 588 | for (x = 0; x < num_edges; x++) |
e48ba7af | 589 | { |
7bcd381b | 590 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
591 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
592 | if (succ == EXIT_BLOCK_PTR) | |
b3d6de89 | 593 | sbitmap_copy (farthest[x], st_avout[pred->index]); |
7bcd381b | 594 | else |
e48ba7af | 595 | { |
7bcd381b | 596 | if (pred == ENTRY_BLOCK_PTR) |
9ca8d59e | 597 | sbitmap_zero (farthest[x]); |
7bcd381b | 598 | else |
599 | { | |
b3d6de89 | 600 | sbitmap_difference (difference, st_avout[pred->index], |
601 | st_antin[succ->index]); | |
602 | sbitmap_not (temp_bitmap, st_avin[succ->index]); | |
5ab08585 | 603 | sbitmap_a_and_b_or_c (farthest[x], difference, |
b3d6de89 | 604 | kill[succ->index], temp_bitmap); |
7bcd381b | 605 | } |
e48ba7af | 606 | } |
e48ba7af | 607 | } |
9ca8d59e | 608 | |
f5123ed5 | 609 | sbitmap_free (temp_bitmap); |
610 | sbitmap_free (difference); | |
e48ba7af | 611 | } |
612 | ||
2325f0e2 | 613 | /* Compute nearer and nearerout vectors for edge based lcm. |
614 | ||
615 | This is the mirror of compute_laterin, additional comments on the | |
616 | implementation can be found before compute_laterin. */ | |
617 | ||
e48ba7af | 618 | static void |
2325f0e2 | 619 | compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout) |
7bcd381b | 620 | struct edge_list *edge_list; |
7bcd381b | 621 | sbitmap *farthest, *st_avloc, *nearer, *nearerout; |
e48ba7af | 622 | { |
4c26117a | 623 | int num_edges, i; |
2325f0e2 | 624 | edge e; |
4c26117a | 625 | basic_block *worklist, *tos, bb; |
e48ba7af | 626 | |
7bcd381b | 627 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 628 | |
2325f0e2 | 629 | /* Allocate a worklist array/queue. Entries are only added to the |
630 | list if they were not already on the list. So the size is | |
631 | bounded by the number of basic blocks. */ | |
9ca8d59e | 632 | tos = worklist |
b3d6de89 | 633 | = (basic_block *) xmalloc (sizeof (basic_block) * (n_basic_blocks + 1)); |
e48ba7af | 634 | |
2325f0e2 | 635 | /* Initialize NEARER for each edge and build a mapping from an edge to |
636 | its index. */ | |
637 | for (i = 0; i < num_edges; i++) | |
9ffd5d6d | 638 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
7bcd381b | 639 | |
2325f0e2 | 640 | /* We want a maximal solution. */ |
641 | sbitmap_vector_ones (nearer, num_edges); | |
642 | ||
048599b9 | 643 | /* Note that even though we want an optimistic setting of NEARER, we |
644 | do not want to be overly optimistic. Consider an incoming edge to | |
645 | the exit block. That edge should always have a NEARER value the | |
646 | same as FARTHEST for that edge. */ | |
647 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) | |
8b863f57 | 648 | sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]); |
048599b9 | 649 | |
2325f0e2 | 650 | /* Add all the blocks to the worklist. This prevents an early exit |
651 | from the loop given our optimistic initialization of NEARER. */ | |
4c26117a | 652 | FOR_EACH_BB (bb) |
e48ba7af | 653 | { |
4c26117a | 654 | *tos++ = bb; |
655 | bb->aux = bb; | |
7bcd381b | 656 | } |
5ab08585 | 657 | |
2325f0e2 | 658 | /* Iterate until the worklist is empty. */ |
659 | while (tos != worklist) | |
7bcd381b | 660 | { |
2325f0e2 | 661 | /* Take the first entry off the worklist. */ |
4c26117a | 662 | bb = *--tos; |
663 | bb->aux = NULL; | |
2325f0e2 | 664 | |
665 | /* Compute the intersection of NEARER for each outgoing edge from B. */ | |
4c26117a | 666 | sbitmap_ones (nearerout[bb->index]); |
667 | for (e = bb->succ; e != NULL; e = e->succ_next) | |
668 | sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index], | |
9ffd5d6d | 669 | nearer[(size_t) e->aux]); |
2325f0e2 | 670 | |
671 | /* Calculate NEARER for all incoming edges. */ | |
4c26117a | 672 | for (e = bb->pred; e != NULL; e = e->pred_next) |
739c050b | 673 | if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux], |
b3d6de89 | 674 | farthest[(size_t) e->aux], |
675 | nearerout[e->dest->index], | |
676 | st_avloc[e->dest->index]) | |
9ca8d59e | 677 | /* If NEARER for an incoming edge was changed, then we need |
678 | to add the source of the incoming edge to the worklist. */ | |
679 | && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0) | |
680 | { | |
681 | *tos++ = e->src; | |
682 | e->src->aux = e; | |
683 | } | |
7bcd381b | 684 | } |
e48ba7af | 685 | |
2325f0e2 | 686 | /* Computation of insertion and deletion points requires computing NEAREROUT |
687 | for the ENTRY block. We allocated an extra entry in the NEAREROUT array | |
688 | for just this purpose. */ | |
f20183e6 | 689 | sbitmap_ones (nearerout[last_basic_block]); |
2325f0e2 | 690 | for (e = ENTRY_BLOCK_PTR->succ; e != NULL; e = e->succ_next) |
f20183e6 | 691 | sbitmap_a_and_b (nearerout[last_basic_block], |
692 | nearerout[last_basic_block], | |
9ffd5d6d | 693 | nearer[(size_t) e->aux]); |
2325f0e2 | 694 | |
82f7392b | 695 | clear_aux_for_edges (); |
2325f0e2 | 696 | free (tos); |
7bcd381b | 697 | } |
e48ba7af | 698 | |
7bcd381b | 699 | /* Compute the insertion and deletion points for edge based LCM. */ |
9ca8d59e | 700 | |
e48ba7af | 701 | static void |
7bcd381b | 702 | compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout, |
703 | insert, delete) | |
704 | struct edge_list *edge_list; | |
705 | sbitmap *st_avloc, *nearer, *nearerout, *insert, *delete; | |
e48ba7af | 706 | { |
7bcd381b | 707 | int x; |
4c26117a | 708 | basic_block bb; |
e48ba7af | 709 | |
4c26117a | 710 | FOR_EACH_BB (bb) |
711 | sbitmap_difference (delete[bb->index], st_avloc[bb->index], nearerout[bb->index]); | |
5ab08585 | 712 | |
7bcd381b | 713 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
e48ba7af | 714 | { |
7bcd381b | 715 | basic_block b = INDEX_EDGE_PRED_BB (edge_list, x); |
716 | if (b == ENTRY_BLOCK_PTR) | |
f20183e6 | 717 | sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]); |
e48ba7af | 718 | else |
b3d6de89 | 719 | sbitmap_difference (insert[x], nearer[x], nearerout[b->index]); |
e48ba7af | 720 | } |
e48ba7af | 721 | } |
722 | ||
5ab08585 | 723 | /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the |
7bcd381b | 724 | insert and delete vectors for edge based reverse LCM. Returns an |
725 | edgelist which is used to map the insert vector to what edge | |
726 | an expression should be inserted on. */ | |
e48ba7af | 727 | |
7bcd381b | 728 | struct edge_list * |
5ab08585 | 729 | pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill, |
7bcd381b | 730 | insert, delete) |
27548a74 | 731 | FILE *file ATTRIBUTE_UNUSED; |
7bcd381b | 732 | int n_exprs; |
733 | sbitmap *transp; | |
734 | sbitmap *st_avloc; | |
735 | sbitmap *st_antloc; | |
736 | sbitmap *kill; | |
737 | sbitmap **insert; | |
738 | sbitmap **delete; | |
e48ba7af | 739 | { |
7bcd381b | 740 | sbitmap *st_antin, *st_antout; |
741 | sbitmap *st_avout, *st_avin, *farthest; | |
742 | sbitmap *nearer, *nearerout; | |
743 | struct edge_list *edge_list; | |
27548a74 | 744 | int num_edges; |
7bcd381b | 745 | |
746 | edge_list = create_edge_list (); | |
747 | num_edges = NUM_EDGES (edge_list); | |
748 | ||
f20183e6 | 749 | st_antin = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs); |
750 | st_antout = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs); | |
751 | sbitmap_vector_zero (st_antin, last_basic_block); | |
752 | sbitmap_vector_zero (st_antout, last_basic_block); | |
7bcd381b | 753 | compute_antinout_edge (st_antloc, transp, st_antin, st_antout); |
754 | ||
755 | /* Compute global anticipatability. */ | |
f20183e6 | 756 | st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs); |
757 | st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
7bcd381b | 758 | compute_available (st_avloc, kill, st_avout, st_avin); |
759 | ||
760 | #ifdef LCM_DEBUG_INFO | |
761 | if (file) | |
762 | { | |
763 | fprintf (file, "Edge List:\n"); | |
764 | verify_edge_list (file, edge_list); | |
765 | print_edge_list (file, edge_list); | |
f20183e6 | 766 | dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); |
767 | dump_sbitmap_vector (file, "st_avloc", "", st_avloc, last_basic_block); | |
768 | dump_sbitmap_vector (file, "st_antloc", "", st_antloc, last_basic_block); | |
769 | dump_sbitmap_vector (file, "st_antin", "", st_antin, last_basic_block); | |
770 | dump_sbitmap_vector (file, "st_antout", "", st_antout, last_basic_block); | |
771 | dump_sbitmap_vector (file, "st_kill", "", kill, last_basic_block); | |
7bcd381b | 772 | } |
773 | #endif | |
e48ba7af | 774 | |
7bcd381b | 775 | #ifdef LCM_DEBUG_INFO |
776 | if (file) | |
777 | { | |
f20183e6 | 778 | dump_sbitmap_vector (file, "st_avout", "", st_avout, last_basic_block); |
779 | dump_sbitmap_vector (file, "st_avin", "", st_avin, last_basic_block); | |
7bcd381b | 780 | } |
781 | #endif | |
e48ba7af | 782 | |
7bcd381b | 783 | /* Compute farthestness. */ |
784 | farthest = sbitmap_vector_alloc (num_edges, n_exprs); | |
5ab08585 | 785 | compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin, |
7bcd381b | 786 | kill, farthest); |
787 | ||
788 | #ifdef LCM_DEBUG_INFO | |
789 | if (file) | |
790 | dump_sbitmap_vector (file, "farthest", "", farthest, num_edges); | |
791 | #endif | |
792 | ||
cca23eb2 | 793 | sbitmap_vector_free (st_antin); |
794 | sbitmap_vector_free (st_antout); | |
795 | ||
796 | sbitmap_vector_free (st_avin); | |
797 | sbitmap_vector_free (st_avout); | |
7bcd381b | 798 | |
799 | nearer = sbitmap_vector_alloc (num_edges, n_exprs); | |
9ca8d59e | 800 | |
7bcd381b | 801 | /* Allocate an extra element for the entry block. */ |
f20183e6 | 802 | nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); |
2325f0e2 | 803 | compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout); |
7bcd381b | 804 | |
805 | #ifdef LCM_DEBUG_INFO | |
806 | if (file) | |
e48ba7af | 807 | { |
5ab08585 | 808 | dump_sbitmap_vector (file, "nearerout", "", nearerout, |
f20183e6 | 809 | last_basic_block + 1); |
7bcd381b | 810 | dump_sbitmap_vector (file, "nearer", "", nearer, num_edges); |
e48ba7af | 811 | } |
7bcd381b | 812 | #endif |
813 | ||
cca23eb2 | 814 | sbitmap_vector_free (farthest); |
7bcd381b | 815 | |
816 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
f20183e6 | 817 | *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); |
9ca8d59e | 818 | compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout, |
819 | *insert, *delete); | |
7bcd381b | 820 | |
cca23eb2 | 821 | sbitmap_vector_free (nearerout); |
822 | sbitmap_vector_free (nearer); | |
7bcd381b | 823 | |
824 | #ifdef LCM_DEBUG_INFO | |
825 | if (file) | |
826 | { | |
827 | dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); | |
9ca8d59e | 828 | dump_sbitmap_vector (file, "pre_delete_map", "", *delete, |
f20183e6 | 829 | last_basic_block); |
7bcd381b | 830 | } |
831 | #endif | |
7bcd381b | 832 | return edge_list; |
e48ba7af | 833 | } |
18862b5a | 834 | |
9ca8d59e | 835 | /* Mode switching: |
836 | ||
837 | The algorithm for setting the modes consists of scanning the insn list | |
18862b5a | 838 | and finding all the insns which require a specific mode. Each insn gets |
839 | a unique struct seginfo element. These structures are inserted into a list | |
840 | for each basic block. For each entity, there is an array of bb_info over | |
841 | the flow graph basic blocks (local var 'bb_info'), and contains a list | |
842 | of all insns within that basic block, in the order they are encountered. | |
843 | ||
844 | For each entity, any basic block WITHOUT any insns requiring a specific | |
845 | mode are given a single entry, without a mode. (Each basic block | |
846 | in the flow graph must have at least one entry in the segment table.) | |
847 | ||
848 | The LCM algorithm is then run over the flow graph to determine where to | |
849 | place the sets to the highest-priority value in respect of first the first | |
850 | insn in any one block. Any adjustments required to the transparancy | |
851 | vectors are made, then the next iteration starts for the next-lower | |
852 | priority mode, till for each entity all modes are exhasted. | |
853 | ||
854 | More details are located in the code for optimize_mode_switching(). */ | |
855 | ||
856 | /* This structure contains the information for each insn which requires | |
5ab08585 | 857 | either single or double mode to be set. |
18862b5a | 858 | MODE is the mode this insn must be executed in. |
f913e47c | 859 | INSN_PTR is the insn to be executed (may be the note that marks the |
860 | beginning of a basic block). | |
18862b5a | 861 | BBNUM is the flow graph basic block this insn occurs in. |
862 | NEXT is the next insn in the same basic block. */ | |
5ab08585 | 863 | struct seginfo |
18862b5a | 864 | { |
865 | int mode; | |
866 | rtx insn_ptr; | |
867 | int bbnum; | |
868 | struct seginfo *next; | |
869 | HARD_REG_SET regs_live; | |
870 | }; | |
871 | ||
3b7e1f27 | 872 | struct bb_info |
18862b5a | 873 | { |
874 | struct seginfo *seginfo; | |
875 | int computing; | |
876 | }; | |
877 | ||
878 | /* These bitmaps are used for the LCM algorithm. */ | |
879 | ||
29768226 | 880 | #ifdef OPTIMIZE_MODE_SWITCHING |
18862b5a | 881 | static sbitmap *antic; |
882 | static sbitmap *transp; | |
883 | static sbitmap *comp; | |
884 | static sbitmap *delete; | |
885 | static sbitmap *insert; | |
886 | ||
7dff60c8 | 887 | static struct seginfo * new_seginfo PARAMS ((int, rtx, int, HARD_REG_SET)); |
3b7e1f27 | 888 | static void add_seginfo PARAMS ((struct bb_info *, struct seginfo *)); |
18862b5a | 889 | static void reg_dies PARAMS ((rtx, HARD_REG_SET)); |
890 | static void reg_becomes_live PARAMS ((rtx, rtx, void *)); | |
29768226 | 891 | static void make_preds_opaque PARAMS ((basic_block, int)); |
892 | #endif | |
893 | \f | |
894 | #ifdef OPTIMIZE_MODE_SWITCHING | |
18862b5a | 895 | |
896 | /* This function will allocate a new BBINFO structure, initialized | |
7dff60c8 | 897 | with the MODE, INSN, and basic block BB parameters. */ |
29768226 | 898 | |
18862b5a | 899 | static struct seginfo * |
900 | new_seginfo (mode, insn, bb, regs_live) | |
901 | int mode; | |
902 | rtx insn; | |
903 | int bb; | |
904 | HARD_REG_SET regs_live; | |
905 | { | |
906 | struct seginfo *ptr; | |
907 | ptr = xmalloc (sizeof (struct seginfo)); | |
908 | ptr->mode = mode; | |
909 | ptr->insn_ptr = insn; | |
910 | ptr->bbnum = bb; | |
911 | ptr->next = NULL; | |
912 | COPY_HARD_REG_SET (ptr->regs_live, regs_live); | |
913 | return ptr; | |
914 | } | |
915 | ||
5ab08585 | 916 | /* Add a seginfo element to the end of a list. |
18862b5a | 917 | HEAD is a pointer to the list beginning. |
918 | INFO is the structure to be linked in. */ | |
29768226 | 919 | |
18862b5a | 920 | static void |
921 | add_seginfo (head, info) | |
3b7e1f27 | 922 | struct bb_info *head; |
18862b5a | 923 | struct seginfo *info; |
924 | { | |
925 | struct seginfo *ptr; | |
926 | ||
927 | if (head->seginfo == NULL) | |
928 | head->seginfo = info; | |
929 | else | |
930 | { | |
931 | ptr = head->seginfo; | |
932 | while (ptr->next != NULL) | |
933 | ptr = ptr->next; | |
934 | ptr->next = info; | |
935 | } | |
936 | } | |
937 | ||
938 | /* Make all predecessors of basic block B opaque, recursively, till we hit | |
939 | some that are already non-transparent, or an edge where aux is set; that | |
940 | denotes that a mode set is to be done on that edge. | |
941 | J is the bit number in the bitmaps that corresponds to the entity that | |
942 | we are currently handling mode-switching for. */ | |
29768226 | 943 | |
18862b5a | 944 | static void |
945 | make_preds_opaque (b, j) | |
946 | basic_block b; | |
947 | int j; | |
948 | { | |
949 | edge e; | |
950 | ||
951 | for (e = b->pred; e; e = e->pred_next) | |
952 | { | |
953 | basic_block pb = e->src; | |
9ca8d59e | 954 | |
b3d6de89 | 955 | if (e->aux || ! TEST_BIT (transp[pb->index], j)) |
18862b5a | 956 | continue; |
9ca8d59e | 957 | |
b3d6de89 | 958 | RESET_BIT (transp[pb->index], j); |
18862b5a | 959 | make_preds_opaque (pb, j); |
960 | } | |
961 | } | |
962 | ||
963 | /* Record in LIVE that register REG died. */ | |
29768226 | 964 | |
18862b5a | 965 | static void |
966 | reg_dies (reg, live) | |
967 | rtx reg; | |
968 | HARD_REG_SET live; | |
969 | { | |
9ca8d59e | 970 | int regno, nregs; |
18862b5a | 971 | |
972 | if (GET_CODE (reg) != REG) | |
973 | return; | |
9ca8d59e | 974 | |
18862b5a | 975 | regno = REGNO (reg); |
976 | if (regno < FIRST_PSEUDO_REGISTER) | |
9ca8d59e | 977 | for (nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; nregs >= 0; |
978 | nregs--) | |
979 | CLEAR_HARD_REG_BIT (live, regno + nregs); | |
18862b5a | 980 | } |
981 | ||
982 | /* Record in LIVE that register REG became live. | |
983 | This is called via note_stores. */ | |
29768226 | 984 | |
18862b5a | 985 | static void |
986 | reg_becomes_live (reg, setter, live) | |
987 | rtx reg; | |
988 | rtx setter ATTRIBUTE_UNUSED; | |
989 | void *live; | |
990 | { | |
9ca8d59e | 991 | int regno, nregs; |
18862b5a | 992 | |
993 | if (GET_CODE (reg) == SUBREG) | |
994 | reg = SUBREG_REG (reg); | |
995 | ||
996 | if (GET_CODE (reg) != REG) | |
997 | return; | |
998 | ||
999 | regno = REGNO (reg); | |
1000 | if (regno < FIRST_PSEUDO_REGISTER) | |
9ca8d59e | 1001 | for (nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; nregs >= 0; |
1002 | nregs--) | |
1003 | SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs); | |
18862b5a | 1004 | } |
1005 | ||
b78e14a8 | 1006 | /* Find all insns that need a particular mode setting, and insert the |
1007 | necessary mode switches. Return true if we did work. */ | |
9ca8d59e | 1008 | |
b78e14a8 | 1009 | int |
18862b5a | 1010 | optimize_mode_switching (file) |
b78e14a8 | 1011 | FILE *file; |
18862b5a | 1012 | { |
18862b5a | 1013 | rtx insn; |
4c26117a | 1014 | int e; |
1015 | basic_block bb; | |
18862b5a | 1016 | int need_commit = 0; |
1017 | sbitmap *kill; | |
1018 | struct edge_list *edge_list; | |
e99c3a1d | 1019 | static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING; |
3585dac7 | 1020 | #define N_ENTITIES ARRAY_SIZE (num_modes) |
18862b5a | 1021 | int entity_map[N_ENTITIES]; |
3b7e1f27 | 1022 | struct bb_info *bb_info[N_ENTITIES]; |
18862b5a | 1023 | int i, j; |
1024 | int n_entities; | |
1025 | int max_num_modes = 0; | |
7fb47f9f | 1026 | bool emited = false; |
742ee135 | 1027 | basic_block post_entry, pre_exit ATTRIBUTE_UNUSED; |
18862b5a | 1028 | |
308f9b79 | 1029 | clear_bb_flags (); |
1287c919 | 1030 | |
18862b5a | 1031 | for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--) |
9ca8d59e | 1032 | if (OPTIMIZE_MODE_SWITCHING (e)) |
1033 | { | |
742ee135 | 1034 | int entry_exit_extra = 0; |
1035 | ||
1036 | /* Create the list of segments within each basic block. | |
1037 | If NORMAL_MODE is defined, allow for two extra | |
1038 | blocks split from the entry and exit block. */ | |
1039 | #ifdef NORMAL_MODE | |
1040 | entry_exit_extra = 2; | |
1041 | #endif | |
9ca8d59e | 1042 | bb_info[n_entities] |
742ee135 | 1043 | = (struct bb_info *) xcalloc (last_basic_block + entry_exit_extra, |
1044 | sizeof **bb_info); | |
9ca8d59e | 1045 | entity_map[n_entities++] = e; |
1046 | if (num_modes[e] > max_num_modes) | |
1047 | max_num_modes = num_modes[e]; | |
1048 | } | |
1049 | ||
18862b5a | 1050 | if (! n_entities) |
b78e14a8 | 1051 | return 0; |
18862b5a | 1052 | |
1287c919 | 1053 | #ifdef NORMAL_MODE |
742ee135 | 1054 | { |
1055 | /* Split the edge from the entry block and the fallthrough edge to the | |
1056 | exit block, so that we can note that there NORMAL_MODE is supplied / | |
1057 | required. */ | |
1058 | edge eg; | |
1059 | post_entry = split_edge (ENTRY_BLOCK_PTR->succ); | |
1060 | /* The only non-call predecessor at this stage is a block with a | |
1061 | fallthrough edge; there can be at most one, but there could be | |
1062 | none at all, e.g. when exit is called. */ | |
1063 | for (pre_exit = 0, eg = EXIT_BLOCK_PTR->pred; eg; eg = eg->pred_next) | |
1064 | if (eg->flags & EDGE_FALLTHRU) | |
1065 | { | |
1066 | regset live_at_end = eg->src->global_live_at_end; | |
1067 | ||
1068 | if (pre_exit) | |
1069 | abort (); | |
1070 | pre_exit = split_edge (eg); | |
1071 | COPY_REG_SET (pre_exit->global_live_at_start, live_at_end); | |
1072 | COPY_REG_SET (pre_exit->global_live_at_end, live_at_end); | |
1073 | } | |
1074 | } | |
1287c919 | 1075 | #endif |
1076 | ||
18862b5a | 1077 | /* Create the bitmap vectors. */ |
1078 | ||
f20183e6 | 1079 | antic = sbitmap_vector_alloc (last_basic_block, n_entities); |
1080 | transp = sbitmap_vector_alloc (last_basic_block, n_entities); | |
1081 | comp = sbitmap_vector_alloc (last_basic_block, n_entities); | |
18862b5a | 1082 | |
f20183e6 | 1083 | sbitmap_vector_ones (transp, last_basic_block); |
18862b5a | 1084 | |
1085 | for (j = n_entities - 1; j >= 0; j--) | |
1086 | { | |
1087 | int e = entity_map[j]; | |
1088 | int no_mode = num_modes[e]; | |
3b7e1f27 | 1089 | struct bb_info *info = bb_info[j]; |
18862b5a | 1090 | |
1091 | /* Determine what the first use (if any) need for a mode of entity E is. | |
b78e14a8 | 1092 | This will be the mode that is anticipatable for this block. |
18862b5a | 1093 | Also compute the initial transparency settings. */ |
4c26117a | 1094 | FOR_EACH_BB (bb) |
18862b5a | 1095 | { |
1096 | struct seginfo *ptr; | |
1097 | int last_mode = no_mode; | |
1098 | HARD_REG_SET live_now; | |
1099 | ||
1100 | REG_SET_TO_HARD_REG_SET (live_now, | |
4c26117a | 1101 | bb->global_live_at_start); |
1102 | for (insn = bb->head; | |
1103 | insn != NULL && insn != NEXT_INSN (bb->end); | |
18862b5a | 1104 | insn = NEXT_INSN (insn)) |
1105 | { | |
9204e736 | 1106 | if (INSN_P (insn)) |
18862b5a | 1107 | { |
1108 | int mode = MODE_NEEDED (e, insn); | |
1109 | rtx link; | |
1110 | ||
1111 | if (mode != no_mode && mode != last_mode) | |
1112 | { | |
1113 | last_mode = mode; | |
4c26117a | 1114 | ptr = new_seginfo (mode, insn, bb->index, live_now); |
1115 | add_seginfo (info + bb->index, ptr); | |
1116 | RESET_BIT (transp[bb->index], j); | |
18862b5a | 1117 | } |
1118 | ||
1119 | /* Update LIVE_NOW. */ | |
1120 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1121 | if (REG_NOTE_KIND (link) == REG_DEAD) | |
1122 | reg_dies (XEXP (link, 0), live_now); | |
9ca8d59e | 1123 | |
18862b5a | 1124 | note_stores (PATTERN (insn), reg_becomes_live, &live_now); |
1125 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1126 | if (REG_NOTE_KIND (link) == REG_UNUSED) | |
1127 | reg_dies (XEXP (link, 0), live_now); | |
1128 | } | |
1129 | } | |
9ca8d59e | 1130 | |
4c26117a | 1131 | info[bb->index].computing = last_mode; |
18862b5a | 1132 | /* Check for blocks without ANY mode requirements. */ |
1133 | if (last_mode == no_mode) | |
1134 | { | |
742ee135 | 1135 | ptr = new_seginfo (no_mode, bb->end, bb->index, live_now); |
4c26117a | 1136 | add_seginfo (info + bb->index, ptr); |
18862b5a | 1137 | } |
1138 | } | |
7dff60c8 | 1139 | #ifdef NORMAL_MODE |
18862b5a | 1140 | { |
7dff60c8 | 1141 | int mode = NORMAL_MODE (e); |
9ca8d59e | 1142 | |
18862b5a | 1143 | if (mode != no_mode) |
1144 | { | |
742ee135 | 1145 | bb = post_entry; |
5ab08585 | 1146 | |
742ee135 | 1147 | /* By always making this nontransparent, we save |
1148 | an extra check in make_preds_opaque. We also | |
1149 | need this to avoid confusing pre_edge_lcm when | |
1150 | antic is cleared but transp and comp are set. */ | |
1151 | RESET_BIT (transp[bb->index], j); | |
1287c919 | 1152 | |
742ee135 | 1153 | /* Insert a fake computing definition of MODE into entry |
1154 | blocks which compute no mode. This represents the mode on | |
1155 | entry. */ | |
1156 | info[bb->index].computing = mode; | |
1157 | ||
1158 | if (pre_exit) | |
1159 | info[pre_exit->index].seginfo->mode = mode; | |
18862b5a | 1160 | } |
1161 | } | |
7dff60c8 | 1162 | #endif /* NORMAL_MODE */ |
18862b5a | 1163 | } |
1164 | ||
f20183e6 | 1165 | kill = sbitmap_vector_alloc (last_basic_block, n_entities); |
18862b5a | 1166 | for (i = 0; i < max_num_modes; i++) |
1167 | { | |
1168 | int current_mode[N_ENTITIES]; | |
1169 | ||
1170 | /* Set the anticipatable and computing arrays. */ | |
f20183e6 | 1171 | sbitmap_vector_zero (antic, last_basic_block); |
1172 | sbitmap_vector_zero (comp, last_basic_block); | |
18862b5a | 1173 | for (j = n_entities - 1; j >= 0; j--) |
1174 | { | |
1175 | int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i); | |
3b7e1f27 | 1176 | struct bb_info *info = bb_info[j]; |
5ab08585 | 1177 | |
4c26117a | 1178 | FOR_EACH_BB (bb) |
18862b5a | 1179 | { |
4c26117a | 1180 | if (info[bb->index].seginfo->mode == m) |
1181 | SET_BIT (antic[bb->index], j); | |
18862b5a | 1182 | |
4c26117a | 1183 | if (info[bb->index].computing == m) |
1184 | SET_BIT (comp[bb->index], j); | |
18862b5a | 1185 | } |
1186 | } | |
1187 | ||
1188 | /* Calculate the optimal locations for the | |
1189 | placement mode switches to modes with priority I. */ | |
1190 | ||
4c26117a | 1191 | FOR_EACH_BB (bb) |
1192 | sbitmap_not (kill[bb->index], transp[bb->index]); | |
18862b5a | 1193 | edge_list = pre_edge_lcm (file, 1, transp, comp, antic, |
1194 | kill, &insert, &delete); | |
1195 | ||
9ca8d59e | 1196 | for (j = n_entities - 1; j >= 0; j--) |
18862b5a | 1197 | { |
1198 | /* Insert all mode sets that have been inserted by lcm. */ | |
1199 | int no_mode = num_modes[entity_map[j]]; | |
9ca8d59e | 1200 | |
18862b5a | 1201 | /* Wherever we have moved a mode setting upwards in the flow graph, |
1202 | the blocks between the new setting site and the now redundant | |
1203 | computation ceases to be transparent for any lower-priority | |
1204 | mode of the same entity. First set the aux field of each | |
1205 | insertion site edge non-transparent, then propagate the new | |
1206 | non-transparency from the redundant computation upwards till | |
1207 | we hit an insertion site or an already non-transparent block. */ | |
1208 | for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--) | |
1209 | { | |
1210 | edge eg = INDEX_EDGE (edge_list, e); | |
1211 | int mode; | |
1212 | basic_block src_bb; | |
1213 | HARD_REG_SET live_at_edge; | |
1214 | rtx mode_set; | |
1215 | ||
1216 | eg->aux = 0; | |
1217 | ||
1218 | if (! TEST_BIT (insert[e], j)) | |
1219 | continue; | |
1220 | ||
1221 | eg->aux = (void *)1; | |
1222 | ||
1223 | mode = current_mode[j]; | |
1224 | src_bb = eg->src; | |
1225 | ||
9ca8d59e | 1226 | REG_SET_TO_HARD_REG_SET (live_at_edge, |
1227 | src_bb->global_live_at_end); | |
1228 | ||
18862b5a | 1229 | start_sequence (); |
1230 | EMIT_MODE_SET (entity_map[j], mode, live_at_edge); | |
1231 | mode_set = gen_sequence (); | |
1232 | end_sequence (); | |
1233 | ||
7fb47f9f | 1234 | /* Do not bother to insert empty sequence. */ |
1235 | if (GET_CODE (mode_set) == SEQUENCE | |
1236 | && !XVECLEN (mode_set, 0)) | |
1237 | continue; | |
1238 | ||
1287c919 | 1239 | /* If this is an abnormal edge, we'll insert at the end |
1240 | of the previous block. */ | |
18862b5a | 1241 | if (eg->flags & EDGE_ABNORMAL) |
1242 | { | |
7fb47f9f | 1243 | emited = true; |
a24d0647 | 1244 | if (GET_CODE (src_bb->end) == JUMP_INSN) |
1245 | emit_insn_before (mode_set, src_bb->end); | |
1287c919 | 1246 | /* It doesn't make sense to switch to normal mode |
1247 | after a CALL_INSN, so we're going to abort if we | |
1248 | find one. The cases in which a CALL_INSN may | |
1249 | have an abnormal edge are sibcalls and EH edges. | |
1250 | In the case of sibcalls, the dest basic-block is | |
1251 | the EXIT_BLOCK, that runs in normal mode; it is | |
1252 | assumed that a sibcall insn requires normal mode | |
1253 | itself, so no mode switch would be required after | |
1254 | the call (it wouldn't make sense, anyway). In | |
1255 | the case of EH edges, EH entry points also start | |
1256 | in normal mode, so a similar reasoning applies. */ | |
1257 | else if (GET_CODE (src_bb->end) == INSN) | |
9dda7915 | 1258 | emit_insn_after (mode_set, src_bb->end); |
1287c919 | 1259 | else |
1260 | abort (); | |
b3d6de89 | 1261 | bb_info[j][src_bb->index].computing = mode; |
1262 | RESET_BIT (transp[src_bb->index], j); | |
18862b5a | 1263 | } |
1264 | else | |
1265 | { | |
1266 | need_commit = 1; | |
1267 | insert_insn_on_edge (mode_set, eg); | |
1268 | } | |
18862b5a | 1269 | } |
1270 | ||
4c26117a | 1271 | FOR_EACH_BB_REVERSE (bb) |
1272 | if (TEST_BIT (delete[bb->index], j)) | |
9ca8d59e | 1273 | { |
4c26117a | 1274 | make_preds_opaque (bb, j); |
9ca8d59e | 1275 | /* Cancel the 'deleted' mode set. */ |
4c26117a | 1276 | bb_info[j][bb->index].seginfo->mode = no_mode; |
9ca8d59e | 1277 | } |
18862b5a | 1278 | } |
9ca8d59e | 1279 | |
82f7392b | 1280 | clear_aux_for_edges (); |
18862b5a | 1281 | free_edge_list (edge_list); |
1282 | } | |
1283 | ||
1284 | /* Now output the remaining mode sets in all the segments. */ | |
1285 | for (j = n_entities - 1; j >= 0; j--) | |
1286 | { | |
7dff60c8 | 1287 | int no_mode = num_modes[entity_map[j]]; |
1288 | ||
4c26117a | 1289 | FOR_EACH_BB_REVERSE (bb) |
18862b5a | 1290 | { |
1291 | struct seginfo *ptr, *next; | |
4c26117a | 1292 | for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next) |
18862b5a | 1293 | { |
1294 | next = ptr->next; | |
7dff60c8 | 1295 | if (ptr->mode != no_mode) |
18862b5a | 1296 | { |
1297 | rtx mode_set; | |
1298 | ||
1299 | start_sequence (); | |
1300 | EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live); | |
1301 | mode_set = gen_sequence (); | |
1302 | end_sequence (); | |
1303 | ||
7fb47f9f | 1304 | /* Do not bother to insert empty sequence. */ |
1305 | if (GET_CODE (mode_set) == SEQUENCE | |
1306 | && !XVECLEN (mode_set, 0)) | |
1307 | continue; | |
1308 | ||
1309 | emited = true; | |
6484cd39 | 1310 | if (GET_CODE (ptr->insn_ptr) == NOTE |
1311 | && (NOTE_LINE_NUMBER (ptr->insn_ptr) | |
1312 | == NOTE_INSN_BASIC_BLOCK)) | |
9dda7915 | 1313 | emit_insn_after (mode_set, ptr->insn_ptr); |
7dff60c8 | 1314 | else |
9dda7915 | 1315 | emit_insn_before (mode_set, ptr->insn_ptr); |
18862b5a | 1316 | } |
9ca8d59e | 1317 | |
18862b5a | 1318 | free (ptr); |
1319 | } | |
1320 | } | |
9ca8d59e | 1321 | |
18862b5a | 1322 | free (bb_info[j]); |
1323 | } | |
1324 | ||
1325 | /* Finished. Free up all the things we've allocated. */ | |
5ab08585 | 1326 | |
18862b5a | 1327 | sbitmap_vector_free (kill); |
1328 | sbitmap_vector_free (antic); | |
1329 | sbitmap_vector_free (transp); | |
1330 | sbitmap_vector_free (comp); | |
1331 | sbitmap_vector_free (delete); | |
1332 | sbitmap_vector_free (insert); | |
1333 | ||
1334 | if (need_commit) | |
1335 | commit_edge_insertions (); | |
b78e14a8 | 1336 | |
742ee135 | 1337 | #ifdef NORMAL_MODE |
1338 | cleanup_cfg (CLEANUP_NO_INSN_DEL); | |
1339 | #else | |
7fb47f9f | 1340 | if (!need_commit && !emited) |
1341 | return 0; | |
742ee135 | 1342 | #endif |
7fb47f9f | 1343 | |
308f9b79 | 1344 | max_regno = max_reg_num (); |
1345 | allocate_reg_info (max_regno, FALSE, FALSE); | |
1346 | update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES, | |
1347 | (PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE | |
1348 | | PROP_SCAN_DEAD_CODE)); | |
b78e14a8 | 1349 | |
1350 | return 1; | |
18862b5a | 1351 | } |
b78e14a8 | 1352 | #endif /* OPTIMIZE_MODE_SWITCHING */ |