]>
Commit | Line | Data |
---|---|---|
f4e72d6e | 1 | /* Generic partial redundancy elimination with lazy code motion support. |
e146f815 | 2 | Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004 |
0c20a65f | 3 | Free Software Foundation, Inc. |
d2ecda27 | 4 | |
1322177d | 5 | This file is part of GCC. |
d2ecda27 | 6 | |
1322177d LB |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
d2ecda27 | 11 | |
1322177d LB |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
d2ecda27 JL |
16 | |
17 | You should have received a copy of the GNU General Public License | |
1322177d LB |
18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
d2ecda27 JL |
21 | |
22 | /* These routines are meant to be used by various optimization | |
b3bb6456 | 23 | passes which can be modeled as lazy code motion problems. |
d2ecda27 JL |
24 | Including, but not limited to: |
25 | ||
26 | * Traditional partial redundancy elimination. | |
27 | ||
28 | * Placement of caller/caller register save/restores. | |
29 | ||
30 | * Load/store motion. | |
31 | ||
32 | * Copy motion. | |
33 | ||
34 | * Conversion of flat register files to a stacked register | |
35 | model. | |
36 | ||
37 | * Dead load/store elimination. | |
38 | ||
39 | These routines accept as input: | |
40 | ||
41 | * Basic block information (number of blocks, lists of | |
42 | predecessors and successors). Note the granularity | |
43 | does not need to be basic block, they could be statements | |
44 | or functions. | |
45 | ||
46 | * Bitmaps of local properties (computed, transparent and | |
47 | anticipatable expressions). | |
48 | ||
49 | The output of these routines is bitmap of redundant computations | |
50 | and a bitmap of optimal placement points. */ | |
51 | ||
52 | ||
53 | #include "config.h" | |
54 | #include "system.h" | |
4977bab6 ZW |
55 | #include "coretypes.h" |
56 | #include "tm.h" | |
d2ecda27 JL |
57 | #include "rtl.h" |
58 | #include "regs.h" | |
59 | #include "hard-reg-set.h" | |
60 | #include "flags.h" | |
61 | #include "real.h" | |
62 | #include "insn-config.h" | |
63 | #include "recog.h" | |
64 | #include "basic-block.h" | |
81b40b72 | 65 | #include "output.h" |
9f09b1f2 | 66 | #include "tm_p.h" |
fa1a0d02 | 67 | #include "function.h" |
f4e72d6e | 68 | |
9f09b1f2 R |
69 | /* We want target macros for the mode switching code to be able to refer |
70 | to instruction attribute values. */ | |
71 | #include "insn-attr.h" | |
d2ecda27 | 72 | |
a42cd965 | 73 | /* Edge based LCM routines. */ |
0c20a65f AJ |
74 | static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *); |
75 | static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *, | |
76 | sbitmap *, sbitmap *, sbitmap *); | |
77 | static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *, | |
78 | sbitmap *, sbitmap *); | |
79 | static void compute_insert_delete (struct edge_list *edge_list, sbitmap *, | |
80 | sbitmap *, sbitmap *, sbitmap *, sbitmap *); | |
a42cd965 AM |
81 | |
82 | /* Edge based LCM routines on a reverse flowgraph. */ | |
0c20a65f AJ |
83 | static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *, |
84 | sbitmap*, sbitmap *, sbitmap *); | |
85 | static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *, | |
86 | sbitmap *, sbitmap *); | |
87 | static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *, | |
88 | sbitmap *, sbitmap *, sbitmap *, | |
89 | sbitmap *); | |
a42cd965 AM |
90 | \f |
91 | /* Edge based lcm routines. */ | |
9ca88d5a | 92 | |
b3bb6456 AJ |
93 | /* Compute expression anticipatability at entrance and exit of each block. |
94 | This is done based on the flow graph, and not on the pred-succ lists. | |
a42cd965 | 95 | Other than that, its pretty much identical to compute_antinout. */ |
d2ecda27 JL |
96 | |
97 | static void | |
0c20a65f AJ |
98 | compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin, |
99 | sbitmap *antout) | |
d2ecda27 | 100 | { |
e0082a72 | 101 | basic_block bb; |
a42cd965 | 102 | edge e; |
274969ea MM |
103 | basic_block *worklist, *qin, *qout, *qend; |
104 | unsigned int qlen; | |
9ca88d5a | 105 | |
bd0eaec2 JL |
106 | /* Allocate a worklist array/queue. Entries are only added to the |
107 | list if they were not already on the list. So the size is | |
108 | bounded by the number of basic blocks. */ | |
703ad42b | 109 | qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); |
d2ecda27 | 110 | |
bd0eaec2 JL |
111 | /* We want a maximal solution, so make an optimistic initialization of |
112 | ANTIN. */ | |
d55bc081 | 113 | sbitmap_vector_ones (antin, last_basic_block); |
d2ecda27 | 114 | |
ce724250 JL |
115 | /* Put every block on the worklist; this is necessary because of the |
116 | optimistic initialization of ANTIN above. */ | |
e0082a72 | 117 | FOR_EACH_BB_REVERSE (bb) |
d2ecda27 | 118 | { |
6a87d634 | 119 | *qin++ = bb; |
e0082a72 | 120 | bb->aux = bb; |
bd0eaec2 | 121 | } |
b3bb6456 | 122 | |
274969ea | 123 | qin = worklist; |
0b17ab2f RH |
124 | qend = &worklist[n_basic_blocks]; |
125 | qlen = n_basic_blocks; | |
d2ecda27 | 126 | |
ce724250 JL |
127 | /* Mark blocks which are predecessors of the exit block so that we |
128 | can easily identify them below. */ | |
129 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) | |
130 | e->src->aux = EXIT_BLOCK_PTR; | |
131 | ||
bd0eaec2 | 132 | /* Iterate until the worklist is empty. */ |
274969ea | 133 | while (qlen) |
bd0eaec2 JL |
134 | { |
135 | /* Take the first entry off the worklist. */ | |
e0082a72 | 136 | bb = *qout++; |
274969ea | 137 | qlen--; |
9ca88d5a | 138 | |
274969ea | 139 | if (qout >= qend) |
e11e816e | 140 | qout = worklist; |
d2ecda27 | 141 | |
e0082a72 | 142 | if (bb->aux == EXIT_BLOCK_PTR) |
f4e72d6e RK |
143 | /* Do not clear the aux field for blocks which are predecessors of |
144 | the EXIT block. That way we never add then to the worklist | |
145 | again. */ | |
e0082a72 | 146 | sbitmap_zero (antout[bb->index]); |
bd0eaec2 JL |
147 | else |
148 | { | |
149 | /* Clear the aux field of this block so that it can be added to | |
150 | the worklist again if necessary. */ | |
e0082a72 ZD |
151 | bb->aux = NULL; |
152 | sbitmap_intersection_of_succs (antout[bb->index], antin, bb->index); | |
bd0eaec2 | 153 | } |
a42cd965 | 154 | |
e0082a72 ZD |
155 | if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index], |
156 | transp[bb->index], antout[bb->index])) | |
f4e72d6e RK |
157 | /* If the in state of this block changed, then we need |
158 | to add the predecessors of this block to the worklist | |
159 | if they are not already on the worklist. */ | |
e0082a72 | 160 | for (e = bb->pred; e; e = e->pred_next) |
f4e72d6e | 161 | if (!e->src->aux && e->src != ENTRY_BLOCK_PTR) |
d2ecda27 | 162 | { |
274969ea | 163 | *qin++ = e->src; |
f4e72d6e | 164 | e->src->aux = e; |
274969ea MM |
165 | qlen++; |
166 | if (qin >= qend) | |
e11e816e | 167 | qin = worklist; |
d2ecda27 | 168 | } |
d2ecda27 | 169 | } |
f4e72d6e | 170 | |
108c1afc RH |
171 | clear_aux_for_edges (); |
172 | clear_aux_for_blocks (); | |
274969ea | 173 | free (worklist); |
d2ecda27 JL |
174 | } |
175 | ||
a42cd965 | 176 | /* Compute the earliest vector for edge based lcm. */ |
f4e72d6e | 177 | |
d2ecda27 | 178 | static void |
0c20a65f AJ |
179 | compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin, |
180 | sbitmap *antout, sbitmap *avout, sbitmap *kill, | |
181 | sbitmap *earliest) | |
d2ecda27 | 182 | { |
a42cd965 | 183 | sbitmap difference, temp_bitmap; |
b3bb6456 | 184 | int x, num_edges; |
a42cd965 | 185 | basic_block pred, succ; |
d2ecda27 | 186 | |
a42cd965 | 187 | num_edges = NUM_EDGES (edge_list); |
d2ecda27 | 188 | |
a42cd965 AM |
189 | difference = sbitmap_alloc (n_exprs); |
190 | temp_bitmap = sbitmap_alloc (n_exprs); | |
d2ecda27 | 191 | |
a42cd965 | 192 | for (x = 0; x < num_edges; x++) |
d2ecda27 | 193 | { |
a42cd965 AM |
194 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
195 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
196 | if (pred == ENTRY_BLOCK_PTR) | |
0b17ab2f | 197 | sbitmap_copy (earliest[x], antin[succ->index]); |
a42cd965 | 198 | else |
e11e816e | 199 | { |
81b40b72 | 200 | if (succ == EXIT_BLOCK_PTR) |
f4e72d6e | 201 | sbitmap_zero (earliest[x]); |
a42cd965 | 202 | else |
d2ecda27 | 203 | { |
0b17ab2f RH |
204 | sbitmap_difference (difference, antin[succ->index], |
205 | avout[pred->index]); | |
206 | sbitmap_not (temp_bitmap, antout[pred->index]); | |
f4e72d6e | 207 | sbitmap_a_and_b_or_c (earliest[x], difference, |
0b17ab2f | 208 | kill[pred->index], temp_bitmap); |
d2ecda27 JL |
209 | } |
210 | } | |
d2ecda27 | 211 | } |
f4e72d6e | 212 | |
76ac938b MH |
213 | sbitmap_free (temp_bitmap); |
214 | sbitmap_free (difference); | |
d2ecda27 JL |
215 | } |
216 | ||
bd0eaec2 JL |
217 | /* later(p,s) is dependent on the calculation of laterin(p). |
218 | laterin(p) is dependent on the calculation of later(p2,p). | |
219 | ||
220 | laterin(ENTRY) is defined as all 0's | |
221 | later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY) | |
222 | laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)). | |
223 | ||
224 | If we progress in this manner, starting with all basic blocks | |
225 | in the work list, anytime we change later(bb), we need to add | |
226 | succs(bb) to the worklist if they are not already on the worklist. | |
227 | ||
228 | Boundary conditions: | |
229 | ||
230 | We prime the worklist all the normal basic blocks. The ENTRY block can | |
231 | never be added to the worklist since it is never the successor of any | |
232 | block. We explicitly prevent the EXIT block from being added to the | |
233 | worklist. | |
234 | ||
235 | We optimistically initialize LATER. That is the only time this routine | |
236 | will compute LATER for an edge out of the entry block since the entry | |
237 | block is never on the worklist. Thus, LATERIN is neither used nor | |
238 | computed for the ENTRY block. | |
239 | ||
240 | Since the EXIT block is never added to the worklist, we will neither | |
241 | use nor compute LATERIN for the exit block. Edges which reach the | |
242 | EXIT block are handled in the normal fashion inside the loop. However, | |
243 | the insertion/deletion computation needs LATERIN(EXIT), so we have | |
244 | to compute it. */ | |
b3bb6456 | 245 | |
d2ecda27 | 246 | static void |
0c20a65f AJ |
247 | compute_laterin (struct edge_list *edge_list, sbitmap *earliest, |
248 | sbitmap *antloc, sbitmap *later, sbitmap *laterin) | |
d2ecda27 | 249 | { |
e0082a72 | 250 | int num_edges, i; |
bd0eaec2 | 251 | edge e; |
e0082a72 | 252 | basic_block *worklist, *qin, *qout, *qend, bb; |
274969ea | 253 | unsigned int qlen; |
d2ecda27 | 254 | |
a42cd965 | 255 | num_edges = NUM_EDGES (edge_list); |
d2ecda27 | 256 | |
bd0eaec2 JL |
257 | /* Allocate a worklist array/queue. Entries are only added to the |
258 | list if they were not already on the list. So the size is | |
259 | bounded by the number of basic blocks. */ | |
274969ea | 260 | qin = qout = worklist |
703ad42b | 261 | = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1)); |
bd0eaec2 JL |
262 | |
263 | /* Initialize a mapping from each edge to its index. */ | |
264 | for (i = 0; i < num_edges; i++) | |
63408827 | 265 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
bd0eaec2 JL |
266 | |
267 | /* We want a maximal solution, so initially consider LATER true for | |
268 | all edges. This allows propagation through a loop since the incoming | |
269 | loop edge will have LATER set, so if all the other incoming edges | |
270 | to the loop are set, then LATERIN will be set for the head of the | |
271 | loop. | |
272 | ||
273 | If the optimistic setting of LATER on that edge was incorrect (for | |
274 | example the expression is ANTLOC in a block within the loop) then | |
275 | this algorithm will detect it when we process the block at the head | |
276 | of the optimistic edge. That will requeue the affected blocks. */ | |
277 | sbitmap_vector_ones (later, num_edges); | |
278 | ||
89e606c9 JL |
279 | /* Note that even though we want an optimistic setting of LATER, we |
280 | do not want to be overly optimistic. Consider an outgoing edge from | |
281 | the entry block. That edge should always have a LATER value the | |
282 | same as EARLIEST for that edge. */ | |
283 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
f4e72d6e | 284 | sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]); |
89e606c9 | 285 | |
bd0eaec2 JL |
286 | /* Add all the blocks to the worklist. This prevents an early exit from |
287 | the loop given our optimistic initialization of LATER above. */ | |
e0082a72 | 288 | FOR_EACH_BB (bb) |
d2ecda27 | 289 | { |
e0082a72 ZD |
290 | *qin++ = bb; |
291 | bb->aux = bb; | |
a42cd965 | 292 | } |
274969ea MM |
293 | qin = worklist; |
294 | /* Note that we do not use the last allocated element for our queue, | |
295 | as EXIT_BLOCK is never inserted into it. In fact the above allocation | |
3d042e77 | 296 | of n_basic_blocks + 1 elements is not necessary. */ |
0b17ab2f RH |
297 | qend = &worklist[n_basic_blocks]; |
298 | qlen = n_basic_blocks; | |
a42cd965 | 299 | |
bd0eaec2 | 300 | /* Iterate until the worklist is empty. */ |
274969ea | 301 | while (qlen) |
a42cd965 | 302 | { |
bd0eaec2 | 303 | /* Take the first entry off the worklist. */ |
e0082a72 ZD |
304 | bb = *qout++; |
305 | bb->aux = NULL; | |
274969ea MM |
306 | qlen--; |
307 | if (qout >= qend) | |
e11e816e | 308 | qout = worklist; |
bd0eaec2 JL |
309 | |
310 | /* Compute the intersection of LATERIN for each incoming edge to B. */ | |
e0082a72 ZD |
311 | sbitmap_ones (laterin[bb->index]); |
312 | for (e = bb->pred; e != NULL; e = e->pred_next) | |
313 | sbitmap_a_and_b (laterin[bb->index], laterin[bb->index], later[(size_t)e->aux]); | |
bd0eaec2 JL |
314 | |
315 | /* Calculate LATER for all outgoing edges. */ | |
e0082a72 | 316 | for (e = bb->succ; e != NULL; e = e->succ_next) |
b47374fa | 317 | if (sbitmap_union_of_diff_cg (later[(size_t) e->aux], |
0b17ab2f RH |
318 | earliest[(size_t) e->aux], |
319 | laterin[e->src->index], | |
320 | antloc[e->src->index]) | |
f4e72d6e RK |
321 | /* If LATER for an outgoing edge was changed, then we need |
322 | to add the target of the outgoing edge to the worklist. */ | |
323 | && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0) | |
324 | { | |
274969ea | 325 | *qin++ = e->dest; |
f4e72d6e | 326 | e->dest->aux = e; |
274969ea MM |
327 | qlen++; |
328 | if (qin >= qend) | |
329 | qin = worklist; | |
f4e72d6e | 330 | } |
d2ecda27 JL |
331 | } |
332 | ||
bd0eaec2 JL |
333 | /* Computation of insertion and deletion points requires computing LATERIN |
334 | for the EXIT block. We allocated an extra entry in the LATERIN array | |
335 | for just this purpose. */ | |
d55bc081 | 336 | sbitmap_ones (laterin[last_basic_block]); |
bd0eaec2 | 337 | for (e = EXIT_BLOCK_PTR->pred; e != NULL; e = e->pred_next) |
d55bc081 ZD |
338 | sbitmap_a_and_b (laterin[last_basic_block], |
339 | laterin[last_basic_block], | |
63408827 | 340 | later[(size_t) e->aux]); |
bd0eaec2 | 341 | |
108c1afc | 342 | clear_aux_for_edges (); |
274969ea | 343 | free (worklist); |
d2ecda27 JL |
344 | } |
345 | ||
a42cd965 | 346 | /* Compute the insertion and deletion points for edge based LCM. */ |
f4e72d6e | 347 | |
a42cd965 | 348 | static void |
0c20a65f AJ |
349 | compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc, |
350 | sbitmap *later, sbitmap *laterin, sbitmap *insert, | |
351 | sbitmap *delete) | |
a42cd965 AM |
352 | { |
353 | int x; | |
e0082a72 | 354 | basic_block bb; |
d2ecda27 | 355 | |
e0082a72 ZD |
356 | FOR_EACH_BB (bb) |
357 | sbitmap_difference (delete[bb->index], antloc[bb->index], laterin[bb->index]); | |
b3bb6456 | 358 | |
a42cd965 AM |
359 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
360 | { | |
361 | basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x); | |
f4e72d6e | 362 | |
a42cd965 | 363 | if (b == EXIT_BLOCK_PTR) |
d55bc081 | 364 | sbitmap_difference (insert[x], later[x], laterin[last_basic_block]); |
a42cd965 | 365 | else |
0b17ab2f | 366 | sbitmap_difference (insert[x], later[x], laterin[b->index]); |
a42cd965 AM |
367 | } |
368 | } | |
d2ecda27 | 369 | |
f4e72d6e RK |
370 | /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and |
371 | delete vectors for edge based LCM. Returns an edgelist which is used to | |
372 | map the insert vector to what edge an expression should be inserted on. */ | |
d2ecda27 | 373 | |
a42cd965 | 374 | struct edge_list * |
0c20a65f AJ |
375 | pre_edge_lcm (FILE *file ATTRIBUTE_UNUSED, int n_exprs, sbitmap *transp, |
376 | sbitmap *avloc, sbitmap *antloc, sbitmap *kill, | |
377 | sbitmap **insert, sbitmap **delete) | |
d2ecda27 | 378 | { |
a42cd965 AM |
379 | sbitmap *antin, *antout, *earliest; |
380 | sbitmap *avin, *avout; | |
381 | sbitmap *later, *laterin; | |
382 | struct edge_list *edge_list; | |
383 | int num_edges; | |
d2ecda27 | 384 | |
a42cd965 AM |
385 | edge_list = create_edge_list (); |
386 | num_edges = NUM_EDGES (edge_list); | |
d2ecda27 | 387 | |
a42cd965 AM |
388 | #ifdef LCM_DEBUG_INFO |
389 | if (file) | |
d2ecda27 | 390 | { |
a42cd965 AM |
391 | fprintf (file, "Edge List:\n"); |
392 | verify_edge_list (file, edge_list); | |
393 | print_edge_list (file, edge_list); | |
d55bc081 ZD |
394 | dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); |
395 | dump_sbitmap_vector (file, "antloc", "", antloc, last_basic_block); | |
396 | dump_sbitmap_vector (file, "avloc", "", avloc, last_basic_block); | |
397 | dump_sbitmap_vector (file, "kill", "", kill, last_basic_block); | |
d2ecda27 | 398 | } |
a42cd965 | 399 | #endif |
d2ecda27 | 400 | |
a42cd965 | 401 | /* Compute global availability. */ |
d55bc081 ZD |
402 | avin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
403 | avout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
a42cd965 | 404 | compute_available (avloc, kill, avout, avin); |
5a660bff | 405 | sbitmap_vector_free (avin); |
d2ecda27 | 406 | |
a42cd965 | 407 | /* Compute global anticipatability. */ |
d55bc081 ZD |
408 | antin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
409 | antout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
a42cd965 | 410 | compute_antinout_edge (antloc, transp, antin, antout); |
d2ecda27 | 411 | |
a42cd965 AM |
412 | #ifdef LCM_DEBUG_INFO |
413 | if (file) | |
d2ecda27 | 414 | { |
d55bc081 ZD |
415 | dump_sbitmap_vector (file, "antin", "", antin, last_basic_block); |
416 | dump_sbitmap_vector (file, "antout", "", antout, last_basic_block); | |
d2ecda27 | 417 | } |
a42cd965 | 418 | #endif |
d2ecda27 | 419 | |
a42cd965 AM |
420 | /* Compute earliestness. */ |
421 | earliest = sbitmap_vector_alloc (num_edges, n_exprs); | |
422 | compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest); | |
d2ecda27 | 423 | |
a42cd965 AM |
424 | #ifdef LCM_DEBUG_INFO |
425 | if (file) | |
426 | dump_sbitmap_vector (file, "earliest", "", earliest, num_edges); | |
427 | #endif | |
d2ecda27 | 428 | |
5a660bff DB |
429 | sbitmap_vector_free (antout); |
430 | sbitmap_vector_free (antin); | |
431 | sbitmap_vector_free (avout); | |
d2ecda27 | 432 | |
a42cd965 | 433 | later = sbitmap_vector_alloc (num_edges, n_exprs); |
f4e72d6e | 434 | |
a42cd965 | 435 | /* Allocate an extra element for the exit block in the laterin vector. */ |
d55bc081 | 436 | laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); |
bd0eaec2 JL |
437 | compute_laterin (edge_list, earliest, antloc, later, laterin); |
438 | ||
a42cd965 AM |
439 | #ifdef LCM_DEBUG_INFO |
440 | if (file) | |
441 | { | |
d55bc081 | 442 | dump_sbitmap_vector (file, "laterin", "", laterin, last_basic_block + 1); |
a42cd965 AM |
443 | dump_sbitmap_vector (file, "later", "", later, num_edges); |
444 | } | |
445 | #endif | |
d2ecda27 | 446 | |
5a660bff | 447 | sbitmap_vector_free (earliest); |
a42cd965 AM |
448 | |
449 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
d55bc081 | 450 | *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); |
a42cd965 | 451 | compute_insert_delete (edge_list, antloc, later, laterin, *insert, *delete); |
d2ecda27 | 452 | |
5a660bff DB |
453 | sbitmap_vector_free (laterin); |
454 | sbitmap_vector_free (later); | |
a42cd965 AM |
455 | |
456 | #ifdef LCM_DEBUG_INFO | |
457 | if (file) | |
d2ecda27 | 458 | { |
a42cd965 | 459 | dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); |
f4e72d6e | 460 | dump_sbitmap_vector (file, "pre_delete_map", "", *delete, |
d55bc081 | 461 | last_basic_block); |
d2ecda27 | 462 | } |
a42cd965 | 463 | #endif |
d2ecda27 | 464 | |
a42cd965 AM |
465 | return edge_list; |
466 | } | |
9ca88d5a DB |
467 | |
468 | /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors. | |
469 | Return the number of passes we performed to iterate to a solution. */ | |
470 | ||
bd0eaec2 | 471 | void |
0c20a65f AJ |
472 | compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout, |
473 | sbitmap *avin) | |
d2ecda27 | 474 | { |
9ca88d5a | 475 | edge e; |
e0082a72 | 476 | basic_block *worklist, *qin, *qout, *qend, bb; |
9ca88d5a DB |
477 | unsigned int qlen; |
478 | ||
479 | /* Allocate a worklist array/queue. Entries are only added to the | |
480 | list if they were not already on the list. So the size is | |
481 | bounded by the number of basic blocks. */ | |
703ad42b | 482 | qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); |
9ca88d5a DB |
483 | |
484 | /* We want a maximal solution. */ | |
d55bc081 | 485 | sbitmap_vector_ones (avout, last_basic_block); |
9ca88d5a DB |
486 | |
487 | /* Put every block on the worklist; this is necessary because of the | |
488 | optimistic initialization of AVOUT above. */ | |
e0082a72 | 489 | FOR_EACH_BB (bb) |
9ca88d5a | 490 | { |
e0082a72 ZD |
491 | *qin++ = bb; |
492 | bb->aux = bb; | |
9ca88d5a DB |
493 | } |
494 | ||
495 | qin = worklist; | |
0b17ab2f RH |
496 | qend = &worklist[n_basic_blocks]; |
497 | qlen = n_basic_blocks; | |
9ca88d5a DB |
498 | |
499 | /* Mark blocks which are successors of the entry block so that we | |
500 | can easily identify them below. */ | |
501 | for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next) | |
502 | e->dest->aux = ENTRY_BLOCK_PTR; | |
503 | ||
504 | /* Iterate until the worklist is empty. */ | |
505 | while (qlen) | |
506 | { | |
507 | /* Take the first entry off the worklist. */ | |
e0082a72 | 508 | bb = *qout++; |
9ca88d5a DB |
509 | qlen--; |
510 | ||
511 | if (qout >= qend) | |
e11e816e | 512 | qout = worklist; |
9ca88d5a DB |
513 | |
514 | /* If one of the predecessor blocks is the ENTRY block, then the | |
515 | intersection of avouts is the null set. We can identify such blocks | |
516 | by the special value in the AUX field in the block structure. */ | |
e0082a72 | 517 | if (bb->aux == ENTRY_BLOCK_PTR) |
9ca88d5a DB |
518 | /* Do not clear the aux field for blocks which are successors of the |
519 | ENTRY block. That way we never add then to the worklist again. */ | |
e0082a72 | 520 | sbitmap_zero (avin[bb->index]); |
9ca88d5a DB |
521 | else |
522 | { | |
523 | /* Clear the aux field of this block so that it can be added to | |
524 | the worklist again if necessary. */ | |
e0082a72 ZD |
525 | bb->aux = NULL; |
526 | sbitmap_intersection_of_preds (avin[bb->index], avout, bb->index); | |
9ca88d5a DB |
527 | } |
528 | ||
e0082a72 | 529 | if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index], avin[bb->index], kill[bb->index])) |
9ca88d5a DB |
530 | /* If the out state of this block changed, then we need |
531 | to add the successors of this block to the worklist | |
532 | if they are not already on the worklist. */ | |
e0082a72 | 533 | for (e = bb->succ; e; e = e->succ_next) |
9ca88d5a DB |
534 | if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR) |
535 | { | |
536 | *qin++ = e->dest; | |
537 | e->dest->aux = e; | |
538 | qlen++; | |
539 | ||
540 | if (qin >= qend) | |
e11e816e | 541 | qin = worklist; |
9ca88d5a DB |
542 | } |
543 | } | |
544 | ||
545 | clear_aux_for_edges (); | |
546 | clear_aux_for_blocks (); | |
547 | free (worklist); | |
d2ecda27 JL |
548 | } |
549 | ||
a42cd965 | 550 | /* Compute the farthest vector for edge based lcm. */ |
f4e72d6e | 551 | |
d2ecda27 | 552 | static void |
0c20a65f AJ |
553 | compute_farthest (struct edge_list *edge_list, int n_exprs, |
554 | sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin, | |
555 | sbitmap *kill, sbitmap *farthest) | |
d2ecda27 | 556 | { |
a42cd965 | 557 | sbitmap difference, temp_bitmap; |
b3bb6456 | 558 | int x, num_edges; |
a42cd965 | 559 | basic_block pred, succ; |
d2ecda27 | 560 | |
a42cd965 | 561 | num_edges = NUM_EDGES (edge_list); |
d2ecda27 | 562 | |
a42cd965 AM |
563 | difference = sbitmap_alloc (n_exprs); |
564 | temp_bitmap = sbitmap_alloc (n_exprs); | |
d2ecda27 | 565 | |
a42cd965 | 566 | for (x = 0; x < num_edges; x++) |
d2ecda27 | 567 | { |
a42cd965 AM |
568 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
569 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
570 | if (succ == EXIT_BLOCK_PTR) | |
0b17ab2f | 571 | sbitmap_copy (farthest[x], st_avout[pred->index]); |
a42cd965 | 572 | else |
d2ecda27 | 573 | { |
a42cd965 | 574 | if (pred == ENTRY_BLOCK_PTR) |
f4e72d6e | 575 | sbitmap_zero (farthest[x]); |
a42cd965 AM |
576 | else |
577 | { | |
0b17ab2f RH |
578 | sbitmap_difference (difference, st_avout[pred->index], |
579 | st_antin[succ->index]); | |
580 | sbitmap_not (temp_bitmap, st_avin[succ->index]); | |
b3bb6456 | 581 | sbitmap_a_and_b_or_c (farthest[x], difference, |
0b17ab2f | 582 | kill[succ->index], temp_bitmap); |
a42cd965 | 583 | } |
d2ecda27 | 584 | } |
d2ecda27 | 585 | } |
f4e72d6e | 586 | |
76ac938b MH |
587 | sbitmap_free (temp_bitmap); |
588 | sbitmap_free (difference); | |
d2ecda27 JL |
589 | } |
590 | ||
bd0eaec2 JL |
591 | /* Compute nearer and nearerout vectors for edge based lcm. |
592 | ||
593 | This is the mirror of compute_laterin, additional comments on the | |
594 | implementation can be found before compute_laterin. */ | |
595 | ||
d2ecda27 | 596 | static void |
0c20a65f AJ |
597 | compute_nearerout (struct edge_list *edge_list, sbitmap *farthest, |
598 | sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout) | |
d2ecda27 | 599 | { |
e0082a72 | 600 | int num_edges, i; |
bd0eaec2 | 601 | edge e; |
e0082a72 | 602 | basic_block *worklist, *tos, bb; |
d2ecda27 | 603 | |
a42cd965 | 604 | num_edges = NUM_EDGES (edge_list); |
d2ecda27 | 605 | |
bd0eaec2 JL |
606 | /* Allocate a worklist array/queue. Entries are only added to the |
607 | list if they were not already on the list. So the size is | |
608 | bounded by the number of basic blocks. */ | |
703ad42b | 609 | tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1)); |
d2ecda27 | 610 | |
bd0eaec2 JL |
611 | /* Initialize NEARER for each edge and build a mapping from an edge to |
612 | its index. */ | |
613 | for (i = 0; i < num_edges; i++) | |
63408827 | 614 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
a42cd965 | 615 | |
bd0eaec2 JL |
616 | /* We want a maximal solution. */ |
617 | sbitmap_vector_ones (nearer, num_edges); | |
618 | ||
89e606c9 JL |
619 | /* Note that even though we want an optimistic setting of NEARER, we |
620 | do not want to be overly optimistic. Consider an incoming edge to | |
621 | the exit block. That edge should always have a NEARER value the | |
622 | same as FARTHEST for that edge. */ | |
623 | for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next) | |
e5b7ca32 | 624 | sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]); |
89e606c9 | 625 | |
bd0eaec2 JL |
626 | /* Add all the blocks to the worklist. This prevents an early exit |
627 | from the loop given our optimistic initialization of NEARER. */ | |
e0082a72 | 628 | FOR_EACH_BB (bb) |
d2ecda27 | 629 | { |
e0082a72 ZD |
630 | *tos++ = bb; |
631 | bb->aux = bb; | |
a42cd965 | 632 | } |
b3bb6456 | 633 | |
bd0eaec2 JL |
634 | /* Iterate until the worklist is empty. */ |
635 | while (tos != worklist) | |
a42cd965 | 636 | { |
bd0eaec2 | 637 | /* Take the first entry off the worklist. */ |
e0082a72 ZD |
638 | bb = *--tos; |
639 | bb->aux = NULL; | |
bd0eaec2 JL |
640 | |
641 | /* Compute the intersection of NEARER for each outgoing edge from B. */ | |
e0082a72 ZD |
642 | sbitmap_ones (nearerout[bb->index]); |
643 | for (e = bb->succ; e != NULL; e = e->succ_next) | |
644 | sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index], | |
63408827 | 645 | nearer[(size_t) e->aux]); |
bd0eaec2 JL |
646 | |
647 | /* Calculate NEARER for all incoming edges. */ | |
e0082a72 | 648 | for (e = bb->pred; e != NULL; e = e->pred_next) |
b47374fa | 649 | if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux], |
0b17ab2f RH |
650 | farthest[(size_t) e->aux], |
651 | nearerout[e->dest->index], | |
652 | st_avloc[e->dest->index]) | |
f4e72d6e RK |
653 | /* If NEARER for an incoming edge was changed, then we need |
654 | to add the source of the incoming edge to the worklist. */ | |
655 | && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0) | |
656 | { | |
657 | *tos++ = e->src; | |
658 | e->src->aux = e; | |
659 | } | |
a42cd965 | 660 | } |
d2ecda27 | 661 | |
bd0eaec2 JL |
662 | /* Computation of insertion and deletion points requires computing NEAREROUT |
663 | for the ENTRY block. We allocated an extra entry in the NEAREROUT array | |
664 | for just this purpose. */ | |
d55bc081 | 665 | sbitmap_ones (nearerout[last_basic_block]); |
bd0eaec2 | 666 | for (e = ENTRY_BLOCK_PTR->succ; e != NULL; e = e->succ_next) |
d55bc081 ZD |
667 | sbitmap_a_and_b (nearerout[last_basic_block], |
668 | nearerout[last_basic_block], | |
63408827 | 669 | nearer[(size_t) e->aux]); |
bd0eaec2 | 670 | |
108c1afc | 671 | clear_aux_for_edges (); |
bd0eaec2 | 672 | free (tos); |
a42cd965 | 673 | } |
d2ecda27 | 674 | |
a42cd965 | 675 | /* Compute the insertion and deletion points for edge based LCM. */ |
f4e72d6e | 676 | |
d2ecda27 | 677 | static void |
0c20a65f AJ |
678 | compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc, |
679 | sbitmap *nearer, sbitmap *nearerout, | |
680 | sbitmap *insert, sbitmap *delete) | |
d2ecda27 | 681 | { |
a42cd965 | 682 | int x; |
e0082a72 | 683 | basic_block bb; |
d2ecda27 | 684 | |
e0082a72 ZD |
685 | FOR_EACH_BB (bb) |
686 | sbitmap_difference (delete[bb->index], st_avloc[bb->index], nearerout[bb->index]); | |
b3bb6456 | 687 | |
a42cd965 | 688 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
d2ecda27 | 689 | { |
a42cd965 AM |
690 | basic_block b = INDEX_EDGE_PRED_BB (edge_list, x); |
691 | if (b == ENTRY_BLOCK_PTR) | |
d55bc081 | 692 | sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]); |
d2ecda27 | 693 | else |
0b17ab2f | 694 | sbitmap_difference (insert[x], nearer[x], nearerout[b->index]); |
d2ecda27 | 695 | } |
d2ecda27 JL |
696 | } |
697 | ||
b3bb6456 | 698 | /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the |
a42cd965 AM |
699 | insert and delete vectors for edge based reverse LCM. Returns an |
700 | edgelist which is used to map the insert vector to what edge | |
701 | an expression should be inserted on. */ | |
d2ecda27 | 702 | |
a42cd965 | 703 | struct edge_list * |
0c20a65f AJ |
704 | pre_edge_rev_lcm (FILE *file ATTRIBUTE_UNUSED, int n_exprs, sbitmap *transp, |
705 | sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill, | |
706 | sbitmap **insert, sbitmap **delete) | |
d2ecda27 | 707 | { |
a42cd965 AM |
708 | sbitmap *st_antin, *st_antout; |
709 | sbitmap *st_avout, *st_avin, *farthest; | |
710 | sbitmap *nearer, *nearerout; | |
711 | struct edge_list *edge_list; | |
4b66e1c0 | 712 | int num_edges; |
a42cd965 AM |
713 | |
714 | edge_list = create_edge_list (); | |
715 | num_edges = NUM_EDGES (edge_list); | |
716 | ||
703ad42b KG |
717 | st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
718 | st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
d55bc081 ZD |
719 | sbitmap_vector_zero (st_antin, last_basic_block); |
720 | sbitmap_vector_zero (st_antout, last_basic_block); | |
a42cd965 AM |
721 | compute_antinout_edge (st_antloc, transp, st_antin, st_antout); |
722 | ||
723 | /* Compute global anticipatability. */ | |
d55bc081 ZD |
724 | st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs); |
725 | st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
a42cd965 AM |
726 | compute_available (st_avloc, kill, st_avout, st_avin); |
727 | ||
728 | #ifdef LCM_DEBUG_INFO | |
729 | if (file) | |
730 | { | |
731 | fprintf (file, "Edge List:\n"); | |
732 | verify_edge_list (file, edge_list); | |
733 | print_edge_list (file, edge_list); | |
d55bc081 ZD |
734 | dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); |
735 | dump_sbitmap_vector (file, "st_avloc", "", st_avloc, last_basic_block); | |
736 | dump_sbitmap_vector (file, "st_antloc", "", st_antloc, last_basic_block); | |
737 | dump_sbitmap_vector (file, "st_antin", "", st_antin, last_basic_block); | |
738 | dump_sbitmap_vector (file, "st_antout", "", st_antout, last_basic_block); | |
739 | dump_sbitmap_vector (file, "st_kill", "", kill, last_basic_block); | |
a42cd965 AM |
740 | } |
741 | #endif | |
d2ecda27 | 742 | |
a42cd965 AM |
743 | #ifdef LCM_DEBUG_INFO |
744 | if (file) | |
745 | { | |
d55bc081 ZD |
746 | dump_sbitmap_vector (file, "st_avout", "", st_avout, last_basic_block); |
747 | dump_sbitmap_vector (file, "st_avin", "", st_avin, last_basic_block); | |
a42cd965 AM |
748 | } |
749 | #endif | |
d2ecda27 | 750 | |
a42cd965 AM |
751 | /* Compute farthestness. */ |
752 | farthest = sbitmap_vector_alloc (num_edges, n_exprs); | |
b3bb6456 | 753 | compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin, |
a42cd965 AM |
754 | kill, farthest); |
755 | ||
756 | #ifdef LCM_DEBUG_INFO | |
757 | if (file) | |
758 | dump_sbitmap_vector (file, "farthest", "", farthest, num_edges); | |
759 | #endif | |
760 | ||
5a660bff DB |
761 | sbitmap_vector_free (st_antin); |
762 | sbitmap_vector_free (st_antout); | |
763 | ||
764 | sbitmap_vector_free (st_avin); | |
765 | sbitmap_vector_free (st_avout); | |
a42cd965 AM |
766 | |
767 | nearer = sbitmap_vector_alloc (num_edges, n_exprs); | |
f4e72d6e | 768 | |
a42cd965 | 769 | /* Allocate an extra element for the entry block. */ |
d55bc081 | 770 | nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); |
bd0eaec2 | 771 | compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout); |
a42cd965 AM |
772 | |
773 | #ifdef LCM_DEBUG_INFO | |
774 | if (file) | |
d2ecda27 | 775 | { |
b3bb6456 | 776 | dump_sbitmap_vector (file, "nearerout", "", nearerout, |
d55bc081 | 777 | last_basic_block + 1); |
a42cd965 | 778 | dump_sbitmap_vector (file, "nearer", "", nearer, num_edges); |
d2ecda27 | 779 | } |
a42cd965 AM |
780 | #endif |
781 | ||
5a660bff | 782 | sbitmap_vector_free (farthest); |
a42cd965 AM |
783 | |
784 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
d55bc081 | 785 | *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); |
f4e72d6e RK |
786 | compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout, |
787 | *insert, *delete); | |
a42cd965 | 788 | |
5a660bff DB |
789 | sbitmap_vector_free (nearerout); |
790 | sbitmap_vector_free (nearer); | |
a42cd965 AM |
791 | |
792 | #ifdef LCM_DEBUG_INFO | |
793 | if (file) | |
794 | { | |
795 | dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); | |
f4e72d6e | 796 | dump_sbitmap_vector (file, "pre_delete_map", "", *delete, |
d55bc081 | 797 | last_basic_block); |
a42cd965 AM |
798 | } |
799 | #endif | |
a42cd965 | 800 | return edge_list; |
d2ecda27 | 801 | } |
9f09b1f2 | 802 | |
f4e72d6e RK |
803 | /* Mode switching: |
804 | ||
805 | The algorithm for setting the modes consists of scanning the insn list | |
9f09b1f2 R |
806 | and finding all the insns which require a specific mode. Each insn gets |
807 | a unique struct seginfo element. These structures are inserted into a list | |
808 | for each basic block. For each entity, there is an array of bb_info over | |
809 | the flow graph basic blocks (local var 'bb_info'), and contains a list | |
810 | of all insns within that basic block, in the order they are encountered. | |
811 | ||
812 | For each entity, any basic block WITHOUT any insns requiring a specific | |
813 | mode are given a single entry, without a mode. (Each basic block | |
814 | in the flow graph must have at least one entry in the segment table.) | |
815 | ||
816 | The LCM algorithm is then run over the flow graph to determine where to | |
817 | place the sets to the highest-priority value in respect of first the first | |
3d042e77 | 818 | insn in any one block. Any adjustments required to the transparency |
9f09b1f2 | 819 | vectors are made, then the next iteration starts for the next-lower |
3d042e77 | 820 | priority mode, till for each entity all modes are exhausted. |
9f09b1f2 R |
821 | |
822 | More details are located in the code for optimize_mode_switching(). */ | |
823 | ||
824 | /* This structure contains the information for each insn which requires | |
b3bb6456 | 825 | either single or double mode to be set. |
9f09b1f2 | 826 | MODE is the mode this insn must be executed in. |
1cca43ea AO |
827 | INSN_PTR is the insn to be executed (may be the note that marks the |
828 | beginning of a basic block). | |
9f09b1f2 R |
829 | BBNUM is the flow graph basic block this insn occurs in. |
830 | NEXT is the next insn in the same basic block. */ | |
b3bb6456 | 831 | struct seginfo |
9f09b1f2 R |
832 | { |
833 | int mode; | |
834 | rtx insn_ptr; | |
835 | int bbnum; | |
836 | struct seginfo *next; | |
837 | HARD_REG_SET regs_live; | |
838 | }; | |
839 | ||
9ca88d5a | 840 | struct bb_info |
9f09b1f2 R |
841 | { |
842 | struct seginfo *seginfo; | |
843 | int computing; | |
844 | }; | |
845 | ||
846 | /* These bitmaps are used for the LCM algorithm. */ | |
847 | ||
c8d8ed65 | 848 | #ifdef OPTIMIZE_MODE_SWITCHING |
9f09b1f2 R |
849 | static sbitmap *antic; |
850 | static sbitmap *transp; | |
851 | static sbitmap *comp; | |
852 | static sbitmap *delete; | |
853 | static sbitmap *insert; | |
854 | ||
0c20a65f AJ |
855 | static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET); |
856 | static void add_seginfo (struct bb_info *, struct seginfo *); | |
857 | static void reg_dies (rtx, HARD_REG_SET); | |
858 | static void reg_becomes_live (rtx, rtx, void *); | |
859 | static void make_preds_opaque (basic_block, int); | |
c8d8ed65 RK |
860 | #endif |
861 | \f | |
862 | #ifdef OPTIMIZE_MODE_SWITCHING | |
9f09b1f2 R |
863 | |
864 | /* This function will allocate a new BBINFO structure, initialized | |
1270c255 | 865 | with the MODE, INSN, and basic block BB parameters. */ |
c8d8ed65 | 866 | |
9f09b1f2 | 867 | static struct seginfo * |
0c20a65f | 868 | new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live) |
9f09b1f2 R |
869 | { |
870 | struct seginfo *ptr; | |
871 | ptr = xmalloc (sizeof (struct seginfo)); | |
872 | ptr->mode = mode; | |
873 | ptr->insn_ptr = insn; | |
874 | ptr->bbnum = bb; | |
875 | ptr->next = NULL; | |
876 | COPY_HARD_REG_SET (ptr->regs_live, regs_live); | |
877 | return ptr; | |
878 | } | |
879 | ||
b3bb6456 | 880 | /* Add a seginfo element to the end of a list. |
9f09b1f2 R |
881 | HEAD is a pointer to the list beginning. |
882 | INFO is the structure to be linked in. */ | |
c8d8ed65 | 883 | |
9f09b1f2 | 884 | static void |
0c20a65f | 885 | add_seginfo (struct bb_info *head, struct seginfo *info) |
9f09b1f2 R |
886 | { |
887 | struct seginfo *ptr; | |
888 | ||
889 | if (head->seginfo == NULL) | |
890 | head->seginfo = info; | |
891 | else | |
892 | { | |
893 | ptr = head->seginfo; | |
894 | while (ptr->next != NULL) | |
e11e816e | 895 | ptr = ptr->next; |
9f09b1f2 R |
896 | ptr->next = info; |
897 | } | |
898 | } | |
899 | ||
900 | /* Make all predecessors of basic block B opaque, recursively, till we hit | |
901 | some that are already non-transparent, or an edge where aux is set; that | |
902 | denotes that a mode set is to be done on that edge. | |
903 | J is the bit number in the bitmaps that corresponds to the entity that | |
904 | we are currently handling mode-switching for. */ | |
c8d8ed65 | 905 | |
9f09b1f2 | 906 | static void |
0c20a65f | 907 | make_preds_opaque (basic_block b, int j) |
9f09b1f2 R |
908 | { |
909 | edge e; | |
910 | ||
911 | for (e = b->pred; e; e = e->pred_next) | |
912 | { | |
913 | basic_block pb = e->src; | |
f4e72d6e | 914 | |
0b17ab2f | 915 | if (e->aux || ! TEST_BIT (transp[pb->index], j)) |
9f09b1f2 | 916 | continue; |
f4e72d6e | 917 | |
0b17ab2f | 918 | RESET_BIT (transp[pb->index], j); |
9f09b1f2 R |
919 | make_preds_opaque (pb, j); |
920 | } | |
921 | } | |
922 | ||
923 | /* Record in LIVE that register REG died. */ | |
c8d8ed65 | 924 | |
9f09b1f2 | 925 | static void |
0c20a65f | 926 | reg_dies (rtx reg, HARD_REG_SET live) |
9f09b1f2 | 927 | { |
f4e72d6e | 928 | int regno, nregs; |
9f09b1f2 | 929 | |
f8cfc6aa | 930 | if (!REG_P (reg)) |
9f09b1f2 | 931 | return; |
f4e72d6e | 932 | |
9f09b1f2 R |
933 | regno = REGNO (reg); |
934 | if (regno < FIRST_PSEUDO_REGISTER) | |
66fd46b6 | 935 | for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0; |
f4e72d6e RK |
936 | nregs--) |
937 | CLEAR_HARD_REG_BIT (live, regno + nregs); | |
9f09b1f2 R |
938 | } |
939 | ||
940 | /* Record in LIVE that register REG became live. | |
941 | This is called via note_stores. */ | |
c8d8ed65 | 942 | |
9f09b1f2 | 943 | static void |
0c20a65f | 944 | reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *live) |
9f09b1f2 | 945 | { |
f4e72d6e | 946 | int regno, nregs; |
9f09b1f2 R |
947 | |
948 | if (GET_CODE (reg) == SUBREG) | |
949 | reg = SUBREG_REG (reg); | |
950 | ||
f8cfc6aa | 951 | if (!REG_P (reg)) |
9f09b1f2 R |
952 | return; |
953 | ||
954 | regno = REGNO (reg); | |
955 | if (regno < FIRST_PSEUDO_REGISTER) | |
66fd46b6 | 956 | for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0; |
f4e72d6e RK |
957 | nregs--) |
958 | SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs); | |
9f09b1f2 R |
959 | } |
960 | ||
73774972 EC |
961 | /* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined |
962 | and vice versa. */ | |
963 | #if defined (MODE_ENTRY) != defined (MODE_EXIT) | |
964 | #error "Both MODE_ENTRY and MODE_EXIT must be defined" | |
965 | #endif | |
966 | ||
97d36f45 RH |
967 | /* Find all insns that need a particular mode setting, and insert the |
968 | necessary mode switches. Return true if we did work. */ | |
f4e72d6e | 969 | |
97d36f45 | 970 | int |
0c20a65f | 971 | optimize_mode_switching (FILE *file) |
9f09b1f2 | 972 | { |
9f09b1f2 | 973 | rtx insn; |
e0082a72 ZD |
974 | int e; |
975 | basic_block bb; | |
9f09b1f2 R |
976 | int need_commit = 0; |
977 | sbitmap *kill; | |
978 | struct edge_list *edge_list; | |
8b60264b | 979 | static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING; |
ca7558fc | 980 | #define N_ENTITIES ARRAY_SIZE (num_modes) |
9f09b1f2 | 981 | int entity_map[N_ENTITIES]; |
9ca88d5a | 982 | struct bb_info *bb_info[N_ENTITIES]; |
9f09b1f2 R |
983 | int i, j; |
984 | int n_entities; | |
985 | int max_num_modes = 0; | |
73991d6a | 986 | bool emited = false; |
272cdf58 | 987 | basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED; |
9f09b1f2 | 988 | |
38c1593d | 989 | clear_bb_flags (); |
e8eacc3f | 990 | |
9f09b1f2 | 991 | for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--) |
f4e72d6e RK |
992 | if (OPTIMIZE_MODE_SWITCHING (e)) |
993 | { | |
81b40b72 R |
994 | int entry_exit_extra = 0; |
995 | ||
996 | /* Create the list of segments within each basic block. | |
997 | If NORMAL_MODE is defined, allow for two extra | |
e11e816e | 998 | blocks split from the entry and exit block. */ |
73774972 | 999 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
81b40b72 R |
1000 | entry_exit_extra = 2; |
1001 | #endif | |
f4e72d6e | 1002 | bb_info[n_entities] |
703ad42b | 1003 | = xcalloc (last_basic_block + entry_exit_extra, sizeof **bb_info); |
f4e72d6e RK |
1004 | entity_map[n_entities++] = e; |
1005 | if (num_modes[e] > max_num_modes) | |
1006 | max_num_modes = num_modes[e]; | |
1007 | } | |
1008 | ||
9f09b1f2 | 1009 | if (! n_entities) |
97d36f45 | 1010 | return 0; |
9f09b1f2 | 1011 | |
73774972 | 1012 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
81b40b72 R |
1013 | { |
1014 | /* Split the edge from the entry block and the fallthrough edge to the | |
1015 | exit block, so that we can note that there NORMAL_MODE is supplied / | |
1016 | required. */ | |
1017 | edge eg; | |
1018 | post_entry = split_edge (ENTRY_BLOCK_PTR->succ); | |
1019 | /* The only non-call predecessor at this stage is a block with a | |
1020 | fallthrough edge; there can be at most one, but there could be | |
1021 | none at all, e.g. when exit is called. */ | |
1022 | for (pre_exit = 0, eg = EXIT_BLOCK_PTR->pred; eg; eg = eg->pred_next) | |
1023 | if (eg->flags & EDGE_FALLTHRU) | |
1024 | { | |
1025 | regset live_at_end = eg->src->global_live_at_end; | |
1026 | ||
1027 | if (pre_exit) | |
1028 | abort (); | |
1029 | pre_exit = split_edge (eg); | |
1030 | COPY_REG_SET (pre_exit->global_live_at_start, live_at_end); | |
1031 | COPY_REG_SET (pre_exit->global_live_at_end, live_at_end); | |
1032 | } | |
1033 | } | |
e8eacc3f AO |
1034 | #endif |
1035 | ||
9f09b1f2 R |
1036 | /* Create the bitmap vectors. */ |
1037 | ||
d55bc081 ZD |
1038 | antic = sbitmap_vector_alloc (last_basic_block, n_entities); |
1039 | transp = sbitmap_vector_alloc (last_basic_block, n_entities); | |
1040 | comp = sbitmap_vector_alloc (last_basic_block, n_entities); | |
9f09b1f2 | 1041 | |
d55bc081 | 1042 | sbitmap_vector_ones (transp, last_basic_block); |
9f09b1f2 R |
1043 | |
1044 | for (j = n_entities - 1; j >= 0; j--) | |
1045 | { | |
1046 | int e = entity_map[j]; | |
1047 | int no_mode = num_modes[e]; | |
9ca88d5a | 1048 | struct bb_info *info = bb_info[j]; |
9f09b1f2 R |
1049 | |
1050 | /* Determine what the first use (if any) need for a mode of entity E is. | |
97d36f45 | 1051 | This will be the mode that is anticipatable for this block. |
9f09b1f2 | 1052 | Also compute the initial transparency settings. */ |
e0082a72 | 1053 | FOR_EACH_BB (bb) |
9f09b1f2 R |
1054 | { |
1055 | struct seginfo *ptr; | |
1056 | int last_mode = no_mode; | |
1057 | HARD_REG_SET live_now; | |
1058 | ||
1059 | REG_SET_TO_HARD_REG_SET (live_now, | |
e0082a72 | 1060 | bb->global_live_at_start); |
a813c111 SB |
1061 | for (insn = BB_HEAD (bb); |
1062 | insn != NULL && insn != NEXT_INSN (BB_END (bb)); | |
9f09b1f2 R |
1063 | insn = NEXT_INSN (insn)) |
1064 | { | |
2c3c49de | 1065 | if (INSN_P (insn)) |
9f09b1f2 R |
1066 | { |
1067 | int mode = MODE_NEEDED (e, insn); | |
1068 | rtx link; | |
1069 | ||
1070 | if (mode != no_mode && mode != last_mode) | |
1071 | { | |
1072 | last_mode = mode; | |
e0082a72 ZD |
1073 | ptr = new_seginfo (mode, insn, bb->index, live_now); |
1074 | add_seginfo (info + bb->index, ptr); | |
1075 | RESET_BIT (transp[bb->index], j); | |
9f09b1f2 | 1076 | } |
73774972 EC |
1077 | #ifdef MODE_AFTER |
1078 | last_mode = MODE_AFTER (last_mode, insn); | |
1079 | #endif | |
9f09b1f2 R |
1080 | /* Update LIVE_NOW. */ |
1081 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1082 | if (REG_NOTE_KIND (link) == REG_DEAD) | |
1083 | reg_dies (XEXP (link, 0), live_now); | |
f4e72d6e | 1084 | |
9f09b1f2 R |
1085 | note_stores (PATTERN (insn), reg_becomes_live, &live_now); |
1086 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1087 | if (REG_NOTE_KIND (link) == REG_UNUSED) | |
1088 | reg_dies (XEXP (link, 0), live_now); | |
1089 | } | |
1090 | } | |
f4e72d6e | 1091 | |
e0082a72 | 1092 | info[bb->index].computing = last_mode; |
9f09b1f2 R |
1093 | /* Check for blocks without ANY mode requirements. */ |
1094 | if (last_mode == no_mode) | |
1095 | { | |
a813c111 | 1096 | ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now); |
e0082a72 | 1097 | add_seginfo (info + bb->index, ptr); |
9f09b1f2 R |
1098 | } |
1099 | } | |
73774972 | 1100 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
9f09b1f2 | 1101 | { |
73774972 | 1102 | int mode = MODE_ENTRY (e); |
f4e72d6e | 1103 | |
9f09b1f2 R |
1104 | if (mode != no_mode) |
1105 | { | |
81b40b72 | 1106 | bb = post_entry; |
b3bb6456 | 1107 | |
81b40b72 R |
1108 | /* By always making this nontransparent, we save |
1109 | an extra check in make_preds_opaque. We also | |
1110 | need this to avoid confusing pre_edge_lcm when | |
1111 | antic is cleared but transp and comp are set. */ | |
1112 | RESET_BIT (transp[bb->index], j); | |
e8eacc3f | 1113 | |
81b40b72 R |
1114 | /* Insert a fake computing definition of MODE into entry |
1115 | blocks which compute no mode. This represents the mode on | |
1116 | entry. */ | |
1117 | info[bb->index].computing = mode; | |
1118 | ||
1119 | if (pre_exit) | |
73774972 | 1120 | info[pre_exit->index].seginfo->mode = MODE_EXIT (e); |
9f09b1f2 R |
1121 | } |
1122 | } | |
1270c255 | 1123 | #endif /* NORMAL_MODE */ |
9f09b1f2 R |
1124 | } |
1125 | ||
d55bc081 | 1126 | kill = sbitmap_vector_alloc (last_basic_block, n_entities); |
9f09b1f2 R |
1127 | for (i = 0; i < max_num_modes; i++) |
1128 | { | |
1129 | int current_mode[N_ENTITIES]; | |
1130 | ||
1131 | /* Set the anticipatable and computing arrays. */ | |
d55bc081 ZD |
1132 | sbitmap_vector_zero (antic, last_basic_block); |
1133 | sbitmap_vector_zero (comp, last_basic_block); | |
9f09b1f2 R |
1134 | for (j = n_entities - 1; j >= 0; j--) |
1135 | { | |
1136 | int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i); | |
9ca88d5a | 1137 | struct bb_info *info = bb_info[j]; |
b3bb6456 | 1138 | |
e0082a72 | 1139 | FOR_EACH_BB (bb) |
9f09b1f2 | 1140 | { |
e0082a72 ZD |
1141 | if (info[bb->index].seginfo->mode == m) |
1142 | SET_BIT (antic[bb->index], j); | |
9f09b1f2 | 1143 | |
e0082a72 ZD |
1144 | if (info[bb->index].computing == m) |
1145 | SET_BIT (comp[bb->index], j); | |
9f09b1f2 R |
1146 | } |
1147 | } | |
1148 | ||
1149 | /* Calculate the optimal locations for the | |
1150 | placement mode switches to modes with priority I. */ | |
1151 | ||
e0082a72 ZD |
1152 | FOR_EACH_BB (bb) |
1153 | sbitmap_not (kill[bb->index], transp[bb->index]); | |
9f09b1f2 R |
1154 | edge_list = pre_edge_lcm (file, 1, transp, comp, antic, |
1155 | kill, &insert, &delete); | |
1156 | ||
f4e72d6e | 1157 | for (j = n_entities - 1; j >= 0; j--) |
9f09b1f2 R |
1158 | { |
1159 | /* Insert all mode sets that have been inserted by lcm. */ | |
1160 | int no_mode = num_modes[entity_map[j]]; | |
f4e72d6e | 1161 | |
9f09b1f2 R |
1162 | /* Wherever we have moved a mode setting upwards in the flow graph, |
1163 | the blocks between the new setting site and the now redundant | |
1164 | computation ceases to be transparent for any lower-priority | |
1165 | mode of the same entity. First set the aux field of each | |
1166 | insertion site edge non-transparent, then propagate the new | |
1167 | non-transparency from the redundant computation upwards till | |
1168 | we hit an insertion site or an already non-transparent block. */ | |
1169 | for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--) | |
1170 | { | |
1171 | edge eg = INDEX_EDGE (edge_list, e); | |
1172 | int mode; | |
1173 | basic_block src_bb; | |
1174 | HARD_REG_SET live_at_edge; | |
1175 | rtx mode_set; | |
1176 | ||
1177 | eg->aux = 0; | |
1178 | ||
1179 | if (! TEST_BIT (insert[e], j)) | |
1180 | continue; | |
1181 | ||
1182 | eg->aux = (void *)1; | |
1183 | ||
1184 | mode = current_mode[j]; | |
1185 | src_bb = eg->src; | |
1186 | ||
f4e72d6e RK |
1187 | REG_SET_TO_HARD_REG_SET (live_at_edge, |
1188 | src_bb->global_live_at_end); | |
1189 | ||
9f09b1f2 R |
1190 | start_sequence (); |
1191 | EMIT_MODE_SET (entity_map[j], mode, live_at_edge); | |
2f937369 | 1192 | mode_set = get_insns (); |
9f09b1f2 R |
1193 | end_sequence (); |
1194 | ||
73991d6a | 1195 | /* Do not bother to insert empty sequence. */ |
2f937369 | 1196 | if (mode_set == NULL_RTX) |
73991d6a JH |
1197 | continue; |
1198 | ||
e8eacc3f AO |
1199 | /* If this is an abnormal edge, we'll insert at the end |
1200 | of the previous block. */ | |
9f09b1f2 R |
1201 | if (eg->flags & EDGE_ABNORMAL) |
1202 | { | |
73991d6a | 1203 | emited = true; |
4b4bf941 | 1204 | if (JUMP_P (BB_END (src_bb))) |
a813c111 | 1205 | emit_insn_before (mode_set, BB_END (src_bb)); |
e8eacc3f AO |
1206 | /* It doesn't make sense to switch to normal mode |
1207 | after a CALL_INSN, so we're going to abort if we | |
1208 | find one. The cases in which a CALL_INSN may | |
1209 | have an abnormal edge are sibcalls and EH edges. | |
1210 | In the case of sibcalls, the dest basic-block is | |
1211 | the EXIT_BLOCK, that runs in normal mode; it is | |
1212 | assumed that a sibcall insn requires normal mode | |
1213 | itself, so no mode switch would be required after | |
1214 | the call (it wouldn't make sense, anyway). In | |
1215 | the case of EH edges, EH entry points also start | |
1216 | in normal mode, so a similar reasoning applies. */ | |
4b4bf941 | 1217 | else if (NONJUMP_INSN_P (BB_END (src_bb))) |
a813c111 | 1218 | emit_insn_after (mode_set, BB_END (src_bb)); |
e8eacc3f AO |
1219 | else |
1220 | abort (); | |
0b17ab2f RH |
1221 | bb_info[j][src_bb->index].computing = mode; |
1222 | RESET_BIT (transp[src_bb->index], j); | |
9f09b1f2 R |
1223 | } |
1224 | else | |
1225 | { | |
1226 | need_commit = 1; | |
1227 | insert_insn_on_edge (mode_set, eg); | |
1228 | } | |
9f09b1f2 R |
1229 | } |
1230 | ||
e0082a72 ZD |
1231 | FOR_EACH_BB_REVERSE (bb) |
1232 | if (TEST_BIT (delete[bb->index], j)) | |
f4e72d6e | 1233 | { |
e0082a72 | 1234 | make_preds_opaque (bb, j); |
f4e72d6e | 1235 | /* Cancel the 'deleted' mode set. */ |
e0082a72 | 1236 | bb_info[j][bb->index].seginfo->mode = no_mode; |
f4e72d6e | 1237 | } |
9f09b1f2 | 1238 | } |
f4e72d6e | 1239 | |
108c1afc | 1240 | clear_aux_for_edges (); |
9f09b1f2 R |
1241 | free_edge_list (edge_list); |
1242 | } | |
1243 | ||
1244 | /* Now output the remaining mode sets in all the segments. */ | |
1245 | for (j = n_entities - 1; j >= 0; j--) | |
1246 | { | |
1270c255 CP |
1247 | int no_mode = num_modes[entity_map[j]]; |
1248 | ||
e0082a72 | 1249 | FOR_EACH_BB_REVERSE (bb) |
9f09b1f2 R |
1250 | { |
1251 | struct seginfo *ptr, *next; | |
e0082a72 | 1252 | for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next) |
9f09b1f2 R |
1253 | { |
1254 | next = ptr->next; | |
1270c255 | 1255 | if (ptr->mode != no_mode) |
9f09b1f2 R |
1256 | { |
1257 | rtx mode_set; | |
1258 | ||
1259 | start_sequence (); | |
1260 | EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live); | |
2f937369 | 1261 | mode_set = get_insns (); |
9f09b1f2 R |
1262 | end_sequence (); |
1263 | ||
73991d6a | 1264 | /* Do not bother to insert empty sequence. */ |
2f937369 | 1265 | if (mode_set == NULL_RTX) |
73991d6a JH |
1266 | continue; |
1267 | ||
1268 | emited = true; | |
4b4bf941 | 1269 | if (NOTE_P (ptr->insn_ptr) |
25fa8bdc AO |
1270 | && (NOTE_LINE_NUMBER (ptr->insn_ptr) |
1271 | == NOTE_INSN_BASIC_BLOCK)) | |
3c030e88 | 1272 | emit_insn_after (mode_set, ptr->insn_ptr); |
1270c255 | 1273 | else |
3c030e88 | 1274 | emit_insn_before (mode_set, ptr->insn_ptr); |
9f09b1f2 | 1275 | } |
f4e72d6e | 1276 | |
9f09b1f2 R |
1277 | free (ptr); |
1278 | } | |
1279 | } | |
f4e72d6e | 1280 | |
9f09b1f2 R |
1281 | free (bb_info[j]); |
1282 | } | |
1283 | ||
1284 | /* Finished. Free up all the things we've allocated. */ | |
b3bb6456 | 1285 | |
9f09b1f2 R |
1286 | sbitmap_vector_free (kill); |
1287 | sbitmap_vector_free (antic); | |
1288 | sbitmap_vector_free (transp); | |
1289 | sbitmap_vector_free (comp); | |
1290 | sbitmap_vector_free (delete); | |
1291 | sbitmap_vector_free (insert); | |
1292 | ||
1293 | if (need_commit) | |
1294 | commit_edge_insertions (); | |
97d36f45 | 1295 | |
73774972 | 1296 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
81b40b72 R |
1297 | cleanup_cfg (CLEANUP_NO_INSN_DEL); |
1298 | #else | |
73991d6a JH |
1299 | if (!need_commit && !emited) |
1300 | return 0; | |
81b40b72 | 1301 | #endif |
73991d6a | 1302 | |
38c1593d JH |
1303 | max_regno = max_reg_num (); |
1304 | allocate_reg_info (max_regno, FALSE, FALSE); | |
1305 | update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES, | |
1306 | (PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE | |
1307 | | PROP_SCAN_DEAD_CODE)); | |
97d36f45 RH |
1308 | |
1309 | return 1; | |
9f09b1f2 | 1310 | } |
97d36f45 | 1311 | #endif /* OPTIMIZE_MODE_SWITCHING */ |