]>
Commit | Line | Data |
---|---|---|
9ca8d59e | 1 | /* Generic partial redundancy elimination with lazy code motion support. |
2b4876d2 | 2 | Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 |
3ad4992f | 3 | Free Software Foundation, Inc. |
e48ba7af | 4 | |
f12b58b3 | 5 | This file is part of GCC. |
e48ba7af | 6 | |
f12b58b3 | 7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
e48ba7af | 11 | |
f12b58b3 | 12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
e48ba7af | 16 | |
17 | You should have received a copy of the GNU General Public License | |
f12b58b3 | 18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
e48ba7af | 21 | |
22 | /* These routines are meant to be used by various optimization | |
5ab08585 | 23 | passes which can be modeled as lazy code motion problems. |
e48ba7af | 24 | Including, but not limited to: |
25 | ||
26 | * Traditional partial redundancy elimination. | |
27 | ||
28 | * Placement of caller/caller register save/restores. | |
29 | ||
30 | * Load/store motion. | |
31 | ||
32 | * Copy motion. | |
33 | ||
34 | * Conversion of flat register files to a stacked register | |
35 | model. | |
36 | ||
37 | * Dead load/store elimination. | |
38 | ||
39 | These routines accept as input: | |
40 | ||
41 | * Basic block information (number of blocks, lists of | |
42 | predecessors and successors). Note the granularity | |
43 | does not need to be basic block, they could be statements | |
44 | or functions. | |
45 | ||
46 | * Bitmaps of local properties (computed, transparent and | |
47 | anticipatable expressions). | |
48 | ||
49 | The output of these routines is bitmap of redundant computations | |
50 | and a bitmap of optimal placement points. */ | |
51 | ||
52 | ||
53 | #include "config.h" | |
54 | #include "system.h" | |
805e22b2 | 55 | #include "coretypes.h" |
56 | #include "tm.h" | |
e48ba7af | 57 | #include "rtl.h" |
58 | #include "regs.h" | |
59 | #include "hard-reg-set.h" | |
60 | #include "flags.h" | |
61 | #include "real.h" | |
62 | #include "insn-config.h" | |
63 | #include "recog.h" | |
64 | #include "basic-block.h" | |
742ee135 | 65 | #include "output.h" |
18862b5a | 66 | #include "tm_p.h" |
6ce54148 | 67 | #include "function.h" |
9ca8d59e | 68 | |
18862b5a | 69 | /* We want target macros for the mode switching code to be able to refer |
70 | to instruction attribute values. */ | |
71 | #include "insn-attr.h" | |
e48ba7af | 72 | |
7bcd381b | 73 | /* Edge based LCM routines. */ |
3ad4992f | 74 | static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *); |
75 | static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *, | |
76 | sbitmap *, sbitmap *, sbitmap *); | |
77 | static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *, | |
78 | sbitmap *, sbitmap *); | |
79 | static void compute_insert_delete (struct edge_list *edge_list, sbitmap *, | |
80 | sbitmap *, sbitmap *, sbitmap *, sbitmap *); | |
7bcd381b | 81 | |
82 | /* Edge based LCM routines on a reverse flowgraph. */ | |
3ad4992f | 83 | static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *, |
84 | sbitmap*, sbitmap *, sbitmap *); | |
85 | static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *, | |
86 | sbitmap *, sbitmap *); | |
87 | static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *, | |
88 | sbitmap *, sbitmap *, sbitmap *, | |
89 | sbitmap *); | |
7bcd381b | 90 | \f |
91 | /* Edge based lcm routines. */ | |
3b7e1f27 | 92 | |
5ab08585 | 93 | /* Compute expression anticipatability at entrance and exit of each block. |
94 | This is done based on the flow graph, and not on the pred-succ lists. | |
7bcd381b | 95 | Other than that, its pretty much identical to compute_antinout. */ |
e48ba7af | 96 | |
97 | static void | |
3ad4992f | 98 | compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin, |
99 | sbitmap *antout) | |
e48ba7af | 100 | { |
4c26117a | 101 | basic_block bb; |
7bcd381b | 102 | edge e; |
2c59145b | 103 | basic_block *worklist, *qin, *qout, *qend; |
104 | unsigned int qlen; | |
cd665a06 | 105 | edge_iterator ei; |
3b7e1f27 | 106 | |
2325f0e2 | 107 | /* Allocate a worklist array/queue. Entries are only added to the |
108 | list if they were not already on the list. So the size is | |
109 | bounded by the number of basic blocks. */ | |
f0af5a88 | 110 | qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); |
e48ba7af | 111 | |
2325f0e2 | 112 | /* We want a maximal solution, so make an optimistic initialization of |
113 | ANTIN. */ | |
f20183e6 | 114 | sbitmap_vector_ones (antin, last_basic_block); |
e48ba7af | 115 | |
5d6931e2 | 116 | /* Put every block on the worklist; this is necessary because of the |
117 | optimistic initialization of ANTIN above. */ | |
4c26117a | 118 | FOR_EACH_BB_REVERSE (bb) |
e48ba7af | 119 | { |
ea0041f4 | 120 | *qin++ = bb; |
4c26117a | 121 | bb->aux = bb; |
2325f0e2 | 122 | } |
5ab08585 | 123 | |
2c59145b | 124 | qin = worklist; |
b3d6de89 | 125 | qend = &worklist[n_basic_blocks]; |
126 | qlen = n_basic_blocks; | |
e48ba7af | 127 | |
5d6931e2 | 128 | /* Mark blocks which are predecessors of the exit block so that we |
129 | can easily identify them below. */ | |
cd665a06 | 130 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) |
5d6931e2 | 131 | e->src->aux = EXIT_BLOCK_PTR; |
132 | ||
2325f0e2 | 133 | /* Iterate until the worklist is empty. */ |
2c59145b | 134 | while (qlen) |
2325f0e2 | 135 | { |
136 | /* Take the first entry off the worklist. */ | |
4c26117a | 137 | bb = *qout++; |
2c59145b | 138 | qlen--; |
3b7e1f27 | 139 | |
2c59145b | 140 | if (qout >= qend) |
8851e806 | 141 | qout = worklist; |
e48ba7af | 142 | |
4c26117a | 143 | if (bb->aux == EXIT_BLOCK_PTR) |
9ca8d59e | 144 | /* Do not clear the aux field for blocks which are predecessors of |
145 | the EXIT block. That way we never add then to the worklist | |
146 | again. */ | |
4c26117a | 147 | sbitmap_zero (antout[bb->index]); |
2325f0e2 | 148 | else |
149 | { | |
150 | /* Clear the aux field of this block so that it can be added to | |
151 | the worklist again if necessary. */ | |
4c26117a | 152 | bb->aux = NULL; |
153 | sbitmap_intersection_of_succs (antout[bb->index], antin, bb->index); | |
2325f0e2 | 154 | } |
7bcd381b | 155 | |
4c26117a | 156 | if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index], |
157 | transp[bb->index], antout[bb->index])) | |
9ca8d59e | 158 | /* If the in state of this block changed, then we need |
159 | to add the predecessors of this block to the worklist | |
160 | if they are not already on the worklist. */ | |
cd665a06 | 161 | FOR_EACH_EDGE (e, ei, bb->preds) |
9ca8d59e | 162 | if (!e->src->aux && e->src != ENTRY_BLOCK_PTR) |
e48ba7af | 163 | { |
2c59145b | 164 | *qin++ = e->src; |
9ca8d59e | 165 | e->src->aux = e; |
2c59145b | 166 | qlen++; |
167 | if (qin >= qend) | |
8851e806 | 168 | qin = worklist; |
e48ba7af | 169 | } |
e48ba7af | 170 | } |
9ca8d59e | 171 | |
82f7392b | 172 | clear_aux_for_edges (); |
173 | clear_aux_for_blocks (); | |
2c59145b | 174 | free (worklist); |
e48ba7af | 175 | } |
176 | ||
7bcd381b | 177 | /* Compute the earliest vector for edge based lcm. */ |
9ca8d59e | 178 | |
e48ba7af | 179 | static void |
3ad4992f | 180 | compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin, |
181 | sbitmap *antout, sbitmap *avout, sbitmap *kill, | |
182 | sbitmap *earliest) | |
e48ba7af | 183 | { |
7bcd381b | 184 | sbitmap difference, temp_bitmap; |
5ab08585 | 185 | int x, num_edges; |
7bcd381b | 186 | basic_block pred, succ; |
e48ba7af | 187 | |
7bcd381b | 188 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 189 | |
7bcd381b | 190 | difference = sbitmap_alloc (n_exprs); |
191 | temp_bitmap = sbitmap_alloc (n_exprs); | |
e48ba7af | 192 | |
7bcd381b | 193 | for (x = 0; x < num_edges; x++) |
e48ba7af | 194 | { |
7bcd381b | 195 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
196 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
197 | if (pred == ENTRY_BLOCK_PTR) | |
b3d6de89 | 198 | sbitmap_copy (earliest[x], antin[succ->index]); |
7bcd381b | 199 | else |
8851e806 | 200 | { |
742ee135 | 201 | if (succ == EXIT_BLOCK_PTR) |
9ca8d59e | 202 | sbitmap_zero (earliest[x]); |
7bcd381b | 203 | else |
e48ba7af | 204 | { |
b3d6de89 | 205 | sbitmap_difference (difference, antin[succ->index], |
206 | avout[pred->index]); | |
207 | sbitmap_not (temp_bitmap, antout[pred->index]); | |
9ca8d59e | 208 | sbitmap_a_and_b_or_c (earliest[x], difference, |
b3d6de89 | 209 | kill[pred->index], temp_bitmap); |
e48ba7af | 210 | } |
211 | } | |
e48ba7af | 212 | } |
9ca8d59e | 213 | |
f5123ed5 | 214 | sbitmap_free (temp_bitmap); |
215 | sbitmap_free (difference); | |
e48ba7af | 216 | } |
217 | ||
2325f0e2 | 218 | /* later(p,s) is dependent on the calculation of laterin(p). |
219 | laterin(p) is dependent on the calculation of later(p2,p). | |
220 | ||
221 | laterin(ENTRY) is defined as all 0's | |
222 | later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY) | |
223 | laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)). | |
224 | ||
225 | If we progress in this manner, starting with all basic blocks | |
226 | in the work list, anytime we change later(bb), we need to add | |
227 | succs(bb) to the worklist if they are not already on the worklist. | |
228 | ||
229 | Boundary conditions: | |
230 | ||
231 | We prime the worklist all the normal basic blocks. The ENTRY block can | |
232 | never be added to the worklist since it is never the successor of any | |
233 | block. We explicitly prevent the EXIT block from being added to the | |
234 | worklist. | |
235 | ||
236 | We optimistically initialize LATER. That is the only time this routine | |
237 | will compute LATER for an edge out of the entry block since the entry | |
238 | block is never on the worklist. Thus, LATERIN is neither used nor | |
239 | computed for the ENTRY block. | |
240 | ||
241 | Since the EXIT block is never added to the worklist, we will neither | |
242 | use nor compute LATERIN for the exit block. Edges which reach the | |
243 | EXIT block are handled in the normal fashion inside the loop. However, | |
244 | the insertion/deletion computation needs LATERIN(EXIT), so we have | |
245 | to compute it. */ | |
5ab08585 | 246 | |
e48ba7af | 247 | static void |
3ad4992f | 248 | compute_laterin (struct edge_list *edge_list, sbitmap *earliest, |
249 | sbitmap *antloc, sbitmap *later, sbitmap *laterin) | |
e48ba7af | 250 | { |
4c26117a | 251 | int num_edges, i; |
2325f0e2 | 252 | edge e; |
4c26117a | 253 | basic_block *worklist, *qin, *qout, *qend, bb; |
2c59145b | 254 | unsigned int qlen; |
cd665a06 | 255 | edge_iterator ei; |
e48ba7af | 256 | |
7bcd381b | 257 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 258 | |
2325f0e2 | 259 | /* Allocate a worklist array/queue. Entries are only added to the |
260 | list if they were not already on the list. So the size is | |
261 | bounded by the number of basic blocks. */ | |
2c59145b | 262 | qin = qout = worklist |
f0af5a88 | 263 | = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1)); |
2325f0e2 | 264 | |
265 | /* Initialize a mapping from each edge to its index. */ | |
266 | for (i = 0; i < num_edges; i++) | |
9ffd5d6d | 267 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
2325f0e2 | 268 | |
269 | /* We want a maximal solution, so initially consider LATER true for | |
270 | all edges. This allows propagation through a loop since the incoming | |
271 | loop edge will have LATER set, so if all the other incoming edges | |
272 | to the loop are set, then LATERIN will be set for the head of the | |
273 | loop. | |
274 | ||
275 | If the optimistic setting of LATER on that edge was incorrect (for | |
276 | example the expression is ANTLOC in a block within the loop) then | |
277 | this algorithm will detect it when we process the block at the head | |
278 | of the optimistic edge. That will requeue the affected blocks. */ | |
279 | sbitmap_vector_ones (later, num_edges); | |
280 | ||
048599b9 | 281 | /* Note that even though we want an optimistic setting of LATER, we |
282 | do not want to be overly optimistic. Consider an outgoing edge from | |
283 | the entry block. That edge should always have a LATER value the | |
284 | same as EARLIEST for that edge. */ | |
cd665a06 | 285 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
9ca8d59e | 286 | sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]); |
048599b9 | 287 | |
2325f0e2 | 288 | /* Add all the blocks to the worklist. This prevents an early exit from |
289 | the loop given our optimistic initialization of LATER above. */ | |
4c26117a | 290 | FOR_EACH_BB (bb) |
e48ba7af | 291 | { |
4c26117a | 292 | *qin++ = bb; |
293 | bb->aux = bb; | |
7bcd381b | 294 | } |
d4d93ea0 | 295 | |
2c59145b | 296 | /* Note that we do not use the last allocated element for our queue, |
297 | as EXIT_BLOCK is never inserted into it. In fact the above allocation | |
b903337a | 298 | of n_basic_blocks + 1 elements is not necessary. */ |
d4d93ea0 | 299 | qin = worklist; |
b3d6de89 | 300 | qend = &worklist[n_basic_blocks]; |
301 | qlen = n_basic_blocks; | |
7bcd381b | 302 | |
2325f0e2 | 303 | /* Iterate until the worklist is empty. */ |
2c59145b | 304 | while (qlen) |
7bcd381b | 305 | { |
2325f0e2 | 306 | /* Take the first entry off the worklist. */ |
4c26117a | 307 | bb = *qout++; |
308 | bb->aux = NULL; | |
2c59145b | 309 | qlen--; |
310 | if (qout >= qend) | |
8851e806 | 311 | qout = worklist; |
2325f0e2 | 312 | |
313 | /* Compute the intersection of LATERIN for each incoming edge to B. */ | |
4c26117a | 314 | sbitmap_ones (laterin[bb->index]); |
cd665a06 | 315 | FOR_EACH_EDGE (e, ei, bb->preds) |
d4d93ea0 | 316 | sbitmap_a_and_b (laterin[bb->index], laterin[bb->index], |
317 | later[(size_t)e->aux]); | |
2325f0e2 | 318 | |
319 | /* Calculate LATER for all outgoing edges. */ | |
cd665a06 | 320 | FOR_EACH_EDGE (e, ei, bb->succs) |
739c050b | 321 | if (sbitmap_union_of_diff_cg (later[(size_t) e->aux], |
b3d6de89 | 322 | earliest[(size_t) e->aux], |
323 | laterin[e->src->index], | |
324 | antloc[e->src->index]) | |
9ca8d59e | 325 | /* If LATER for an outgoing edge was changed, then we need |
326 | to add the target of the outgoing edge to the worklist. */ | |
327 | && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0) | |
328 | { | |
2c59145b | 329 | *qin++ = e->dest; |
9ca8d59e | 330 | e->dest->aux = e; |
2c59145b | 331 | qlen++; |
332 | if (qin >= qend) | |
333 | qin = worklist; | |
9ca8d59e | 334 | } |
e48ba7af | 335 | } |
336 | ||
2325f0e2 | 337 | /* Computation of insertion and deletion points requires computing LATERIN |
338 | for the EXIT block. We allocated an extra entry in the LATERIN array | |
339 | for just this purpose. */ | |
f20183e6 | 340 | sbitmap_ones (laterin[last_basic_block]); |
cd665a06 | 341 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) |
f20183e6 | 342 | sbitmap_a_and_b (laterin[last_basic_block], |
343 | laterin[last_basic_block], | |
9ffd5d6d | 344 | later[(size_t) e->aux]); |
2325f0e2 | 345 | |
82f7392b | 346 | clear_aux_for_edges (); |
2c59145b | 347 | free (worklist); |
e48ba7af | 348 | } |
349 | ||
7bcd381b | 350 | /* Compute the insertion and deletion points for edge based LCM. */ |
9ca8d59e | 351 | |
7bcd381b | 352 | static void |
3ad4992f | 353 | compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc, |
354 | sbitmap *later, sbitmap *laterin, sbitmap *insert, | |
355 | sbitmap *delete) | |
7bcd381b | 356 | { |
357 | int x; | |
4c26117a | 358 | basic_block bb; |
e48ba7af | 359 | |
4c26117a | 360 | FOR_EACH_BB (bb) |
d4d93ea0 | 361 | sbitmap_difference (delete[bb->index], antloc[bb->index], |
362 | laterin[bb->index]); | |
5ab08585 | 363 | |
7bcd381b | 364 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
365 | { | |
366 | basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x); | |
9ca8d59e | 367 | |
7bcd381b | 368 | if (b == EXIT_BLOCK_PTR) |
f20183e6 | 369 | sbitmap_difference (insert[x], later[x], laterin[last_basic_block]); |
7bcd381b | 370 | else |
b3d6de89 | 371 | sbitmap_difference (insert[x], later[x], laterin[b->index]); |
7bcd381b | 372 | } |
373 | } | |
e48ba7af | 374 | |
9ca8d59e | 375 | /* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and |
376 | delete vectors for edge based LCM. Returns an edgelist which is used to | |
377 | map the insert vector to what edge an expression should be inserted on. */ | |
e48ba7af | 378 | |
7bcd381b | 379 | struct edge_list * |
3ad4992f | 380 | pre_edge_lcm (FILE *file ATTRIBUTE_UNUSED, int n_exprs, sbitmap *transp, |
381 | sbitmap *avloc, sbitmap *antloc, sbitmap *kill, | |
382 | sbitmap **insert, sbitmap **delete) | |
e48ba7af | 383 | { |
7bcd381b | 384 | sbitmap *antin, *antout, *earliest; |
385 | sbitmap *avin, *avout; | |
386 | sbitmap *later, *laterin; | |
387 | struct edge_list *edge_list; | |
388 | int num_edges; | |
e48ba7af | 389 | |
7bcd381b | 390 | edge_list = create_edge_list (); |
391 | num_edges = NUM_EDGES (edge_list); | |
e48ba7af | 392 | |
7bcd381b | 393 | #ifdef LCM_DEBUG_INFO |
394 | if (file) | |
e48ba7af | 395 | { |
7bcd381b | 396 | fprintf (file, "Edge List:\n"); |
397 | verify_edge_list (file, edge_list); | |
398 | print_edge_list (file, edge_list); | |
f20183e6 | 399 | dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); |
400 | dump_sbitmap_vector (file, "antloc", "", antloc, last_basic_block); | |
401 | dump_sbitmap_vector (file, "avloc", "", avloc, last_basic_block); | |
402 | dump_sbitmap_vector (file, "kill", "", kill, last_basic_block); | |
e48ba7af | 403 | } |
7bcd381b | 404 | #endif |
e48ba7af | 405 | |
7bcd381b | 406 | /* Compute global availability. */ |
f20183e6 | 407 | avin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
408 | avout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
7bcd381b | 409 | compute_available (avloc, kill, avout, avin); |
cca23eb2 | 410 | sbitmap_vector_free (avin); |
e48ba7af | 411 | |
7bcd381b | 412 | /* Compute global anticipatability. */ |
f20183e6 | 413 | antin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
414 | antout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
7bcd381b | 415 | compute_antinout_edge (antloc, transp, antin, antout); |
e48ba7af | 416 | |
7bcd381b | 417 | #ifdef LCM_DEBUG_INFO |
418 | if (file) | |
e48ba7af | 419 | { |
f20183e6 | 420 | dump_sbitmap_vector (file, "antin", "", antin, last_basic_block); |
421 | dump_sbitmap_vector (file, "antout", "", antout, last_basic_block); | |
e48ba7af | 422 | } |
7bcd381b | 423 | #endif |
e48ba7af | 424 | |
7bcd381b | 425 | /* Compute earliestness. */ |
426 | earliest = sbitmap_vector_alloc (num_edges, n_exprs); | |
427 | compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest); | |
e48ba7af | 428 | |
7bcd381b | 429 | #ifdef LCM_DEBUG_INFO |
430 | if (file) | |
431 | dump_sbitmap_vector (file, "earliest", "", earliest, num_edges); | |
432 | #endif | |
e48ba7af | 433 | |
cca23eb2 | 434 | sbitmap_vector_free (antout); |
435 | sbitmap_vector_free (antin); | |
436 | sbitmap_vector_free (avout); | |
e48ba7af | 437 | |
7bcd381b | 438 | later = sbitmap_vector_alloc (num_edges, n_exprs); |
9ca8d59e | 439 | |
7bcd381b | 440 | /* Allocate an extra element for the exit block in the laterin vector. */ |
f20183e6 | 441 | laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); |
2325f0e2 | 442 | compute_laterin (edge_list, earliest, antloc, later, laterin); |
443 | ||
7bcd381b | 444 | #ifdef LCM_DEBUG_INFO |
445 | if (file) | |
446 | { | |
f20183e6 | 447 | dump_sbitmap_vector (file, "laterin", "", laterin, last_basic_block + 1); |
7bcd381b | 448 | dump_sbitmap_vector (file, "later", "", later, num_edges); |
449 | } | |
450 | #endif | |
e48ba7af | 451 | |
cca23eb2 | 452 | sbitmap_vector_free (earliest); |
7bcd381b | 453 | |
454 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
f20183e6 | 455 | *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); |
7bcd381b | 456 | compute_insert_delete (edge_list, antloc, later, laterin, *insert, *delete); |
e48ba7af | 457 | |
cca23eb2 | 458 | sbitmap_vector_free (laterin); |
459 | sbitmap_vector_free (later); | |
7bcd381b | 460 | |
461 | #ifdef LCM_DEBUG_INFO | |
462 | if (file) | |
e48ba7af | 463 | { |
7bcd381b | 464 | dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); |
9ca8d59e | 465 | dump_sbitmap_vector (file, "pre_delete_map", "", *delete, |
f20183e6 | 466 | last_basic_block); |
e48ba7af | 467 | } |
7bcd381b | 468 | #endif |
e48ba7af | 469 | |
7bcd381b | 470 | return edge_list; |
471 | } | |
3b7e1f27 | 472 | |
473 | /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors. | |
474 | Return the number of passes we performed to iterate to a solution. */ | |
475 | ||
2325f0e2 | 476 | void |
3ad4992f | 477 | compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout, |
478 | sbitmap *avin) | |
e48ba7af | 479 | { |
3b7e1f27 | 480 | edge e; |
4c26117a | 481 | basic_block *worklist, *qin, *qout, *qend, bb; |
3b7e1f27 | 482 | unsigned int qlen; |
cd665a06 | 483 | edge_iterator ei; |
3b7e1f27 | 484 | |
485 | /* Allocate a worklist array/queue. Entries are only added to the | |
486 | list if they were not already on the list. So the size is | |
487 | bounded by the number of basic blocks. */ | |
f0af5a88 | 488 | qin = qout = worklist = xmalloc (sizeof (basic_block) * n_basic_blocks); |
3b7e1f27 | 489 | |
490 | /* We want a maximal solution. */ | |
f20183e6 | 491 | sbitmap_vector_ones (avout, last_basic_block); |
3b7e1f27 | 492 | |
493 | /* Put every block on the worklist; this is necessary because of the | |
494 | optimistic initialization of AVOUT above. */ | |
4c26117a | 495 | FOR_EACH_BB (bb) |
3b7e1f27 | 496 | { |
4c26117a | 497 | *qin++ = bb; |
498 | bb->aux = bb; | |
3b7e1f27 | 499 | } |
500 | ||
501 | qin = worklist; | |
b3d6de89 | 502 | qend = &worklist[n_basic_blocks]; |
503 | qlen = n_basic_blocks; | |
3b7e1f27 | 504 | |
505 | /* Mark blocks which are successors of the entry block so that we | |
506 | can easily identify them below. */ | |
cd665a06 | 507 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
3b7e1f27 | 508 | e->dest->aux = ENTRY_BLOCK_PTR; |
509 | ||
510 | /* Iterate until the worklist is empty. */ | |
511 | while (qlen) | |
512 | { | |
513 | /* Take the first entry off the worklist. */ | |
4c26117a | 514 | bb = *qout++; |
3b7e1f27 | 515 | qlen--; |
516 | ||
517 | if (qout >= qend) | |
8851e806 | 518 | qout = worklist; |
3b7e1f27 | 519 | |
520 | /* If one of the predecessor blocks is the ENTRY block, then the | |
521 | intersection of avouts is the null set. We can identify such blocks | |
522 | by the special value in the AUX field in the block structure. */ | |
4c26117a | 523 | if (bb->aux == ENTRY_BLOCK_PTR) |
3b7e1f27 | 524 | /* Do not clear the aux field for blocks which are successors of the |
525 | ENTRY block. That way we never add then to the worklist again. */ | |
4c26117a | 526 | sbitmap_zero (avin[bb->index]); |
3b7e1f27 | 527 | else |
528 | { | |
529 | /* Clear the aux field of this block so that it can be added to | |
530 | the worklist again if necessary. */ | |
4c26117a | 531 | bb->aux = NULL; |
532 | sbitmap_intersection_of_preds (avin[bb->index], avout, bb->index); | |
3b7e1f27 | 533 | } |
534 | ||
d4d93ea0 | 535 | if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index], |
536 | avin[bb->index], kill[bb->index])) | |
3b7e1f27 | 537 | /* If the out state of this block changed, then we need |
538 | to add the successors of this block to the worklist | |
539 | if they are not already on the worklist. */ | |
cd665a06 | 540 | FOR_EACH_EDGE (e, ei, bb->succs) |
3b7e1f27 | 541 | if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR) |
542 | { | |
543 | *qin++ = e->dest; | |
544 | e->dest->aux = e; | |
545 | qlen++; | |
546 | ||
547 | if (qin >= qend) | |
8851e806 | 548 | qin = worklist; |
3b7e1f27 | 549 | } |
550 | } | |
551 | ||
552 | clear_aux_for_edges (); | |
553 | clear_aux_for_blocks (); | |
554 | free (worklist); | |
e48ba7af | 555 | } |
556 | ||
7bcd381b | 557 | /* Compute the farthest vector for edge based lcm. */ |
9ca8d59e | 558 | |
e48ba7af | 559 | static void |
3ad4992f | 560 | compute_farthest (struct edge_list *edge_list, int n_exprs, |
561 | sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin, | |
562 | sbitmap *kill, sbitmap *farthest) | |
e48ba7af | 563 | { |
7bcd381b | 564 | sbitmap difference, temp_bitmap; |
5ab08585 | 565 | int x, num_edges; |
7bcd381b | 566 | basic_block pred, succ; |
e48ba7af | 567 | |
7bcd381b | 568 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 569 | |
7bcd381b | 570 | difference = sbitmap_alloc (n_exprs); |
571 | temp_bitmap = sbitmap_alloc (n_exprs); | |
e48ba7af | 572 | |
7bcd381b | 573 | for (x = 0; x < num_edges; x++) |
e48ba7af | 574 | { |
7bcd381b | 575 | pred = INDEX_EDGE_PRED_BB (edge_list, x); |
576 | succ = INDEX_EDGE_SUCC_BB (edge_list, x); | |
577 | if (succ == EXIT_BLOCK_PTR) | |
b3d6de89 | 578 | sbitmap_copy (farthest[x], st_avout[pred->index]); |
7bcd381b | 579 | else |
e48ba7af | 580 | { |
7bcd381b | 581 | if (pred == ENTRY_BLOCK_PTR) |
9ca8d59e | 582 | sbitmap_zero (farthest[x]); |
7bcd381b | 583 | else |
584 | { | |
b3d6de89 | 585 | sbitmap_difference (difference, st_avout[pred->index], |
586 | st_antin[succ->index]); | |
587 | sbitmap_not (temp_bitmap, st_avin[succ->index]); | |
5ab08585 | 588 | sbitmap_a_and_b_or_c (farthest[x], difference, |
b3d6de89 | 589 | kill[succ->index], temp_bitmap); |
7bcd381b | 590 | } |
e48ba7af | 591 | } |
e48ba7af | 592 | } |
9ca8d59e | 593 | |
f5123ed5 | 594 | sbitmap_free (temp_bitmap); |
595 | sbitmap_free (difference); | |
e48ba7af | 596 | } |
597 | ||
2325f0e2 | 598 | /* Compute nearer and nearerout vectors for edge based lcm. |
599 | ||
600 | This is the mirror of compute_laterin, additional comments on the | |
601 | implementation can be found before compute_laterin. */ | |
602 | ||
e48ba7af | 603 | static void |
3ad4992f | 604 | compute_nearerout (struct edge_list *edge_list, sbitmap *farthest, |
605 | sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout) | |
e48ba7af | 606 | { |
4c26117a | 607 | int num_edges, i; |
2325f0e2 | 608 | edge e; |
4c26117a | 609 | basic_block *worklist, *tos, bb; |
cd665a06 | 610 | edge_iterator ei; |
e48ba7af | 611 | |
7bcd381b | 612 | num_edges = NUM_EDGES (edge_list); |
e48ba7af | 613 | |
2325f0e2 | 614 | /* Allocate a worklist array/queue. Entries are only added to the |
615 | list if they were not already on the list. So the size is | |
616 | bounded by the number of basic blocks. */ | |
f0af5a88 | 617 | tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1)); |
e48ba7af | 618 | |
2325f0e2 | 619 | /* Initialize NEARER for each edge and build a mapping from an edge to |
620 | its index. */ | |
621 | for (i = 0; i < num_edges; i++) | |
9ffd5d6d | 622 | INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i; |
7bcd381b | 623 | |
2325f0e2 | 624 | /* We want a maximal solution. */ |
625 | sbitmap_vector_ones (nearer, num_edges); | |
626 | ||
048599b9 | 627 | /* Note that even though we want an optimistic setting of NEARER, we |
628 | do not want to be overly optimistic. Consider an incoming edge to | |
629 | the exit block. That edge should always have a NEARER value the | |
630 | same as FARTHEST for that edge. */ | |
cd665a06 | 631 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) |
8b863f57 | 632 | sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]); |
048599b9 | 633 | |
2325f0e2 | 634 | /* Add all the blocks to the worklist. This prevents an early exit |
635 | from the loop given our optimistic initialization of NEARER. */ | |
4c26117a | 636 | FOR_EACH_BB (bb) |
e48ba7af | 637 | { |
4c26117a | 638 | *tos++ = bb; |
639 | bb->aux = bb; | |
7bcd381b | 640 | } |
5ab08585 | 641 | |
2325f0e2 | 642 | /* Iterate until the worklist is empty. */ |
643 | while (tos != worklist) | |
7bcd381b | 644 | { |
2325f0e2 | 645 | /* Take the first entry off the worklist. */ |
4c26117a | 646 | bb = *--tos; |
647 | bb->aux = NULL; | |
2325f0e2 | 648 | |
649 | /* Compute the intersection of NEARER for each outgoing edge from B. */ | |
4c26117a | 650 | sbitmap_ones (nearerout[bb->index]); |
cd665a06 | 651 | FOR_EACH_EDGE (e, ei, bb->succs) |
4c26117a | 652 | sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index], |
9ffd5d6d | 653 | nearer[(size_t) e->aux]); |
2325f0e2 | 654 | |
655 | /* Calculate NEARER for all incoming edges. */ | |
cd665a06 | 656 | FOR_EACH_EDGE (e, ei, bb->preds) |
739c050b | 657 | if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux], |
b3d6de89 | 658 | farthest[(size_t) e->aux], |
659 | nearerout[e->dest->index], | |
660 | st_avloc[e->dest->index]) | |
9ca8d59e | 661 | /* If NEARER for an incoming edge was changed, then we need |
662 | to add the source of the incoming edge to the worklist. */ | |
663 | && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0) | |
664 | { | |
665 | *tos++ = e->src; | |
666 | e->src->aux = e; | |
667 | } | |
7bcd381b | 668 | } |
e48ba7af | 669 | |
2325f0e2 | 670 | /* Computation of insertion and deletion points requires computing NEAREROUT |
671 | for the ENTRY block. We allocated an extra entry in the NEAREROUT array | |
672 | for just this purpose. */ | |
f20183e6 | 673 | sbitmap_ones (nearerout[last_basic_block]); |
cd665a06 | 674 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
f20183e6 | 675 | sbitmap_a_and_b (nearerout[last_basic_block], |
676 | nearerout[last_basic_block], | |
9ffd5d6d | 677 | nearer[(size_t) e->aux]); |
2325f0e2 | 678 | |
82f7392b | 679 | clear_aux_for_edges (); |
2325f0e2 | 680 | free (tos); |
7bcd381b | 681 | } |
e48ba7af | 682 | |
7bcd381b | 683 | /* Compute the insertion and deletion points for edge based LCM. */ |
9ca8d59e | 684 | |
e48ba7af | 685 | static void |
3ad4992f | 686 | compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc, |
687 | sbitmap *nearer, sbitmap *nearerout, | |
688 | sbitmap *insert, sbitmap *delete) | |
e48ba7af | 689 | { |
7bcd381b | 690 | int x; |
4c26117a | 691 | basic_block bb; |
e48ba7af | 692 | |
4c26117a | 693 | FOR_EACH_BB (bb) |
d4d93ea0 | 694 | sbitmap_difference (delete[bb->index], st_avloc[bb->index], |
695 | nearerout[bb->index]); | |
5ab08585 | 696 | |
7bcd381b | 697 | for (x = 0; x < NUM_EDGES (edge_list); x++) |
e48ba7af | 698 | { |
7bcd381b | 699 | basic_block b = INDEX_EDGE_PRED_BB (edge_list, x); |
700 | if (b == ENTRY_BLOCK_PTR) | |
f20183e6 | 701 | sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]); |
e48ba7af | 702 | else |
b3d6de89 | 703 | sbitmap_difference (insert[x], nearer[x], nearerout[b->index]); |
e48ba7af | 704 | } |
e48ba7af | 705 | } |
706 | ||
5ab08585 | 707 | /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the |
7bcd381b | 708 | insert and delete vectors for edge based reverse LCM. Returns an |
709 | edgelist which is used to map the insert vector to what edge | |
710 | an expression should be inserted on. */ | |
e48ba7af | 711 | |
7bcd381b | 712 | struct edge_list * |
3ad4992f | 713 | pre_edge_rev_lcm (FILE *file ATTRIBUTE_UNUSED, int n_exprs, sbitmap *transp, |
714 | sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill, | |
715 | sbitmap **insert, sbitmap **delete) | |
e48ba7af | 716 | { |
7bcd381b | 717 | sbitmap *st_antin, *st_antout; |
718 | sbitmap *st_avout, *st_avin, *farthest; | |
719 | sbitmap *nearer, *nearerout; | |
720 | struct edge_list *edge_list; | |
27548a74 | 721 | int num_edges; |
7bcd381b | 722 | |
723 | edge_list = create_edge_list (); | |
724 | num_edges = NUM_EDGES (edge_list); | |
725 | ||
f0af5a88 | 726 | st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs); |
727 | st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
f20183e6 | 728 | sbitmap_vector_zero (st_antin, last_basic_block); |
729 | sbitmap_vector_zero (st_antout, last_basic_block); | |
7bcd381b | 730 | compute_antinout_edge (st_antloc, transp, st_antin, st_antout); |
731 | ||
732 | /* Compute global anticipatability. */ | |
f20183e6 | 733 | st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs); |
734 | st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs); | |
7bcd381b | 735 | compute_available (st_avloc, kill, st_avout, st_avin); |
736 | ||
737 | #ifdef LCM_DEBUG_INFO | |
738 | if (file) | |
739 | { | |
740 | fprintf (file, "Edge List:\n"); | |
741 | verify_edge_list (file, edge_list); | |
742 | print_edge_list (file, edge_list); | |
f20183e6 | 743 | dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); |
744 | dump_sbitmap_vector (file, "st_avloc", "", st_avloc, last_basic_block); | |
745 | dump_sbitmap_vector (file, "st_antloc", "", st_antloc, last_basic_block); | |
746 | dump_sbitmap_vector (file, "st_antin", "", st_antin, last_basic_block); | |
747 | dump_sbitmap_vector (file, "st_antout", "", st_antout, last_basic_block); | |
748 | dump_sbitmap_vector (file, "st_kill", "", kill, last_basic_block); | |
7bcd381b | 749 | } |
750 | #endif | |
e48ba7af | 751 | |
7bcd381b | 752 | #ifdef LCM_DEBUG_INFO |
753 | if (file) | |
754 | { | |
f20183e6 | 755 | dump_sbitmap_vector (file, "st_avout", "", st_avout, last_basic_block); |
756 | dump_sbitmap_vector (file, "st_avin", "", st_avin, last_basic_block); | |
7bcd381b | 757 | } |
758 | #endif | |
e48ba7af | 759 | |
7bcd381b | 760 | /* Compute farthestness. */ |
761 | farthest = sbitmap_vector_alloc (num_edges, n_exprs); | |
5ab08585 | 762 | compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin, |
7bcd381b | 763 | kill, farthest); |
764 | ||
765 | #ifdef LCM_DEBUG_INFO | |
766 | if (file) | |
767 | dump_sbitmap_vector (file, "farthest", "", farthest, num_edges); | |
768 | #endif | |
769 | ||
cca23eb2 | 770 | sbitmap_vector_free (st_antin); |
771 | sbitmap_vector_free (st_antout); | |
772 | ||
773 | sbitmap_vector_free (st_avin); | |
774 | sbitmap_vector_free (st_avout); | |
7bcd381b | 775 | |
776 | nearer = sbitmap_vector_alloc (num_edges, n_exprs); | |
9ca8d59e | 777 | |
7bcd381b | 778 | /* Allocate an extra element for the entry block. */ |
f20183e6 | 779 | nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); |
2325f0e2 | 780 | compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout); |
7bcd381b | 781 | |
782 | #ifdef LCM_DEBUG_INFO | |
783 | if (file) | |
e48ba7af | 784 | { |
5ab08585 | 785 | dump_sbitmap_vector (file, "nearerout", "", nearerout, |
f20183e6 | 786 | last_basic_block + 1); |
7bcd381b | 787 | dump_sbitmap_vector (file, "nearer", "", nearer, num_edges); |
e48ba7af | 788 | } |
7bcd381b | 789 | #endif |
790 | ||
cca23eb2 | 791 | sbitmap_vector_free (farthest); |
7bcd381b | 792 | |
793 | *insert = sbitmap_vector_alloc (num_edges, n_exprs); | |
f20183e6 | 794 | *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); |
9ca8d59e | 795 | compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout, |
796 | *insert, *delete); | |
7bcd381b | 797 | |
cca23eb2 | 798 | sbitmap_vector_free (nearerout); |
799 | sbitmap_vector_free (nearer); | |
7bcd381b | 800 | |
801 | #ifdef LCM_DEBUG_INFO | |
802 | if (file) | |
803 | { | |
804 | dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); | |
9ca8d59e | 805 | dump_sbitmap_vector (file, "pre_delete_map", "", *delete, |
f20183e6 | 806 | last_basic_block); |
7bcd381b | 807 | } |
808 | #endif | |
7bcd381b | 809 | return edge_list; |
e48ba7af | 810 | } |
18862b5a | 811 | |
9ca8d59e | 812 | /* Mode switching: |
813 | ||
814 | The algorithm for setting the modes consists of scanning the insn list | |
18862b5a | 815 | and finding all the insns which require a specific mode. Each insn gets |
816 | a unique struct seginfo element. These structures are inserted into a list | |
817 | for each basic block. For each entity, there is an array of bb_info over | |
818 | the flow graph basic blocks (local var 'bb_info'), and contains a list | |
819 | of all insns within that basic block, in the order they are encountered. | |
820 | ||
821 | For each entity, any basic block WITHOUT any insns requiring a specific | |
822 | mode are given a single entry, without a mode. (Each basic block | |
823 | in the flow graph must have at least one entry in the segment table.) | |
824 | ||
825 | The LCM algorithm is then run over the flow graph to determine where to | |
826 | place the sets to the highest-priority value in respect of first the first | |
b903337a | 827 | insn in any one block. Any adjustments required to the transparency |
18862b5a | 828 | vectors are made, then the next iteration starts for the next-lower |
b903337a | 829 | priority mode, till for each entity all modes are exhausted. |
18862b5a | 830 | |
831 | More details are located in the code for optimize_mode_switching(). */ | |
832 | ||
833 | /* This structure contains the information for each insn which requires | |
5ab08585 | 834 | either single or double mode to be set. |
18862b5a | 835 | MODE is the mode this insn must be executed in. |
f913e47c | 836 | INSN_PTR is the insn to be executed (may be the note that marks the |
837 | beginning of a basic block). | |
18862b5a | 838 | BBNUM is the flow graph basic block this insn occurs in. |
839 | NEXT is the next insn in the same basic block. */ | |
5ab08585 | 840 | struct seginfo |
18862b5a | 841 | { |
842 | int mode; | |
843 | rtx insn_ptr; | |
844 | int bbnum; | |
845 | struct seginfo *next; | |
846 | HARD_REG_SET regs_live; | |
847 | }; | |
848 | ||
3b7e1f27 | 849 | struct bb_info |
18862b5a | 850 | { |
851 | struct seginfo *seginfo; | |
852 | int computing; | |
853 | }; | |
854 | ||
855 | /* These bitmaps are used for the LCM algorithm. */ | |
856 | ||
29768226 | 857 | #ifdef OPTIMIZE_MODE_SWITCHING |
18862b5a | 858 | static sbitmap *antic; |
859 | static sbitmap *transp; | |
860 | static sbitmap *comp; | |
18862b5a | 861 | |
3ad4992f | 862 | static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET); |
863 | static void add_seginfo (struct bb_info *, struct seginfo *); | |
864 | static void reg_dies (rtx, HARD_REG_SET); | |
865 | static void reg_becomes_live (rtx, rtx, void *); | |
866 | static void make_preds_opaque (basic_block, int); | |
29768226 | 867 | #endif |
868 | \f | |
869 | #ifdef OPTIMIZE_MODE_SWITCHING | |
18862b5a | 870 | |
871 | /* This function will allocate a new BBINFO structure, initialized | |
7dff60c8 | 872 | with the MODE, INSN, and basic block BB parameters. */ |
29768226 | 873 | |
18862b5a | 874 | static struct seginfo * |
3ad4992f | 875 | new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live) |
18862b5a | 876 | { |
877 | struct seginfo *ptr; | |
878 | ptr = xmalloc (sizeof (struct seginfo)); | |
879 | ptr->mode = mode; | |
880 | ptr->insn_ptr = insn; | |
881 | ptr->bbnum = bb; | |
882 | ptr->next = NULL; | |
883 | COPY_HARD_REG_SET (ptr->regs_live, regs_live); | |
884 | return ptr; | |
885 | } | |
886 | ||
5ab08585 | 887 | /* Add a seginfo element to the end of a list. |
18862b5a | 888 | HEAD is a pointer to the list beginning. |
889 | INFO is the structure to be linked in. */ | |
29768226 | 890 | |
18862b5a | 891 | static void |
3ad4992f | 892 | add_seginfo (struct bb_info *head, struct seginfo *info) |
18862b5a | 893 | { |
894 | struct seginfo *ptr; | |
895 | ||
896 | if (head->seginfo == NULL) | |
897 | head->seginfo = info; | |
898 | else | |
899 | { | |
900 | ptr = head->seginfo; | |
901 | while (ptr->next != NULL) | |
8851e806 | 902 | ptr = ptr->next; |
18862b5a | 903 | ptr->next = info; |
904 | } | |
905 | } | |
906 | ||
907 | /* Make all predecessors of basic block B opaque, recursively, till we hit | |
908 | some that are already non-transparent, or an edge where aux is set; that | |
909 | denotes that a mode set is to be done on that edge. | |
910 | J is the bit number in the bitmaps that corresponds to the entity that | |
911 | we are currently handling mode-switching for. */ | |
29768226 | 912 | |
18862b5a | 913 | static void |
3ad4992f | 914 | make_preds_opaque (basic_block b, int j) |
18862b5a | 915 | { |
916 | edge e; | |
cd665a06 | 917 | edge_iterator ei; |
18862b5a | 918 | |
cd665a06 | 919 | FOR_EACH_EDGE (e, ei, b->preds) |
18862b5a | 920 | { |
921 | basic_block pb = e->src; | |
9ca8d59e | 922 | |
b3d6de89 | 923 | if (e->aux || ! TEST_BIT (transp[pb->index], j)) |
18862b5a | 924 | continue; |
9ca8d59e | 925 | |
b3d6de89 | 926 | RESET_BIT (transp[pb->index], j); |
18862b5a | 927 | make_preds_opaque (pb, j); |
928 | } | |
929 | } | |
930 | ||
931 | /* Record in LIVE that register REG died. */ | |
29768226 | 932 | |
18862b5a | 933 | static void |
3ad4992f | 934 | reg_dies (rtx reg, HARD_REG_SET live) |
18862b5a | 935 | { |
9ca8d59e | 936 | int regno, nregs; |
18862b5a | 937 | |
8ad4c111 | 938 | if (!REG_P (reg)) |
18862b5a | 939 | return; |
9ca8d59e | 940 | |
18862b5a | 941 | regno = REGNO (reg); |
942 | if (regno < FIRST_PSEUDO_REGISTER) | |
67d6c12b | 943 | for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0; |
9ca8d59e | 944 | nregs--) |
945 | CLEAR_HARD_REG_BIT (live, regno + nregs); | |
18862b5a | 946 | } |
947 | ||
948 | /* Record in LIVE that register REG became live. | |
949 | This is called via note_stores. */ | |
29768226 | 950 | |
18862b5a | 951 | static void |
3ad4992f | 952 | reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *live) |
18862b5a | 953 | { |
9ca8d59e | 954 | int regno, nregs; |
18862b5a | 955 | |
956 | if (GET_CODE (reg) == SUBREG) | |
957 | reg = SUBREG_REG (reg); | |
958 | ||
8ad4c111 | 959 | if (!REG_P (reg)) |
18862b5a | 960 | return; |
961 | ||
962 | regno = REGNO (reg); | |
963 | if (regno < FIRST_PSEUDO_REGISTER) | |
67d6c12b | 964 | for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0; |
9ca8d59e | 965 | nregs--) |
966 | SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs); | |
18862b5a | 967 | } |
968 | ||
894b8fd9 | 969 | /* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined |
970 | and vice versa. */ | |
971 | #if defined (MODE_ENTRY) != defined (MODE_EXIT) | |
972 | #error "Both MODE_ENTRY and MODE_EXIT must be defined" | |
973 | #endif | |
974 | ||
d4f6028d | 975 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
976 | /* Split the fallthrough edge to the exit block, so that we can note | |
977 | that there NORMAL_MODE is required. Return the new block if it's | |
978 | inserted before the exit block. Otherwise return null. */ | |
979 | ||
980 | static basic_block | |
981 | create_pre_exit (int n_entities, int *entity_map, const int *num_modes) | |
982 | { | |
983 | edge eg; | |
984 | edge_iterator ei; | |
985 | basic_block pre_exit; | |
986 | ||
987 | /* The only non-call predecessor at this stage is a block with a | |
988 | fallthrough edge; there can be at most one, but there could be | |
989 | none at all, e.g. when exit is called. */ | |
990 | pre_exit = 0; | |
991 | FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds) | |
992 | if (eg->flags & EDGE_FALLTHRU) | |
993 | { | |
994 | basic_block src_bb = eg->src; | |
995 | regset live_at_end = src_bb->global_live_at_end; | |
996 | rtx last_insn, ret_reg; | |
997 | ||
998 | gcc_assert (!pre_exit); | |
999 | /* If this function returns a value at the end, we have to | |
1000 | insert the final mode switch before the return value copy | |
1001 | to its hard register. */ | |
1002 | if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1 | |
1003 | && GET_CODE ((last_insn = BB_END (src_bb))) == INSN | |
1004 | && GET_CODE (PATTERN (last_insn)) == USE | |
1005 | && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG) | |
1006 | { | |
1007 | int ret_start = REGNO (ret_reg); | |
1008 | int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)]; | |
1009 | int ret_end = ret_start + nregs; | |
1010 | int short_block = 0; | |
1011 | int maybe_builtin_apply = 0; | |
1012 | int forced_late_switch = 0; | |
1013 | rtx before_return_copy; | |
1014 | ||
1015 | do | |
1016 | { | |
1017 | rtx return_copy = PREV_INSN (last_insn); | |
1018 | rtx return_copy_pat, copy_reg; | |
1019 | int copy_start, copy_num; | |
1020 | int j; | |
1021 | ||
1022 | if (INSN_P (return_copy)) | |
1023 | { | |
1024 | if (GET_CODE (PATTERN (return_copy)) == USE | |
1025 | && GET_CODE (XEXP (PATTERN (return_copy), 0)) == REG | |
1026 | && (FUNCTION_VALUE_REGNO_P | |
1027 | (REGNO (XEXP (PATTERN (return_copy), 0))))) | |
1028 | { | |
1029 | maybe_builtin_apply = 1; | |
1030 | last_insn = return_copy; | |
1031 | continue; | |
1032 | } | |
1033 | /* If the return register is not (in its entirety) | |
1034 | likely spilled, the return copy might be | |
1035 | partially or completely optimized away. */ | |
1036 | return_copy_pat = single_set (return_copy); | |
1037 | if (!return_copy_pat) | |
1038 | { | |
1039 | return_copy_pat = PATTERN (return_copy); | |
1040 | if (GET_CODE (return_copy_pat) != CLOBBER) | |
1041 | break; | |
1042 | } | |
1043 | copy_reg = SET_DEST (return_copy_pat); | |
1044 | if (GET_CODE (copy_reg) == REG) | |
1045 | copy_start = REGNO (copy_reg); | |
1046 | else if (GET_CODE (copy_reg) == SUBREG | |
1047 | && GET_CODE (SUBREG_REG (copy_reg)) == REG) | |
1048 | copy_start = REGNO (SUBREG_REG (copy_reg)); | |
1049 | else | |
1050 | break; | |
1051 | if (copy_start >= FIRST_PSEUDO_REGISTER) | |
1052 | break; | |
1053 | copy_num | |
1054 | = hard_regno_nregs[copy_start][GET_MODE (copy_reg)]; | |
1055 | ||
1056 | /* If the return register is not likely spilled, - as is | |
1057 | the case for floating point on SH4 - then it might | |
1058 | be set by an arithmetic operation that needs a | |
1059 | different mode than the exit block. */ | |
1060 | for (j = n_entities - 1; j >= 0; j--) | |
1061 | { | |
1062 | int e = entity_map[j]; | |
1063 | int mode = MODE_NEEDED (e, return_copy); | |
1064 | ||
1065 | if (mode != num_modes[e] && mode != MODE_EXIT (e)) | |
1066 | break; | |
1067 | } | |
1068 | if (j >= 0) | |
1069 | { | |
1070 | /* For the SH4, floating point loads depend on fpscr, | |
1071 | thus we might need to put the final mode switch | |
1072 | after the return value copy. That is still OK, | |
1073 | because a floating point return value does not | |
1074 | conflict with address reloads. */ | |
1075 | if (copy_start >= ret_start | |
1076 | && copy_start + copy_num <= ret_end | |
1077 | && OBJECT_P (SET_SRC (return_copy_pat))) | |
1078 | forced_late_switch = 1; | |
1079 | break; | |
1080 | } | |
1081 | ||
1082 | if (copy_start >= ret_start | |
1083 | && copy_start + copy_num <= ret_end) | |
1084 | nregs -= copy_num; | |
1085 | else if (!maybe_builtin_apply | |
1086 | || !FUNCTION_VALUE_REGNO_P (copy_start)) | |
1087 | break; | |
1088 | last_insn = return_copy; | |
1089 | } | |
1090 | /* ??? Exception handling can lead to the return value | |
1091 | copy being already separated from the return value use, | |
1092 | as in unwind-dw2.c . | |
1093 | Similarly, conditionally returning without a value, | |
1094 | and conditionally using builtin_return can lead to an | |
1095 | isolated use. */ | |
1096 | if (return_copy == BB_HEAD (src_bb)) | |
1097 | { | |
1098 | short_block = 1; | |
1099 | break; | |
1100 | } | |
1101 | last_insn = return_copy; | |
1102 | } | |
1103 | while (nregs); | |
1104 | /* If we didn't see a full return value copy, verify that there | |
1105 | is a plausible reason for this. If some, but not all of the | |
1106 | return register is likely spilled, we can expect that there | |
1107 | is a copy for the likely spilled part. */ | |
1108 | if (nregs | |
1109 | && ! forced_late_switch | |
1110 | && ! short_block | |
1111 | && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (ret_start)) | |
1112 | && nregs == hard_regno_nregs[ret_start][GET_MODE (ret_reg)] | |
1113 | /* For multi-hard-register floating point values, | |
1114 | sometimes the likely-spilled part is ordinarily copied | |
1115 | first, then the other part is set with an arithmetic | |
1116 | operation. This doesn't actually cause reload failures, | |
1117 | so let it pass. */ | |
1118 | && (GET_MODE_CLASS (GET_MODE (ret_reg)) == MODE_INT | |
1119 | || nregs == 1)) | |
1120 | abort (); | |
1121 | if (INSN_P (last_insn)) | |
1122 | { | |
1123 | before_return_copy | |
1124 | = emit_note_before (NOTE_INSN_DELETED, last_insn); | |
1125 | /* Instructions preceding LAST_INSN in the same block might | |
1126 | require a different mode than MODE_EXIT, so if we might | |
1127 | have such instructions, keep them in a separate block | |
1128 | from pre_exit. */ | |
1129 | if (last_insn != BB_HEAD (src_bb)) | |
1130 | src_bb = split_block (src_bb, | |
1131 | PREV_INSN (before_return_copy))->dest; | |
1132 | } | |
1133 | else | |
1134 | before_return_copy = last_insn; | |
1135 | pre_exit = split_block (src_bb, before_return_copy)->src; | |
1136 | } | |
1137 | else | |
1138 | { | |
1139 | pre_exit = split_edge (eg); | |
1140 | COPY_REG_SET (pre_exit->global_live_at_start, live_at_end); | |
1141 | COPY_REG_SET (pre_exit->global_live_at_end, live_at_end); | |
1142 | } | |
1143 | } | |
1144 | ||
1145 | return pre_exit; | |
1146 | } | |
1147 | #endif | |
1148 | ||
b78e14a8 | 1149 | /* Find all insns that need a particular mode setting, and insert the |
1150 | necessary mode switches. Return true if we did work. */ | |
9ca8d59e | 1151 | |
b78e14a8 | 1152 | int |
3ad4992f | 1153 | optimize_mode_switching (FILE *file) |
18862b5a | 1154 | { |
18862b5a | 1155 | rtx insn; |
4c26117a | 1156 | int e; |
1157 | basic_block bb; | |
18862b5a | 1158 | int need_commit = 0; |
1159 | sbitmap *kill; | |
1160 | struct edge_list *edge_list; | |
e99c3a1d | 1161 | static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING; |
3585dac7 | 1162 | #define N_ENTITIES ARRAY_SIZE (num_modes) |
18862b5a | 1163 | int entity_map[N_ENTITIES]; |
3b7e1f27 | 1164 | struct bb_info *bb_info[N_ENTITIES]; |
18862b5a | 1165 | int i, j; |
1166 | int n_entities; | |
1167 | int max_num_modes = 0; | |
7fb47f9f | 1168 | bool emited = false; |
43b8e8ee | 1169 | basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED; |
18862b5a | 1170 | |
308f9b79 | 1171 | clear_bb_flags (); |
1287c919 | 1172 | |
18862b5a | 1173 | for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--) |
9ca8d59e | 1174 | if (OPTIMIZE_MODE_SWITCHING (e)) |
1175 | { | |
742ee135 | 1176 | int entry_exit_extra = 0; |
1177 | ||
1178 | /* Create the list of segments within each basic block. | |
1179 | If NORMAL_MODE is defined, allow for two extra | |
8851e806 | 1180 | blocks split from the entry and exit block. */ |
894b8fd9 | 1181 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
d4f6028d | 1182 | entry_exit_extra = 3; |
742ee135 | 1183 | #endif |
9ca8d59e | 1184 | bb_info[n_entities] |
f0af5a88 | 1185 | = xcalloc (last_basic_block + entry_exit_extra, sizeof **bb_info); |
9ca8d59e | 1186 | entity_map[n_entities++] = e; |
1187 | if (num_modes[e] > max_num_modes) | |
1188 | max_num_modes = num_modes[e]; | |
1189 | } | |
1190 | ||
18862b5a | 1191 | if (! n_entities) |
b78e14a8 | 1192 | return 0; |
18862b5a | 1193 | |
894b8fd9 | 1194 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
d4f6028d | 1195 | /* Split the edge from the entry block, so that we can note that |
1196 | there NORMAL_MODE is supplied. */ | |
1197 | post_entry = split_edge (EDGE_SUCC (ENTRY_BLOCK_PTR, 0)); | |
1198 | pre_exit = create_pre_exit (n_entities, entity_map, num_modes); | |
1287c919 | 1199 | #endif |
1200 | ||
18862b5a | 1201 | /* Create the bitmap vectors. */ |
1202 | ||
f20183e6 | 1203 | antic = sbitmap_vector_alloc (last_basic_block, n_entities); |
1204 | transp = sbitmap_vector_alloc (last_basic_block, n_entities); | |
1205 | comp = sbitmap_vector_alloc (last_basic_block, n_entities); | |
18862b5a | 1206 | |
f20183e6 | 1207 | sbitmap_vector_ones (transp, last_basic_block); |
18862b5a | 1208 | |
1209 | for (j = n_entities - 1; j >= 0; j--) | |
1210 | { | |
1211 | int e = entity_map[j]; | |
1212 | int no_mode = num_modes[e]; | |
3b7e1f27 | 1213 | struct bb_info *info = bb_info[j]; |
18862b5a | 1214 | |
1215 | /* Determine what the first use (if any) need for a mode of entity E is. | |
b78e14a8 | 1216 | This will be the mode that is anticipatable for this block. |
18862b5a | 1217 | Also compute the initial transparency settings. */ |
4c26117a | 1218 | FOR_EACH_BB (bb) |
18862b5a | 1219 | { |
1220 | struct seginfo *ptr; | |
1221 | int last_mode = no_mode; | |
1222 | HARD_REG_SET live_now; | |
1223 | ||
1224 | REG_SET_TO_HARD_REG_SET (live_now, | |
4c26117a | 1225 | bb->global_live_at_start); |
5496dbfc | 1226 | for (insn = BB_HEAD (bb); |
1227 | insn != NULL && insn != NEXT_INSN (BB_END (bb)); | |
18862b5a | 1228 | insn = NEXT_INSN (insn)) |
1229 | { | |
9204e736 | 1230 | if (INSN_P (insn)) |
18862b5a | 1231 | { |
1232 | int mode = MODE_NEEDED (e, insn); | |
1233 | rtx link; | |
1234 | ||
1235 | if (mode != no_mode && mode != last_mode) | |
1236 | { | |
1237 | last_mode = mode; | |
4c26117a | 1238 | ptr = new_seginfo (mode, insn, bb->index, live_now); |
1239 | add_seginfo (info + bb->index, ptr); | |
1240 | RESET_BIT (transp[bb->index], j); | |
18862b5a | 1241 | } |
894b8fd9 | 1242 | #ifdef MODE_AFTER |
1243 | last_mode = MODE_AFTER (last_mode, insn); | |
1244 | #endif | |
18862b5a | 1245 | /* Update LIVE_NOW. */ |
1246 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1247 | if (REG_NOTE_KIND (link) == REG_DEAD) | |
1248 | reg_dies (XEXP (link, 0), live_now); | |
9ca8d59e | 1249 | |
18862b5a | 1250 | note_stores (PATTERN (insn), reg_becomes_live, &live_now); |
1251 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1252 | if (REG_NOTE_KIND (link) == REG_UNUSED) | |
1253 | reg_dies (XEXP (link, 0), live_now); | |
1254 | } | |
1255 | } | |
9ca8d59e | 1256 | |
4c26117a | 1257 | info[bb->index].computing = last_mode; |
18862b5a | 1258 | /* Check for blocks without ANY mode requirements. */ |
1259 | if (last_mode == no_mode) | |
1260 | { | |
5496dbfc | 1261 | ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now); |
4c26117a | 1262 | add_seginfo (info + bb->index, ptr); |
18862b5a | 1263 | } |
1264 | } | |
894b8fd9 | 1265 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
18862b5a | 1266 | { |
894b8fd9 | 1267 | int mode = MODE_ENTRY (e); |
9ca8d59e | 1268 | |
18862b5a | 1269 | if (mode != no_mode) |
1270 | { | |
742ee135 | 1271 | bb = post_entry; |
5ab08585 | 1272 | |
742ee135 | 1273 | /* By always making this nontransparent, we save |
1274 | an extra check in make_preds_opaque. We also | |
1275 | need this to avoid confusing pre_edge_lcm when | |
1276 | antic is cleared but transp and comp are set. */ | |
1277 | RESET_BIT (transp[bb->index], j); | |
1287c919 | 1278 | |
742ee135 | 1279 | /* Insert a fake computing definition of MODE into entry |
1280 | blocks which compute no mode. This represents the mode on | |
1281 | entry. */ | |
1282 | info[bb->index].computing = mode; | |
1283 | ||
1284 | if (pre_exit) | |
894b8fd9 | 1285 | info[pre_exit->index].seginfo->mode = MODE_EXIT (e); |
18862b5a | 1286 | } |
1287 | } | |
7dff60c8 | 1288 | #endif /* NORMAL_MODE */ |
18862b5a | 1289 | } |
1290 | ||
f20183e6 | 1291 | kill = sbitmap_vector_alloc (last_basic_block, n_entities); |
18862b5a | 1292 | for (i = 0; i < max_num_modes; i++) |
1293 | { | |
1294 | int current_mode[N_ENTITIES]; | |
5a8ae6eb | 1295 | sbitmap *delete; |
1296 | sbitmap *insert; | |
18862b5a | 1297 | |
1298 | /* Set the anticipatable and computing arrays. */ | |
f20183e6 | 1299 | sbitmap_vector_zero (antic, last_basic_block); |
1300 | sbitmap_vector_zero (comp, last_basic_block); | |
18862b5a | 1301 | for (j = n_entities - 1; j >= 0; j--) |
1302 | { | |
1303 | int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i); | |
3b7e1f27 | 1304 | struct bb_info *info = bb_info[j]; |
5ab08585 | 1305 | |
4c26117a | 1306 | FOR_EACH_BB (bb) |
18862b5a | 1307 | { |
4c26117a | 1308 | if (info[bb->index].seginfo->mode == m) |
1309 | SET_BIT (antic[bb->index], j); | |
18862b5a | 1310 | |
4c26117a | 1311 | if (info[bb->index].computing == m) |
1312 | SET_BIT (comp[bb->index], j); | |
18862b5a | 1313 | } |
1314 | } | |
1315 | ||
1316 | /* Calculate the optimal locations for the | |
1317 | placement mode switches to modes with priority I. */ | |
1318 | ||
4c26117a | 1319 | FOR_EACH_BB (bb) |
1320 | sbitmap_not (kill[bb->index], transp[bb->index]); | |
18862b5a | 1321 | edge_list = pre_edge_lcm (file, 1, transp, comp, antic, |
1322 | kill, &insert, &delete); | |
1323 | ||
9ca8d59e | 1324 | for (j = n_entities - 1; j >= 0; j--) |
18862b5a | 1325 | { |
1326 | /* Insert all mode sets that have been inserted by lcm. */ | |
1327 | int no_mode = num_modes[entity_map[j]]; | |
9ca8d59e | 1328 | |
18862b5a | 1329 | /* Wherever we have moved a mode setting upwards in the flow graph, |
1330 | the blocks between the new setting site and the now redundant | |
1331 | computation ceases to be transparent for any lower-priority | |
1332 | mode of the same entity. First set the aux field of each | |
1333 | insertion site edge non-transparent, then propagate the new | |
1334 | non-transparency from the redundant computation upwards till | |
1335 | we hit an insertion site or an already non-transparent block. */ | |
1336 | for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--) | |
1337 | { | |
1338 | edge eg = INDEX_EDGE (edge_list, e); | |
1339 | int mode; | |
1340 | basic_block src_bb; | |
1341 | HARD_REG_SET live_at_edge; | |
1342 | rtx mode_set; | |
1343 | ||
1344 | eg->aux = 0; | |
1345 | ||
1346 | if (! TEST_BIT (insert[e], j)) | |
1347 | continue; | |
1348 | ||
1349 | eg->aux = (void *)1; | |
1350 | ||
1351 | mode = current_mode[j]; | |
1352 | src_bb = eg->src; | |
1353 | ||
9ca8d59e | 1354 | REG_SET_TO_HARD_REG_SET (live_at_edge, |
1355 | src_bb->global_live_at_end); | |
1356 | ||
18862b5a | 1357 | start_sequence (); |
1358 | EMIT_MODE_SET (entity_map[j], mode, live_at_edge); | |
31d3e01c | 1359 | mode_set = get_insns (); |
18862b5a | 1360 | end_sequence (); |
1361 | ||
7fb47f9f | 1362 | /* Do not bother to insert empty sequence. */ |
31d3e01c | 1363 | if (mode_set == NULL_RTX) |
7fb47f9f | 1364 | continue; |
1365 | ||
1287c919 | 1366 | /* If this is an abnormal edge, we'll insert at the end |
1367 | of the previous block. */ | |
18862b5a | 1368 | if (eg->flags & EDGE_ABNORMAL) |
1369 | { | |
7fb47f9f | 1370 | emited = true; |
6d7dc5b9 | 1371 | if (JUMP_P (BB_END (src_bb))) |
5496dbfc | 1372 | emit_insn_before (mode_set, BB_END (src_bb)); |
2045cdd4 | 1373 | /* It doesn't make sense to switch to normal mode |
1374 | after a CALL_INSN, so we're going to abort if we | |
1375 | find one. The cases in which a CALL_INSN may | |
1376 | have an abnormal edge are sibcalls and EH edges. | |
1377 | In the case of sibcalls, the dest basic-block is | |
1378 | the EXIT_BLOCK, that runs in normal mode; it is | |
1379 | assumed that a sibcall insn requires normal mode | |
1380 | itself, so no mode switch would be required after | |
1381 | the call (it wouldn't make sense, anyway). In | |
1382 | the case of EH edges, EH entry points also start | |
1383 | in normal mode, so a similar reasoning applies. */ | |
1384 | else if (NONJUMP_INSN_P (BB_END (src_bb))) | |
1385 | emit_insn_after (mode_set, BB_END (src_bb)); | |
1287c919 | 1386 | else |
2045cdd4 | 1387 | abort (); |
b3d6de89 | 1388 | bb_info[j][src_bb->index].computing = mode; |
1389 | RESET_BIT (transp[src_bb->index], j); | |
18862b5a | 1390 | } |
1391 | else | |
1392 | { | |
1393 | need_commit = 1; | |
1394 | insert_insn_on_edge (mode_set, eg); | |
1395 | } | |
18862b5a | 1396 | } |
1397 | ||
4c26117a | 1398 | FOR_EACH_BB_REVERSE (bb) |
1399 | if (TEST_BIT (delete[bb->index], j)) | |
9ca8d59e | 1400 | { |
4c26117a | 1401 | make_preds_opaque (bb, j); |
9ca8d59e | 1402 | /* Cancel the 'deleted' mode set. */ |
4c26117a | 1403 | bb_info[j][bb->index].seginfo->mode = no_mode; |
9ca8d59e | 1404 | } |
18862b5a | 1405 | } |
9ca8d59e | 1406 | |
5a8ae6eb | 1407 | sbitmap_vector_free (delete); |
1408 | sbitmap_vector_free (insert); | |
82f7392b | 1409 | clear_aux_for_edges (); |
18862b5a | 1410 | free_edge_list (edge_list); |
1411 | } | |
1412 | ||
1413 | /* Now output the remaining mode sets in all the segments. */ | |
1414 | for (j = n_entities - 1; j >= 0; j--) | |
1415 | { | |
7dff60c8 | 1416 | int no_mode = num_modes[entity_map[j]]; |
1417 | ||
4c26117a | 1418 | FOR_EACH_BB_REVERSE (bb) |
18862b5a | 1419 | { |
1420 | struct seginfo *ptr, *next; | |
4c26117a | 1421 | for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next) |
18862b5a | 1422 | { |
1423 | next = ptr->next; | |
7dff60c8 | 1424 | if (ptr->mode != no_mode) |
18862b5a | 1425 | { |
1426 | rtx mode_set; | |
1427 | ||
1428 | start_sequence (); | |
1429 | EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live); | |
31d3e01c | 1430 | mode_set = get_insns (); |
18862b5a | 1431 | end_sequence (); |
1432 | ||
2027d2d2 | 1433 | /* Insert MODE_SET only if it is nonempty. */ |
1434 | if (mode_set != NULL_RTX) | |
1435 | { | |
1436 | emited = true; | |
1437 | if (NOTE_P (ptr->insn_ptr) | |
1438 | && (NOTE_LINE_NUMBER (ptr->insn_ptr) | |
1439 | == NOTE_INSN_BASIC_BLOCK)) | |
1440 | emit_insn_after (mode_set, ptr->insn_ptr); | |
1441 | else | |
1442 | emit_insn_before (mode_set, ptr->insn_ptr); | |
1443 | } | |
18862b5a | 1444 | } |
9ca8d59e | 1445 | |
18862b5a | 1446 | free (ptr); |
1447 | } | |
1448 | } | |
9ca8d59e | 1449 | |
18862b5a | 1450 | free (bb_info[j]); |
1451 | } | |
1452 | ||
1453 | /* Finished. Free up all the things we've allocated. */ | |
5ab08585 | 1454 | |
18862b5a | 1455 | sbitmap_vector_free (kill); |
1456 | sbitmap_vector_free (antic); | |
1457 | sbitmap_vector_free (transp); | |
1458 | sbitmap_vector_free (comp); | |
18862b5a | 1459 | |
1460 | if (need_commit) | |
1461 | commit_edge_insertions (); | |
b78e14a8 | 1462 | |
894b8fd9 | 1463 | #if defined (MODE_ENTRY) && defined (MODE_EXIT) |
742ee135 | 1464 | cleanup_cfg (CLEANUP_NO_INSN_DEL); |
1465 | #else | |
7fb47f9f | 1466 | if (!need_commit && !emited) |
1467 | return 0; | |
742ee135 | 1468 | #endif |
7fb47f9f | 1469 | |
308f9b79 | 1470 | max_regno = max_reg_num (); |
1471 | allocate_reg_info (max_regno, FALSE, FALSE); | |
1472 | update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES, | |
1473 | (PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE | |
1474 | | PROP_SCAN_DEAD_CODE)); | |
b78e14a8 | 1475 | |
1476 | return 1; | |
18862b5a | 1477 | } |
b78e14a8 | 1478 | #endif /* OPTIMIZE_MODE_SWITCHING */ |