]>
Commit | Line | Data |
---|---|---|
610d2478 | 1 | /* CPU mode switching |
23a5b65a | 2 | Copyright (C) 1998-2014 Free Software Foundation, Inc. |
610d2478 SB |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
610d2478 SB |
9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
610d2478 SB |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "tm.h" | |
82f81f18 | 24 | #include "target.h" |
610d2478 SB |
25 | #include "rtl.h" |
26 | #include "regs.h" | |
27 | #include "hard-reg-set.h" | |
28 | #include "flags.h" | |
610d2478 SB |
29 | #include "insn-config.h" |
30 | #include "recog.h" | |
31 | #include "basic-block.h" | |
610d2478 SB |
32 | #include "tm_p.h" |
33 | #include "function.h" | |
ef330312 | 34 | #include "tree-pass.h" |
6fb5fa3c | 35 | #include "df.h" |
5936d944 | 36 | #include "emit-rtl.h" |
610d2478 SB |
37 | |
38 | /* We want target macros for the mode switching code to be able to refer | |
39 | to instruction attribute values. */ | |
40 | #include "insn-attr.h" | |
41 | ||
42 | #ifdef OPTIMIZE_MODE_SWITCHING | |
43 | ||
44 | /* The algorithm for setting the modes consists of scanning the insn list | |
45 | and finding all the insns which require a specific mode. Each insn gets | |
46 | a unique struct seginfo element. These structures are inserted into a list | |
47 | for each basic block. For each entity, there is an array of bb_info over | |
cee9defb | 48 | the flow graph basic blocks (local var 'bb_info'), which contains a list |
610d2478 SB |
49 | of all insns within that basic block, in the order they are encountered. |
50 | ||
51 | For each entity, any basic block WITHOUT any insns requiring a specific | |
cee9defb EB |
52 | mode are given a single entry without a mode (each basic block in the |
53 | flow graph must have at least one entry in the segment table). | |
610d2478 SB |
54 | |
55 | The LCM algorithm is then run over the flow graph to determine where to | |
cee9defb | 56 | place the sets to the highest-priority mode with respect to the first |
610d2478 SB |
57 | insn in any one block. Any adjustments required to the transparency |
58 | vectors are made, then the next iteration starts for the next-lower | |
59 | priority mode, till for each entity all modes are exhausted. | |
60 | ||
cee9defb | 61 | More details can be found in the code of optimize_mode_switching. */ |
610d2478 SB |
62 | \f |
63 | /* This structure contains the information for each insn which requires | |
64 | either single or double mode to be set. | |
65 | MODE is the mode this insn must be executed in. | |
66 | INSN_PTR is the insn to be executed (may be the note that marks the | |
67 | beginning of a basic block). | |
68 | BBNUM is the flow graph basic block this insn occurs in. | |
69 | NEXT is the next insn in the same basic block. */ | |
70 | struct seginfo | |
71 | { | |
72 | int mode; | |
73 | rtx insn_ptr; | |
74 | int bbnum; | |
75 | struct seginfo *next; | |
76 | HARD_REG_SET regs_live; | |
77 | }; | |
78 | ||
79 | struct bb_info | |
80 | { | |
81 | struct seginfo *seginfo; | |
82 | int computing; | |
83 | }; | |
84 | ||
85 | /* These bitmaps are used for the LCM algorithm. */ | |
86 | ||
87 | static sbitmap *antic; | |
88 | static sbitmap *transp; | |
89 | static sbitmap *comp; | |
90 | ||
91 | static struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET); | |
92 | static void add_seginfo (struct bb_info *, struct seginfo *); | |
408bed3c | 93 | static void reg_dies (rtx, HARD_REG_SET *); |
7bc980e1 | 94 | static void reg_becomes_live (rtx, const_rtx, void *); |
610d2478 SB |
95 | static void make_preds_opaque (basic_block, int); |
96 | \f | |
97 | ||
98 | /* This function will allocate a new BBINFO structure, initialized | |
473fd99a JR |
99 | with the MODE, INSN, and basic block BB parameters. |
100 | INSN may not be a NOTE_INSN_BASIC_BLOCK, unless it is an empty | |
101 | basic block; that allows us later to insert instructions in a FIFO-like | |
102 | manner. */ | |
610d2478 SB |
103 | |
104 | static struct seginfo * | |
105 | new_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live) | |
106 | { | |
107 | struct seginfo *ptr; | |
473fd99a JR |
108 | |
109 | gcc_assert (!NOTE_INSN_BASIC_BLOCK_P (insn) | |
110 | || insn == BB_END (NOTE_BASIC_BLOCK (insn))); | |
5ed6ace5 | 111 | ptr = XNEW (struct seginfo); |
610d2478 SB |
112 | ptr->mode = mode; |
113 | ptr->insn_ptr = insn; | |
114 | ptr->bbnum = bb; | |
115 | ptr->next = NULL; | |
116 | COPY_HARD_REG_SET (ptr->regs_live, regs_live); | |
117 | return ptr; | |
118 | } | |
119 | ||
120 | /* Add a seginfo element to the end of a list. | |
121 | HEAD is a pointer to the list beginning. | |
122 | INFO is the structure to be linked in. */ | |
123 | ||
124 | static void | |
125 | add_seginfo (struct bb_info *head, struct seginfo *info) | |
126 | { | |
127 | struct seginfo *ptr; | |
128 | ||
129 | if (head->seginfo == NULL) | |
130 | head->seginfo = info; | |
131 | else | |
132 | { | |
133 | ptr = head->seginfo; | |
134 | while (ptr->next != NULL) | |
135 | ptr = ptr->next; | |
136 | ptr->next = info; | |
137 | } | |
138 | } | |
139 | ||
140 | /* Make all predecessors of basic block B opaque, recursively, till we hit | |
141 | some that are already non-transparent, or an edge where aux is set; that | |
142 | denotes that a mode set is to be done on that edge. | |
143 | J is the bit number in the bitmaps that corresponds to the entity that | |
144 | we are currently handling mode-switching for. */ | |
145 | ||
146 | static void | |
147 | make_preds_opaque (basic_block b, int j) | |
148 | { | |
149 | edge e; | |
150 | edge_iterator ei; | |
151 | ||
152 | FOR_EACH_EDGE (e, ei, b->preds) | |
153 | { | |
154 | basic_block pb = e->src; | |
155 | ||
d7c028c0 | 156 | if (e->aux || ! bitmap_bit_p (transp[pb->index], j)) |
610d2478 SB |
157 | continue; |
158 | ||
d7c028c0 | 159 | bitmap_clear_bit (transp[pb->index], j); |
610d2478 SB |
160 | make_preds_opaque (pb, j); |
161 | } | |
162 | } | |
163 | ||
164 | /* Record in LIVE that register REG died. */ | |
165 | ||
166 | static void | |
408bed3c | 167 | reg_dies (rtx reg, HARD_REG_SET *live) |
610d2478 | 168 | { |
09e18274 | 169 | int regno; |
610d2478 SB |
170 | |
171 | if (!REG_P (reg)) | |
172 | return; | |
173 | ||
174 | regno = REGNO (reg); | |
175 | if (regno < FIRST_PSEUDO_REGISTER) | |
09e18274 | 176 | remove_from_hard_reg_set (live, GET_MODE (reg), regno); |
610d2478 SB |
177 | } |
178 | ||
179 | /* Record in LIVE that register REG became live. | |
180 | This is called via note_stores. */ | |
181 | ||
182 | static void | |
7bc980e1 | 183 | reg_becomes_live (rtx reg, const_rtx setter ATTRIBUTE_UNUSED, void *live) |
610d2478 | 184 | { |
09e18274 | 185 | int regno; |
610d2478 SB |
186 | |
187 | if (GET_CODE (reg) == SUBREG) | |
188 | reg = SUBREG_REG (reg); | |
189 | ||
190 | if (!REG_P (reg)) | |
191 | return; | |
192 | ||
193 | regno = REGNO (reg); | |
194 | if (regno < FIRST_PSEUDO_REGISTER) | |
09e18274 | 195 | add_to_hard_reg_set ((HARD_REG_SET *) live, GET_MODE (reg), regno); |
610d2478 SB |
196 | } |
197 | ||
610d2478 SB |
198 | /* Split the fallthrough edge to the exit block, so that we can note |
199 | that there NORMAL_MODE is required. Return the new block if it's | |
200 | inserted before the exit block. Otherwise return null. */ | |
201 | ||
202 | static basic_block | |
203 | create_pre_exit (int n_entities, int *entity_map, const int *num_modes) | |
204 | { | |
205 | edge eg; | |
206 | edge_iterator ei; | |
207 | basic_block pre_exit; | |
208 | ||
209 | /* The only non-call predecessor at this stage is a block with a | |
210 | fallthrough edge; there can be at most one, but there could be | |
211 | none at all, e.g. when exit is called. */ | |
212 | pre_exit = 0; | |
fefa31b5 | 213 | FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
610d2478 SB |
214 | if (eg->flags & EDGE_FALLTHRU) |
215 | { | |
216 | basic_block src_bb = eg->src; | |
610d2478 SB |
217 | rtx last_insn, ret_reg; |
218 | ||
219 | gcc_assert (!pre_exit); | |
220 | /* If this function returns a value at the end, we have to | |
221 | insert the final mode switch before the return value copy | |
222 | to its hard register. */ | |
fefa31b5 | 223 | if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 1 |
610d2478 SB |
224 | && NONJUMP_INSN_P ((last_insn = BB_END (src_bb))) |
225 | && GET_CODE (PATTERN (last_insn)) == USE | |
226 | && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG) | |
227 | { | |
228 | int ret_start = REGNO (ret_reg); | |
229 | int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)]; | |
230 | int ret_end = ret_start + nregs; | |
c07757e5 UB |
231 | bool short_block = false; |
232 | bool multi_reg_return = false; | |
233 | bool forced_late_switch = false; | |
610d2478 SB |
234 | rtx before_return_copy; |
235 | ||
236 | do | |
237 | { | |
238 | rtx return_copy = PREV_INSN (last_insn); | |
239 | rtx return_copy_pat, copy_reg; | |
240 | int copy_start, copy_num; | |
241 | int j; | |
242 | ||
141a9e06 | 243 | if (NONDEBUG_INSN_P (return_copy)) |
610d2478 | 244 | { |
2bde7ae9 RS |
245 | /* When using SJLJ exceptions, the call to the |
246 | unregister function is inserted between the | |
247 | clobber of the return value and the copy. | |
248 | We do not want to split the block before this | |
249 | or any other call; if we have not found the | |
250 | copy yet, the copy must have been deleted. */ | |
251 | if (CALL_P (return_copy)) | |
252 | { | |
c07757e5 | 253 | short_block = true; |
2bde7ae9 RS |
254 | break; |
255 | } | |
89ab4659 KK |
256 | return_copy_pat = PATTERN (return_copy); |
257 | switch (GET_CODE (return_copy_pat)) | |
07288ab0 | 258 | { |
89ab4659 | 259 | case USE: |
c07757e5 UB |
260 | /* Skip USEs of multiple return registers. |
261 | __builtin_apply pattern is also handled here. */ | |
89ab4659 | 262 | if (GET_CODE (XEXP (return_copy_pat, 0)) == REG |
82f81f18 | 263 | && (targetm.calls.function_value_regno_p |
89ab4659 KK |
264 | (REGNO (XEXP (return_copy_pat, 0))))) |
265 | { | |
c07757e5 | 266 | multi_reg_return = true; |
89ab4659 KK |
267 | last_insn = return_copy; |
268 | continue; | |
269 | } | |
270 | break; | |
271 | ||
272 | case ASM_OPERANDS: | |
273 | /* Skip barrier insns. */ | |
274 | if (!MEM_VOLATILE_P (return_copy_pat)) | |
275 | break; | |
276 | ||
277 | /* Fall through. */ | |
278 | ||
279 | case ASM_INPUT: | |
280 | case UNSPEC_VOLATILE: | |
07288ab0 KK |
281 | last_insn = return_copy; |
282 | continue; | |
89ab4659 KK |
283 | |
284 | default: | |
285 | break; | |
07288ab0 | 286 | } |
89ab4659 | 287 | |
610d2478 SB |
288 | /* If the return register is not (in its entirety) |
289 | likely spilled, the return copy might be | |
290 | partially or completely optimized away. */ | |
291 | return_copy_pat = single_set (return_copy); | |
292 | if (!return_copy_pat) | |
293 | { | |
294 | return_copy_pat = PATTERN (return_copy); | |
295 | if (GET_CODE (return_copy_pat) != CLOBBER) | |
296 | break; | |
6fb5fa3c DB |
297 | else if (!optimize) |
298 | { | |
299 | /* This might be (clobber (reg [<result>])) | |
300 | when not optimizing. Then check if | |
301 | the previous insn is the clobber for | |
302 | the return register. */ | |
303 | copy_reg = SET_DEST (return_copy_pat); | |
304 | if (GET_CODE (copy_reg) == REG | |
305 | && !HARD_REGISTER_NUM_P (REGNO (copy_reg))) | |
306 | { | |
307 | if (INSN_P (PREV_INSN (return_copy))) | |
308 | { | |
309 | return_copy = PREV_INSN (return_copy); | |
310 | return_copy_pat = PATTERN (return_copy); | |
311 | if (GET_CODE (return_copy_pat) != CLOBBER) | |
312 | break; | |
313 | } | |
314 | } | |
315 | } | |
610d2478 SB |
316 | } |
317 | copy_reg = SET_DEST (return_copy_pat); | |
318 | if (GET_CODE (copy_reg) == REG) | |
319 | copy_start = REGNO (copy_reg); | |
320 | else if (GET_CODE (copy_reg) == SUBREG | |
321 | && GET_CODE (SUBREG_REG (copy_reg)) == REG) | |
322 | copy_start = REGNO (SUBREG_REG (copy_reg)); | |
323 | else | |
d78e64db KK |
324 | { |
325 | /* When control reaches end of non-void function, | |
326 | there are no return copy insns at all. This | |
327 | avoids an ice on that invalid function. */ | |
328 | if (ret_start + nregs == ret_end) | |
c07757e5 | 329 | short_block = true; |
d78e64db KK |
330 | break; |
331 | } | |
ffbbfaba | 332 | if (!targetm.calls.function_value_regno_p (copy_start)) |
ce4a9422 JR |
333 | copy_num = 0; |
334 | else | |
335 | copy_num | |
336 | = hard_regno_nregs[copy_start][GET_MODE (copy_reg)]; | |
610d2478 SB |
337 | |
338 | /* If the return register is not likely spilled, - as is | |
339 | the case for floating point on SH4 - then it might | |
340 | be set by an arithmetic operation that needs a | |
341 | different mode than the exit block. */ | |
342 | for (j = n_entities - 1; j >= 0; j--) | |
343 | { | |
344 | int e = entity_map[j]; | |
06b90602 CB |
345 | int mode = |
346 | targetm.mode_switching.needed (e, return_copy); | |
610d2478 | 347 | |
06b90602 CB |
348 | if (mode != num_modes[e] |
349 | && mode != targetm.mode_switching.exit (e)) | |
610d2478 SB |
350 | break; |
351 | } | |
352 | if (j >= 0) | |
353 | { | |
b8435aa9 UB |
354 | /* __builtin_return emits a sequence of loads to all |
355 | return registers. One of them might require | |
356 | another mode than MODE_EXIT, even if it is | |
357 | unrelated to the return value, so we want to put | |
358 | the final mode switch after it. */ | |
c07757e5 | 359 | if (multi_reg_return |
b8435aa9 UB |
360 | && targetm.calls.function_value_regno_p |
361 | (copy_start)) | |
c07757e5 | 362 | forced_late_switch = true; |
b8435aa9 | 363 | |
610d2478 SB |
364 | /* For the SH4, floating point loads depend on fpscr, |
365 | thus we might need to put the final mode switch | |
366 | after the return value copy. That is still OK, | |
367 | because a floating point return value does not | |
368 | conflict with address reloads. */ | |
369 | if (copy_start >= ret_start | |
370 | && copy_start + copy_num <= ret_end | |
371 | && OBJECT_P (SET_SRC (return_copy_pat))) | |
c07757e5 | 372 | forced_late_switch = true; |
610d2478 SB |
373 | break; |
374 | } | |
ce4a9422 JR |
375 | if (copy_num == 0) |
376 | { | |
377 | last_insn = return_copy; | |
378 | continue; | |
379 | } | |
610d2478 SB |
380 | |
381 | if (copy_start >= ret_start | |
382 | && copy_start + copy_num <= ret_end) | |
383 | nregs -= copy_num; | |
c07757e5 | 384 | else if (!multi_reg_return |
82f81f18 AS |
385 | || !targetm.calls.function_value_regno_p |
386 | (copy_start)) | |
610d2478 SB |
387 | break; |
388 | last_insn = return_copy; | |
389 | } | |
390 | /* ??? Exception handling can lead to the return value | |
391 | copy being already separated from the return value use, | |
392 | as in unwind-dw2.c . | |
393 | Similarly, conditionally returning without a value, | |
394 | and conditionally using builtin_return can lead to an | |
395 | isolated use. */ | |
396 | if (return_copy == BB_HEAD (src_bb)) | |
397 | { | |
c07757e5 | 398 | short_block = true; |
610d2478 SB |
399 | break; |
400 | } | |
401 | last_insn = return_copy; | |
402 | } | |
403 | while (nregs); | |
b8698a0f | 404 | |
610d2478 SB |
405 | /* If we didn't see a full return value copy, verify that there |
406 | is a plausible reason for this. If some, but not all of the | |
407 | return register is likely spilled, we can expect that there | |
408 | is a copy for the likely spilled part. */ | |
409 | gcc_assert (!nregs | |
410 | || forced_late_switch | |
411 | || short_block | |
07b8f0a8 | 412 | || !(targetm.class_likely_spilled_p |
610d2478 SB |
413 | (REGNO_REG_CLASS (ret_start))) |
414 | || (nregs | |
415 | != hard_regno_nregs[ret_start][GET_MODE (ret_reg)]) | |
416 | /* For multi-hard-register floating point | |
417 | values, sometimes the likely-spilled part | |
418 | is ordinarily copied first, then the other | |
419 | part is set with an arithmetic operation. | |
420 | This doesn't actually cause reload | |
421 | failures, so let it pass. */ | |
422 | || (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT | |
423 | && nregs != 1)); | |
b8698a0f | 424 | |
bba33211 | 425 | if (!NOTE_INSN_BASIC_BLOCK_P (last_insn)) |
610d2478 SB |
426 | { |
427 | before_return_copy | |
428 | = emit_note_before (NOTE_INSN_DELETED, last_insn); | |
429 | /* Instructions preceding LAST_INSN in the same block might | |
430 | require a different mode than MODE_EXIT, so if we might | |
431 | have such instructions, keep them in a separate block | |
432 | from pre_exit. */ | |
bba33211 JR |
433 | src_bb = split_block (src_bb, |
434 | PREV_INSN (before_return_copy))->dest; | |
610d2478 SB |
435 | } |
436 | else | |
437 | before_return_copy = last_insn; | |
438 | pre_exit = split_block (src_bb, before_return_copy)->src; | |
439 | } | |
440 | else | |
441 | { | |
442 | pre_exit = split_edge (eg); | |
610d2478 SB |
443 | } |
444 | } | |
445 | ||
446 | return pre_exit; | |
447 | } | |
610d2478 SB |
448 | |
449 | /* Find all insns that need a particular mode setting, and insert the | |
450 | necessary mode switches. Return true if we did work. */ | |
451 | ||
7399bcb0 | 452 | static int |
10d22567 | 453 | optimize_mode_switching (void) |
610d2478 SB |
454 | { |
455 | rtx insn; | |
456 | int e; | |
457 | basic_block bb; | |
458 | int need_commit = 0; | |
459 | sbitmap *kill; | |
460 | struct edge_list *edge_list; | |
461 | static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING; | |
462 | #define N_ENTITIES ARRAY_SIZE (num_modes) | |
463 | int entity_map[N_ENTITIES]; | |
464 | struct bb_info *bb_info[N_ENTITIES]; | |
465 | int i, j; | |
466 | int n_entities; | |
467 | int max_num_modes = 0; | |
073a8998 | 468 | bool emitted ATTRIBUTE_UNUSED = false; |
06b90602 CB |
469 | basic_block post_entry = 0; |
470 | basic_block pre_exit = 0; | |
610d2478 | 471 | |
610d2478 SB |
472 | for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--) |
473 | if (OPTIMIZE_MODE_SWITCHING (e)) | |
474 | { | |
475 | int entry_exit_extra = 0; | |
476 | ||
477 | /* Create the list of segments within each basic block. | |
478 | If NORMAL_MODE is defined, allow for two extra | |
479 | blocks split from the entry and exit block. */ | |
06b90602 CB |
480 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) |
481 | entry_exit_extra = 3; | |
482 | ||
610d2478 | 483 | bb_info[n_entities] |
8b1c6fd7 DM |
484 | = XCNEWVEC (struct bb_info, |
485 | last_basic_block_for_fn (cfun) + entry_exit_extra); | |
610d2478 SB |
486 | entity_map[n_entities++] = e; |
487 | if (num_modes[e] > max_num_modes) | |
488 | max_num_modes = num_modes[e]; | |
489 | } | |
490 | ||
491 | if (! n_entities) | |
492 | return 0; | |
493 | ||
06b90602 CB |
494 | /* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined and vice versa. */ |
495 | gcc_assert ((targetm.mode_switching.entry && targetm.mode_switching.exit) | |
496 | || (!targetm.mode_switching.entry && !targetm.mode_switching.exit)); | |
497 | ||
498 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) | |
499 | { | |
500 | /* Split the edge from the entry block, so that we can note that | |
501 | there NORMAL_MODE is supplied. */ | |
502 | post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))); | |
503 | pre_exit = create_pre_exit (n_entities, entity_map, num_modes); | |
504 | } | |
610d2478 | 505 | |
6fb5fa3c DB |
506 | df_analyze (); |
507 | ||
610d2478 SB |
508 | /* Create the bitmap vectors. */ |
509 | ||
8b1c6fd7 DM |
510 | antic = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities); |
511 | transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities); | |
512 | comp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities); | |
610d2478 | 513 | |
8b1c6fd7 | 514 | bitmap_vector_ones (transp, last_basic_block_for_fn (cfun)); |
610d2478 SB |
515 | |
516 | for (j = n_entities - 1; j >= 0; j--) | |
517 | { | |
518 | int e = entity_map[j]; | |
519 | int no_mode = num_modes[e]; | |
520 | struct bb_info *info = bb_info[j]; | |
521 | ||
522 | /* Determine what the first use (if any) need for a mode of entity E is. | |
523 | This will be the mode that is anticipatable for this block. | |
524 | Also compute the initial transparency settings. */ | |
11cd3bed | 525 | FOR_EACH_BB_FN (bb, cfun) |
610d2478 SB |
526 | { |
527 | struct seginfo *ptr; | |
528 | int last_mode = no_mode; | |
a44250f4 | 529 | bool any_set_required = false; |
610d2478 SB |
530 | HARD_REG_SET live_now; |
531 | ||
6fb5fa3c | 532 | REG_SET_TO_HARD_REG_SET (live_now, df_get_live_in (bb)); |
24c2fde2 RH |
533 | |
534 | /* Pretend the mode is clobbered across abnormal edges. */ | |
535 | { | |
536 | edge_iterator ei; | |
537 | edge e; | |
538 | FOR_EACH_EDGE (e, ei, bb->preds) | |
539 | if (e->flags & EDGE_COMPLEX) | |
540 | break; | |
541 | if (e) | |
650a59ef | 542 | { |
473fd99a JR |
543 | rtx ins_pos = BB_HEAD (bb); |
544 | if (LABEL_P (ins_pos)) | |
545 | ins_pos = NEXT_INSN (ins_pos); | |
546 | gcc_assert (NOTE_INSN_BASIC_BLOCK_P (ins_pos)); | |
547 | if (ins_pos != BB_END (bb)) | |
548 | ins_pos = NEXT_INSN (ins_pos); | |
549 | ptr = new_seginfo (no_mode, ins_pos, bb->index, live_now); | |
650a59ef | 550 | add_seginfo (info + bb->index, ptr); |
d7c028c0 | 551 | bitmap_clear_bit (transp[bb->index], j); |
650a59ef | 552 | } |
24c2fde2 RH |
553 | } |
554 | ||
0f346928 | 555 | FOR_BB_INSNS (bb, insn) |
610d2478 SB |
556 | { |
557 | if (INSN_P (insn)) | |
558 | { | |
06b90602 | 559 | int mode = targetm.mode_switching.needed (e, insn); |
610d2478 SB |
560 | rtx link; |
561 | ||
562 | if (mode != no_mode && mode != last_mode) | |
563 | { | |
a44250f4 | 564 | any_set_required = true; |
610d2478 SB |
565 | last_mode = mode; |
566 | ptr = new_seginfo (mode, insn, bb->index, live_now); | |
567 | add_seginfo (info + bb->index, ptr); | |
d7c028c0 | 568 | bitmap_clear_bit (transp[bb->index], j); |
610d2478 | 569 | } |
06b90602 CB |
570 | |
571 | if (targetm.mode_switching.after) | |
572 | last_mode = targetm.mode_switching.after (e, last_mode, insn); | |
573 | ||
610d2478 SB |
574 | /* Update LIVE_NOW. */ |
575 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
576 | if (REG_NOTE_KIND (link) == REG_DEAD) | |
408bed3c | 577 | reg_dies (XEXP (link, 0), &live_now); |
610d2478 SB |
578 | |
579 | note_stores (PATTERN (insn), reg_becomes_live, &live_now); | |
580 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
581 | if (REG_NOTE_KIND (link) == REG_UNUSED) | |
408bed3c | 582 | reg_dies (XEXP (link, 0), &live_now); |
610d2478 SB |
583 | } |
584 | } | |
585 | ||
586 | info[bb->index].computing = last_mode; | |
a44250f4 | 587 | /* Check for blocks without ANY mode requirements. |
611a4849 UB |
588 | N.B. because of MODE_AFTER, last_mode might still |
589 | be different from no_mode, in which case we need to | |
590 | mark the block as nontransparent. */ | |
a44250f4 | 591 | if (!any_set_required) |
610d2478 SB |
592 | { |
593 | ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now); | |
594 | add_seginfo (info + bb->index, ptr); | |
611a4849 UB |
595 | if (last_mode != no_mode) |
596 | bitmap_clear_bit (transp[bb->index], j); | |
610d2478 SB |
597 | } |
598 | } | |
06b90602 CB |
599 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) |
600 | { | |
601 | int mode = targetm.mode_switching.entry (e); | |
610d2478 | 602 | |
06b90602 CB |
603 | if (mode != no_mode) |
604 | { | |
605 | bb = post_entry; | |
606 | ||
607 | /* By always making this nontransparent, we save | |
608 | an extra check in make_preds_opaque. We also | |
609 | need this to avoid confusing pre_edge_lcm when | |
610 | antic is cleared but transp and comp are set. */ | |
611 | bitmap_clear_bit (transp[bb->index], j); | |
612 | ||
613 | /* Insert a fake computing definition of MODE into entry | |
614 | blocks which compute no mode. This represents the mode on | |
615 | entry. */ | |
616 | info[bb->index].computing = mode; | |
617 | ||
618 | if (pre_exit) | |
619 | info[pre_exit->index].seginfo->mode = | |
620 | targetm.mode_switching.exit (e); | |
621 | } | |
622 | } | |
610d2478 SB |
623 | } |
624 | ||
8b1c6fd7 | 625 | kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities); |
610d2478 SB |
626 | for (i = 0; i < max_num_modes; i++) |
627 | { | |
628 | int current_mode[N_ENTITIES]; | |
60564289 | 629 | sbitmap *del; |
610d2478 SB |
630 | sbitmap *insert; |
631 | ||
632 | /* Set the anticipatable and computing arrays. */ | |
8b1c6fd7 DM |
633 | bitmap_vector_clear (antic, last_basic_block_for_fn (cfun)); |
634 | bitmap_vector_clear (comp, last_basic_block_for_fn (cfun)); | |
610d2478 SB |
635 | for (j = n_entities - 1; j >= 0; j--) |
636 | { | |
06b90602 CB |
637 | int m = current_mode[j] = |
638 | targetm.mode_switching.priority (entity_map[j], i); | |
610d2478 SB |
639 | struct bb_info *info = bb_info[j]; |
640 | ||
11cd3bed | 641 | FOR_EACH_BB_FN (bb, cfun) |
610d2478 SB |
642 | { |
643 | if (info[bb->index].seginfo->mode == m) | |
d7c028c0 | 644 | bitmap_set_bit (antic[bb->index], j); |
610d2478 SB |
645 | |
646 | if (info[bb->index].computing == m) | |
d7c028c0 | 647 | bitmap_set_bit (comp[bb->index], j); |
610d2478 SB |
648 | } |
649 | } | |
650 | ||
651 | /* Calculate the optimal locations for the | |
652 | placement mode switches to modes with priority I. */ | |
653 | ||
11cd3bed | 654 | FOR_EACH_BB_FN (bb, cfun) |
f61e445a | 655 | bitmap_not (kill[bb->index], transp[bb->index]); |
10d22567 | 656 | edge_list = pre_edge_lcm (n_entities, transp, comp, antic, |
60564289 | 657 | kill, &insert, &del); |
610d2478 SB |
658 | |
659 | for (j = n_entities - 1; j >= 0; j--) | |
660 | { | |
661 | /* Insert all mode sets that have been inserted by lcm. */ | |
662 | int no_mode = num_modes[entity_map[j]]; | |
663 | ||
664 | /* Wherever we have moved a mode setting upwards in the flow graph, | |
665 | the blocks between the new setting site and the now redundant | |
666 | computation ceases to be transparent for any lower-priority | |
667 | mode of the same entity. First set the aux field of each | |
668 | insertion site edge non-transparent, then propagate the new | |
669 | non-transparency from the redundant computation upwards till | |
670 | we hit an insertion site or an already non-transparent block. */ | |
671 | for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--) | |
672 | { | |
673 | edge eg = INDEX_EDGE (edge_list, e); | |
674 | int mode; | |
675 | basic_block src_bb; | |
676 | HARD_REG_SET live_at_edge; | |
677 | rtx mode_set; | |
678 | ||
679 | eg->aux = 0; | |
680 | ||
d7c028c0 | 681 | if (! bitmap_bit_p (insert[e], j)) |
610d2478 SB |
682 | continue; |
683 | ||
684 | eg->aux = (void *)1; | |
685 | ||
686 | mode = current_mode[j]; | |
687 | src_bb = eg->src; | |
688 | ||
6fb5fa3c | 689 | REG_SET_TO_HARD_REG_SET (live_at_edge, df_get_live_out (src_bb)); |
610d2478 | 690 | |
5f28524a | 691 | rtl_profile_for_edge (eg); |
610d2478 | 692 | start_sequence (); |
06b90602 | 693 | targetm.mode_switching.emit (entity_map[j], mode, live_at_edge); |
610d2478 SB |
694 | mode_set = get_insns (); |
695 | end_sequence (); | |
5f28524a | 696 | default_rtl_profile (); |
610d2478 SB |
697 | |
698 | /* Do not bother to insert empty sequence. */ | |
699 | if (mode_set == NULL_RTX) | |
700 | continue; | |
701 | ||
650a59ef R |
702 | /* We should not get an abnormal edge here. */ |
703 | gcc_assert (! (eg->flags & EDGE_ABNORMAL)); | |
704 | ||
705 | need_commit = 1; | |
706 | insert_insn_on_edge (mode_set, eg); | |
610d2478 SB |
707 | } |
708 | ||
4f42035e | 709 | FOR_EACH_BB_REVERSE_FN (bb, cfun) |
d7c028c0 | 710 | if (bitmap_bit_p (del[bb->index], j)) |
610d2478 SB |
711 | { |
712 | make_preds_opaque (bb, j); | |
713 | /* Cancel the 'deleted' mode set. */ | |
714 | bb_info[j][bb->index].seginfo->mode = no_mode; | |
715 | } | |
716 | } | |
717 | ||
60564289 | 718 | sbitmap_vector_free (del); |
610d2478 SB |
719 | sbitmap_vector_free (insert); |
720 | clear_aux_for_edges (); | |
721 | free_edge_list (edge_list); | |
722 | } | |
723 | ||
724 | /* Now output the remaining mode sets in all the segments. */ | |
725 | for (j = n_entities - 1; j >= 0; j--) | |
726 | { | |
727 | int no_mode = num_modes[entity_map[j]]; | |
728 | ||
4f42035e | 729 | FOR_EACH_BB_REVERSE_FN (bb, cfun) |
610d2478 SB |
730 | { |
731 | struct seginfo *ptr, *next; | |
732 | for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next) | |
733 | { | |
734 | next = ptr->next; | |
735 | if (ptr->mode != no_mode) | |
736 | { | |
737 | rtx mode_set; | |
738 | ||
5f28524a | 739 | rtl_profile_for_bb (bb); |
610d2478 | 740 | start_sequence (); |
06b90602 CB |
741 | targetm.mode_switching.emit (entity_map[j], |
742 | ptr->mode, | |
743 | ptr->regs_live); | |
610d2478 SB |
744 | mode_set = get_insns (); |
745 | end_sequence (); | |
746 | ||
747 | /* Insert MODE_SET only if it is nonempty. */ | |
748 | if (mode_set != NULL_RTX) | |
749 | { | |
073a8998 | 750 | emitted = true; |
a38e7aa5 | 751 | if (NOTE_INSN_BASIC_BLOCK_P (ptr->insn_ptr)) |
473fd99a JR |
752 | /* We need to emit the insns in a FIFO-like manner, |
753 | i.e. the first to be emitted at our insertion | |
754 | point ends up first in the instruction steam. | |
755 | Because we made sure that NOTE_INSN_BASIC_BLOCK is | |
756 | only used for initially empty basic blocks, we | |
74145685 | 757 | can achieve this by appending at the end of |
473fd99a JR |
758 | the block. */ |
759 | emit_insn_after | |
760 | (mode_set, BB_END (NOTE_BASIC_BLOCK (ptr->insn_ptr))); | |
610d2478 SB |
761 | else |
762 | emit_insn_before (mode_set, ptr->insn_ptr); | |
763 | } | |
5f28524a JH |
764 | |
765 | default_rtl_profile (); | |
610d2478 SB |
766 | } |
767 | ||
768 | free (ptr); | |
769 | } | |
770 | } | |
771 | ||
772 | free (bb_info[j]); | |
773 | } | |
774 | ||
775 | /* Finished. Free up all the things we've allocated. */ | |
610d2478 SB |
776 | sbitmap_vector_free (kill); |
777 | sbitmap_vector_free (antic); | |
778 | sbitmap_vector_free (transp); | |
779 | sbitmap_vector_free (comp); | |
780 | ||
781 | if (need_commit) | |
782 | commit_edge_insertions (); | |
783 | ||
06b90602 CB |
784 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) |
785 | cleanup_cfg (CLEANUP_NO_INSN_DEL); | |
786 | else if (!need_commit && !emitted) | |
610d2478 | 787 | return 0; |
610d2478 | 788 | |
610d2478 SB |
789 | return 1; |
790 | } | |
ef330312 | 791 | |
610d2478 | 792 | #endif /* OPTIMIZE_MODE_SWITCHING */ |
ef330312 | 793 | \f |
27a4cd48 DM |
794 | namespace { |
795 | ||
796 | const pass_data pass_data_mode_switching = | |
ef330312 | 797 | { |
27a4cd48 DM |
798 | RTL_PASS, /* type */ |
799 | "mode_sw", /* name */ | |
800 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
801 | true, /* has_execute */ |
802 | TV_MODE_SWITCH, /* tv_id */ | |
803 | 0, /* properties_required */ | |
804 | 0, /* properties_provided */ | |
805 | 0, /* properties_destroyed */ | |
806 | 0, /* todo_flags_start */ | |
3bea341f | 807 | TODO_df_finish, /* todo_flags_finish */ |
ef330312 | 808 | }; |
27a4cd48 DM |
809 | |
810 | class pass_mode_switching : public rtl_opt_pass | |
811 | { | |
812 | public: | |
c3284718 RS |
813 | pass_mode_switching (gcc::context *ctxt) |
814 | : rtl_opt_pass (pass_data_mode_switching, ctxt) | |
27a4cd48 DM |
815 | {} |
816 | ||
817 | /* opt_pass methods: */ | |
05555c4a DM |
818 | /* The epiphany backend creates a second instance of this pass, so we need |
819 | a clone method. */ | |
65d3284b | 820 | opt_pass * clone () { return new pass_mode_switching (m_ctxt); } |
1a3d085c TS |
821 | virtual bool gate (function *) |
822 | { | |
823 | #ifdef OPTIMIZE_MODE_SWITCHING | |
824 | return true; | |
825 | #else | |
826 | return false; | |
827 | #endif | |
828 | } | |
829 | ||
be55bfe6 TS |
830 | virtual unsigned int execute (function *) |
831 | { | |
832 | #ifdef OPTIMIZE_MODE_SWITCHING | |
833 | optimize_mode_switching (); | |
834 | #endif /* OPTIMIZE_MODE_SWITCHING */ | |
835 | return 0; | |
836 | } | |
27a4cd48 DM |
837 | |
838 | }; // class pass_mode_switching | |
839 | ||
840 | } // anon namespace | |
841 | ||
842 | rtl_opt_pass * | |
843 | make_pass_mode_switching (gcc::context *ctxt) | |
844 | { | |
845 | return new pass_mode_switching (ctxt); | |
846 | } |