]>
Commit | Line | Data |
---|---|---|
610d2478 | 1 | /* CPU mode switching |
5624e564 | 2 | Copyright (C) 1998-2015 Free Software Foundation, Inc. |
610d2478 SB |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
610d2478 SB |
9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
610d2478 SB |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
957060b5 | 24 | #include "target.h" |
610d2478 | 25 | #include "rtl.h" |
957060b5 | 26 | #include "cfghooks.h" |
c7131fb2 | 27 | #include "df.h" |
957060b5 | 28 | #include "tm_p.h" |
610d2478 | 29 | #include "insn-config.h" |
957060b5 AM |
30 | #include "regs.h" |
31 | #include "emit-rtl.h" | |
610d2478 | 32 | #include "recog.h" |
957060b5 | 33 | #include "flags.h" |
60393bbc AM |
34 | #include "cfgrtl.h" |
35 | #include "cfganal.h" | |
36 | #include "lcm.h" | |
37 | #include "cfgcleanup.h" | |
ef330312 | 38 | #include "tree-pass.h" |
610d2478 SB |
39 | |
40 | /* We want target macros for the mode switching code to be able to refer | |
41 | to instruction attribute values. */ | |
42 | #include "insn-attr.h" | |
43 | ||
44 | #ifdef OPTIMIZE_MODE_SWITCHING | |
45 | ||
46 | /* The algorithm for setting the modes consists of scanning the insn list | |
47 | and finding all the insns which require a specific mode. Each insn gets | |
48 | a unique struct seginfo element. These structures are inserted into a list | |
49 | for each basic block. For each entity, there is an array of bb_info over | |
cee9defb | 50 | the flow graph basic blocks (local var 'bb_info'), which contains a list |
610d2478 SB |
51 | of all insns within that basic block, in the order they are encountered. |
52 | ||
53 | For each entity, any basic block WITHOUT any insns requiring a specific | |
cee9defb EB |
54 | mode are given a single entry without a mode (each basic block in the |
55 | flow graph must have at least one entry in the segment table). | |
610d2478 SB |
56 | |
57 | The LCM algorithm is then run over the flow graph to determine where to | |
cee9defb | 58 | place the sets to the highest-priority mode with respect to the first |
610d2478 SB |
59 | insn in any one block. Any adjustments required to the transparency |
60 | vectors are made, then the next iteration starts for the next-lower | |
61 | priority mode, till for each entity all modes are exhausted. | |
62 | ||
cee9defb | 63 | More details can be found in the code of optimize_mode_switching. */ |
610d2478 SB |
64 | \f |
65 | /* This structure contains the information for each insn which requires | |
66 | either single or double mode to be set. | |
67 | MODE is the mode this insn must be executed in. | |
68 | INSN_PTR is the insn to be executed (may be the note that marks the | |
69 | beginning of a basic block). | |
70 | BBNUM is the flow graph basic block this insn occurs in. | |
71 | NEXT is the next insn in the same basic block. */ | |
72 | struct seginfo | |
73 | { | |
74 | int mode; | |
1d455520 | 75 | rtx_insn *insn_ptr; |
610d2478 SB |
76 | int bbnum; |
77 | struct seginfo *next; | |
78 | HARD_REG_SET regs_live; | |
79 | }; | |
80 | ||
81 | struct bb_info | |
82 | { | |
83 | struct seginfo *seginfo; | |
84 | int computing; | |
cbb1e3d9 CB |
85 | int mode_out; |
86 | int mode_in; | |
610d2478 SB |
87 | }; |
88 | ||
1d455520 | 89 | static struct seginfo * new_seginfo (int, rtx_insn *, int, HARD_REG_SET); |
610d2478 | 90 | static void add_seginfo (struct bb_info *, struct seginfo *); |
408bed3c | 91 | static void reg_dies (rtx, HARD_REG_SET *); |
7bc980e1 | 92 | static void reg_becomes_live (rtx, const_rtx, void *); |
610d2478 | 93 | |
cbb1e3d9 CB |
94 | /* Clear ode I from entity J in bitmap B. */ |
95 | #define clear_mode_bit(b, j, i) \ | |
96 | bitmap_clear_bit (b, (j * max_num_modes) + i) | |
97 | ||
98 | /* Test mode I from entity J in bitmap B. */ | |
99 | #define mode_bit_p(b, j, i) \ | |
100 | bitmap_bit_p (b, (j * max_num_modes) + i) | |
101 | ||
102 | /* Set mode I from entity J in bitmal B. */ | |
103 | #define set_mode_bit(b, j, i) \ | |
104 | bitmap_set_bit (b, (j * max_num_modes) + i) | |
105 | ||
106 | /* Emit modes segments from EDGE_LIST associated with entity E. | |
107 | INFO gives mode availability for each mode. */ | |
108 | ||
109 | static bool | |
110 | commit_mode_sets (struct edge_list *edge_list, int e, struct bb_info *info) | |
111 | { | |
112 | bool need_commit = false; | |
113 | ||
114 | for (int ed = NUM_EDGES (edge_list) - 1; ed >= 0; ed--) | |
115 | { | |
116 | edge eg = INDEX_EDGE (edge_list, ed); | |
117 | int mode; | |
118 | ||
119 | if ((mode = (int)(intptr_t)(eg->aux)) != -1) | |
120 | { | |
121 | HARD_REG_SET live_at_edge; | |
122 | basic_block src_bb = eg->src; | |
123 | int cur_mode = info[src_bb->index].mode_out; | |
f4701c96 | 124 | rtx_insn *mode_set; |
cbb1e3d9 CB |
125 | |
126 | REG_SET_TO_HARD_REG_SET (live_at_edge, df_get_live_out (src_bb)); | |
127 | ||
128 | rtl_profile_for_edge (eg); | |
129 | start_sequence (); | |
130 | ||
131 | targetm.mode_switching.emit (e, mode, cur_mode, live_at_edge); | |
132 | ||
133 | mode_set = get_insns (); | |
134 | end_sequence (); | |
135 | default_rtl_profile (); | |
136 | ||
137 | /* Do not bother to insert empty sequence. */ | |
f4701c96 | 138 | if (mode_set == NULL) |
cbb1e3d9 CB |
139 | continue; |
140 | ||
141 | /* We should not get an abnormal edge here. */ | |
142 | gcc_assert (! (eg->flags & EDGE_ABNORMAL)); | |
143 | ||
144 | need_commit = true; | |
145 | insert_insn_on_edge (mode_set, eg); | |
146 | } | |
147 | } | |
148 | ||
149 | return need_commit; | |
150 | } | |
151 | ||
152 | /* Allocate a new BBINFO structure, initialized with the MODE, INSN, | |
153 | and basic block BB parameters. | |
473fd99a JR |
154 | INSN may not be a NOTE_INSN_BASIC_BLOCK, unless it is an empty |
155 | basic block; that allows us later to insert instructions in a FIFO-like | |
156 | manner. */ | |
610d2478 SB |
157 | |
158 | static struct seginfo * | |
1d455520 | 159 | new_seginfo (int mode, rtx_insn *insn, int bb, HARD_REG_SET regs_live) |
610d2478 SB |
160 | { |
161 | struct seginfo *ptr; | |
473fd99a JR |
162 | |
163 | gcc_assert (!NOTE_INSN_BASIC_BLOCK_P (insn) | |
164 | || insn == BB_END (NOTE_BASIC_BLOCK (insn))); | |
5ed6ace5 | 165 | ptr = XNEW (struct seginfo); |
610d2478 SB |
166 | ptr->mode = mode; |
167 | ptr->insn_ptr = insn; | |
168 | ptr->bbnum = bb; | |
169 | ptr->next = NULL; | |
170 | COPY_HARD_REG_SET (ptr->regs_live, regs_live); | |
171 | return ptr; | |
172 | } | |
173 | ||
174 | /* Add a seginfo element to the end of a list. | |
175 | HEAD is a pointer to the list beginning. | |
176 | INFO is the structure to be linked in. */ | |
177 | ||
178 | static void | |
179 | add_seginfo (struct bb_info *head, struct seginfo *info) | |
180 | { | |
181 | struct seginfo *ptr; | |
182 | ||
183 | if (head->seginfo == NULL) | |
184 | head->seginfo = info; | |
185 | else | |
186 | { | |
187 | ptr = head->seginfo; | |
188 | while (ptr->next != NULL) | |
189 | ptr = ptr->next; | |
190 | ptr->next = info; | |
191 | } | |
192 | } | |
193 | ||
610d2478 SB |
194 | /* Record in LIVE that register REG died. */ |
195 | ||
196 | static void | |
408bed3c | 197 | reg_dies (rtx reg, HARD_REG_SET *live) |
610d2478 | 198 | { |
09e18274 | 199 | int regno; |
610d2478 SB |
200 | |
201 | if (!REG_P (reg)) | |
202 | return; | |
203 | ||
204 | regno = REGNO (reg); | |
205 | if (regno < FIRST_PSEUDO_REGISTER) | |
09e18274 | 206 | remove_from_hard_reg_set (live, GET_MODE (reg), regno); |
610d2478 SB |
207 | } |
208 | ||
209 | /* Record in LIVE that register REG became live. | |
210 | This is called via note_stores. */ | |
211 | ||
212 | static void | |
7bc980e1 | 213 | reg_becomes_live (rtx reg, const_rtx setter ATTRIBUTE_UNUSED, void *live) |
610d2478 | 214 | { |
09e18274 | 215 | int regno; |
610d2478 SB |
216 | |
217 | if (GET_CODE (reg) == SUBREG) | |
218 | reg = SUBREG_REG (reg); | |
219 | ||
220 | if (!REG_P (reg)) | |
221 | return; | |
222 | ||
223 | regno = REGNO (reg); | |
224 | if (regno < FIRST_PSEUDO_REGISTER) | |
09e18274 | 225 | add_to_hard_reg_set ((HARD_REG_SET *) live, GET_MODE (reg), regno); |
610d2478 SB |
226 | } |
227 | ||
610d2478 SB |
228 | /* Split the fallthrough edge to the exit block, so that we can note |
229 | that there NORMAL_MODE is required. Return the new block if it's | |
230 | inserted before the exit block. Otherwise return null. */ | |
231 | ||
232 | static basic_block | |
233 | create_pre_exit (int n_entities, int *entity_map, const int *num_modes) | |
234 | { | |
235 | edge eg; | |
236 | edge_iterator ei; | |
237 | basic_block pre_exit; | |
238 | ||
239 | /* The only non-call predecessor at this stage is a block with a | |
240 | fallthrough edge; there can be at most one, but there could be | |
241 | none at all, e.g. when exit is called. */ | |
242 | pre_exit = 0; | |
fefa31b5 | 243 | FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) |
610d2478 SB |
244 | if (eg->flags & EDGE_FALLTHRU) |
245 | { | |
246 | basic_block src_bb = eg->src; | |
1d455520 DM |
247 | rtx_insn *last_insn; |
248 | rtx ret_reg; | |
610d2478 SB |
249 | |
250 | gcc_assert (!pre_exit); | |
251 | /* If this function returns a value at the end, we have to | |
252 | insert the final mode switch before the return value copy | |
253 | to its hard register. */ | |
fefa31b5 | 254 | if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 1 |
610d2478 SB |
255 | && NONJUMP_INSN_P ((last_insn = BB_END (src_bb))) |
256 | && GET_CODE (PATTERN (last_insn)) == USE | |
257 | && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG) | |
258 | { | |
259 | int ret_start = REGNO (ret_reg); | |
dc8afb70 | 260 | int nregs = REG_NREGS (ret_reg); |
610d2478 | 261 | int ret_end = ret_start + nregs; |
c07757e5 UB |
262 | bool short_block = false; |
263 | bool multi_reg_return = false; | |
264 | bool forced_late_switch = false; | |
1d455520 | 265 | rtx_insn *before_return_copy; |
610d2478 SB |
266 | |
267 | do | |
268 | { | |
1d455520 | 269 | rtx_insn *return_copy = PREV_INSN (last_insn); |
610d2478 SB |
270 | rtx return_copy_pat, copy_reg; |
271 | int copy_start, copy_num; | |
272 | int j; | |
273 | ||
141a9e06 | 274 | if (NONDEBUG_INSN_P (return_copy)) |
610d2478 | 275 | { |
2bde7ae9 RS |
276 | /* When using SJLJ exceptions, the call to the |
277 | unregister function is inserted between the | |
278 | clobber of the return value and the copy. | |
279 | We do not want to split the block before this | |
280 | or any other call; if we have not found the | |
281 | copy yet, the copy must have been deleted. */ | |
282 | if (CALL_P (return_copy)) | |
283 | { | |
c07757e5 | 284 | short_block = true; |
2bde7ae9 RS |
285 | break; |
286 | } | |
89ab4659 KK |
287 | return_copy_pat = PATTERN (return_copy); |
288 | switch (GET_CODE (return_copy_pat)) | |
07288ab0 | 289 | { |
89ab4659 | 290 | case USE: |
c07757e5 UB |
291 | /* Skip USEs of multiple return registers. |
292 | __builtin_apply pattern is also handled here. */ | |
89ab4659 | 293 | if (GET_CODE (XEXP (return_copy_pat, 0)) == REG |
82f81f18 | 294 | && (targetm.calls.function_value_regno_p |
89ab4659 KK |
295 | (REGNO (XEXP (return_copy_pat, 0))))) |
296 | { | |
c07757e5 | 297 | multi_reg_return = true; |
89ab4659 KK |
298 | last_insn = return_copy; |
299 | continue; | |
300 | } | |
301 | break; | |
302 | ||
303 | case ASM_OPERANDS: | |
304 | /* Skip barrier insns. */ | |
305 | if (!MEM_VOLATILE_P (return_copy_pat)) | |
306 | break; | |
307 | ||
308 | /* Fall through. */ | |
309 | ||
310 | case ASM_INPUT: | |
311 | case UNSPEC_VOLATILE: | |
07288ab0 KK |
312 | last_insn = return_copy; |
313 | continue; | |
89ab4659 KK |
314 | |
315 | default: | |
316 | break; | |
07288ab0 | 317 | } |
89ab4659 | 318 | |
610d2478 SB |
319 | /* If the return register is not (in its entirety) |
320 | likely spilled, the return copy might be | |
321 | partially or completely optimized away. */ | |
322 | return_copy_pat = single_set (return_copy); | |
323 | if (!return_copy_pat) | |
324 | { | |
325 | return_copy_pat = PATTERN (return_copy); | |
326 | if (GET_CODE (return_copy_pat) != CLOBBER) | |
327 | break; | |
6fb5fa3c DB |
328 | else if (!optimize) |
329 | { | |
330 | /* This might be (clobber (reg [<result>])) | |
331 | when not optimizing. Then check if | |
332 | the previous insn is the clobber for | |
333 | the return register. */ | |
334 | copy_reg = SET_DEST (return_copy_pat); | |
335 | if (GET_CODE (copy_reg) == REG | |
336 | && !HARD_REGISTER_NUM_P (REGNO (copy_reg))) | |
337 | { | |
338 | if (INSN_P (PREV_INSN (return_copy))) | |
339 | { | |
340 | return_copy = PREV_INSN (return_copy); | |
341 | return_copy_pat = PATTERN (return_copy); | |
342 | if (GET_CODE (return_copy_pat) != CLOBBER) | |
343 | break; | |
344 | } | |
345 | } | |
346 | } | |
610d2478 SB |
347 | } |
348 | copy_reg = SET_DEST (return_copy_pat); | |
349 | if (GET_CODE (copy_reg) == REG) | |
350 | copy_start = REGNO (copy_reg); | |
351 | else if (GET_CODE (copy_reg) == SUBREG | |
352 | && GET_CODE (SUBREG_REG (copy_reg)) == REG) | |
353 | copy_start = REGNO (SUBREG_REG (copy_reg)); | |
354 | else | |
d78e64db KK |
355 | { |
356 | /* When control reaches end of non-void function, | |
357 | there are no return copy insns at all. This | |
358 | avoids an ice on that invalid function. */ | |
359 | if (ret_start + nregs == ret_end) | |
c07757e5 | 360 | short_block = true; |
d78e64db KK |
361 | break; |
362 | } | |
ffbbfaba | 363 | if (!targetm.calls.function_value_regno_p (copy_start)) |
ce4a9422 JR |
364 | copy_num = 0; |
365 | else | |
366 | copy_num | |
367 | = hard_regno_nregs[copy_start][GET_MODE (copy_reg)]; | |
610d2478 SB |
368 | |
369 | /* If the return register is not likely spilled, - as is | |
370 | the case for floating point on SH4 - then it might | |
371 | be set by an arithmetic operation that needs a | |
372 | different mode than the exit block. */ | |
373 | for (j = n_entities - 1; j >= 0; j--) | |
374 | { | |
375 | int e = entity_map[j]; | |
06b90602 CB |
376 | int mode = |
377 | targetm.mode_switching.needed (e, return_copy); | |
610d2478 | 378 | |
06b90602 CB |
379 | if (mode != num_modes[e] |
380 | && mode != targetm.mode_switching.exit (e)) | |
610d2478 SB |
381 | break; |
382 | } | |
383 | if (j >= 0) | |
384 | { | |
b8435aa9 UB |
385 | /* __builtin_return emits a sequence of loads to all |
386 | return registers. One of them might require | |
387 | another mode than MODE_EXIT, even if it is | |
388 | unrelated to the return value, so we want to put | |
389 | the final mode switch after it. */ | |
c07757e5 | 390 | if (multi_reg_return |
b8435aa9 UB |
391 | && targetm.calls.function_value_regno_p |
392 | (copy_start)) | |
c07757e5 | 393 | forced_late_switch = true; |
b8435aa9 | 394 | |
610d2478 SB |
395 | /* For the SH4, floating point loads depend on fpscr, |
396 | thus we might need to put the final mode switch | |
397 | after the return value copy. That is still OK, | |
398 | because a floating point return value does not | |
399 | conflict with address reloads. */ | |
400 | if (copy_start >= ret_start | |
401 | && copy_start + copy_num <= ret_end | |
402 | && OBJECT_P (SET_SRC (return_copy_pat))) | |
c07757e5 | 403 | forced_late_switch = true; |
610d2478 SB |
404 | break; |
405 | } | |
ce4a9422 JR |
406 | if (copy_num == 0) |
407 | { | |
408 | last_insn = return_copy; | |
409 | continue; | |
410 | } | |
610d2478 SB |
411 | |
412 | if (copy_start >= ret_start | |
413 | && copy_start + copy_num <= ret_end) | |
414 | nregs -= copy_num; | |
c07757e5 | 415 | else if (!multi_reg_return |
82f81f18 AS |
416 | || !targetm.calls.function_value_regno_p |
417 | (copy_start)) | |
610d2478 SB |
418 | break; |
419 | last_insn = return_copy; | |
420 | } | |
421 | /* ??? Exception handling can lead to the return value | |
422 | copy being already separated from the return value use, | |
423 | as in unwind-dw2.c . | |
424 | Similarly, conditionally returning without a value, | |
425 | and conditionally using builtin_return can lead to an | |
426 | isolated use. */ | |
427 | if (return_copy == BB_HEAD (src_bb)) | |
428 | { | |
c07757e5 | 429 | short_block = true; |
610d2478 SB |
430 | break; |
431 | } | |
432 | last_insn = return_copy; | |
433 | } | |
434 | while (nregs); | |
b8698a0f | 435 | |
610d2478 SB |
436 | /* If we didn't see a full return value copy, verify that there |
437 | is a plausible reason for this. If some, but not all of the | |
438 | return register is likely spilled, we can expect that there | |
439 | is a copy for the likely spilled part. */ | |
440 | gcc_assert (!nregs | |
441 | || forced_late_switch | |
442 | || short_block | |
07b8f0a8 | 443 | || !(targetm.class_likely_spilled_p |
610d2478 SB |
444 | (REGNO_REG_CLASS (ret_start))) |
445 | || (nregs | |
446 | != hard_regno_nregs[ret_start][GET_MODE (ret_reg)]) | |
447 | /* For multi-hard-register floating point | |
448 | values, sometimes the likely-spilled part | |
449 | is ordinarily copied first, then the other | |
450 | part is set with an arithmetic operation. | |
451 | This doesn't actually cause reload | |
452 | failures, so let it pass. */ | |
453 | || (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT | |
454 | && nregs != 1)); | |
b8698a0f | 455 | |
bba33211 | 456 | if (!NOTE_INSN_BASIC_BLOCK_P (last_insn)) |
610d2478 SB |
457 | { |
458 | before_return_copy | |
459 | = emit_note_before (NOTE_INSN_DELETED, last_insn); | |
460 | /* Instructions preceding LAST_INSN in the same block might | |
461 | require a different mode than MODE_EXIT, so if we might | |
462 | have such instructions, keep them in a separate block | |
463 | from pre_exit. */ | |
bba33211 JR |
464 | src_bb = split_block (src_bb, |
465 | PREV_INSN (before_return_copy))->dest; | |
610d2478 SB |
466 | } |
467 | else | |
468 | before_return_copy = last_insn; | |
469 | pre_exit = split_block (src_bb, before_return_copy)->src; | |
470 | } | |
471 | else | |
472 | { | |
473 | pre_exit = split_edge (eg); | |
610d2478 SB |
474 | } |
475 | } | |
476 | ||
477 | return pre_exit; | |
478 | } | |
610d2478 SB |
479 | |
480 | /* Find all insns that need a particular mode setting, and insert the | |
481 | necessary mode switches. Return true if we did work. */ | |
482 | ||
7399bcb0 | 483 | static int |
10d22567 | 484 | optimize_mode_switching (void) |
610d2478 | 485 | { |
610d2478 SB |
486 | int e; |
487 | basic_block bb; | |
cbb1e3d9 | 488 | bool need_commit = false; |
610d2478 SB |
489 | static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING; |
490 | #define N_ENTITIES ARRAY_SIZE (num_modes) | |
491 | int entity_map[N_ENTITIES]; | |
492 | struct bb_info *bb_info[N_ENTITIES]; | |
493 | int i, j; | |
cbb1e3d9 | 494 | int n_entities = 0; |
610d2478 | 495 | int max_num_modes = 0; |
073a8998 | 496 | bool emitted ATTRIBUTE_UNUSED = false; |
06b90602 CB |
497 | basic_block post_entry = 0; |
498 | basic_block pre_exit = 0; | |
cbb1e3d9 CB |
499 | struct edge_list *edge_list = 0; |
500 | ||
501 | /* These bitmaps are used for the LCM algorithm. */ | |
502 | sbitmap *kill, *del, *insert, *antic, *transp, *comp; | |
503 | sbitmap *avin, *avout; | |
610d2478 | 504 | |
cbb1e3d9 | 505 | for (e = N_ENTITIES - 1; e >= 0; e--) |
610d2478 SB |
506 | if (OPTIMIZE_MODE_SWITCHING (e)) |
507 | { | |
508 | int entry_exit_extra = 0; | |
509 | ||
510 | /* Create the list of segments within each basic block. | |
511 | If NORMAL_MODE is defined, allow for two extra | |
512 | blocks split from the entry and exit block. */ | |
06b90602 CB |
513 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) |
514 | entry_exit_extra = 3; | |
515 | ||
610d2478 | 516 | bb_info[n_entities] |
8b1c6fd7 DM |
517 | = XCNEWVEC (struct bb_info, |
518 | last_basic_block_for_fn (cfun) + entry_exit_extra); | |
610d2478 SB |
519 | entity_map[n_entities++] = e; |
520 | if (num_modes[e] > max_num_modes) | |
521 | max_num_modes = num_modes[e]; | |
522 | } | |
523 | ||
524 | if (! n_entities) | |
525 | return 0; | |
526 | ||
cbb1e3d9 | 527 | /* Make sure if MODE_ENTRY is defined MODE_EXIT is defined. */ |
06b90602 | 528 | gcc_assert ((targetm.mode_switching.entry && targetm.mode_switching.exit) |
cbb1e3d9 CB |
529 | || (!targetm.mode_switching.entry |
530 | && !targetm.mode_switching.exit)); | |
06b90602 CB |
531 | |
532 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) | |
533 | { | |
534 | /* Split the edge from the entry block, so that we can note that | |
535 | there NORMAL_MODE is supplied. */ | |
536 | post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))); | |
537 | pre_exit = create_pre_exit (n_entities, entity_map, num_modes); | |
538 | } | |
610d2478 | 539 | |
6fb5fa3c DB |
540 | df_analyze (); |
541 | ||
610d2478 | 542 | /* Create the bitmap vectors. */ |
cbb1e3d9 CB |
543 | antic = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), |
544 | n_entities * max_num_modes); | |
545 | transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), | |
546 | n_entities * max_num_modes); | |
547 | comp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), | |
548 | n_entities * max_num_modes); | |
549 | avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), | |
550 | n_entities * max_num_modes); | |
551 | avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), | |
552 | n_entities * max_num_modes); | |
553 | kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), | |
554 | n_entities * max_num_modes); | |
610d2478 | 555 | |
8b1c6fd7 | 556 | bitmap_vector_ones (transp, last_basic_block_for_fn (cfun)); |
cbb1e3d9 CB |
557 | bitmap_vector_clear (antic, last_basic_block_for_fn (cfun)); |
558 | bitmap_vector_clear (comp, last_basic_block_for_fn (cfun)); | |
610d2478 SB |
559 | |
560 | for (j = n_entities - 1; j >= 0; j--) | |
561 | { | |
562 | int e = entity_map[j]; | |
563 | int no_mode = num_modes[e]; | |
564 | struct bb_info *info = bb_info[j]; | |
1d455520 | 565 | rtx_insn *insn; |
610d2478 SB |
566 | |
567 | /* Determine what the first use (if any) need for a mode of entity E is. | |
568 | This will be the mode that is anticipatable for this block. | |
569 | Also compute the initial transparency settings. */ | |
11cd3bed | 570 | FOR_EACH_BB_FN (bb, cfun) |
610d2478 SB |
571 | { |
572 | struct seginfo *ptr; | |
573 | int last_mode = no_mode; | |
a44250f4 | 574 | bool any_set_required = false; |
610d2478 SB |
575 | HARD_REG_SET live_now; |
576 | ||
cbb1e3d9 CB |
577 | info[bb->index].mode_out = info[bb->index].mode_in = no_mode; |
578 | ||
6fb5fa3c | 579 | REG_SET_TO_HARD_REG_SET (live_now, df_get_live_in (bb)); |
24c2fde2 RH |
580 | |
581 | /* Pretend the mode is clobbered across abnormal edges. */ | |
582 | { | |
583 | edge_iterator ei; | |
cbb1e3d9 CB |
584 | edge eg; |
585 | FOR_EACH_EDGE (eg, ei, bb->preds) | |
586 | if (eg->flags & EDGE_COMPLEX) | |
24c2fde2 | 587 | break; |
cbb1e3d9 | 588 | if (eg) |
650a59ef | 589 | { |
1d455520 | 590 | rtx_insn *ins_pos = BB_HEAD (bb); |
473fd99a JR |
591 | if (LABEL_P (ins_pos)) |
592 | ins_pos = NEXT_INSN (ins_pos); | |
593 | gcc_assert (NOTE_INSN_BASIC_BLOCK_P (ins_pos)); | |
594 | if (ins_pos != BB_END (bb)) | |
595 | ins_pos = NEXT_INSN (ins_pos); | |
596 | ptr = new_seginfo (no_mode, ins_pos, bb->index, live_now); | |
650a59ef | 597 | add_seginfo (info + bb->index, ptr); |
cbb1e3d9 CB |
598 | for (i = 0; i < no_mode; i++) |
599 | clear_mode_bit (transp[bb->index], j, i); | |
650a59ef | 600 | } |
24c2fde2 RH |
601 | } |
602 | ||
0f346928 | 603 | FOR_BB_INSNS (bb, insn) |
610d2478 SB |
604 | { |
605 | if (INSN_P (insn)) | |
606 | { | |
06b90602 | 607 | int mode = targetm.mode_switching.needed (e, insn); |
610d2478 SB |
608 | rtx link; |
609 | ||
610 | if (mode != no_mode && mode != last_mode) | |
611 | { | |
a44250f4 | 612 | any_set_required = true; |
610d2478 SB |
613 | last_mode = mode; |
614 | ptr = new_seginfo (mode, insn, bb->index, live_now); | |
615 | add_seginfo (info + bb->index, ptr); | |
cbb1e3d9 CB |
616 | for (i = 0; i < no_mode; i++) |
617 | clear_mode_bit (transp[bb->index], j, i); | |
610d2478 | 618 | } |
06b90602 CB |
619 | |
620 | if (targetm.mode_switching.after) | |
cbb1e3d9 CB |
621 | last_mode = targetm.mode_switching.after (e, last_mode, |
622 | insn); | |
06b90602 | 623 | |
610d2478 SB |
624 | /* Update LIVE_NOW. */ |
625 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
626 | if (REG_NOTE_KIND (link) == REG_DEAD) | |
408bed3c | 627 | reg_dies (XEXP (link, 0), &live_now); |
610d2478 SB |
628 | |
629 | note_stores (PATTERN (insn), reg_becomes_live, &live_now); | |
630 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
631 | if (REG_NOTE_KIND (link) == REG_UNUSED) | |
408bed3c | 632 | reg_dies (XEXP (link, 0), &live_now); |
610d2478 SB |
633 | } |
634 | } | |
635 | ||
636 | info[bb->index].computing = last_mode; | |
a44250f4 | 637 | /* Check for blocks without ANY mode requirements. |
611a4849 UB |
638 | N.B. because of MODE_AFTER, last_mode might still |
639 | be different from no_mode, in which case we need to | |
640 | mark the block as nontransparent. */ | |
a44250f4 | 641 | if (!any_set_required) |
610d2478 SB |
642 | { |
643 | ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now); | |
644 | add_seginfo (info + bb->index, ptr); | |
611a4849 | 645 | if (last_mode != no_mode) |
cbb1e3d9 CB |
646 | for (i = 0; i < no_mode; i++) |
647 | clear_mode_bit (transp[bb->index], j, i); | |
610d2478 SB |
648 | } |
649 | } | |
06b90602 CB |
650 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) |
651 | { | |
652 | int mode = targetm.mode_switching.entry (e); | |
610d2478 | 653 | |
cbb1e3d9 CB |
654 | info[post_entry->index].mode_out = |
655 | info[post_entry->index].mode_in = no_mode; | |
656 | if (pre_exit) | |
657 | { | |
658 | info[pre_exit->index].mode_out = | |
659 | info[pre_exit->index].mode_in = no_mode; | |
660 | } | |
661 | ||
06b90602 CB |
662 | if (mode != no_mode) |
663 | { | |
664 | bb = post_entry; | |
665 | ||
666 | /* By always making this nontransparent, we save | |
667 | an extra check in make_preds_opaque. We also | |
668 | need this to avoid confusing pre_edge_lcm when | |
669 | antic is cleared but transp and comp are set. */ | |
cbb1e3d9 CB |
670 | for (i = 0; i < no_mode; i++) |
671 | clear_mode_bit (transp[bb->index], j, i); | |
06b90602 CB |
672 | |
673 | /* Insert a fake computing definition of MODE into entry | |
674 | blocks which compute no mode. This represents the mode on | |
675 | entry. */ | |
676 | info[bb->index].computing = mode; | |
677 | ||
678 | if (pre_exit) | |
679 | info[pre_exit->index].seginfo->mode = | |
680 | targetm.mode_switching.exit (e); | |
681 | } | |
682 | } | |
610d2478 SB |
683 | |
684 | /* Set the anticipatable and computing arrays. */ | |
cbb1e3d9 | 685 | for (i = 0; i < no_mode; i++) |
610d2478 | 686 | { |
cbb1e3d9 | 687 | int m = targetm.mode_switching.priority (entity_map[j], i); |
610d2478 | 688 | |
11cd3bed | 689 | FOR_EACH_BB_FN (bb, cfun) |
610d2478 SB |
690 | { |
691 | if (info[bb->index].seginfo->mode == m) | |
cbb1e3d9 | 692 | set_mode_bit (antic[bb->index], j, m); |
610d2478 SB |
693 | |
694 | if (info[bb->index].computing == m) | |
cbb1e3d9 | 695 | set_mode_bit (comp[bb->index], j, m); |
610d2478 SB |
696 | } |
697 | } | |
cbb1e3d9 | 698 | } |
610d2478 | 699 | |
cbb1e3d9 CB |
700 | /* Calculate the optimal locations for the |
701 | placement mode switches to modes with priority I. */ | |
610d2478 | 702 | |
cbb1e3d9 CB |
703 | FOR_EACH_BB_FN (bb, cfun) |
704 | bitmap_not (kill[bb->index], transp[bb->index]); | |
610d2478 | 705 | |
cbb1e3d9 CB |
706 | edge_list = pre_edge_lcm_avs (n_entities * max_num_modes, transp, comp, antic, |
707 | kill, avin, avout, &insert, &del); | |
610d2478 | 708 | |
cbb1e3d9 CB |
709 | for (j = n_entities - 1; j >= 0; j--) |
710 | { | |
711 | int no_mode = num_modes[entity_map[j]]; | |
610d2478 | 712 | |
cbb1e3d9 | 713 | /* Insert all mode sets that have been inserted by lcm. */ |
610d2478 | 714 | |
cbb1e3d9 CB |
715 | for (int ed = NUM_EDGES (edge_list) - 1; ed >= 0; ed--) |
716 | { | |
717 | edge eg = INDEX_EDGE (edge_list, ed); | |
610d2478 | 718 | |
cbb1e3d9 | 719 | eg->aux = (void *)(intptr_t)-1; |
610d2478 | 720 | |
cbb1e3d9 CB |
721 | for (i = 0; i < no_mode; i++) |
722 | { | |
723 | int m = targetm.mode_switching.priority (entity_map[j], i); | |
724 | if (mode_bit_p (insert[ed], j, m)) | |
725 | { | |
726 | eg->aux = (void *)(intptr_t)m; | |
727 | break; | |
728 | } | |
729 | } | |
730 | } | |
610d2478 | 731 | |
cbb1e3d9 CB |
732 | FOR_EACH_BB_FN (bb, cfun) |
733 | { | |
734 | struct bb_info *info = bb_info[j]; | |
735 | int last_mode = no_mode; | |
650a59ef | 736 | |
cbb1e3d9 CB |
737 | /* intialize mode in availability for bb. */ |
738 | for (i = 0; i < no_mode; i++) | |
739 | if (mode_bit_p (avout[bb->index], j, i)) | |
740 | { | |
741 | if (last_mode == no_mode) | |
742 | last_mode = i; | |
743 | if (last_mode != i) | |
744 | { | |
745 | last_mode = no_mode; | |
746 | break; | |
747 | } | |
748 | } | |
749 | info[bb->index].mode_out = last_mode; | |
610d2478 | 750 | |
cbb1e3d9 CB |
751 | /* intialize mode out availability for bb. */ |
752 | last_mode = no_mode; | |
753 | for (i = 0; i < no_mode; i++) | |
754 | if (mode_bit_p (avin[bb->index], j, i)) | |
610d2478 | 755 | { |
cbb1e3d9 CB |
756 | if (last_mode == no_mode) |
757 | last_mode = i; | |
758 | if (last_mode != i) | |
759 | { | |
760 | last_mode = no_mode; | |
761 | break; | |
762 | } | |
610d2478 | 763 | } |
cbb1e3d9 CB |
764 | info[bb->index].mode_in = last_mode; |
765 | ||
766 | for (i = 0; i < no_mode; i++) | |
767 | if (mode_bit_p (del[bb->index], j, i)) | |
768 | info[bb->index].seginfo->mode = no_mode; | |
610d2478 SB |
769 | } |
770 | ||
cbb1e3d9 | 771 | /* Now output the remaining mode sets in all the segments. */ |
610d2478 | 772 | |
cbb1e3d9 CB |
773 | /* In case there was no mode inserted. the mode information on the edge |
774 | might not be complete. | |
775 | Update mode info on edges and commit pending mode sets. */ | |
776 | need_commit |= commit_mode_sets (edge_list, entity_map[j], bb_info[j]); | |
777 | ||
778 | /* Reset modes for next entity. */ | |
779 | clear_aux_for_edges (); | |
610d2478 | 780 | |
cbb1e3d9 | 781 | FOR_EACH_BB_FN (bb, cfun) |
610d2478 SB |
782 | { |
783 | struct seginfo *ptr, *next; | |
cbb1e3d9 CB |
784 | int cur_mode = bb_info[j][bb->index].mode_in; |
785 | ||
610d2478 SB |
786 | for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next) |
787 | { | |
788 | next = ptr->next; | |
789 | if (ptr->mode != no_mode) | |
790 | { | |
1d455520 | 791 | rtx_insn *mode_set; |
610d2478 | 792 | |
5f28524a | 793 | rtl_profile_for_bb (bb); |
610d2478 | 794 | start_sequence (); |
cbb1e3d9 CB |
795 | |
796 | targetm.mode_switching.emit (entity_map[j], ptr->mode, | |
797 | cur_mode, ptr->regs_live); | |
610d2478 SB |
798 | mode_set = get_insns (); |
799 | end_sequence (); | |
800 | ||
cbb1e3d9 CB |
801 | /* modes kill each other inside a basic block. */ |
802 | cur_mode = ptr->mode; | |
803 | ||
610d2478 SB |
804 | /* Insert MODE_SET only if it is nonempty. */ |
805 | if (mode_set != NULL_RTX) | |
806 | { | |
073a8998 | 807 | emitted = true; |
a38e7aa5 | 808 | if (NOTE_INSN_BASIC_BLOCK_P (ptr->insn_ptr)) |
473fd99a JR |
809 | /* We need to emit the insns in a FIFO-like manner, |
810 | i.e. the first to be emitted at our insertion | |
811 | point ends up first in the instruction steam. | |
812 | Because we made sure that NOTE_INSN_BASIC_BLOCK is | |
813 | only used for initially empty basic blocks, we | |
74145685 | 814 | can achieve this by appending at the end of |
473fd99a JR |
815 | the block. */ |
816 | emit_insn_after | |
817 | (mode_set, BB_END (NOTE_BASIC_BLOCK (ptr->insn_ptr))); | |
610d2478 SB |
818 | else |
819 | emit_insn_before (mode_set, ptr->insn_ptr); | |
820 | } | |
5f28524a JH |
821 | |
822 | default_rtl_profile (); | |
610d2478 SB |
823 | } |
824 | ||
825 | free (ptr); | |
826 | } | |
827 | } | |
828 | ||
829 | free (bb_info[j]); | |
830 | } | |
831 | ||
cbb1e3d9 CB |
832 | free_edge_list (edge_list); |
833 | ||
610d2478 | 834 | /* Finished. Free up all the things we've allocated. */ |
cbb1e3d9 CB |
835 | sbitmap_vector_free (del); |
836 | sbitmap_vector_free (insert); | |
610d2478 SB |
837 | sbitmap_vector_free (kill); |
838 | sbitmap_vector_free (antic); | |
839 | sbitmap_vector_free (transp); | |
840 | sbitmap_vector_free (comp); | |
cbb1e3d9 CB |
841 | sbitmap_vector_free (avin); |
842 | sbitmap_vector_free (avout); | |
610d2478 SB |
843 | |
844 | if (need_commit) | |
845 | commit_edge_insertions (); | |
846 | ||
06b90602 CB |
847 | if (targetm.mode_switching.entry && targetm.mode_switching.exit) |
848 | cleanup_cfg (CLEANUP_NO_INSN_DEL); | |
849 | else if (!need_commit && !emitted) | |
610d2478 | 850 | return 0; |
610d2478 | 851 | |
610d2478 SB |
852 | return 1; |
853 | } | |
ef330312 | 854 | |
610d2478 | 855 | #endif /* OPTIMIZE_MODE_SWITCHING */ |
ef330312 | 856 | \f |
17795822 TS |
857 | namespace { |
858 | ||
859 | const pass_data pass_data_mode_switching = | |
ef330312 | 860 | { |
27a4cd48 DM |
861 | RTL_PASS, /* type */ |
862 | "mode_sw", /* name */ | |
863 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
864 | TV_MODE_SWITCH, /* tv_id */ |
865 | 0, /* properties_required */ | |
866 | 0, /* properties_provided */ | |
867 | 0, /* properties_destroyed */ | |
868 | 0, /* todo_flags_start */ | |
3bea341f | 869 | TODO_df_finish, /* todo_flags_finish */ |
ef330312 | 870 | }; |
27a4cd48 | 871 | |
17795822 | 872 | class pass_mode_switching : public rtl_opt_pass |
27a4cd48 DM |
873 | { |
874 | public: | |
c3284718 RS |
875 | pass_mode_switching (gcc::context *ctxt) |
876 | : rtl_opt_pass (pass_data_mode_switching, ctxt) | |
27a4cd48 DM |
877 | {} |
878 | ||
879 | /* opt_pass methods: */ | |
05555c4a DM |
880 | /* The epiphany backend creates a second instance of this pass, so we need |
881 | a clone method. */ | |
65d3284b | 882 | opt_pass * clone () { return new pass_mode_switching (m_ctxt); } |
1a3d085c TS |
883 | virtual bool gate (function *) |
884 | { | |
885 | #ifdef OPTIMIZE_MODE_SWITCHING | |
886 | return true; | |
887 | #else | |
888 | return false; | |
889 | #endif | |
890 | } | |
891 | ||
be55bfe6 TS |
892 | virtual unsigned int execute (function *) |
893 | { | |
894 | #ifdef OPTIMIZE_MODE_SWITCHING | |
895 | optimize_mode_switching (); | |
896 | #endif /* OPTIMIZE_MODE_SWITCHING */ | |
897 | return 0; | |
898 | } | |
27a4cd48 DM |
899 | |
900 | }; // class pass_mode_switching | |
901 | ||
17795822 TS |
902 | } // anon namespace |
903 | ||
27a4cd48 DM |
904 | rtl_opt_pass * |
905 | make_pass_mode_switching (gcc::context *ctxt) | |
906 | { | |
907 | return new pass_mode_switching (ctxt); | |
908 | } |