]>
Commit | Line | Data |
---|---|---|
6a73406e | 1 | /* Definitions for computing resource usage of specific insns. |
a5544970 | 2 | Copyright (C) 1999-2019 Free Software Foundation, Inc. |
6a73406e | 3 | |
1322177d | 4 | This file is part of GCC. |
6a73406e | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
6a73406e | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
6a73406e RH |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
6a73406e | 19 | |
ca545bb5 | 20 | #include "config.h" |
8ce25479 | 21 | #include "system.h" |
4977bab6 | 22 | #include "coretypes.h" |
c7131fb2 | 23 | #include "backend.h" |
ca545bb5 | 24 | #include "rtl.h" |
c7131fb2 | 25 | #include "df.h" |
4d0cdd0c | 26 | #include "memmodel.h" |
6baf1cc8 | 27 | #include "tm_p.h" |
ca545bb5 | 28 | #include "regs.h" |
957060b5 | 29 | #include "emit-rtl.h" |
ca545bb5 | 30 | #include "resource.h" |
7bdb32b9 | 31 | #include "insn-attr.h" |
d5d063d7 | 32 | #include "params.h" |
ca545bb5 BM |
33 | |
34 | /* This structure is used to record liveness information at the targets or | |
35 | fallthrough insns of branches. We will most likely need the information | |
36 | at targets again, so save them in a hash table rather than recomputing them | |
37 | each time. */ | |
38 | ||
39 | struct target_info | |
40 | { | |
41 | int uid; /* INSN_UID of target. */ | |
42 | struct target_info *next; /* Next info for same hash bucket. */ | |
43 | HARD_REG_SET live_regs; /* Registers live at target. */ | |
44 | int block; /* Basic block number containing target. */ | |
45 | int bb_tick; /* Generation count of basic block info. */ | |
46 | }; | |
47 | ||
48 | #define TARGET_HASH_PRIME 257 | |
49 | ||
50 | /* Indicates what resources are required at the beginning of the epilogue. */ | |
51 | static struct resources start_of_epilogue_needs; | |
52 | ||
53 | /* Indicates what resources are required at function end. */ | |
54 | static struct resources end_of_function_needs; | |
55 | ||
56 | /* Define the hash table itself. */ | |
57 | static struct target_info **target_hash_table = NULL; | |
58 | ||
59 | /* For each basic block, we maintain a generation number of its basic | |
60 | block info, which is updated each time we move an insn from the | |
61 | target of a jump. This is the generation number indexed by block | |
62 | number. */ | |
63 | ||
64 | static int *bb_ticks; | |
65 | ||
66 | /* Marks registers possibly live at the current place being scanned by | |
d5d063d7 | 67 | mark_target_live_regs. Also used by update_live_status. */ |
ca545bb5 BM |
68 | |
69 | static HARD_REG_SET current_live_regs; | |
70 | ||
71 | /* Marks registers for which we have seen a REG_DEAD note but no assignment. | |
72 | Also only used by the next two functions. */ | |
73 | ||
74 | static HARD_REG_SET pending_dead_regs; | |
6a73406e | 75 | \f |
7bc980e1 | 76 | static void update_live_status (rtx, const_rtx, void *); |
e4685bc8 | 77 | static int find_basic_block (rtx_insn *, int); |
dc01c3d1 DM |
78 | static rtx_insn *next_insn_no_annul (rtx_insn *); |
79 | static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*, | |
c3f14d55 | 80 | rtx *, int, struct resources, |
dc01c3d1 | 81 | struct resources); |
6a73406e | 82 | \f |
ca545bb5 BM |
83 | /* Utility function called from mark_target_live_regs via note_stores. |
84 | It deadens any CLOBBERed registers and livens any SET registers. */ | |
85 | ||
86 | static void | |
7bc980e1 | 87 | update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED) |
ca545bb5 BM |
88 | { |
89 | int first_regno, last_regno; | |
90 | int i; | |
91 | ||
f8cfc6aa JQ |
92 | if (!REG_P (dest) |
93 | && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest)))) | |
ca545bb5 BM |
94 | return; |
95 | ||
96 | if (GET_CODE (dest) == SUBREG) | |
f1f4e530 JM |
97 | { |
98 | first_regno = subreg_regno (dest); | |
99 | last_regno = first_regno + subreg_nregs (dest); | |
ca545bb5 | 100 | |
f1f4e530 JM |
101 | } |
102 | else | |
103 | { | |
104 | first_regno = REGNO (dest); | |
72d19505 | 105 | last_regno = END_REGNO (dest); |
f1f4e530 | 106 | } |
ca545bb5 BM |
107 | |
108 | if (GET_CODE (x) == CLOBBER) | |
109 | for (i = first_regno; i < last_regno; i++) | |
110 | CLEAR_HARD_REG_BIT (current_live_regs, i); | |
8df47bdf AH |
111 | else if (GET_CODE (x) == CLOBBER_HIGH) |
112 | /* No current target supports both branch delay slots and CLOBBER_HIGH. | |
113 | We'd need more elaborate liveness tracking to handle that | |
114 | combination. */ | |
115 | gcc_unreachable (); | |
ca545bb5 BM |
116 | else |
117 | for (i = first_regno; i < last_regno; i++) | |
118 | { | |
119 | SET_HARD_REG_BIT (current_live_regs, i); | |
120 | CLEAR_HARD_REG_BIT (pending_dead_regs, i); | |
121 | } | |
122 | } | |
d5d063d7 JO |
123 | |
124 | /* Find the number of the basic block with correct live register | |
125 | information that starts closest to INSN. Return -1 if we couldn't | |
126 | find such a basic block or the beginning is more than | |
127 | SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for | |
128 | an unlimited search. | |
129 | ||
130 | The delay slot filling code destroys the control-flow graph so, | |
131 | instead of finding the basic block containing INSN, we search | |
132 | backwards toward a BARRIER where the live register information is | |
133 | correct. */ | |
925fa227 RH |
134 | |
135 | static int | |
e4685bc8 | 136 | find_basic_block (rtx_insn *insn, int search_limit) |
925fa227 | 137 | { |
925fa227 RH |
138 | /* Scan backwards to the previous BARRIER. Then see if we can find a |
139 | label that starts a basic block. Return the basic block number. */ | |
925fa227 | 140 | for (insn = prev_nonnote_insn (insn); |
4b4bf941 | 141 | insn && !BARRIER_P (insn) && search_limit != 0; |
d5d063d7 | 142 | insn = prev_nonnote_insn (insn), --search_limit) |
925fa227 RH |
143 | ; |
144 | ||
d5d063d7 JO |
145 | /* The closest BARRIER is too far away. */ |
146 | if (search_limit == 0) | |
147 | return -1; | |
148 | ||
f6366fc7 | 149 | /* The start of the function. */ |
d5d063d7 | 150 | else if (insn == 0) |
fefa31b5 | 151 | return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index; |
925fa227 RH |
152 | |
153 | /* See if any of the upcoming CODE_LABELs start a basic block. If we reach | |
154 | anything other than a CODE_LABEL or note, we can't find this code. */ | |
155 | for (insn = next_nonnote_insn (insn); | |
4b4bf941 | 156 | insn && LABEL_P (insn); |
925fa227 | 157 | insn = next_nonnote_insn (insn)) |
a1fa3e79 EB |
158 | if (BLOCK_FOR_INSN (insn)) |
159 | return BLOCK_FOR_INSN (insn)->index; | |
925fa227 RH |
160 | |
161 | return -1; | |
162 | } | |
ca545bb5 BM |
163 | \f |
164 | /* Similar to next_insn, but ignores insns in the delay slots of | |
165 | an annulled branch. */ | |
166 | ||
dc01c3d1 DM |
167 | static rtx_insn * |
168 | next_insn_no_annul (rtx_insn *insn) | |
ca545bb5 BM |
169 | { |
170 | if (insn) | |
171 | { | |
172 | /* If INSN is an annulled branch, skip any insns from the target | |
173 | of the branch. */ | |
8f06d483 | 174 | if (JUMP_P (insn) |
cf40ea15 | 175 | && INSN_ANNULLED_BRANCH_P (insn) |
ca545bb5 | 176 | && NEXT_INSN (PREV_INSN (insn)) != insn) |
cf40ea15 | 177 | { |
fcff5049 | 178 | rtx_insn *next = NEXT_INSN (insn); |
cf40ea15 | 179 | |
b64925dc | 180 | while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next)) |
cf40ea15 DM |
181 | && INSN_FROM_TARGET_P (next)) |
182 | { | |
183 | insn = next; | |
184 | next = NEXT_INSN (insn); | |
cf40ea15 DM |
185 | } |
186 | } | |
ca545bb5 BM |
187 | |
188 | insn = NEXT_INSN (insn); | |
4b4bf941 | 189 | if (insn && NONJUMP_INSN_P (insn) |
ca545bb5 | 190 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
dc01c3d1 | 191 | insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
ca545bb5 BM |
192 | } |
193 | ||
194 | return insn; | |
195 | } | |
196 | \f | |
197 | /* Given X, some rtl, and RES, a pointer to a `struct resource', mark | |
f5df2e8c | 198 | which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS |
ca545bb5 BM |
199 | is TRUE, resources used by the called routine will be included for |
200 | CALL_INSNs. */ | |
201 | ||
202 | void | |
0c20a65f | 203 | mark_referenced_resources (rtx x, struct resources *res, |
675f99c9 | 204 | bool include_delayed_effects) |
ca545bb5 | 205 | { |
770ae6cc RK |
206 | enum rtx_code code = GET_CODE (x); |
207 | int i, j; | |
208 | unsigned int r; | |
b3694847 | 209 | const char *format_ptr; |
ca545bb5 BM |
210 | |
211 | /* Handle leaf items for which we set resource flags. Also, special-case | |
212 | CALL, SET and CLOBBER operators. */ | |
213 | switch (code) | |
214 | { | |
215 | case CONST: | |
d8116890 | 216 | CASE_CONST_ANY: |
ca545bb5 BM |
217 | case PC: |
218 | case SYMBOL_REF: | |
219 | case LABEL_REF: | |
e9d03821 | 220 | case DEBUG_INSN: |
ca545bb5 BM |
221 | return; |
222 | ||
223 | case SUBREG: | |
f8cfc6aa | 224 | if (!REG_P (SUBREG_REG (x))) |
675f99c9 | 225 | mark_referenced_resources (SUBREG_REG (x), res, false); |
ca545bb5 BM |
226 | else |
227 | { | |
ddef6bc7 | 228 | unsigned int regno = subreg_regno (x); |
f1f4e530 | 229 | unsigned int last_regno = regno + subreg_nregs (x); |
770ae6cc | 230 | |
41374e13 | 231 | gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); |
770ae6cc RK |
232 | for (r = regno; r < last_regno; r++) |
233 | SET_HARD_REG_BIT (res->regs, r); | |
ca545bb5 BM |
234 | } |
235 | return; | |
236 | ||
237 | case REG: | |
09e18274 RS |
238 | gcc_assert (HARD_REGISTER_P (x)); |
239 | add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
ca545bb5 BM |
240 | return; |
241 | ||
242 | case MEM: | |
243 | /* If this memory shouldn't change, it really isn't referencing | |
244 | memory. */ | |
fd6beed4 | 245 | if (! MEM_READONLY_P (x)) |
ca545bb5 | 246 | res->memory = 1; |
a5045352 | 247 | res->volatil |= MEM_VOLATILE_P (x); |
ca545bb5 BM |
248 | |
249 | /* Mark registers used to access memory. */ | |
675f99c9 | 250 | mark_referenced_resources (XEXP (x, 0), res, false); |
ca545bb5 BM |
251 | return; |
252 | ||
253 | case CC0: | |
254 | res->cc = 1; | |
255 | return; | |
256 | ||
257 | case UNSPEC_VOLATILE: | |
1b929c9a | 258 | case TRAP_IF: |
ca545bb5 BM |
259 | case ASM_INPUT: |
260 | /* Traditional asm's are always volatile. */ | |
ca545bb5 BM |
261 | res->volatil = 1; |
262 | break; | |
263 | ||
264 | case ASM_OPERANDS: | |
a5045352 | 265 | res->volatil |= MEM_VOLATILE_P (x); |
ca545bb5 BM |
266 | |
267 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
268 | We can not just fall through here since then we would be confused | |
269 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
270 | traditional asms unlike their normal usage. */ | |
a6a2274a | 271 | |
ca545bb5 | 272 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) |
675f99c9 | 273 | mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false); |
ca545bb5 BM |
274 | return; |
275 | ||
276 | case CALL: | |
277 | /* The first operand will be a (MEM (xxx)) but doesn't really reference | |
278 | memory. The second operand may be referenced, though. */ | |
675f99c9 ILT |
279 | mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false); |
280 | mark_referenced_resources (XEXP (x, 1), res, false); | |
ca545bb5 BM |
281 | return; |
282 | ||
283 | case SET: | |
284 | /* Usually, the first operand of SET is set, not referenced. But | |
285 | registers used to access memory are referenced. SET_DEST is | |
46d096a3 | 286 | also referenced if it is a ZERO_EXTRACT. */ |
ca545bb5 | 287 | |
675f99c9 | 288 | mark_referenced_resources (SET_SRC (x), res, false); |
ca545bb5 BM |
289 | |
290 | x = SET_DEST (x); | |
46d096a3 | 291 | if (GET_CODE (x) == ZERO_EXTRACT |
07570c39 | 292 | || GET_CODE (x) == STRICT_LOW_PART) |
675f99c9 | 293 | mark_referenced_resources (x, res, false); |
ca545bb5 BM |
294 | else if (GET_CODE (x) == SUBREG) |
295 | x = SUBREG_REG (x); | |
3c0cb5de | 296 | if (MEM_P (x)) |
675f99c9 | 297 | mark_referenced_resources (XEXP (x, 0), res, false); |
ca545bb5 BM |
298 | return; |
299 | ||
300 | case CLOBBER: | |
8df47bdf | 301 | case CLOBBER_HIGH: |
ca545bb5 BM |
302 | return; |
303 | ||
304 | case CALL_INSN: | |
305 | if (include_delayed_effects) | |
306 | { | |
307 | /* A CALL references memory, the frame pointer if it exists, the | |
308 | stack pointer, any global registers and any registers given in | |
309 | USE insns immediately in front of the CALL. | |
310 | ||
311 | However, we may have moved some of the parameter loading insns | |
312 | into the delay slot of this CALL. If so, the USE's for them | |
313 | don't count and should be skipped. */ | |
dc01c3d1 | 314 | rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x)); |
a3be187d | 315 | rtx_sequence *sequence = 0; |
ca545bb5 | 316 | int seq_size = 0; |
ca545bb5 BM |
317 | int i; |
318 | ||
319 | /* If we are part of a delay slot sequence, point at the SEQUENCE. */ | |
320 | if (NEXT_INSN (insn) != x) | |
321 | { | |
a3be187d DM |
322 | sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn))); |
323 | seq_size = sequence->len (); | |
41374e13 | 324 | gcc_assert (GET_CODE (sequence) == SEQUENCE); |
ca545bb5 BM |
325 | } |
326 | ||
327 | res->memory = 1; | |
328 | SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM); | |
329 | if (frame_pointer_needed) | |
330 | { | |
331 | SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM); | |
c3e08036 TS |
332 | if (!HARD_FRAME_POINTER_IS_FRAME_POINTER) |
333 | SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM); | |
ca545bb5 BM |
334 | } |
335 | ||
336 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
337 | if (global_regs[i]) | |
338 | SET_HARD_REG_BIT (res->regs, i); | |
339 | ||
570a98eb | 340 | /* Check for a REG_SETJMP. If it exists, then we must |
ca545bb5 BM |
341 | assume that this call can need any register. |
342 | ||
343 | This is done to be more conservative about how we handle setjmp. | |
344 | We assume that they both use and set all registers. Using all | |
345 | registers ensures that a register will not be considered dead | |
346 | just because it crosses a setjmp call. A register should be | |
40f03658 | 347 | considered dead only if the setjmp call returns nonzero. */ |
570a98eb | 348 | if (find_reg_note (x, REG_SETJMP, NULL)) |
ca545bb5 BM |
349 | SET_HARD_REG_SET (res->regs); |
350 | ||
351 | { | |
352 | rtx link; | |
353 | ||
354 | for (link = CALL_INSN_FUNCTION_USAGE (x); | |
355 | link; | |
356 | link = XEXP (link, 1)) | |
357 | if (GET_CODE (XEXP (link, 0)) == USE) | |
358 | { | |
359 | for (i = 1; i < seq_size; i++) | |
360 | { | |
a3be187d | 361 | rtx slot_pat = PATTERN (sequence->element (i)); |
ca545bb5 BM |
362 | if (GET_CODE (slot_pat) == SET |
363 | && rtx_equal_p (SET_DEST (slot_pat), | |
0cb5d81c | 364 | XEXP (XEXP (link, 0), 0))) |
ca545bb5 BM |
365 | break; |
366 | } | |
367 | if (i >= seq_size) | |
0cb5d81c | 368 | mark_referenced_resources (XEXP (XEXP (link, 0), 0), |
675f99c9 | 369 | res, false); |
ca545bb5 BM |
370 | } |
371 | } | |
372 | } | |
373 | ||
374 | /* ... fall through to other INSN processing ... */ | |
81fea426 | 375 | gcc_fallthrough (); |
ca545bb5 BM |
376 | |
377 | case INSN: | |
378 | case JUMP_INSN: | |
379 | ||
3a001aff JR |
380 | if (GET_CODE (PATTERN (x)) == COND_EXEC) |
381 | /* In addition to the usual references, also consider all outputs | |
382 | as referenced, to compensate for mark_set_resources treating | |
383 | them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART | |
384 | handling, execpt that we got a partial incidence instead of a partial | |
385 | width. */ | |
386 | mark_set_resources (x, res, 0, | |
387 | include_delayed_effects | |
388 | ? MARK_SRC_DEST_CALL : MARK_SRC_DEST); | |
389 | ||
ca545bb5 | 390 | if (! include_delayed_effects |
84034c69 | 391 | && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x))) |
ca545bb5 | 392 | return; |
ca545bb5 BM |
393 | |
394 | /* No special processing, just speed up. */ | |
395 | mark_referenced_resources (PATTERN (x), res, include_delayed_effects); | |
396 | return; | |
397 | ||
398 | default: | |
399 | break; | |
400 | } | |
401 | ||
402 | /* Process each sub-expression and flag what it needs. */ | |
403 | format_ptr = GET_RTX_FORMAT (code); | |
404 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
405 | switch (*format_ptr++) | |
406 | { | |
407 | case 'e': | |
408 | mark_referenced_resources (XEXP (x, i), res, include_delayed_effects); | |
409 | break; | |
410 | ||
411 | case 'E': | |
412 | for (j = 0; j < XVECLEN (x, i); j++) | |
413 | mark_referenced_resources (XVECEXP (x, i, j), res, | |
414 | include_delayed_effects); | |
415 | break; | |
416 | } | |
417 | } | |
418 | \f | |
419 | /* A subroutine of mark_target_live_regs. Search forward from TARGET | |
a6a2274a | 420 | looking for registers that are set before they are used. These are dead. |
ca545bb5 BM |
421 | Stop after passing a few conditional jumps, and/or a small |
422 | number of unconditional branches. */ | |
423 | ||
dc01c3d1 DM |
424 | static rtx_insn * |
425 | find_dead_or_set_registers (rtx_insn *target, struct resources *res, | |
c3f14d55 | 426 | rtx *jump_target, int jump_count, |
0c20a65f | 427 | struct resources set, struct resources needed) |
ca545bb5 BM |
428 | { |
429 | HARD_REG_SET scratch; | |
c3f14d55 DM |
430 | rtx_insn *insn; |
431 | rtx_insn *next_insn; | |
dc01c3d1 | 432 | rtx_insn *jump_insn = 0; |
ca545bb5 BM |
433 | int i; |
434 | ||
c3f14d55 | 435 | for (insn = target; insn; insn = next_insn) |
ca545bb5 | 436 | { |
1476d1bd | 437 | rtx_insn *this_insn = insn; |
ca545bb5 | 438 | |
c3f14d55 | 439 | next_insn = NEXT_INSN (insn); |
0519ce30 MM |
440 | |
441 | /* If this instruction can throw an exception, then we don't | |
442 | know where we might end up next. That means that we have to | |
443 | assume that whatever we have already marked as live really is | |
444 | live. */ | |
54590688 | 445 | if (can_throw_internal (insn)) |
0519ce30 MM |
446 | break; |
447 | ||
ca545bb5 BM |
448 | switch (GET_CODE (insn)) |
449 | { | |
450 | case CODE_LABEL: | |
451 | /* After a label, any pending dead registers that weren't yet | |
452 | used can be made dead. */ | |
453 | AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs); | |
454 | AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs); | |
455 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
456 | ||
457 | continue; | |
458 | ||
459 | case BARRIER: | |
460 | case NOTE: | |
e9d03821 | 461 | case DEBUG_INSN: |
ca545bb5 BM |
462 | continue; |
463 | ||
464 | case INSN: | |
465 | if (GET_CODE (PATTERN (insn)) == USE) | |
466 | { | |
467 | /* If INSN is a USE made by update_block, we care about the | |
468 | underlying insn. Any registers set by the underlying insn | |
469 | are live since the insn is being done somewhere else. */ | |
2c3c49de | 470 | if (INSN_P (XEXP (PATTERN (insn), 0))) |
73780b74 SC |
471 | mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, |
472 | MARK_SRC_DEST_CALL); | |
ca545bb5 BM |
473 | |
474 | /* All other USE insns are to be ignored. */ | |
475 | continue; | |
476 | } | |
477 | else if (GET_CODE (PATTERN (insn)) == CLOBBER) | |
478 | continue; | |
a3be187d DM |
479 | else if (rtx_sequence *seq = |
480 | dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
ca545bb5 BM |
481 | { |
482 | /* An unconditional jump can be used to fill the delay slot | |
483 | of a call, so search for a JUMP_INSN in any position. */ | |
a3be187d | 484 | for (i = 0; i < seq->len (); i++) |
ca545bb5 | 485 | { |
1476d1bd MM |
486 | this_insn = seq->insn (i); |
487 | if (JUMP_P (this_insn)) | |
ca545bb5 BM |
488 | break; |
489 | } | |
490 | } | |
491 | ||
492 | default: | |
493 | break; | |
494 | } | |
495 | ||
1476d1bd MM |
496 | if (rtx_jump_insn *this_jump_insn = |
497 | dyn_cast <rtx_jump_insn *> (this_insn)) | |
ca545bb5 BM |
498 | { |
499 | if (jump_count++ < 10) | |
500 | { | |
7f1c097d | 501 | if (any_uncondjump_p (this_jump_insn) |
26898771 | 502 | || ANY_RETURN_P (PATTERN (this_jump_insn))) |
ca545bb5 | 503 | { |
1476d1bd | 504 | rtx lab_or_return = this_jump_insn->jump_label (); |
c3f14d55 DM |
505 | if (ANY_RETURN_P (lab_or_return)) |
506 | next_insn = NULL; | |
507 | else | |
508 | next_insn = as_a <rtx_insn *> (lab_or_return); | |
ca545bb5 BM |
509 | if (jump_insn == 0) |
510 | { | |
511 | jump_insn = insn; | |
512 | if (jump_target) | |
c3f14d55 | 513 | *jump_target = JUMP_LABEL (this_jump_insn); |
ca545bb5 BM |
514 | } |
515 | } | |
7f1c097d | 516 | else if (any_condjump_p (this_jump_insn)) |
ca545bb5 BM |
517 | { |
518 | struct resources target_set, target_res; | |
519 | struct resources fallthrough_res; | |
520 | ||
521 | /* We can handle conditional branches here by following | |
522 | both paths, and then IOR the results of the two paths | |
523 | together, which will give us registers that are dead | |
524 | on both paths. Since this is expensive, we give it | |
525 | a much higher cost than unconditional branches. The | |
526 | cost was chosen so that we will follow at most 1 | |
527 | conditional branch. */ | |
528 | ||
529 | jump_count += 4; | |
530 | if (jump_count >= 10) | |
531 | break; | |
532 | ||
675f99c9 | 533 | mark_referenced_resources (insn, &needed, true); |
ca545bb5 BM |
534 | |
535 | /* For an annulled branch, mark_set_resources ignores slots | |
536 | filled by instructions from the target. This is correct | |
537 | if the branch is not taken. Since we are following both | |
538 | paths from the branch, we must also compute correct info | |
539 | if the branch is taken. We do this by inverting all of | |
540 | the INSN_FROM_TARGET_P bits, calling mark_set_resources, | |
541 | and then inverting the INSN_FROM_TARGET_P bits again. */ | |
542 | ||
543 | if (GET_CODE (PATTERN (insn)) == SEQUENCE | |
544 | && INSN_ANNULLED_BRANCH_P (this_jump_insn)) | |
545 | { | |
a3be187d DM |
546 | rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn)); |
547 | for (i = 1; i < seq->len (); i++) | |
548 | INSN_FROM_TARGET_P (seq->element (i)) | |
549 | = ! INSN_FROM_TARGET_P (seq->element (i)); | |
ca545bb5 BM |
550 | |
551 | target_set = set; | |
73780b74 SC |
552 | mark_set_resources (insn, &target_set, 0, |
553 | MARK_SRC_DEST_CALL); | |
ca545bb5 | 554 | |
a3be187d DM |
555 | for (i = 1; i < seq->len (); i++) |
556 | INSN_FROM_TARGET_P (seq->element (i)) | |
557 | = ! INSN_FROM_TARGET_P (seq->element (i)); | |
ca545bb5 | 558 | |
73780b74 | 559 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
560 | } |
561 | else | |
562 | { | |
73780b74 | 563 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
564 | target_set = set; |
565 | } | |
566 | ||
567 | target_res = *res; | |
568 | COPY_HARD_REG_SET (scratch, target_set.regs); | |
569 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
570 | AND_COMPL_HARD_REG_SET (target_res.regs, scratch); | |
571 | ||
572 | fallthrough_res = *res; | |
573 | COPY_HARD_REG_SET (scratch, set.regs); | |
574 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
575 | AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch); | |
576 | ||
1476d1bd MM |
577 | if (!ANY_RETURN_P (this_jump_insn->jump_label ())) |
578 | find_dead_or_set_registers | |
579 | (this_jump_insn->jump_target (), | |
580 | &target_res, 0, jump_count, target_set, needed); | |
c3f14d55 | 581 | find_dead_or_set_registers (next_insn, |
ca545bb5 BM |
582 | &fallthrough_res, 0, jump_count, |
583 | set, needed); | |
584 | IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs); | |
585 | AND_HARD_REG_SET (res->regs, fallthrough_res.regs); | |
586 | break; | |
587 | } | |
588 | else | |
589 | break; | |
590 | } | |
591 | else | |
592 | { | |
593 | /* Don't try this optimization if we expired our jump count | |
594 | above, since that would mean there may be an infinite loop | |
595 | in the function being compiled. */ | |
596 | jump_insn = 0; | |
597 | break; | |
598 | } | |
599 | } | |
600 | ||
675f99c9 | 601 | mark_referenced_resources (insn, &needed, true); |
73780b74 | 602 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
603 | |
604 | COPY_HARD_REG_SET (scratch, set.regs); | |
605 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | |
606 | AND_COMPL_HARD_REG_SET (res->regs, scratch); | |
607 | } | |
608 | ||
609 | return jump_insn; | |
610 | } | |
611 | \f | |
612 | /* Given X, a part of an insn, and a pointer to a `struct resource', | |
613 | RES, indicate which resources are modified by the insn. If | |
73780b74 | 614 | MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially |
73fb6466 | 615 | set by the called routine. |
ca545bb5 BM |
616 | |
617 | If IN_DEST is nonzero, it means we are inside a SET. Otherwise, | |
618 | objects are being referenced instead of set. | |
619 | ||
620 | We never mark the insn as modifying the condition code unless it explicitly | |
621 | SETs CC0 even though this is not totally correct. The reason for this is | |
622 | that we require a SET of CC0 to immediately precede the reference to CC0. | |
623 | So if some other insn sets CC0 as a side-effect, we know it cannot affect | |
6d2f8887 | 624 | our computation and thus may be placed in a delay slot. */ |
ca545bb5 BM |
625 | |
626 | void | |
0c20a65f AJ |
627 | mark_set_resources (rtx x, struct resources *res, int in_dest, |
628 | enum mark_resource_type mark_type) | |
ca545bb5 | 629 | { |
770ae6cc RK |
630 | enum rtx_code code; |
631 | int i, j; | |
632 | unsigned int r; | |
633 | const char *format_ptr; | |
ca545bb5 BM |
634 | |
635 | restart: | |
636 | ||
637 | code = GET_CODE (x); | |
638 | ||
639 | switch (code) | |
640 | { | |
641 | case NOTE: | |
642 | case BARRIER: | |
643 | case CODE_LABEL: | |
644 | case USE: | |
d8116890 | 645 | CASE_CONST_ANY: |
ca545bb5 BM |
646 | case LABEL_REF: |
647 | case SYMBOL_REF: | |
648 | case CONST: | |
649 | case PC: | |
e9d03821 | 650 | case DEBUG_INSN: |
ca545bb5 BM |
651 | /* These don't set any resources. */ |
652 | return; | |
653 | ||
654 | case CC0: | |
655 | if (in_dest) | |
656 | res->cc = 1; | |
657 | return; | |
658 | ||
659 | case CALL_INSN: | |
660 | /* Called routine modifies the condition code, memory, any registers | |
661 | that aren't saved across calls, global registers and anything | |
662 | explicitly CLOBBERed immediately after the CALL_INSN. */ | |
663 | ||
73780b74 | 664 | if (mark_type == MARK_SRC_DEST_CALL) |
ca545bb5 | 665 | { |
86bf2d46 | 666 | rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x); |
ca545bb5 | 667 | rtx link; |
c2ba7e7a | 668 | HARD_REG_SET regs; |
ca545bb5 BM |
669 | |
670 | res->cc = res->memory = 1; | |
eef75f5e | 671 | |
86bf2d46 | 672 | get_call_reg_set_usage (call_insn, ®s, regs_invalidated_by_call); |
c2ba7e7a | 673 | IOR_HARD_REG_SET (res->regs, regs); |
ca545bb5 | 674 | |
86bf2d46 | 675 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); |
ca545bb5 | 676 | link; link = XEXP (link, 1)) |
8df47bdf AH |
677 | { |
678 | /* We could support CLOBBER_HIGH and treat it in the same way as | |
679 | HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that | |
680 | yet. */ | |
681 | gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH); | |
682 | if (GET_CODE (XEXP (link, 0)) == CLOBBER) | |
683 | mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, | |
684 | MARK_SRC_DEST); | |
685 | } | |
ca545bb5 | 686 | |
570a98eb | 687 | /* Check for a REG_SETJMP. If it exists, then we must |
ca545bb5 | 688 | assume that this call can clobber any register. */ |
86bf2d46 | 689 | if (find_reg_note (call_insn, REG_SETJMP, NULL)) |
ca545bb5 BM |
690 | SET_HARD_REG_SET (res->regs); |
691 | } | |
692 | ||
693 | /* ... and also what its RTL says it modifies, if anything. */ | |
81fea426 | 694 | gcc_fallthrough (); |
ca545bb5 BM |
695 | |
696 | case JUMP_INSN: | |
697 | case INSN: | |
698 | ||
699 | /* An insn consisting of just a CLOBBER (or USE) is just for flow | |
700 | and doesn't actually do anything, so we ignore it. */ | |
701 | ||
73780b74 | 702 | if (mark_type != MARK_SRC_DEST_CALL |
84034c69 | 703 | && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x))) |
ca545bb5 | 704 | return; |
ca545bb5 BM |
705 | |
706 | x = PATTERN (x); | |
707 | if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER) | |
708 | goto restart; | |
709 | return; | |
710 | ||
711 | case SET: | |
712 | /* If the source of a SET is a CALL, this is actually done by | |
713 | the called routine. So only include it if we are to include the | |
714 | effects of the calling routine. */ | |
715 | ||
716 | mark_set_resources (SET_DEST (x), res, | |
73780b74 | 717 | (mark_type == MARK_SRC_DEST_CALL |
ca545bb5 | 718 | || GET_CODE (SET_SRC (x)) != CALL), |
73780b74 | 719 | mark_type); |
ca545bb5 | 720 | |
73fb6466 | 721 | mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST); |
ca545bb5 BM |
722 | return; |
723 | ||
724 | case CLOBBER: | |
73780b74 | 725 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
ca545bb5 | 726 | return; |
a6a2274a | 727 | |
8df47bdf AH |
728 | case CLOBBER_HIGH: |
729 | /* No current target supports both branch delay slots and CLOBBER_HIGH. | |
730 | We'd need more elaborate liveness tracking to handle that | |
731 | combination. */ | |
732 | gcc_unreachable (); | |
733 | ||
ca545bb5 | 734 | case SEQUENCE: |
8f06d483 | 735 | { |
a3be187d DM |
736 | rtx_sequence *seq = as_a <rtx_sequence *> (x); |
737 | rtx control = seq->element (0); | |
8f06d483 RH |
738 | bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control); |
739 | ||
740 | mark_set_resources (control, res, 0, mark_type); | |
a3be187d | 741 | for (i = seq->len () - 1; i >= 0; --i) |
8f06d483 | 742 | { |
a3be187d | 743 | rtx elt = seq->element (i); |
8f06d483 RH |
744 | if (!annul_p && INSN_FROM_TARGET_P (elt)) |
745 | mark_set_resources (elt, res, 0, mark_type); | |
746 | } | |
747 | } | |
ca545bb5 BM |
748 | return; |
749 | ||
750 | case POST_INC: | |
751 | case PRE_INC: | |
752 | case POST_DEC: | |
753 | case PRE_DEC: | |
73780b74 | 754 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
ca545bb5 BM |
755 | return; |
756 | ||
4b983fdc RH |
757 | case PRE_MODIFY: |
758 | case POST_MODIFY: | |
1fb9c5cd MH |
759 | mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); |
760 | mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST); | |
761 | mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST); | |
4b983fdc RH |
762 | return; |
763 | ||
73780b74 | 764 | case SIGN_EXTRACT: |
ca545bb5 | 765 | case ZERO_EXTRACT: |
73fb6466 HPN |
766 | mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST); |
767 | mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST); | |
768 | mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST); | |
ca545bb5 BM |
769 | return; |
770 | ||
771 | case MEM: | |
772 | if (in_dest) | |
773 | { | |
774 | res->memory = 1; | |
a5045352 | 775 | res->volatil |= MEM_VOLATILE_P (x); |
ca545bb5 BM |
776 | } |
777 | ||
73780b74 | 778 | mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); |
ca545bb5 BM |
779 | return; |
780 | ||
781 | case SUBREG: | |
782 | if (in_dest) | |
783 | { | |
f8cfc6aa | 784 | if (!REG_P (SUBREG_REG (x))) |
73780b74 | 785 | mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type); |
ca545bb5 BM |
786 | else |
787 | { | |
ddef6bc7 | 788 | unsigned int regno = subreg_regno (x); |
f1f4e530 | 789 | unsigned int last_regno = regno + subreg_nregs (x); |
770ae6cc | 790 | |
41374e13 | 791 | gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); |
770ae6cc RK |
792 | for (r = regno; r < last_regno; r++) |
793 | SET_HARD_REG_BIT (res->regs, r); | |
ca545bb5 BM |
794 | } |
795 | } | |
796 | return; | |
797 | ||
798 | case REG: | |
799 | if (in_dest) | |
1d2215fe | 800 | { |
09e18274 RS |
801 | gcc_assert (HARD_REGISTER_P (x)); |
802 | add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
1d2215fe | 803 | } |
ca545bb5 BM |
804 | return; |
805 | ||
3d298f8f MM |
806 | case UNSPEC_VOLATILE: |
807 | case ASM_INPUT: | |
808 | /* Traditional asm's are always volatile. */ | |
809 | res->volatil = 1; | |
810 | return; | |
811 | ||
812 | case TRAP_IF: | |
813 | res->volatil = 1; | |
814 | break; | |
815 | ||
816 | case ASM_OPERANDS: | |
a5045352 | 817 | res->volatil |= MEM_VOLATILE_P (x); |
3d298f8f MM |
818 | |
819 | /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
820 | We can not just fall through here since then we would be confused | |
821 | by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate | |
822 | traditional asms unlike their normal usage. */ | |
a6a2274a | 823 | |
3d298f8f | 824 | for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) |
73780b74 SC |
825 | mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest, |
826 | MARK_SRC_DEST); | |
3d298f8f MM |
827 | return; |
828 | ||
ca545bb5 BM |
829 | default: |
830 | break; | |
831 | } | |
832 | ||
833 | /* Process each sub-expression and flag what it needs. */ | |
834 | format_ptr = GET_RTX_FORMAT (code); | |
835 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
836 | switch (*format_ptr++) | |
837 | { | |
838 | case 'e': | |
73780b74 | 839 | mark_set_resources (XEXP (x, i), res, in_dest, mark_type); |
ca545bb5 BM |
840 | break; |
841 | ||
842 | case 'E': | |
843 | for (j = 0; j < XVECLEN (x, i); j++) | |
73780b74 | 844 | mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type); |
ca545bb5 BM |
845 | break; |
846 | } | |
847 | } | |
848 | \f | |
6e456a10 EB |
849 | /* Return TRUE if INSN is a return, possibly with a filled delay slot. */ |
850 | ||
851 | static bool | |
ed7a4b4b | 852 | return_insn_p (const_rtx insn) |
6e456a10 | 853 | { |
26898771 | 854 | if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn))) |
6e456a10 EB |
855 | return true; |
856 | ||
2ca202e7 | 857 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
6e456a10 EB |
858 | return return_insn_p (XVECEXP (PATTERN (insn), 0, 0)); |
859 | ||
860 | return false; | |
861 | } | |
862 | ||
ca545bb5 BM |
863 | /* Set the resources that are live at TARGET. |
864 | ||
865 | If TARGET is zero, we refer to the end of the current function and can | |
866 | return our precomputed value. | |
867 | ||
868 | Otherwise, we try to find out what is live by consulting the basic block | |
869 | information. This is tricky, because we must consider the actions of | |
870 | reload and jump optimization, which occur after the basic block information | |
871 | has been computed. | |
872 | ||
873 | Accordingly, we proceed as follows:: | |
874 | ||
875 | We find the previous BARRIER and look at all immediately following labels | |
876 | (with no intervening active insns) to see if any of them start a basic | |
877 | block. If we hit the start of the function first, we use block 0. | |
878 | ||
a1fa3e79 EB |
879 | Once we have found a basic block and a corresponding first insn, we can |
880 | accurately compute the live status (by starting at a label following a | |
881 | BARRIER, we are immune to actions taken by reload and jump.) Then we | |
882 | scan all insns between that point and our target. For each CLOBBER (or | |
883 | for call-clobbered regs when we pass a CALL_INSN), mark the appropriate | |
884 | registers are dead. For a SET, mark them as live. | |
ca545bb5 BM |
885 | |
886 | We have to be careful when using REG_DEAD notes because they are not | |
887 | updated by such things as find_equiv_reg. So keep track of registers | |
888 | marked as dead that haven't been assigned to, and mark them dead at the | |
889 | next CODE_LABEL since reload and jump won't propagate values across labels. | |
890 | ||
891 | If we cannot find the start of a basic block (should be a very rare | |
892 | case, if it can happen at all), mark everything as potentially live. | |
893 | ||
894 | Next, scan forward from TARGET looking for things set or clobbered | |
895 | before they are used. These are not live. | |
896 | ||
897 | Because we can be called many times on the same target, save our results | |
898 | in a hash table indexed by INSN_UID. This is only done if the function | |
899 | init_resource_info () was invoked before we are called. */ | |
900 | ||
901 | void | |
c3f14d55 | 902 | mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res) |
ca545bb5 BM |
903 | { |
904 | int b = -1; | |
5197bd50 | 905 | unsigned int i; |
ca545bb5 | 906 | struct target_info *tinfo = NULL; |
dc01c3d1 | 907 | rtx_insn *insn; |
c3f14d55 | 908 | rtx jump_target; |
ca545bb5 BM |
909 | HARD_REG_SET scratch; |
910 | struct resources set, needed; | |
911 | ||
912 | /* Handle end of function. */ | |
c3f14d55 | 913 | if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return)) |
ca545bb5 BM |
914 | { |
915 | *res = end_of_function_needs; | |
916 | return; | |
917 | } | |
918 | ||
c3f14d55 DM |
919 | /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an |
920 | instruction. */ | |
921 | rtx_insn *target = as_a <rtx_insn *> (target_maybe_return); | |
922 | ||
6e456a10 | 923 | /* Handle return insn. */ |
c3f14d55 | 924 | if (return_insn_p (target)) |
6e456a10 EB |
925 | { |
926 | *res = end_of_function_needs; | |
675f99c9 | 927 | mark_referenced_resources (target, res, false); |
6e456a10 EB |
928 | return; |
929 | } | |
930 | ||
ca545bb5 BM |
931 | /* We have to assume memory is needed, but the CC isn't. */ |
932 | res->memory = 1; | |
fd6beed4 | 933 | res->volatil = 0; |
ca545bb5 BM |
934 | res->cc = 0; |
935 | ||
936 | /* See if we have computed this value already. */ | |
937 | if (target_hash_table != NULL) | |
938 | { | |
939 | for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
940 | tinfo; tinfo = tinfo->next) | |
941 | if (tinfo->uid == INSN_UID (target)) | |
942 | break; | |
943 | ||
944 | /* Start by getting the basic block number. If we have saved | |
945 | information, we can get it from there unless the insn at the | |
946 | start of the basic block has been deleted. */ | |
947 | if (tinfo && tinfo->block != -1 | |
4654c0cf | 948 | && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ()) |
ca545bb5 BM |
949 | b = tinfo->block; |
950 | } | |
951 | ||
925fa227 | 952 | if (b == -1) |
d5d063d7 | 953 | b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH); |
ca545bb5 BM |
954 | |
955 | if (target_hash_table != NULL) | |
956 | { | |
957 | if (tinfo) | |
958 | { | |
959 | /* If the information is up-to-date, use it. Otherwise, we will | |
960 | update it below. */ | |
961 | if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b]) | |
962 | { | |
963 | COPY_HARD_REG_SET (res->regs, tinfo->live_regs); | |
964 | return; | |
965 | } | |
966 | } | |
967 | else | |
968 | { | |
a6a2274a | 969 | /* Allocate a place to put our results and chain it into the |
ca545bb5 | 970 | hash table. */ |
5ed6ace5 | 971 | tinfo = XNEW (struct target_info); |
ca545bb5 BM |
972 | tinfo->uid = INSN_UID (target); |
973 | tinfo->block = b; | |
5197bd50 RK |
974 | tinfo->next |
975 | = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
ca545bb5 BM |
976 | target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo; |
977 | } | |
978 | } | |
979 | ||
980 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
981 | ||
982 | /* If we found a basic block, get the live registers from it and update | |
983 | them with anything set or killed between its start and the insn before | |
9d2bb902 EB |
984 | TARGET; this custom life analysis is really about registers so we need |
985 | to use the LR problem. Otherwise, we must assume everything is live. */ | |
ca545bb5 BM |
986 | if (b != -1) |
987 | { | |
06e28de2 | 988 | regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b)); |
dc01c3d1 | 989 | rtx_insn *start_insn, *stop_insn; |
ca545bb5 | 990 | |
a1fa3e79 | 991 | /* Compute hard regs live at start of block. */ |
ca545bb5 BM |
992 | REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live); |
993 | ||
ca545bb5 BM |
994 | /* Get starting and ending insn, handling the case where each might |
995 | be a SEQUENCE. */ | |
fefa31b5 | 996 | start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ? |
06e28de2 | 997 | insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b))); |
ca545bb5 BM |
998 | stop_insn = target; |
999 | ||
4b4bf941 | 1000 | if (NONJUMP_INSN_P (start_insn) |
ca545bb5 | 1001 | && GET_CODE (PATTERN (start_insn)) == SEQUENCE) |
dc01c3d1 | 1002 | start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0); |
ca545bb5 | 1003 | |
4b4bf941 | 1004 | if (NONJUMP_INSN_P (stop_insn) |
ca545bb5 BM |
1005 | && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) |
1006 | stop_insn = next_insn (PREV_INSN (stop_insn)); | |
1007 | ||
1008 | for (insn = start_insn; insn != stop_insn; | |
1009 | insn = next_insn_no_annul (insn)) | |
1010 | { | |
1011 | rtx link; | |
86bf2d46 | 1012 | rtx_insn *real_insn = insn; |
cf40ea15 | 1013 | enum rtx_code code = GET_CODE (insn); |
ca545bb5 | 1014 | |
b5b8b0ac AO |
1015 | if (DEBUG_INSN_P (insn)) |
1016 | continue; | |
1017 | ||
ca545bb5 BM |
1018 | /* If this insn is from the target of a branch, it isn't going to |
1019 | be used in the sequel. If it is used in both cases, this | |
1020 | test will not be true. */ | |
cf40ea15 DM |
1021 | if ((code == INSN || code == JUMP_INSN || code == CALL_INSN) |
1022 | && INSN_FROM_TARGET_P (insn)) | |
ca545bb5 BM |
1023 | continue; |
1024 | ||
1025 | /* If this insn is a USE made by update_block, we care about the | |
1026 | underlying insn. */ | |
01007ae0 EB |
1027 | if (code == INSN |
1028 | && GET_CODE (PATTERN (insn)) == USE | |
2c3c49de | 1029 | && INSN_P (XEXP (PATTERN (insn), 0))) |
86bf2d46 | 1030 | real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0)); |
ca545bb5 | 1031 | |
4b4bf941 | 1032 | if (CALL_P (real_insn)) |
ca545bb5 | 1033 | { |
c42091c1 CZ |
1034 | /* Values in call-clobbered registers survive a COND_EXEC CALL |
1035 | if that is not executed; this matters for resoure use because | |
1036 | they may be used by a complementarily (or more strictly) | |
1037 | predicated instruction, or if the CALL is NORETURN. */ | |
1038 | if (GET_CODE (PATTERN (real_insn)) != COND_EXEC) | |
1039 | { | |
c2ba7e7a RO |
1040 | HARD_REG_SET regs_invalidated_by_this_call; |
1041 | get_call_reg_set_usage (real_insn, | |
1042 | ®s_invalidated_by_this_call, | |
1043 | regs_invalidated_by_call); | |
c42091c1 CZ |
1044 | /* CALL clobbers all call-used regs that aren't fixed except |
1045 | sp, ap, and fp. Do this before setting the result of the | |
1046 | call live. */ | |
1047 | AND_COMPL_HARD_REG_SET (current_live_regs, | |
c2ba7e7a | 1048 | regs_invalidated_by_this_call); |
c42091c1 | 1049 | } |
ca545bb5 BM |
1050 | |
1051 | /* A CALL_INSN sets any global register live, since it may | |
1052 | have been modified by the call. */ | |
1053 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1054 | if (global_regs[i]) | |
1055 | SET_HARD_REG_BIT (current_live_regs, i); | |
1056 | } | |
1057 | ||
1058 | /* Mark anything killed in an insn to be deadened at the next | |
1059 | label. Ignore USE insns; the only REG_DEAD notes will be for | |
1060 | parameters. But they might be early. A CALL_INSN will usually | |
1061 | clobber registers used for parameters. It isn't worth bothering | |
1062 | with the unlikely case when it won't. */ | |
4b4bf941 | 1063 | if ((NONJUMP_INSN_P (real_insn) |
ca545bb5 BM |
1064 | && GET_CODE (PATTERN (real_insn)) != USE |
1065 | && GET_CODE (PATTERN (real_insn)) != CLOBBER) | |
4b4bf941 JQ |
1066 | || JUMP_P (real_insn) |
1067 | || CALL_P (real_insn)) | |
ca545bb5 BM |
1068 | { |
1069 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1070 | if (REG_NOTE_KIND (link) == REG_DEAD | |
f8cfc6aa | 1071 | && REG_P (XEXP (link, 0)) |
ca545bb5 | 1072 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) |
09e18274 RS |
1073 | add_to_hard_reg_set (&pending_dead_regs, |
1074 | GET_MODE (XEXP (link, 0)), | |
1075 | REGNO (XEXP (link, 0))); | |
ca545bb5 | 1076 | |
84832317 | 1077 | note_stores (PATTERN (real_insn), update_live_status, NULL); |
ca545bb5 BM |
1078 | |
1079 | /* If any registers were unused after this insn, kill them. | |
1080 | These notes will always be accurate. */ | |
1081 | for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1082 | if (REG_NOTE_KIND (link) == REG_UNUSED | |
f8cfc6aa | 1083 | && REG_P (XEXP (link, 0)) |
ca545bb5 | 1084 | && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) |
09e18274 RS |
1085 | remove_from_hard_reg_set (¤t_live_regs, |
1086 | GET_MODE (XEXP (link, 0)), | |
1087 | REGNO (XEXP (link, 0))); | |
ca545bb5 BM |
1088 | } |
1089 | ||
4b4bf941 | 1090 | else if (LABEL_P (real_insn)) |
ca545bb5 | 1091 | { |
a1fa3e79 EB |
1092 | basic_block bb; |
1093 | ||
ca545bb5 BM |
1094 | /* A label clobbers the pending dead registers since neither |
1095 | reload nor jump will propagate a value across a label. */ | |
1096 | AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs); | |
1097 | CLEAR_HARD_REG_SET (pending_dead_regs); | |
a1fa3e79 EB |
1098 | |
1099 | /* We must conservatively assume that all registers that used | |
1100 | to be live here still are. The fallthrough edge may have | |
1101 | left a live register uninitialized. */ | |
1102 | bb = BLOCK_FOR_INSN (real_insn); | |
1103 | if (bb) | |
1104 | { | |
1105 | HARD_REG_SET extra_live; | |
1106 | ||
9d2bb902 | 1107 | REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb)); |
a1fa3e79 EB |
1108 | IOR_HARD_REG_SET (current_live_regs, extra_live); |
1109 | } | |
ca545bb5 BM |
1110 | } |
1111 | ||
1112 | /* The beginning of the epilogue corresponds to the end of the | |
1113 | RTL chain when there are no epilogue insns. Certain resources | |
1114 | are implicitly required at that point. */ | |
4b4bf941 | 1115 | else if (NOTE_P (real_insn) |
a38e7aa5 | 1116 | && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG) |
ca545bb5 BM |
1117 | IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs); |
1118 | } | |
1119 | ||
1120 | COPY_HARD_REG_SET (res->regs, current_live_regs); | |
1121 | if (tinfo != NULL) | |
1122 | { | |
1123 | tinfo->block = b; | |
1124 | tinfo->bb_tick = bb_ticks[b]; | |
1125 | } | |
1126 | } | |
1127 | else | |
1128 | /* We didn't find the start of a basic block. Assume everything | |
1129 | in use. This should happen only extremely rarely. */ | |
1130 | SET_HARD_REG_SET (res->regs); | |
1131 | ||
1132 | CLEAR_RESOURCE (&set); | |
1133 | CLEAR_RESOURCE (&needed); | |
1134 | ||
e67d1102 RS |
1135 | rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target, |
1136 | 0, set, needed); | |
ca545bb5 BM |
1137 | |
1138 | /* If we hit an unconditional branch, we have another way of finding out | |
1139 | what is live: we can see what is live at the branch target and include | |
96e9c98d | 1140 | anything used but not set before the branch. We add the live |
dc297297 | 1141 | resources found using the test below to those found until now. */ |
ca545bb5 BM |
1142 | |
1143 | if (jump_insn) | |
1144 | { | |
1145 | struct resources new_resources; | |
dc01c3d1 | 1146 | rtx_insn *stop_insn = next_active_insn (jump_insn); |
ca545bb5 | 1147 | |
dc0ff1c8 | 1148 | if (!ANY_RETURN_P (jump_target)) |
7c9796ed | 1149 | jump_target = next_active_insn (as_a<rtx_insn *> (jump_target)); |
dc0ff1c8 | 1150 | mark_target_live_regs (insns, jump_target, &new_resources); |
ca545bb5 BM |
1151 | CLEAR_RESOURCE (&set); |
1152 | CLEAR_RESOURCE (&needed); | |
1153 | ||
1154 | /* Include JUMP_INSN in the needed registers. */ | |
1155 | for (insn = target; insn != stop_insn; insn = next_active_insn (insn)) | |
1156 | { | |
675f99c9 | 1157 | mark_referenced_resources (insn, &needed, true); |
ca545bb5 BM |
1158 | |
1159 | COPY_HARD_REG_SET (scratch, needed.regs); | |
1160 | AND_COMPL_HARD_REG_SET (scratch, set.regs); | |
1161 | IOR_HARD_REG_SET (new_resources.regs, scratch); | |
1162 | ||
73780b74 | 1163 | mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
ca545bb5 BM |
1164 | } |
1165 | ||
96e9c98d | 1166 | IOR_HARD_REG_SET (res->regs, new_resources.regs); |
ca545bb5 BM |
1167 | } |
1168 | ||
1169 | if (tinfo != NULL) | |
1170 | { | |
1171 | COPY_HARD_REG_SET (tinfo->live_regs, res->regs); | |
1172 | } | |
1173 | } | |
1174 | \f | |
1175 | /* Initialize the resources required by mark_target_live_regs (). | |
1176 | This should be invoked before the first call to mark_target_live_regs. */ | |
1177 | ||
1178 | void | |
a79b674b | 1179 | init_resource_info (rtx_insn *epilogue_insn) |
ca545bb5 BM |
1180 | { |
1181 | int i; | |
a1fa3e79 | 1182 | basic_block bb; |
ca545bb5 BM |
1183 | |
1184 | /* Indicate what resources are required to be valid at the end of the current | |
dfa51991 RS |
1185 | function. The condition code never is and memory always is. |
1186 | The stack pointer is needed unless EXIT_IGNORE_STACK is true | |
1187 | and there is an epilogue that restores the original stack pointer | |
1188 | from the frame pointer. Registers used to return the function value | |
1189 | are needed. Registers holding global variables are needed. */ | |
ca545bb5 BM |
1190 | |
1191 | end_of_function_needs.cc = 0; | |
1192 | end_of_function_needs.memory = 1; | |
ca545bb5 BM |
1193 | CLEAR_HARD_REG_SET (end_of_function_needs.regs); |
1194 | ||
1195 | if (frame_pointer_needed) | |
1196 | { | |
1197 | SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM); | |
c3e08036 TS |
1198 | if (!HARD_FRAME_POINTER_IS_FRAME_POINTER) |
1199 | SET_HARD_REG_BIT (end_of_function_needs.regs, | |
1200 | HARD_FRAME_POINTER_REGNUM); | |
ca545bb5 | 1201 | } |
dfa51991 RS |
1202 | if (!(frame_pointer_needed |
1203 | && EXIT_IGNORE_STACK | |
1204 | && epilogue_insn | |
416ff32e | 1205 | && !crtl->sp_is_unchanging)) |
ca545bb5 BM |
1206 | SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); |
1207 | ||
38173d38 JH |
1208 | if (crtl->return_rtx != 0) |
1209 | mark_referenced_resources (crtl->return_rtx, | |
675f99c9 | 1210 | &end_of_function_needs, true); |
ca545bb5 BM |
1211 | |
1212 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
a3699d66 | 1213 | if (global_regs[i] || EPILOGUE_USES (i)) |
ca545bb5 BM |
1214 | SET_HARD_REG_BIT (end_of_function_needs.regs, i); |
1215 | ||
1216 | /* The registers required to be live at the end of the function are | |
1217 | represented in the flow information as being dead just prior to | |
1218 | reaching the end of the function. For example, the return of a value | |
1219 | might be represented by a USE of the return register immediately | |
1220 | followed by an unconditional jump to the return label where the | |
1221 | return label is the end of the RTL chain. The end of the RTL chain | |
1222 | is then taken to mean that the return register is live. | |
1223 | ||
1224 | This sequence is no longer maintained when epilogue instructions are | |
1225 | added to the RTL chain. To reconstruct the original meaning, the | |
1226 | start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the | |
1227 | point where these registers become live (start_of_epilogue_needs). | |
1228 | If epilogue instructions are present, the registers set by those | |
1229 | instructions won't have been processed by flow. Thus, those | |
1230 | registers are additionally required at the end of the RTL chain | |
1231 | (end_of_function_needs). */ | |
1232 | ||
1233 | start_of_epilogue_needs = end_of_function_needs; | |
1234 | ||
1235 | while ((epilogue_insn = next_nonnote_insn (epilogue_insn))) | |
6e456a10 EB |
1236 | { |
1237 | mark_set_resources (epilogue_insn, &end_of_function_needs, 0, | |
1238 | MARK_SRC_DEST_CALL); | |
1239 | if (return_insn_p (epilogue_insn)) | |
1240 | break; | |
1241 | } | |
ca545bb5 BM |
1242 | |
1243 | /* Allocate and initialize the tables used by mark_target_live_regs. */ | |
5ed6ace5 | 1244 | target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME); |
8b1c6fd7 | 1245 | bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun)); |
a1fa3e79 EB |
1246 | |
1247 | /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */ | |
11cd3bed | 1248 | FOR_EACH_BB_FN (bb, cfun) |
a1fa3e79 EB |
1249 | if (LABEL_P (BB_HEAD (bb))) |
1250 | BLOCK_FOR_INSN (BB_HEAD (bb)) = bb; | |
ca545bb5 BM |
1251 | } |
1252 | \f | |
14b493d6 | 1253 | /* Free up the resources allocated to mark_target_live_regs (). This |
ca545bb5 BM |
1254 | should be invoked after the last call to mark_target_live_regs (). */ |
1255 | ||
1256 | void | |
0c20a65f | 1257 | free_resource_info (void) |
ca545bb5 | 1258 | { |
a1fa3e79 EB |
1259 | basic_block bb; |
1260 | ||
ca545bb5 BM |
1261 | if (target_hash_table != NULL) |
1262 | { | |
1f8f4a0b | 1263 | int i; |
a6a2274a KH |
1264 | |
1265 | for (i = 0; i < TARGET_HASH_PRIME; ++i) | |
1f8f4a0b MM |
1266 | { |
1267 | struct target_info *ti = target_hash_table[i]; | |
1268 | ||
a6a2274a | 1269 | while (ti) |
1f8f4a0b MM |
1270 | { |
1271 | struct target_info *next = ti->next; | |
1272 | free (ti); | |
1273 | ti = next; | |
1274 | } | |
1275 | } | |
1276 | ||
ca545bb5 BM |
1277 | free (target_hash_table); |
1278 | target_hash_table = NULL; | |
1279 | } | |
1280 | ||
1281 | if (bb_ticks != NULL) | |
1282 | { | |
1283 | free (bb_ticks); | |
1284 | bb_ticks = NULL; | |
1285 | } | |
a1fa3e79 | 1286 | |
11cd3bed | 1287 | FOR_EACH_BB_FN (bb, cfun) |
a1fa3e79 EB |
1288 | if (LABEL_P (BB_HEAD (bb))) |
1289 | BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL; | |
ca545bb5 BM |
1290 | } |
1291 | \f | |
1292 | /* Clear any hashed information that we have stored for INSN. */ | |
1293 | ||
1294 | void | |
a79b674b | 1295 | clear_hashed_info_for_insn (rtx_insn *insn) |
ca545bb5 BM |
1296 | { |
1297 | struct target_info *tinfo; | |
a6a2274a | 1298 | |
ca545bb5 BM |
1299 | if (target_hash_table != NULL) |
1300 | { | |
1301 | for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME]; | |
1302 | tinfo; tinfo = tinfo->next) | |
1303 | if (tinfo->uid == INSN_UID (insn)) | |
1304 | break; | |
1305 | ||
1306 | if (tinfo) | |
1307 | tinfo->block = -1; | |
1308 | } | |
1309 | } | |
1310 | \f | |
1311 | /* Increment the tick count for the basic block that contains INSN. */ | |
1312 | ||
1313 | void | |
a79b674b | 1314 | incr_ticks_for_insn (rtx_insn *insn) |
ca545bb5 | 1315 | { |
d5d063d7 | 1316 | int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH); |
ca545bb5 BM |
1317 | |
1318 | if (b != -1) | |
1319 | bb_ticks[b]++; | |
1320 | } | |
1321 | \f | |
1322 | /* Add TRIAL to the set of resources used at the end of the current | |
dc297297 | 1323 | function. */ |
ca545bb5 | 1324 | void |
675f99c9 | 1325 | mark_end_of_function_resources (rtx trial, bool include_delayed_effects) |
ca545bb5 BM |
1326 | { |
1327 | mark_referenced_resources (trial, &end_of_function_needs, | |
1328 | include_delayed_effects); | |
1329 | } |