]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/resource.c
function.h: Flatten file.
[thirdparty/gcc.git] / gcc / resource.c
CommitLineData
6a73406e 1/* Definitions for computing resource usage of specific insns.
23a5b65a 2 Copyright (C) 1999-2014 Free Software Foundation, Inc.
6a73406e 3
1322177d 4This file is part of GCC.
6a73406e 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
6a73406e 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
6a73406e
RH
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6a73406e 19
ca545bb5 20#include "config.h"
8ce25479 21#include "system.h"
4977bab6
ZW
22#include "coretypes.h"
23#include "tm.h"
718f9c0f 24#include "diagnostic-core.h"
ca545bb5 25#include "rtl.h"
6baf1cc8 26#include "tm_p.h"
ca545bb5 27#include "hard-reg-set.h"
83685514
AM
28#include "hashtab.h"
29#include "hash-set.h"
30#include "vec.h"
31#include "machmode.h"
32#include "input.h"
49ad7cfa 33#include "function.h"
ca545bb5
BM
34#include "regs.h"
35#include "flags.h"
36#include "output.h"
37#include "resource.h"
d80eb1e1 38#include "except.h"
7bdb32b9 39#include "insn-attr.h"
d5d063d7 40#include "params.h"
6fb5fa3c 41#include "df.h"
ca545bb5
BM
42
43/* This structure is used to record liveness information at the targets or
44 fallthrough insns of branches. We will most likely need the information
45 at targets again, so save them in a hash table rather than recomputing them
46 each time. */
47
48struct target_info
49{
50 int uid; /* INSN_UID of target. */
51 struct target_info *next; /* Next info for same hash bucket. */
52 HARD_REG_SET live_regs; /* Registers live at target. */
53 int block; /* Basic block number containing target. */
54 int bb_tick; /* Generation count of basic block info. */
55};
56
57#define TARGET_HASH_PRIME 257
58
59/* Indicates what resources are required at the beginning of the epilogue. */
60static struct resources start_of_epilogue_needs;
61
62/* Indicates what resources are required at function end. */
63static struct resources end_of_function_needs;
64
65/* Define the hash table itself. */
66static struct target_info **target_hash_table = NULL;
67
68/* For each basic block, we maintain a generation number of its basic
69 block info, which is updated each time we move an insn from the
70 target of a jump. This is the generation number indexed by block
71 number. */
72
73static int *bb_ticks;
74
75/* Marks registers possibly live at the current place being scanned by
d5d063d7 76 mark_target_live_regs. Also used by update_live_status. */
ca545bb5
BM
77
78static HARD_REG_SET current_live_regs;
79
80/* Marks registers for which we have seen a REG_DEAD note but no assignment.
81 Also only used by the next two functions. */
82
83static HARD_REG_SET pending_dead_regs;
6a73406e 84\f
7bc980e1 85static void update_live_status (rtx, const_rtx, void *);
e4685bc8 86static int find_basic_block (rtx_insn *, int);
dc01c3d1
DM
87static rtx_insn *next_insn_no_annul (rtx_insn *);
88static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
c3f14d55 89 rtx *, int, struct resources,
dc01c3d1 90 struct resources);
6a73406e 91\f
ca545bb5
BM
92/* Utility function called from mark_target_live_regs via note_stores.
93 It deadens any CLOBBERed registers and livens any SET registers. */
94
95static void
7bc980e1 96update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
ca545bb5
BM
97{
98 int first_regno, last_regno;
99 int i;
100
f8cfc6aa
JQ
101 if (!REG_P (dest)
102 && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
ca545bb5
BM
103 return;
104
105 if (GET_CODE (dest) == SUBREG)
f1f4e530
JM
106 {
107 first_regno = subreg_regno (dest);
108 last_regno = first_regno + subreg_nregs (dest);
ca545bb5 109
f1f4e530
JM
110 }
111 else
112 {
113 first_regno = REGNO (dest);
09e18274 114 last_regno = END_HARD_REGNO (dest);
f1f4e530 115 }
ca545bb5
BM
116
117 if (GET_CODE (x) == CLOBBER)
118 for (i = first_regno; i < last_regno; i++)
119 CLEAR_HARD_REG_BIT (current_live_regs, i);
120 else
121 for (i = first_regno; i < last_regno; i++)
122 {
123 SET_HARD_REG_BIT (current_live_regs, i);
124 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
125 }
126}
d5d063d7
JO
127
128/* Find the number of the basic block with correct live register
129 information that starts closest to INSN. Return -1 if we couldn't
130 find such a basic block or the beginning is more than
131 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
132 an unlimited search.
133
134 The delay slot filling code destroys the control-flow graph so,
135 instead of finding the basic block containing INSN, we search
136 backwards toward a BARRIER where the live register information is
137 correct. */
925fa227
RH
138
139static int
e4685bc8 140find_basic_block (rtx_insn *insn, int search_limit)
925fa227 141{
925fa227
RH
142 /* Scan backwards to the previous BARRIER. Then see if we can find a
143 label that starts a basic block. Return the basic block number. */
925fa227 144 for (insn = prev_nonnote_insn (insn);
4b4bf941 145 insn && !BARRIER_P (insn) && search_limit != 0;
d5d063d7 146 insn = prev_nonnote_insn (insn), --search_limit)
925fa227
RH
147 ;
148
d5d063d7
JO
149 /* The closest BARRIER is too far away. */
150 if (search_limit == 0)
151 return -1;
152
f6366fc7 153 /* The start of the function. */
d5d063d7 154 else if (insn == 0)
fefa31b5 155 return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
925fa227
RH
156
157 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
158 anything other than a CODE_LABEL or note, we can't find this code. */
159 for (insn = next_nonnote_insn (insn);
4b4bf941 160 insn && LABEL_P (insn);
925fa227 161 insn = next_nonnote_insn (insn))
a1fa3e79
EB
162 if (BLOCK_FOR_INSN (insn))
163 return BLOCK_FOR_INSN (insn)->index;
925fa227
RH
164
165 return -1;
166}
ca545bb5
BM
167\f
168/* Similar to next_insn, but ignores insns in the delay slots of
169 an annulled branch. */
170
dc01c3d1
DM
171static rtx_insn *
172next_insn_no_annul (rtx_insn *insn)
ca545bb5
BM
173{
174 if (insn)
175 {
176 /* If INSN is an annulled branch, skip any insns from the target
177 of the branch. */
8f06d483 178 if (JUMP_P (insn)
cf40ea15 179 && INSN_ANNULLED_BRANCH_P (insn)
ca545bb5 180 && NEXT_INSN (PREV_INSN (insn)) != insn)
cf40ea15 181 {
fcff5049 182 rtx_insn *next = NEXT_INSN (insn);
cf40ea15 183
b64925dc 184 while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
cf40ea15
DM
185 && INSN_FROM_TARGET_P (next))
186 {
187 insn = next;
188 next = NEXT_INSN (insn);
cf40ea15
DM
189 }
190 }
ca545bb5
BM
191
192 insn = NEXT_INSN (insn);
4b4bf941 193 if (insn && NONJUMP_INSN_P (insn)
ca545bb5 194 && GET_CODE (PATTERN (insn)) == SEQUENCE)
dc01c3d1 195 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
ca545bb5
BM
196 }
197
198 return insn;
199}
200\f
201/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
f5df2e8c 202 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
ca545bb5
BM
203 is TRUE, resources used by the called routine will be included for
204 CALL_INSNs. */
205
206void
0c20a65f 207mark_referenced_resources (rtx x, struct resources *res,
675f99c9 208 bool include_delayed_effects)
ca545bb5 209{
770ae6cc
RK
210 enum rtx_code code = GET_CODE (x);
211 int i, j;
212 unsigned int r;
b3694847 213 const char *format_ptr;
ca545bb5
BM
214
215 /* Handle leaf items for which we set resource flags. Also, special-case
216 CALL, SET and CLOBBER operators. */
217 switch (code)
218 {
219 case CONST:
d8116890 220 CASE_CONST_ANY:
ca545bb5
BM
221 case PC:
222 case SYMBOL_REF:
223 case LABEL_REF:
224 return;
225
226 case SUBREG:
f8cfc6aa 227 if (!REG_P (SUBREG_REG (x)))
675f99c9 228 mark_referenced_resources (SUBREG_REG (x), res, false);
ca545bb5
BM
229 else
230 {
ddef6bc7 231 unsigned int regno = subreg_regno (x);
f1f4e530 232 unsigned int last_regno = regno + subreg_nregs (x);
770ae6cc 233
41374e13 234 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
770ae6cc
RK
235 for (r = regno; r < last_regno; r++)
236 SET_HARD_REG_BIT (res->regs, r);
ca545bb5
BM
237 }
238 return;
239
240 case REG:
09e18274
RS
241 gcc_assert (HARD_REGISTER_P (x));
242 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
ca545bb5
BM
243 return;
244
245 case MEM:
246 /* If this memory shouldn't change, it really isn't referencing
247 memory. */
fd6beed4 248 if (! MEM_READONLY_P (x))
ca545bb5 249 res->memory = 1;
a5045352 250 res->volatil |= MEM_VOLATILE_P (x);
ca545bb5
BM
251
252 /* Mark registers used to access memory. */
675f99c9 253 mark_referenced_resources (XEXP (x, 0), res, false);
ca545bb5
BM
254 return;
255
256 case CC0:
257 res->cc = 1;
258 return;
259
260 case UNSPEC_VOLATILE:
1b929c9a 261 case TRAP_IF:
ca545bb5
BM
262 case ASM_INPUT:
263 /* Traditional asm's are always volatile. */
ca545bb5
BM
264 res->volatil = 1;
265 break;
266
267 case ASM_OPERANDS:
a5045352 268 res->volatil |= MEM_VOLATILE_P (x);
ca545bb5
BM
269
270 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
271 We can not just fall through here since then we would be confused
272 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
273 traditional asms unlike their normal usage. */
a6a2274a 274
ca545bb5 275 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
675f99c9 276 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
ca545bb5
BM
277 return;
278
279 case CALL:
280 /* The first operand will be a (MEM (xxx)) but doesn't really reference
281 memory. The second operand may be referenced, though. */
675f99c9
ILT
282 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
283 mark_referenced_resources (XEXP (x, 1), res, false);
ca545bb5
BM
284 return;
285
286 case SET:
287 /* Usually, the first operand of SET is set, not referenced. But
288 registers used to access memory are referenced. SET_DEST is
46d096a3 289 also referenced if it is a ZERO_EXTRACT. */
ca545bb5 290
675f99c9 291 mark_referenced_resources (SET_SRC (x), res, false);
ca545bb5
BM
292
293 x = SET_DEST (x);
46d096a3 294 if (GET_CODE (x) == ZERO_EXTRACT
07570c39 295 || GET_CODE (x) == STRICT_LOW_PART)
675f99c9 296 mark_referenced_resources (x, res, false);
ca545bb5
BM
297 else if (GET_CODE (x) == SUBREG)
298 x = SUBREG_REG (x);
3c0cb5de 299 if (MEM_P (x))
675f99c9 300 mark_referenced_resources (XEXP (x, 0), res, false);
ca545bb5
BM
301 return;
302
303 case CLOBBER:
304 return;
305
306 case CALL_INSN:
307 if (include_delayed_effects)
308 {
309 /* A CALL references memory, the frame pointer if it exists, the
310 stack pointer, any global registers and any registers given in
311 USE insns immediately in front of the CALL.
312
313 However, we may have moved some of the parameter loading insns
314 into the delay slot of this CALL. If so, the USE's for them
315 don't count and should be skipped. */
dc01c3d1 316 rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
a3be187d 317 rtx_sequence *sequence = 0;
ca545bb5 318 int seq_size = 0;
ca545bb5
BM
319 int i;
320
321 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
322 if (NEXT_INSN (insn) != x)
323 {
a3be187d
DM
324 sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
325 seq_size = sequence->len ();
41374e13 326 gcc_assert (GET_CODE (sequence) == SEQUENCE);
ca545bb5
BM
327 }
328
329 res->memory = 1;
330 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
331 if (frame_pointer_needed)
332 {
333 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
e3339d0f 334#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
ca545bb5
BM
335 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
336#endif
337 }
338
339 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
340 if (global_regs[i])
341 SET_HARD_REG_BIT (res->regs, i);
342
570a98eb 343 /* Check for a REG_SETJMP. If it exists, then we must
ca545bb5
BM
344 assume that this call can need any register.
345
346 This is done to be more conservative about how we handle setjmp.
347 We assume that they both use and set all registers. Using all
348 registers ensures that a register will not be considered dead
349 just because it crosses a setjmp call. A register should be
40f03658 350 considered dead only if the setjmp call returns nonzero. */
570a98eb 351 if (find_reg_note (x, REG_SETJMP, NULL))
ca545bb5
BM
352 SET_HARD_REG_SET (res->regs);
353
354 {
355 rtx link;
356
357 for (link = CALL_INSN_FUNCTION_USAGE (x);
358 link;
359 link = XEXP (link, 1))
360 if (GET_CODE (XEXP (link, 0)) == USE)
361 {
362 for (i = 1; i < seq_size; i++)
363 {
a3be187d 364 rtx slot_pat = PATTERN (sequence->element (i));
ca545bb5
BM
365 if (GET_CODE (slot_pat) == SET
366 && rtx_equal_p (SET_DEST (slot_pat),
0cb5d81c 367 XEXP (XEXP (link, 0), 0)))
ca545bb5
BM
368 break;
369 }
370 if (i >= seq_size)
0cb5d81c 371 mark_referenced_resources (XEXP (XEXP (link, 0), 0),
675f99c9 372 res, false);
ca545bb5
BM
373 }
374 }
375 }
376
377 /* ... fall through to other INSN processing ... */
378
379 case INSN:
380 case JUMP_INSN:
381
3a001aff
JR
382 if (GET_CODE (PATTERN (x)) == COND_EXEC)
383 /* In addition to the usual references, also consider all outputs
384 as referenced, to compensate for mark_set_resources treating
385 them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
386 handling, execpt that we got a partial incidence instead of a partial
387 width. */
388 mark_set_resources (x, res, 0,
389 include_delayed_effects
390 ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
391
ca545bb5
BM
392#ifdef INSN_REFERENCES_ARE_DELAYED
393 if (! include_delayed_effects
84034c69 394 && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
ca545bb5
BM
395 return;
396#endif
397
398 /* No special processing, just speed up. */
399 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
400 return;
401
402 default:
403 break;
404 }
405
406 /* Process each sub-expression and flag what it needs. */
407 format_ptr = GET_RTX_FORMAT (code);
408 for (i = 0; i < GET_RTX_LENGTH (code); i++)
409 switch (*format_ptr++)
410 {
411 case 'e':
412 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
413 break;
414
415 case 'E':
416 for (j = 0; j < XVECLEN (x, i); j++)
417 mark_referenced_resources (XVECEXP (x, i, j), res,
418 include_delayed_effects);
419 break;
420 }
421}
422\f
423/* A subroutine of mark_target_live_regs. Search forward from TARGET
a6a2274a 424 looking for registers that are set before they are used. These are dead.
ca545bb5
BM
425 Stop after passing a few conditional jumps, and/or a small
426 number of unconditional branches. */
427
dc01c3d1
DM
428static rtx_insn *
429find_dead_or_set_registers (rtx_insn *target, struct resources *res,
c3f14d55 430 rtx *jump_target, int jump_count,
0c20a65f 431 struct resources set, struct resources needed)
ca545bb5
BM
432{
433 HARD_REG_SET scratch;
c3f14d55
DM
434 rtx_insn *insn;
435 rtx_insn *next_insn;
dc01c3d1 436 rtx_insn *jump_insn = 0;
ca545bb5
BM
437 int i;
438
c3f14d55 439 for (insn = target; insn; insn = next_insn)
ca545bb5 440 {
dc01c3d1 441 rtx_insn *this_jump_insn = insn;
ca545bb5 442
c3f14d55 443 next_insn = NEXT_INSN (insn);
0519ce30
MM
444
445 /* If this instruction can throw an exception, then we don't
446 know where we might end up next. That means that we have to
447 assume that whatever we have already marked as live really is
448 live. */
54590688 449 if (can_throw_internal (insn))
0519ce30
MM
450 break;
451
ca545bb5
BM
452 switch (GET_CODE (insn))
453 {
454 case CODE_LABEL:
455 /* After a label, any pending dead registers that weren't yet
456 used can be made dead. */
457 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
458 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
459 CLEAR_HARD_REG_SET (pending_dead_regs);
460
461 continue;
462
463 case BARRIER:
464 case NOTE:
465 continue;
466
467 case INSN:
468 if (GET_CODE (PATTERN (insn)) == USE)
469 {
470 /* If INSN is a USE made by update_block, we care about the
471 underlying insn. Any registers set by the underlying insn
472 are live since the insn is being done somewhere else. */
2c3c49de 473 if (INSN_P (XEXP (PATTERN (insn), 0)))
73780b74
SC
474 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
475 MARK_SRC_DEST_CALL);
ca545bb5
BM
476
477 /* All other USE insns are to be ignored. */
478 continue;
479 }
480 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
481 continue;
a3be187d
DM
482 else if (rtx_sequence *seq =
483 dyn_cast <rtx_sequence *> (PATTERN (insn)))
ca545bb5
BM
484 {
485 /* An unconditional jump can be used to fill the delay slot
486 of a call, so search for a JUMP_INSN in any position. */
a3be187d 487 for (i = 0; i < seq->len (); i++)
ca545bb5 488 {
dc01c3d1 489 this_jump_insn = seq->insn (i);
4b4bf941 490 if (JUMP_P (this_jump_insn))
ca545bb5
BM
491 break;
492 }
493 }
494
495 default:
496 break;
497 }
498
4b4bf941 499 if (JUMP_P (this_jump_insn))
ca545bb5
BM
500 {
501 if (jump_count++ < 10)
502 {
7f1c097d 503 if (any_uncondjump_p (this_jump_insn)
26898771 504 || ANY_RETURN_P (PATTERN (this_jump_insn)))
ca545bb5 505 {
c3f14d55
DM
506 rtx lab_or_return = JUMP_LABEL (this_jump_insn);
507 if (ANY_RETURN_P (lab_or_return))
508 next_insn = NULL;
509 else
510 next_insn = as_a <rtx_insn *> (lab_or_return);
ca545bb5
BM
511 if (jump_insn == 0)
512 {
513 jump_insn = insn;
514 if (jump_target)
c3f14d55 515 *jump_target = JUMP_LABEL (this_jump_insn);
ca545bb5
BM
516 }
517 }
7f1c097d 518 else if (any_condjump_p (this_jump_insn))
ca545bb5
BM
519 {
520 struct resources target_set, target_res;
521 struct resources fallthrough_res;
522
523 /* We can handle conditional branches here by following
524 both paths, and then IOR the results of the two paths
525 together, which will give us registers that are dead
526 on both paths. Since this is expensive, we give it
527 a much higher cost than unconditional branches. The
528 cost was chosen so that we will follow at most 1
529 conditional branch. */
530
531 jump_count += 4;
532 if (jump_count >= 10)
533 break;
534
675f99c9 535 mark_referenced_resources (insn, &needed, true);
ca545bb5
BM
536
537 /* For an annulled branch, mark_set_resources ignores slots
538 filled by instructions from the target. This is correct
539 if the branch is not taken. Since we are following both
540 paths from the branch, we must also compute correct info
541 if the branch is taken. We do this by inverting all of
542 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
543 and then inverting the INSN_FROM_TARGET_P bits again. */
544
545 if (GET_CODE (PATTERN (insn)) == SEQUENCE
546 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
547 {
a3be187d
DM
548 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
549 for (i = 1; i < seq->len (); i++)
550 INSN_FROM_TARGET_P (seq->element (i))
551 = ! INSN_FROM_TARGET_P (seq->element (i));
ca545bb5
BM
552
553 target_set = set;
73780b74
SC
554 mark_set_resources (insn, &target_set, 0,
555 MARK_SRC_DEST_CALL);
ca545bb5 556
a3be187d
DM
557 for (i = 1; i < seq->len (); i++)
558 INSN_FROM_TARGET_P (seq->element (i))
559 = ! INSN_FROM_TARGET_P (seq->element (i));
ca545bb5 560
73780b74 561 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
ca545bb5
BM
562 }
563 else
564 {
73780b74 565 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
ca545bb5
BM
566 target_set = set;
567 }
568
569 target_res = *res;
570 COPY_HARD_REG_SET (scratch, target_set.regs);
571 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
572 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
573
574 fallthrough_res = *res;
575 COPY_HARD_REG_SET (scratch, set.regs);
576 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
577 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
578
dc0ff1c8 579 if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
dc01c3d1 580 find_dead_or_set_registers (JUMP_LABEL_AS_INSN (this_jump_insn),
dc0ff1c8
BS
581 &target_res, 0, jump_count,
582 target_set, needed);
c3f14d55 583 find_dead_or_set_registers (next_insn,
ca545bb5
BM
584 &fallthrough_res, 0, jump_count,
585 set, needed);
586 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
587 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
588 break;
589 }
590 else
591 break;
592 }
593 else
594 {
595 /* Don't try this optimization if we expired our jump count
596 above, since that would mean there may be an infinite loop
597 in the function being compiled. */
598 jump_insn = 0;
599 break;
600 }
601 }
602
675f99c9 603 mark_referenced_resources (insn, &needed, true);
73780b74 604 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
ca545bb5
BM
605
606 COPY_HARD_REG_SET (scratch, set.regs);
607 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
608 AND_COMPL_HARD_REG_SET (res->regs, scratch);
609 }
610
611 return jump_insn;
612}
613\f
614/* Given X, a part of an insn, and a pointer to a `struct resource',
615 RES, indicate which resources are modified by the insn. If
73780b74 616 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
73fb6466 617 set by the called routine.
ca545bb5
BM
618
619 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
620 objects are being referenced instead of set.
621
622 We never mark the insn as modifying the condition code unless it explicitly
623 SETs CC0 even though this is not totally correct. The reason for this is
624 that we require a SET of CC0 to immediately precede the reference to CC0.
625 So if some other insn sets CC0 as a side-effect, we know it cannot affect
6d2f8887 626 our computation and thus may be placed in a delay slot. */
ca545bb5
BM
627
628void
0c20a65f
AJ
629mark_set_resources (rtx x, struct resources *res, int in_dest,
630 enum mark_resource_type mark_type)
ca545bb5 631{
770ae6cc
RK
632 enum rtx_code code;
633 int i, j;
634 unsigned int r;
635 const char *format_ptr;
ca545bb5
BM
636
637 restart:
638
639 code = GET_CODE (x);
640
641 switch (code)
642 {
643 case NOTE:
644 case BARRIER:
645 case CODE_LABEL:
646 case USE:
d8116890 647 CASE_CONST_ANY:
ca545bb5
BM
648 case LABEL_REF:
649 case SYMBOL_REF:
650 case CONST:
651 case PC:
652 /* These don't set any resources. */
653 return;
654
655 case CC0:
656 if (in_dest)
657 res->cc = 1;
658 return;
659
660 case CALL_INSN:
661 /* Called routine modifies the condition code, memory, any registers
662 that aren't saved across calls, global registers and anything
663 explicitly CLOBBERed immediately after the CALL_INSN. */
664
73780b74 665 if (mark_type == MARK_SRC_DEST_CALL)
ca545bb5 666 {
86bf2d46 667 rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
ca545bb5 668 rtx link;
c2ba7e7a 669 HARD_REG_SET regs;
ca545bb5
BM
670
671 res->cc = res->memory = 1;
eef75f5e 672
86bf2d46 673 get_call_reg_set_usage (call_insn, &regs, regs_invalidated_by_call);
c2ba7e7a 674 IOR_HARD_REG_SET (res->regs, regs);
ca545bb5 675
86bf2d46 676 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
ca545bb5
BM
677 link; link = XEXP (link, 1))
678 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
73780b74
SC
679 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
680 MARK_SRC_DEST);
ca545bb5 681
570a98eb 682 /* Check for a REG_SETJMP. If it exists, then we must
ca545bb5 683 assume that this call can clobber any register. */
86bf2d46 684 if (find_reg_note (call_insn, REG_SETJMP, NULL))
ca545bb5
BM
685 SET_HARD_REG_SET (res->regs);
686 }
687
688 /* ... and also what its RTL says it modifies, if anything. */
689
690 case JUMP_INSN:
691 case INSN:
692
693 /* An insn consisting of just a CLOBBER (or USE) is just for flow
694 and doesn't actually do anything, so we ignore it. */
695
696#ifdef INSN_SETS_ARE_DELAYED
73780b74 697 if (mark_type != MARK_SRC_DEST_CALL
84034c69 698 && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
ca545bb5
BM
699 return;
700#endif
701
702 x = PATTERN (x);
703 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
704 goto restart;
705 return;
706
707 case SET:
708 /* If the source of a SET is a CALL, this is actually done by
709 the called routine. So only include it if we are to include the
710 effects of the calling routine. */
711
712 mark_set_resources (SET_DEST (x), res,
73780b74 713 (mark_type == MARK_SRC_DEST_CALL
ca545bb5 714 || GET_CODE (SET_SRC (x)) != CALL),
73780b74 715 mark_type);
ca545bb5 716
73fb6466 717 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
ca545bb5
BM
718 return;
719
720 case CLOBBER:
73780b74 721 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
ca545bb5 722 return;
a6a2274a 723
ca545bb5 724 case SEQUENCE:
8f06d483 725 {
a3be187d
DM
726 rtx_sequence *seq = as_a <rtx_sequence *> (x);
727 rtx control = seq->element (0);
8f06d483
RH
728 bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
729
730 mark_set_resources (control, res, 0, mark_type);
a3be187d 731 for (i = seq->len () - 1; i >= 0; --i)
8f06d483 732 {
a3be187d 733 rtx elt = seq->element (i);
8f06d483
RH
734 if (!annul_p && INSN_FROM_TARGET_P (elt))
735 mark_set_resources (elt, res, 0, mark_type);
736 }
737 }
ca545bb5
BM
738 return;
739
740 case POST_INC:
741 case PRE_INC:
742 case POST_DEC:
743 case PRE_DEC:
73780b74 744 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
ca545bb5
BM
745 return;
746
4b983fdc
RH
747 case PRE_MODIFY:
748 case POST_MODIFY:
1fb9c5cd
MH
749 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
750 mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
751 mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
4b983fdc
RH
752 return;
753
73780b74 754 case SIGN_EXTRACT:
ca545bb5 755 case ZERO_EXTRACT:
73fb6466
HPN
756 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
757 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
758 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
ca545bb5
BM
759 return;
760
761 case MEM:
762 if (in_dest)
763 {
764 res->memory = 1;
a5045352 765 res->volatil |= MEM_VOLATILE_P (x);
ca545bb5
BM
766 }
767
73780b74 768 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
ca545bb5
BM
769 return;
770
771 case SUBREG:
772 if (in_dest)
773 {
f8cfc6aa 774 if (!REG_P (SUBREG_REG (x)))
73780b74 775 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
ca545bb5
BM
776 else
777 {
ddef6bc7 778 unsigned int regno = subreg_regno (x);
f1f4e530 779 unsigned int last_regno = regno + subreg_nregs (x);
770ae6cc 780
41374e13 781 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
770ae6cc
RK
782 for (r = regno; r < last_regno; r++)
783 SET_HARD_REG_BIT (res->regs, r);
ca545bb5
BM
784 }
785 }
786 return;
787
788 case REG:
789 if (in_dest)
1d2215fe 790 {
09e18274
RS
791 gcc_assert (HARD_REGISTER_P (x));
792 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
1d2215fe 793 }
ca545bb5
BM
794 return;
795
3d298f8f
MM
796 case UNSPEC_VOLATILE:
797 case ASM_INPUT:
798 /* Traditional asm's are always volatile. */
799 res->volatil = 1;
800 return;
801
802 case TRAP_IF:
803 res->volatil = 1;
804 break;
805
806 case ASM_OPERANDS:
a5045352 807 res->volatil |= MEM_VOLATILE_P (x);
3d298f8f
MM
808
809 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
810 We can not just fall through here since then we would be confused
811 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
812 traditional asms unlike their normal usage. */
a6a2274a 813
3d298f8f 814 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
73780b74
SC
815 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
816 MARK_SRC_DEST);
3d298f8f
MM
817 return;
818
ca545bb5
BM
819 default:
820 break;
821 }
822
823 /* Process each sub-expression and flag what it needs. */
824 format_ptr = GET_RTX_FORMAT (code);
825 for (i = 0; i < GET_RTX_LENGTH (code); i++)
826 switch (*format_ptr++)
827 {
828 case 'e':
73780b74 829 mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
ca545bb5
BM
830 break;
831
832 case 'E':
833 for (j = 0; j < XVECLEN (x, i); j++)
73780b74 834 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
ca545bb5
BM
835 break;
836 }
837}
838\f
6e456a10
EB
839/* Return TRUE if INSN is a return, possibly with a filled delay slot. */
840
841static bool
ed7a4b4b 842return_insn_p (const_rtx insn)
6e456a10 843{
26898771 844 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
6e456a10
EB
845 return true;
846
2ca202e7 847 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
6e456a10
EB
848 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
849
850 return false;
851}
852
ca545bb5
BM
853/* Set the resources that are live at TARGET.
854
855 If TARGET is zero, we refer to the end of the current function and can
856 return our precomputed value.
857
858 Otherwise, we try to find out what is live by consulting the basic block
859 information. This is tricky, because we must consider the actions of
860 reload and jump optimization, which occur after the basic block information
861 has been computed.
862
863 Accordingly, we proceed as follows::
864
865 We find the previous BARRIER and look at all immediately following labels
866 (with no intervening active insns) to see if any of them start a basic
867 block. If we hit the start of the function first, we use block 0.
868
a1fa3e79
EB
869 Once we have found a basic block and a corresponding first insn, we can
870 accurately compute the live status (by starting at a label following a
871 BARRIER, we are immune to actions taken by reload and jump.) Then we
872 scan all insns between that point and our target. For each CLOBBER (or
873 for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
874 registers are dead. For a SET, mark them as live.
ca545bb5
BM
875
876 We have to be careful when using REG_DEAD notes because they are not
877 updated by such things as find_equiv_reg. So keep track of registers
878 marked as dead that haven't been assigned to, and mark them dead at the
879 next CODE_LABEL since reload and jump won't propagate values across labels.
880
881 If we cannot find the start of a basic block (should be a very rare
882 case, if it can happen at all), mark everything as potentially live.
883
884 Next, scan forward from TARGET looking for things set or clobbered
885 before they are used. These are not live.
886
887 Because we can be called many times on the same target, save our results
888 in a hash table indexed by INSN_UID. This is only done if the function
889 init_resource_info () was invoked before we are called. */
890
891void
c3f14d55 892mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
ca545bb5
BM
893{
894 int b = -1;
5197bd50 895 unsigned int i;
ca545bb5 896 struct target_info *tinfo = NULL;
dc01c3d1 897 rtx_insn *insn;
ca545bb5 898 rtx jump_insn = 0;
c3f14d55 899 rtx jump_target;
ca545bb5
BM
900 HARD_REG_SET scratch;
901 struct resources set, needed;
902
903 /* Handle end of function. */
c3f14d55 904 if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
ca545bb5
BM
905 {
906 *res = end_of_function_needs;
907 return;
908 }
909
c3f14d55
DM
910 /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
911 instruction. */
912 rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
913
6e456a10 914 /* Handle return insn. */
c3f14d55 915 if (return_insn_p (target))
6e456a10
EB
916 {
917 *res = end_of_function_needs;
675f99c9 918 mark_referenced_resources (target, res, false);
6e456a10
EB
919 return;
920 }
921
ca545bb5
BM
922 /* We have to assume memory is needed, but the CC isn't. */
923 res->memory = 1;
fd6beed4 924 res->volatil = 0;
ca545bb5
BM
925 res->cc = 0;
926
927 /* See if we have computed this value already. */
928 if (target_hash_table != NULL)
929 {
930 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
931 tinfo; tinfo = tinfo->next)
932 if (tinfo->uid == INSN_UID (target))
933 break;
934
935 /* Start by getting the basic block number. If we have saved
936 information, we can get it from there unless the insn at the
937 start of the basic block has been deleted. */
938 if (tinfo && tinfo->block != -1
4654c0cf 939 && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
ca545bb5
BM
940 b = tinfo->block;
941 }
942
925fa227 943 if (b == -1)
d5d063d7 944 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
ca545bb5
BM
945
946 if (target_hash_table != NULL)
947 {
948 if (tinfo)
949 {
950 /* If the information is up-to-date, use it. Otherwise, we will
951 update it below. */
952 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
953 {
954 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
955 return;
956 }
957 }
958 else
959 {
a6a2274a 960 /* Allocate a place to put our results and chain it into the
ca545bb5 961 hash table. */
5ed6ace5 962 tinfo = XNEW (struct target_info);
ca545bb5
BM
963 tinfo->uid = INSN_UID (target);
964 tinfo->block = b;
5197bd50
RK
965 tinfo->next
966 = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
ca545bb5
BM
967 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
968 }
969 }
970
971 CLEAR_HARD_REG_SET (pending_dead_regs);
972
973 /* If we found a basic block, get the live registers from it and update
974 them with anything set or killed between its start and the insn before
9d2bb902
EB
975 TARGET; this custom life analysis is really about registers so we need
976 to use the LR problem. Otherwise, we must assume everything is live. */
ca545bb5
BM
977 if (b != -1)
978 {
06e28de2 979 regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
dc01c3d1 980 rtx_insn *start_insn, *stop_insn;
ca545bb5 981
a1fa3e79 982 /* Compute hard regs live at start of block. */
ca545bb5
BM
983 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
984
ca545bb5
BM
985 /* Get starting and ending insn, handling the case where each might
986 be a SEQUENCE. */
fefa31b5 987 start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
06e28de2 988 insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
ca545bb5
BM
989 stop_insn = target;
990
4b4bf941 991 if (NONJUMP_INSN_P (start_insn)
ca545bb5 992 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
dc01c3d1 993 start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
ca545bb5 994
4b4bf941 995 if (NONJUMP_INSN_P (stop_insn)
ca545bb5
BM
996 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
997 stop_insn = next_insn (PREV_INSN (stop_insn));
998
999 for (insn = start_insn; insn != stop_insn;
1000 insn = next_insn_no_annul (insn))
1001 {
1002 rtx link;
86bf2d46 1003 rtx_insn *real_insn = insn;
cf40ea15 1004 enum rtx_code code = GET_CODE (insn);
ca545bb5 1005
b5b8b0ac
AO
1006 if (DEBUG_INSN_P (insn))
1007 continue;
1008
ca545bb5
BM
1009 /* If this insn is from the target of a branch, it isn't going to
1010 be used in the sequel. If it is used in both cases, this
1011 test will not be true. */
cf40ea15
DM
1012 if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1013 && INSN_FROM_TARGET_P (insn))
ca545bb5
BM
1014 continue;
1015
1016 /* If this insn is a USE made by update_block, we care about the
1017 underlying insn. */
01007ae0
EB
1018 if (code == INSN
1019 && GET_CODE (PATTERN (insn)) == USE
2c3c49de 1020 && INSN_P (XEXP (PATTERN (insn), 0)))
86bf2d46 1021 real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
ca545bb5 1022
4b4bf941 1023 if (CALL_P (real_insn))
ca545bb5 1024 {
c42091c1
CZ
1025 /* Values in call-clobbered registers survive a COND_EXEC CALL
1026 if that is not executed; this matters for resoure use because
1027 they may be used by a complementarily (or more strictly)
1028 predicated instruction, or if the CALL is NORETURN. */
1029 if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1030 {
c2ba7e7a
RO
1031 HARD_REG_SET regs_invalidated_by_this_call;
1032 get_call_reg_set_usage (real_insn,
1033 &regs_invalidated_by_this_call,
1034 regs_invalidated_by_call);
c42091c1
CZ
1035 /* CALL clobbers all call-used regs that aren't fixed except
1036 sp, ap, and fp. Do this before setting the result of the
1037 call live. */
1038 AND_COMPL_HARD_REG_SET (current_live_regs,
c2ba7e7a 1039 regs_invalidated_by_this_call);
c42091c1 1040 }
ca545bb5
BM
1041
1042 /* A CALL_INSN sets any global register live, since it may
1043 have been modified by the call. */
1044 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1045 if (global_regs[i])
1046 SET_HARD_REG_BIT (current_live_regs, i);
1047 }
1048
1049 /* Mark anything killed in an insn to be deadened at the next
1050 label. Ignore USE insns; the only REG_DEAD notes will be for
1051 parameters. But they might be early. A CALL_INSN will usually
1052 clobber registers used for parameters. It isn't worth bothering
1053 with the unlikely case when it won't. */
4b4bf941 1054 if ((NONJUMP_INSN_P (real_insn)
ca545bb5
BM
1055 && GET_CODE (PATTERN (real_insn)) != USE
1056 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
4b4bf941
JQ
1057 || JUMP_P (real_insn)
1058 || CALL_P (real_insn))
ca545bb5
BM
1059 {
1060 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1061 if (REG_NOTE_KIND (link) == REG_DEAD
f8cfc6aa 1062 && REG_P (XEXP (link, 0))
ca545bb5 1063 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
09e18274
RS
1064 add_to_hard_reg_set (&pending_dead_regs,
1065 GET_MODE (XEXP (link, 0)),
1066 REGNO (XEXP (link, 0)));
ca545bb5 1067
84832317 1068 note_stores (PATTERN (real_insn), update_live_status, NULL);
ca545bb5
BM
1069
1070 /* If any registers were unused after this insn, kill them.
1071 These notes will always be accurate. */
1072 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1073 if (REG_NOTE_KIND (link) == REG_UNUSED
f8cfc6aa 1074 && REG_P (XEXP (link, 0))
ca545bb5 1075 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
09e18274
RS
1076 remove_from_hard_reg_set (&current_live_regs,
1077 GET_MODE (XEXP (link, 0)),
1078 REGNO (XEXP (link, 0)));
ca545bb5
BM
1079 }
1080
4b4bf941 1081 else if (LABEL_P (real_insn))
ca545bb5 1082 {
a1fa3e79
EB
1083 basic_block bb;
1084
ca545bb5
BM
1085 /* A label clobbers the pending dead registers since neither
1086 reload nor jump will propagate a value across a label. */
1087 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1088 CLEAR_HARD_REG_SET (pending_dead_regs);
a1fa3e79
EB
1089
1090 /* We must conservatively assume that all registers that used
1091 to be live here still are. The fallthrough edge may have
1092 left a live register uninitialized. */
1093 bb = BLOCK_FOR_INSN (real_insn);
1094 if (bb)
1095 {
1096 HARD_REG_SET extra_live;
1097
9d2bb902 1098 REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
a1fa3e79
EB
1099 IOR_HARD_REG_SET (current_live_regs, extra_live);
1100 }
ca545bb5
BM
1101 }
1102
1103 /* The beginning of the epilogue corresponds to the end of the
1104 RTL chain when there are no epilogue insns. Certain resources
1105 are implicitly required at that point. */
4b4bf941 1106 else if (NOTE_P (real_insn)
a38e7aa5 1107 && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
ca545bb5
BM
1108 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1109 }
1110
1111 COPY_HARD_REG_SET (res->regs, current_live_regs);
1112 if (tinfo != NULL)
1113 {
1114 tinfo->block = b;
1115 tinfo->bb_tick = bb_ticks[b];
1116 }
1117 }
1118 else
1119 /* We didn't find the start of a basic block. Assume everything
1120 in use. This should happen only extremely rarely. */
1121 SET_HARD_REG_SET (res->regs);
1122
1123 CLEAR_RESOURCE (&set);
1124 CLEAR_RESOURCE (&needed);
1125
1126 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1127 set, needed);
1128
1129 /* If we hit an unconditional branch, we have another way of finding out
1130 what is live: we can see what is live at the branch target and include
96e9c98d 1131 anything used but not set before the branch. We add the live
dc297297 1132 resources found using the test below to those found until now. */
ca545bb5
BM
1133
1134 if (jump_insn)
1135 {
1136 struct resources new_resources;
dc01c3d1 1137 rtx_insn *stop_insn = next_active_insn (jump_insn);
ca545bb5 1138
dc0ff1c8
BS
1139 if (!ANY_RETURN_P (jump_target))
1140 jump_target = next_active_insn (jump_target);
1141 mark_target_live_regs (insns, jump_target, &new_resources);
ca545bb5
BM
1142 CLEAR_RESOURCE (&set);
1143 CLEAR_RESOURCE (&needed);
1144
1145 /* Include JUMP_INSN in the needed registers. */
1146 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1147 {
675f99c9 1148 mark_referenced_resources (insn, &needed, true);
ca545bb5
BM
1149
1150 COPY_HARD_REG_SET (scratch, needed.regs);
1151 AND_COMPL_HARD_REG_SET (scratch, set.regs);
1152 IOR_HARD_REG_SET (new_resources.regs, scratch);
1153
73780b74 1154 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
ca545bb5
BM
1155 }
1156
96e9c98d 1157 IOR_HARD_REG_SET (res->regs, new_resources.regs);
ca545bb5
BM
1158 }
1159
1160 if (tinfo != NULL)
1161 {
1162 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1163 }
1164}
1165\f
1166/* Initialize the resources required by mark_target_live_regs ().
1167 This should be invoked before the first call to mark_target_live_regs. */
1168
1169void
a79b674b 1170init_resource_info (rtx_insn *epilogue_insn)
ca545bb5
BM
1171{
1172 int i;
a1fa3e79 1173 basic_block bb;
ca545bb5
BM
1174
1175 /* Indicate what resources are required to be valid at the end of the current
dfa51991
RS
1176 function. The condition code never is and memory always is.
1177 The stack pointer is needed unless EXIT_IGNORE_STACK is true
1178 and there is an epilogue that restores the original stack pointer
1179 from the frame pointer. Registers used to return the function value
1180 are needed. Registers holding global variables are needed. */
ca545bb5
BM
1181
1182 end_of_function_needs.cc = 0;
1183 end_of_function_needs.memory = 1;
ca545bb5
BM
1184 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1185
1186 if (frame_pointer_needed)
1187 {
1188 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
e3339d0f 1189#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
ca545bb5
BM
1190 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1191#endif
ca545bb5 1192 }
dfa51991
RS
1193 if (!(frame_pointer_needed
1194 && EXIT_IGNORE_STACK
1195 && epilogue_insn
416ff32e 1196 && !crtl->sp_is_unchanging))
ca545bb5
BM
1197 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1198
38173d38
JH
1199 if (crtl->return_rtx != 0)
1200 mark_referenced_resources (crtl->return_rtx,
675f99c9 1201 &end_of_function_needs, true);
ca545bb5
BM
1202
1203 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1204 if (global_regs[i]
1205#ifdef EPILOGUE_USES
1206 || EPILOGUE_USES (i)
1207#endif
1208 )
1209 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1210
1211 /* The registers required to be live at the end of the function are
1212 represented in the flow information as being dead just prior to
1213 reaching the end of the function. For example, the return of a value
1214 might be represented by a USE of the return register immediately
1215 followed by an unconditional jump to the return label where the
1216 return label is the end of the RTL chain. The end of the RTL chain
1217 is then taken to mean that the return register is live.
1218
1219 This sequence is no longer maintained when epilogue instructions are
1220 added to the RTL chain. To reconstruct the original meaning, the
1221 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1222 point where these registers become live (start_of_epilogue_needs).
1223 If epilogue instructions are present, the registers set by those
1224 instructions won't have been processed by flow. Thus, those
1225 registers are additionally required at the end of the RTL chain
1226 (end_of_function_needs). */
1227
1228 start_of_epilogue_needs = end_of_function_needs;
1229
1230 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
6e456a10
EB
1231 {
1232 mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1233 MARK_SRC_DEST_CALL);
1234 if (return_insn_p (epilogue_insn))
1235 break;
1236 }
ca545bb5
BM
1237
1238 /* Allocate and initialize the tables used by mark_target_live_regs. */
5ed6ace5 1239 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
8b1c6fd7 1240 bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
a1fa3e79
EB
1241
1242 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
11cd3bed 1243 FOR_EACH_BB_FN (bb, cfun)
a1fa3e79
EB
1244 if (LABEL_P (BB_HEAD (bb)))
1245 BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
ca545bb5
BM
1246}
1247\f
14b493d6 1248/* Free up the resources allocated to mark_target_live_regs (). This
ca545bb5
BM
1249 should be invoked after the last call to mark_target_live_regs (). */
1250
1251void
0c20a65f 1252free_resource_info (void)
ca545bb5 1253{
a1fa3e79
EB
1254 basic_block bb;
1255
ca545bb5
BM
1256 if (target_hash_table != NULL)
1257 {
1f8f4a0b 1258 int i;
a6a2274a
KH
1259
1260 for (i = 0; i < TARGET_HASH_PRIME; ++i)
1f8f4a0b
MM
1261 {
1262 struct target_info *ti = target_hash_table[i];
1263
a6a2274a 1264 while (ti)
1f8f4a0b
MM
1265 {
1266 struct target_info *next = ti->next;
1267 free (ti);
1268 ti = next;
1269 }
1270 }
1271
ca545bb5
BM
1272 free (target_hash_table);
1273 target_hash_table = NULL;
1274 }
1275
1276 if (bb_ticks != NULL)
1277 {
1278 free (bb_ticks);
1279 bb_ticks = NULL;
1280 }
a1fa3e79 1281
11cd3bed 1282 FOR_EACH_BB_FN (bb, cfun)
a1fa3e79
EB
1283 if (LABEL_P (BB_HEAD (bb)))
1284 BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
ca545bb5
BM
1285}
1286\f
1287/* Clear any hashed information that we have stored for INSN. */
1288
1289void
a79b674b 1290clear_hashed_info_for_insn (rtx_insn *insn)
ca545bb5
BM
1291{
1292 struct target_info *tinfo;
a6a2274a 1293
ca545bb5
BM
1294 if (target_hash_table != NULL)
1295 {
1296 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1297 tinfo; tinfo = tinfo->next)
1298 if (tinfo->uid == INSN_UID (insn))
1299 break;
1300
1301 if (tinfo)
1302 tinfo->block = -1;
1303 }
1304}
1305\f
1306/* Increment the tick count for the basic block that contains INSN. */
1307
1308void
a79b674b 1309incr_ticks_for_insn (rtx_insn *insn)
ca545bb5 1310{
d5d063d7 1311 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
ca545bb5
BM
1312
1313 if (b != -1)
1314 bb_ticks[b]++;
1315}
1316\f
1317/* Add TRIAL to the set of resources used at the end of the current
dc297297 1318 function. */
ca545bb5 1319void
675f99c9 1320mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
ca545bb5
BM
1321{
1322 mark_referenced_resources (trial, &end_of_function_needs,
1323 include_delayed_effects);
1324}