]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/flow.c
Merge tree-ssa-20020619-branch into mainline.
[thirdparty/gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "tm.h"
125 #include "tree.h"
126 #include "rtl.h"
127 #include "tm_p.h"
128 #include "hard-reg-set.h"
129 #include "basic-block.h"
130 #include "insn-config.h"
131 #include "regs.h"
132 #include "flags.h"
133 #include "output.h"
134 #include "function.h"
135 #include "except.h"
136 #include "toplev.h"
137 #include "recog.h"
138 #include "expr.h"
139 #include "timevar.h"
140
141 #include "obstack.h"
142 #include "splay-tree.h"
143
144 #ifndef HAVE_epilogue
145 #define HAVE_epilogue 0
146 #endif
147 #ifndef HAVE_prologue
148 #define HAVE_prologue 0
149 #endif
150 #ifndef HAVE_sibcall_epilogue
151 #define HAVE_sibcall_epilogue 0
152 #endif
153
154 #ifndef EPILOGUE_USES
155 #define EPILOGUE_USES(REGNO) 0
156 #endif
157 #ifndef EH_USES
158 #define EH_USES(REGNO) 0
159 #endif
160
161 #ifdef HAVE_conditional_execution
162 #ifndef REVERSE_CONDEXEC_PREDICATES_P
163 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
164 #endif
165 #endif
166
167 /* Nonzero if the second flow pass has completed. */
168 int flow2_completed;
169
170 /* Maximum register number used in this function, plus one. */
171
172 int max_regno;
173
174 /* Indexed by n, giving various register information */
175
176 varray_type reg_n_info;
177
178 /* Size of a regset for the current function,
179 in (1) bytes and (2) elements. */
180
181 int regset_bytes;
182 int regset_size;
183
184 /* Regset of regs live when calls to `setjmp'-like functions happen. */
185 /* ??? Does this exist only for the setjmp-clobbered warning message? */
186
187 regset regs_live_at_setjmp;
188
189 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
190 that have to go in the same hard reg.
191 The first two regs in the list are a pair, and the next two
192 are another pair, etc. */
193 rtx regs_may_share;
194
195 /* Set of registers that may be eliminable. These are handled specially
196 in updating regs_ever_live. */
197
198 static HARD_REG_SET elim_reg_set;
199
200 /* Holds information for tracking conditional register life information. */
201 struct reg_cond_life_info
202 {
203 /* A boolean expression of conditions under which a register is dead. */
204 rtx condition;
205 /* Conditions under which a register is dead at the basic block end. */
206 rtx orig_condition;
207
208 /* A boolean expression of conditions under which a register has been
209 stored into. */
210 rtx stores;
211
212 /* ??? Could store mask of bytes that are dead, so that we could finally
213 track lifetimes of multi-word registers accessed via subregs. */
214 };
215
216 /* For use in communicating between propagate_block and its subroutines.
217 Holds all information needed to compute life and def-use information. */
218
219 struct propagate_block_info
220 {
221 /* The basic block we're considering. */
222 basic_block bb;
223
224 /* Bit N is set if register N is conditionally or unconditionally live. */
225 regset reg_live;
226
227 /* Bit N is set if register N is set this insn. */
228 regset new_set;
229
230 /* Element N is the next insn that uses (hard or pseudo) register N
231 within the current basic block; or zero, if there is no such insn. */
232 rtx *reg_next_use;
233
234 /* Contains a list of all the MEMs we are tracking for dead store
235 elimination. */
236 rtx mem_set_list;
237
238 /* If non-null, record the set of registers set unconditionally in the
239 basic block. */
240 regset local_set;
241
242 /* If non-null, record the set of registers set conditionally in the
243 basic block. */
244 regset cond_local_set;
245
246 #ifdef HAVE_conditional_execution
247 /* Indexed by register number, holds a reg_cond_life_info for each
248 register that is not unconditionally live or dead. */
249 splay_tree reg_cond_dead;
250
251 /* Bit N is set if register N is in an expression in reg_cond_dead. */
252 regset reg_cond_reg;
253 #endif
254
255 /* The length of mem_set_list. */
256 int mem_set_list_len;
257
258 /* Nonzero if the value of CC0 is live. */
259 int cc0_live;
260
261 /* Flags controlling the set of information propagate_block collects. */
262 int flags;
263 /* Index of instruction being processed. */
264 int insn_num;
265 };
266
267 /* Number of dead insns removed. */
268 static int ndead;
269
270 /* When PROP_REG_INFO set, array contains pbi->insn_num of instruction
271 where given register died. When the register is marked alive, we use the
272 information to compute amount of instructions life range cross.
273 (remember, we are walking backward). This can be computed as current
274 pbi->insn_num - reg_deaths[regno].
275 At the end of processing each basic block, the remaining live registers
276 are inspected and liferanges are increased same way so liverange of global
277 registers are computed correctly.
278
279 The array is maintained clear for dead registers, so it can be safely reused
280 for next basic block without expensive memset of the whole array after
281 reseting pbi->insn_num to 0. */
282
283 static int *reg_deaths;
284
285 /* Maximum length of pbi->mem_set_list before we start dropping
286 new elements on the floor. */
287 #define MAX_MEM_SET_LIST_LEN 100
288
289 /* Forward declarations */
290 static int verify_wide_reg_1 (rtx *, void *);
291 static void verify_wide_reg (int, basic_block);
292 static void verify_local_live_at_start (regset, basic_block);
293 static void notice_stack_pointer_modification_1 (rtx, rtx, void *);
294 static void notice_stack_pointer_modification (rtx);
295 static void mark_reg (rtx, void *);
296 static void mark_regs_live_at_end (regset);
297 static void calculate_global_regs_live (sbitmap, sbitmap, int);
298 static void propagate_block_delete_insn (rtx);
299 static rtx propagate_block_delete_libcall (rtx, rtx);
300 static int insn_dead_p (struct propagate_block_info *, rtx, int, rtx);
301 static int libcall_dead_p (struct propagate_block_info *, rtx, rtx);
302 static void mark_set_regs (struct propagate_block_info *, rtx, rtx);
303 static void mark_set_1 (struct propagate_block_info *, enum rtx_code, rtx,
304 rtx, rtx, int);
305 static int find_regno_partial (rtx *, void *);
306
307 #ifdef HAVE_conditional_execution
308 static int mark_regno_cond_dead (struct propagate_block_info *, int, rtx);
309 static void free_reg_cond_life_info (splay_tree_value);
310 static int flush_reg_cond_reg_1 (splay_tree_node, void *);
311 static void flush_reg_cond_reg (struct propagate_block_info *, int);
312 static rtx elim_reg_cond (rtx, unsigned int);
313 static rtx ior_reg_cond (rtx, rtx, int);
314 static rtx not_reg_cond (rtx);
315 static rtx and_reg_cond (rtx, rtx, int);
316 #endif
317 #ifdef AUTO_INC_DEC
318 static void attempt_auto_inc (struct propagate_block_info *, rtx, rtx, rtx,
319 rtx, rtx);
320 static void find_auto_inc (struct propagate_block_info *, rtx, rtx);
321 static int try_pre_increment_1 (struct propagate_block_info *, rtx);
322 static int try_pre_increment (rtx, rtx, HOST_WIDE_INT);
323 #endif
324 static void mark_used_reg (struct propagate_block_info *, rtx, rtx, rtx);
325 static void mark_used_regs (struct propagate_block_info *, rtx, rtx, rtx);
326 void debug_flow_info (void);
327 static void add_to_mem_set_list (struct propagate_block_info *, rtx);
328 static int invalidate_mems_from_autoinc (rtx *, void *);
329 static void invalidate_mems_from_set (struct propagate_block_info *, rtx);
330 static void clear_log_links (sbitmap);
331 static int count_or_remove_death_notes_bb (basic_block, int);
332 \f
333 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
334 note associated with the BLOCK. */
335
336 rtx
337 first_insn_after_basic_block_note (basic_block block)
338 {
339 rtx insn;
340
341 /* Get the first instruction in the block. */
342 insn = BB_HEAD (block);
343
344 if (insn == NULL_RTX)
345 return NULL_RTX;
346 if (GET_CODE (insn) == CODE_LABEL)
347 insn = NEXT_INSN (insn);
348 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
349 abort ();
350
351 return NEXT_INSN (insn);
352 }
353 \f
354 /* Perform data flow analysis.
355 F is the first insn of the function; FLAGS is a set of PROP_* flags
356 to be used in accumulating flow info. */
357
358 void
359 life_analysis (rtx f, FILE *file, int flags)
360 {
361 #ifdef ELIMINABLE_REGS
362 int i;
363 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
364 #endif
365
366 /* Record which registers will be eliminated. We use this in
367 mark_used_regs. */
368
369 CLEAR_HARD_REG_SET (elim_reg_set);
370
371 #ifdef ELIMINABLE_REGS
372 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
373 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
374 #else
375 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
376 #endif
377
378
379 #ifdef CANNOT_CHANGE_MODE_CLASS
380 if (flags & PROP_REG_INFO)
381 bitmap_initialize (&subregs_of_mode, 1);
382 #endif
383
384 if (! optimize)
385 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
386
387 /* The post-reload life analysis have (on a global basis) the same
388 registers live as was computed by reload itself. elimination
389 Otherwise offsets and such may be incorrect.
390
391 Reload will make some registers as live even though they do not
392 appear in the rtl.
393
394 We don't want to create new auto-incs after reload, since they
395 are unlikely to be useful and can cause problems with shared
396 stack slots. */
397 if (reload_completed)
398 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
399
400 /* We want alias analysis information for local dead store elimination. */
401 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
402 init_alias_analysis ();
403
404 /* Always remove no-op moves. Do this before other processing so
405 that we don't have to keep re-scanning them. */
406 delete_noop_moves (f);
407
408 /* Some targets can emit simpler epilogues if they know that sp was
409 not ever modified during the function. After reload, of course,
410 we've already emitted the epilogue so there's no sense searching. */
411 if (! reload_completed)
412 notice_stack_pointer_modification (f);
413
414 /* Allocate and zero out data structures that will record the
415 data from lifetime analysis. */
416 allocate_reg_life_data ();
417 allocate_bb_life_data ();
418
419 /* Find the set of registers live on function exit. */
420 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
421
422 /* "Update" life info from zero. It'd be nice to begin the
423 relaxation with just the exit and noreturn blocks, but that set
424 is not immediately handy. */
425
426 if (flags & PROP_REG_INFO)
427 {
428 memset (regs_ever_live, 0, sizeof (regs_ever_live));
429 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
430 }
431 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
432 if (reg_deaths)
433 {
434 free (reg_deaths);
435 reg_deaths = NULL;
436 }
437
438 /* Clean up. */
439 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
440 end_alias_analysis ();
441
442 if (file)
443 dump_flow_info (file);
444
445 /* Removing dead insns should have made jumptables really dead. */
446 delete_dead_jumptables ();
447 }
448
449 /* A subroutine of verify_wide_reg, called through for_each_rtx.
450 Search for REGNO. If found, return 2 if it is not wider than
451 word_mode. */
452
453 static int
454 verify_wide_reg_1 (rtx *px, void *pregno)
455 {
456 rtx x = *px;
457 unsigned int regno = *(int *) pregno;
458
459 if (GET_CODE (x) == REG && REGNO (x) == regno)
460 {
461 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
462 return 2;
463 return 1;
464 }
465 return 0;
466 }
467
468 /* A subroutine of verify_local_live_at_start. Search through insns
469 of BB looking for register REGNO. */
470
471 static void
472 verify_wide_reg (int regno, basic_block bb)
473 {
474 rtx head = BB_HEAD (bb), end = BB_END (bb);
475
476 while (1)
477 {
478 if (INSN_P (head))
479 {
480 int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno);
481 if (r == 1)
482 return;
483 if (r == 2)
484 break;
485 }
486 if (head == end)
487 break;
488 head = NEXT_INSN (head);
489 }
490
491 if (dump_file)
492 {
493 fprintf (dump_file, "Register %d died unexpectedly.\n", regno);
494 dump_bb (bb, dump_file, 0);
495 }
496 abort ();
497 }
498
499 /* A subroutine of update_life_info. Verify that there are no untoward
500 changes in live_at_start during a local update. */
501
502 static void
503 verify_local_live_at_start (regset new_live_at_start, basic_block bb)
504 {
505 if (reload_completed)
506 {
507 /* After reload, there are no pseudos, nor subregs of multi-word
508 registers. The regsets should exactly match. */
509 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
510 {
511 if (dump_file)
512 {
513 fprintf (dump_file,
514 "live_at_start mismatch in bb %d, aborting\nNew:\n",
515 bb->index);
516 debug_bitmap_file (dump_file, new_live_at_start);
517 fputs ("Old:\n", dump_file);
518 dump_bb (bb, dump_file, 0);
519 }
520 abort ();
521 }
522 }
523 else
524 {
525 int i;
526
527 /* Find the set of changed registers. */
528 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
529
530 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
531 {
532 /* No registers should die. */
533 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
534 {
535 if (dump_file)
536 {
537 fprintf (dump_file,
538 "Register %d died unexpectedly.\n", i);
539 dump_bb (bb, dump_file, 0);
540 }
541 abort ();
542 }
543
544 /* Verify that the now-live register is wider than word_mode. */
545 verify_wide_reg (i, bb);
546 });
547 }
548 }
549
550 /* Updates life information starting with the basic blocks set in BLOCKS.
551 If BLOCKS is null, consider it to be the universal set.
552
553 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholing,
554 we are only expecting local modifications to basic blocks. If we find
555 extra registers live at the beginning of a block, then we either killed
556 useful data, or we have a broken split that wants data not provided.
557 If we find registers removed from live_at_start, that means we have
558 a broken peephole that is killing a register it shouldn't.
559
560 ??? This is not true in one situation -- when a pre-reload splitter
561 generates subregs of a multi-word pseudo, current life analysis will
562 lose the kill. So we _can_ have a pseudo go live. How irritating.
563
564 It is also not true when a peephole decides that it doesn't need one
565 or more of the inputs.
566
567 Including PROP_REG_INFO does not properly refresh regs_ever_live
568 unless the caller resets it to zero. */
569
570 int
571 update_life_info (sbitmap blocks, enum update_life_extent extent, int prop_flags)
572 {
573 regset tmp;
574 regset_head tmp_head;
575 int i;
576 int stabilized_prop_flags = prop_flags;
577 basic_block bb;
578
579 tmp = INITIALIZE_REG_SET (tmp_head);
580 ndead = 0;
581
582 if ((prop_flags & PROP_REG_INFO) && !reg_deaths)
583 reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
584
585 timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
586 ? TV_LIFE_UPDATE : TV_LIFE);
587
588 /* Changes to the CFG are only allowed when
589 doing a global update for the entire CFG. */
590 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
591 && (extent == UPDATE_LIFE_LOCAL || blocks))
592 abort ();
593
594 /* For a global update, we go through the relaxation process again. */
595 if (extent != UPDATE_LIFE_LOCAL)
596 {
597 for ( ; ; )
598 {
599 int changed = 0;
600
601 calculate_global_regs_live (blocks, blocks,
602 prop_flags & (PROP_SCAN_DEAD_CODE
603 | PROP_SCAN_DEAD_STORES
604 | PROP_ALLOW_CFG_CHANGES));
605
606 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
607 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
608 break;
609
610 /* Removing dead code may allow the CFG to be simplified which
611 in turn may allow for further dead code detection / removal. */
612 FOR_EACH_BB_REVERSE (bb)
613 {
614 COPY_REG_SET (tmp, bb->global_live_at_end);
615 changed |= propagate_block (bb, tmp, NULL, NULL,
616 prop_flags & (PROP_SCAN_DEAD_CODE
617 | PROP_SCAN_DEAD_STORES
618 | PROP_KILL_DEAD_CODE));
619 }
620
621 /* Don't pass PROP_SCAN_DEAD_CODE or PROP_KILL_DEAD_CODE to
622 subsequent propagate_block calls, since removing or acting as
623 removing dead code can affect global register liveness, which
624 is supposed to be finalized for this call after this loop. */
625 stabilized_prop_flags
626 &= ~(PROP_SCAN_DEAD_CODE | PROP_SCAN_DEAD_STORES
627 | PROP_KILL_DEAD_CODE);
628
629 if (! changed)
630 break;
631
632 /* We repeat regardless of what cleanup_cfg says. If there were
633 instructions deleted above, that might have been only a
634 partial improvement (see MAX_MEM_SET_LIST_LEN usage).
635 Further improvement may be possible. */
636 cleanup_cfg (CLEANUP_EXPENSIVE);
637
638 /* Zap the life information from the last round. If we don't
639 do this, we can wind up with registers that no longer appear
640 in the code being marked live at entry. */
641 FOR_EACH_BB (bb)
642 {
643 CLEAR_REG_SET (bb->global_live_at_start);
644 CLEAR_REG_SET (bb->global_live_at_end);
645 }
646 }
647
648 /* If asked, remove notes from the blocks we'll update. */
649 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
650 count_or_remove_death_notes (blocks, 1);
651 }
652
653 /* Clear log links in case we are asked to (re)compute them. */
654 if (prop_flags & PROP_LOG_LINKS)
655 clear_log_links (blocks);
656
657 if (blocks)
658 {
659 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
660 {
661 bb = BASIC_BLOCK (i);
662
663 COPY_REG_SET (tmp, bb->global_live_at_end);
664 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
665
666 if (extent == UPDATE_LIFE_LOCAL)
667 verify_local_live_at_start (tmp, bb);
668 });
669 }
670 else
671 {
672 FOR_EACH_BB_REVERSE (bb)
673 {
674 COPY_REG_SET (tmp, bb->global_live_at_end);
675
676 propagate_block (bb, tmp, NULL, NULL, stabilized_prop_flags);
677
678 if (extent == UPDATE_LIFE_LOCAL)
679 verify_local_live_at_start (tmp, bb);
680 }
681 }
682
683 FREE_REG_SET (tmp);
684
685 if (prop_flags & PROP_REG_INFO)
686 {
687 /* The only pseudos that are live at the beginning of the function
688 are those that were not set anywhere in the function. local-alloc
689 doesn't know how to handle these correctly, so mark them as not
690 local to any one basic block. */
691 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
692 FIRST_PSEUDO_REGISTER, i,
693 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
694
695 /* We have a problem with any pseudoreg that lives across the setjmp.
696 ANSI says that if a user variable does not change in value between
697 the setjmp and the longjmp, then the longjmp preserves it. This
698 includes longjmp from a place where the pseudo appears dead.
699 (In principle, the value still exists if it is in scope.)
700 If the pseudo goes in a hard reg, some other value may occupy
701 that hard reg where this pseudo is dead, thus clobbering the pseudo.
702 Conclusion: such a pseudo must not go in a hard reg. */
703 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
704 FIRST_PSEUDO_REGISTER, i,
705 {
706 if (regno_reg_rtx[i] != 0)
707 {
708 REG_LIVE_LENGTH (i) = -1;
709 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
710 }
711 });
712 }
713 if (reg_deaths)
714 {
715 free (reg_deaths);
716 reg_deaths = NULL;
717 }
718 timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
719 ? TV_LIFE_UPDATE : TV_LIFE);
720 if (ndead && dump_file)
721 fprintf (dump_file, "deleted %i dead insns\n", ndead);
722 return ndead;
723 }
724
725 /* Update life information in all blocks where BB_DIRTY is set. */
726
727 int
728 update_life_info_in_dirty_blocks (enum update_life_extent extent, int prop_flags)
729 {
730 sbitmap update_life_blocks = sbitmap_alloc (last_basic_block);
731 int n = 0;
732 basic_block bb;
733 int retval = 0;
734
735 sbitmap_zero (update_life_blocks);
736 FOR_EACH_BB (bb)
737 {
738 if (extent == UPDATE_LIFE_LOCAL)
739 {
740 if (bb->flags & BB_DIRTY)
741 {
742 SET_BIT (update_life_blocks, bb->index);
743 n++;
744 }
745 }
746 else
747 {
748 /* ??? Bootstrap with -march=pentium4 fails to terminate
749 with only a partial life update. */
750 SET_BIT (update_life_blocks, bb->index);
751 if (bb->flags & BB_DIRTY)
752 n++;
753 }
754 }
755
756 if (n)
757 retval = update_life_info (update_life_blocks, extent, prop_flags);
758
759 sbitmap_free (update_life_blocks);
760 return retval;
761 }
762
763 /* Free the variables allocated by find_basic_blocks. */
764
765 void
766 free_basic_block_vars (void)
767 {
768 if (basic_block_info)
769 {
770 clear_edges ();
771 basic_block_info = NULL;
772 }
773 n_basic_blocks = 0;
774 last_basic_block = 0;
775
776 ENTRY_BLOCK_PTR->aux = NULL;
777 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
778 EXIT_BLOCK_PTR->aux = NULL;
779 EXIT_BLOCK_PTR->global_live_at_start = NULL;
780 }
781
782 /* Delete any insns that copy a register to itself. */
783
784 int
785 delete_noop_moves (rtx f ATTRIBUTE_UNUSED)
786 {
787 rtx insn, next;
788 basic_block bb;
789 int nnoops = 0;
790
791 FOR_EACH_BB (bb)
792 {
793 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
794 {
795 next = NEXT_INSN (insn);
796 if (INSN_P (insn) && noop_move_p (insn))
797 {
798 rtx note;
799
800 /* If we're about to remove the first insn of a libcall
801 then move the libcall note to the next real insn and
802 update the retval note. */
803 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
804 && XEXP (note, 0) != insn)
805 {
806 rtx new_libcall_insn = next_real_insn (insn);
807 rtx retval_note = find_reg_note (XEXP (note, 0),
808 REG_RETVAL, NULL_RTX);
809 REG_NOTES (new_libcall_insn)
810 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
811 REG_NOTES (new_libcall_insn));
812 XEXP (retval_note, 0) = new_libcall_insn;
813 }
814
815 delete_insn_and_edges (insn);
816 nnoops++;
817 }
818 }
819 }
820 if (nnoops && dump_file)
821 fprintf (dump_file, "deleted %i noop moves", nnoops);
822 return nnoops;
823 }
824
825 /* Delete any jump tables never referenced. We can't delete them at the
826 time of removing tablejump insn as they are referenced by the preceding
827 insns computing the destination, so we delay deleting and garbagecollect
828 them once life information is computed. */
829 void
830 delete_dead_jumptables (void)
831 {
832 rtx insn, next;
833 for (insn = get_insns (); insn; insn = next)
834 {
835 next = NEXT_INSN (insn);
836 if (GET_CODE (insn) == CODE_LABEL
837 && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
838 && GET_CODE (next) == JUMP_INSN
839 && (GET_CODE (PATTERN (next)) == ADDR_VEC
840 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
841 {
842 if (dump_file)
843 fprintf (dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
844 delete_insn (NEXT_INSN (insn));
845 delete_insn (insn);
846 next = NEXT_INSN (next);
847 }
848 }
849 }
850
851 /* Determine if the stack pointer is constant over the life of the function.
852 Only useful before prologues have been emitted. */
853
854 static void
855 notice_stack_pointer_modification_1 (rtx x, rtx pat ATTRIBUTE_UNUSED,
856 void *data ATTRIBUTE_UNUSED)
857 {
858 if (x == stack_pointer_rtx
859 /* The stack pointer is only modified indirectly as the result
860 of a push until later in flow. See the comments in rtl.texi
861 regarding Embedded Side-Effects on Addresses. */
862 || (GET_CODE (x) == MEM
863 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_AUTOINC
864 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
865 current_function_sp_is_unchanging = 0;
866 }
867
868 static void
869 notice_stack_pointer_modification (rtx f)
870 {
871 rtx insn;
872
873 /* Assume that the stack pointer is unchanging if alloca hasn't
874 been used. */
875 current_function_sp_is_unchanging = !current_function_calls_alloca;
876 if (! current_function_sp_is_unchanging)
877 return;
878
879 for (insn = f; insn; insn = NEXT_INSN (insn))
880 {
881 if (INSN_P (insn))
882 {
883 /* Check if insn modifies the stack pointer. */
884 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
885 NULL);
886 if (! current_function_sp_is_unchanging)
887 return;
888 }
889 }
890 }
891
892 /* Mark a register in SET. Hard registers in large modes get all
893 of their component registers set as well. */
894
895 static void
896 mark_reg (rtx reg, void *xset)
897 {
898 regset set = (regset) xset;
899 int regno = REGNO (reg);
900
901 if (GET_MODE (reg) == BLKmode)
902 abort ();
903
904 SET_REGNO_REG_SET (set, regno);
905 if (regno < FIRST_PSEUDO_REGISTER)
906 {
907 int n = hard_regno_nregs[regno][GET_MODE (reg)];
908 while (--n > 0)
909 SET_REGNO_REG_SET (set, regno + n);
910 }
911 }
912
913 /* Mark those regs which are needed at the end of the function as live
914 at the end of the last basic block. */
915
916 static void
917 mark_regs_live_at_end (regset set)
918 {
919 unsigned int i;
920
921 /* If exiting needs the right stack value, consider the stack pointer
922 live at the end of the function. */
923 if ((HAVE_epilogue && epilogue_completed)
924 || ! EXIT_IGNORE_STACK
925 || (! FRAME_POINTER_REQUIRED
926 && ! current_function_calls_alloca
927 && flag_omit_frame_pointer)
928 || current_function_sp_is_unchanging)
929 {
930 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
931 }
932
933 /* Mark the frame pointer if needed at the end of the function. If
934 we end up eliminating it, it will be removed from the live list
935 of each basic block by reload. */
936
937 if (! reload_completed || frame_pointer_needed)
938 {
939 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
940 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
941 /* If they are different, also mark the hard frame pointer as live. */
942 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
943 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
944 #endif
945 }
946
947 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
948 /* Many architectures have a GP register even without flag_pic.
949 Assume the pic register is not in use, or will be handled by
950 other means, if it is not fixed. */
951 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
952 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
953 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
954 #endif
955
956 /* Mark all global registers, and all registers used by the epilogue
957 as being live at the end of the function since they may be
958 referenced by our caller. */
959 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
960 if (global_regs[i] || EPILOGUE_USES (i))
961 SET_REGNO_REG_SET (set, i);
962
963 if (HAVE_epilogue && epilogue_completed)
964 {
965 /* Mark all call-saved registers that we actually used. */
966 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
967 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
968 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
969 SET_REGNO_REG_SET (set, i);
970 }
971
972 #ifdef EH_RETURN_DATA_REGNO
973 /* Mark the registers that will contain data for the handler. */
974 if (reload_completed && current_function_calls_eh_return)
975 for (i = 0; ; ++i)
976 {
977 unsigned regno = EH_RETURN_DATA_REGNO(i);
978 if (regno == INVALID_REGNUM)
979 break;
980 SET_REGNO_REG_SET (set, regno);
981 }
982 #endif
983 #ifdef EH_RETURN_STACKADJ_RTX
984 if ((! HAVE_epilogue || ! epilogue_completed)
985 && current_function_calls_eh_return)
986 {
987 rtx tmp = EH_RETURN_STACKADJ_RTX;
988 if (tmp && REG_P (tmp))
989 mark_reg (tmp, set);
990 }
991 #endif
992 #ifdef EH_RETURN_HANDLER_RTX
993 if ((! HAVE_epilogue || ! epilogue_completed)
994 && current_function_calls_eh_return)
995 {
996 rtx tmp = EH_RETURN_HANDLER_RTX;
997 if (tmp && REG_P (tmp))
998 mark_reg (tmp, set);
999 }
1000 #endif
1001
1002 /* Mark function return value. */
1003 diddle_return_value (mark_reg, set);
1004 }
1005
1006 /* Propagate global life info around the graph of basic blocks. Begin
1007 considering blocks with their corresponding bit set in BLOCKS_IN.
1008 If BLOCKS_IN is null, consider it the universal set.
1009
1010 BLOCKS_OUT is set for every block that was changed. */
1011
1012 static void
1013 calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags)
1014 {
1015 basic_block *queue, *qhead, *qtail, *qend, bb;
1016 regset tmp, new_live_at_end, invalidated_by_call;
1017 regset_head tmp_head, invalidated_by_call_head;
1018 regset_head new_live_at_end_head;
1019 int i;
1020
1021 /* Some passes used to forget clear aux field of basic block causing
1022 sick behavior here. */
1023 #ifdef ENABLE_CHECKING
1024 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1025 if (bb->aux)
1026 abort ();
1027 #endif
1028
1029 tmp = INITIALIZE_REG_SET (tmp_head);
1030 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
1031 invalidated_by_call = INITIALIZE_REG_SET (invalidated_by_call_head);
1032
1033 /* Inconveniently, this is only readily available in hard reg set form. */
1034 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1035 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1036 SET_REGNO_REG_SET (invalidated_by_call, i);
1037
1038 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
1039 because the `head == tail' style test for an empty queue doesn't
1040 work with a full queue. */
1041 queue = xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
1042 qtail = queue;
1043 qhead = qend = queue + n_basic_blocks + 2;
1044
1045 /* Queue the blocks set in the initial mask. Do this in reverse block
1046 number order so that we are more likely for the first round to do
1047 useful work. We use AUX non-null to flag that the block is queued. */
1048 if (blocks_in)
1049 {
1050 FOR_EACH_BB (bb)
1051 if (TEST_BIT (blocks_in, bb->index))
1052 {
1053 *--qhead = bb;
1054 bb->aux = bb;
1055 }
1056 }
1057 else
1058 {
1059 FOR_EACH_BB (bb)
1060 {
1061 *--qhead = bb;
1062 bb->aux = bb;
1063 }
1064 }
1065
1066 /* We clean aux when we remove the initially-enqueued bbs, but we
1067 don't enqueue ENTRY and EXIT initially, so clean them upfront and
1068 unconditionally. */
1069 ENTRY_BLOCK_PTR->aux = EXIT_BLOCK_PTR->aux = NULL;
1070
1071 if (blocks_out)
1072 sbitmap_zero (blocks_out);
1073
1074 /* We work through the queue until there are no more blocks. What
1075 is live at the end of this block is precisely the union of what
1076 is live at the beginning of all its successors. So, we set its
1077 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1078 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
1079 this block by walking through the instructions in this block in
1080 reverse order and updating as we go. If that changed
1081 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1082 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1083
1084 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1085 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
1086 must either be live at the end of the block, or used within the
1087 block. In the latter case, it will certainly never disappear
1088 from GLOBAL_LIVE_AT_START. In the former case, the register
1089 could go away only if it disappeared from GLOBAL_LIVE_AT_START
1090 for one of the successor blocks. By induction, that cannot
1091 occur. */
1092 while (qhead != qtail)
1093 {
1094 int rescan, changed;
1095 basic_block bb;
1096 edge e;
1097
1098 bb = *qhead++;
1099 if (qhead == qend)
1100 qhead = queue;
1101 bb->aux = NULL;
1102
1103 /* Begin by propagating live_at_start from the successor blocks. */
1104 CLEAR_REG_SET (new_live_at_end);
1105
1106 if (bb->succ)
1107 for (e = bb->succ; e; e = e->succ_next)
1108 {
1109 basic_block sb = e->dest;
1110
1111 /* Call-clobbered registers die across exception and
1112 call edges. */
1113 /* ??? Abnormal call edges ignored for the moment, as this gets
1114 confused by sibling call edges, which crashes reg-stack. */
1115 if (e->flags & EDGE_EH)
1116 {
1117 bitmap_operation (tmp, sb->global_live_at_start,
1118 invalidated_by_call, BITMAP_AND_COMPL);
1119 IOR_REG_SET (new_live_at_end, tmp);
1120 }
1121 else
1122 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
1123
1124 /* If a target saves one register in another (instead of on
1125 the stack) the save register will need to be live for EH. */
1126 if (e->flags & EDGE_EH)
1127 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1128 if (EH_USES (i))
1129 SET_REGNO_REG_SET (new_live_at_end, i);
1130 }
1131 else
1132 {
1133 /* This might be a noreturn function that throws. And
1134 even if it isn't, getting the unwind info right helps
1135 debugging. */
1136 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1137 if (EH_USES (i))
1138 SET_REGNO_REG_SET (new_live_at_end, i);
1139 }
1140
1141 /* The all-important stack pointer must always be live. */
1142 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1143
1144 /* Before reload, there are a few registers that must be forced
1145 live everywhere -- which might not already be the case for
1146 blocks within infinite loops. */
1147 if (! reload_completed)
1148 {
1149 /* Any reference to any pseudo before reload is a potential
1150 reference of the frame pointer. */
1151 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1152
1153 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1154 /* Pseudos with argument area equivalences may require
1155 reloading via the argument pointer. */
1156 if (fixed_regs[ARG_POINTER_REGNUM])
1157 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1158 #endif
1159
1160 /* Any constant, or pseudo with constant equivalences, may
1161 require reloading from memory using the pic register. */
1162 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1163 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1164 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1165 }
1166
1167 if (bb == ENTRY_BLOCK_PTR)
1168 {
1169 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1170 continue;
1171 }
1172
1173 /* On our first pass through this block, we'll go ahead and continue.
1174 Recognize first pass by local_set NULL. On subsequent passes, we
1175 get to skip out early if live_at_end wouldn't have changed. */
1176
1177 if (bb->local_set == NULL)
1178 {
1179 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1180 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1181 rescan = 1;
1182 }
1183 else
1184 {
1185 /* If any bits were removed from live_at_end, we'll have to
1186 rescan the block. This wouldn't be necessary if we had
1187 precalculated local_live, however with PROP_SCAN_DEAD_CODE
1188 local_live is really dependent on live_at_end. */
1189 CLEAR_REG_SET (tmp);
1190 rescan = bitmap_operation (tmp, bb->global_live_at_end,
1191 new_live_at_end, BITMAP_AND_COMPL);
1192
1193 if (! rescan)
1194 {
1195 /* If any of the registers in the new live_at_end set are
1196 conditionally set in this basic block, we must rescan.
1197 This is because conditional lifetimes at the end of the
1198 block do not just take the live_at_end set into account,
1199 but also the liveness at the start of each successor
1200 block. We can miss changes in those sets if we only
1201 compare the new live_at_end against the previous one. */
1202 CLEAR_REG_SET (tmp);
1203 rescan = bitmap_operation (tmp, new_live_at_end,
1204 bb->cond_local_set, BITMAP_AND);
1205 }
1206
1207 if (! rescan)
1208 {
1209 /* Find the set of changed bits. Take this opportunity
1210 to notice that this set is empty and early out. */
1211 CLEAR_REG_SET (tmp);
1212 changed = bitmap_operation (tmp, bb->global_live_at_end,
1213 new_live_at_end, BITMAP_XOR);
1214 if (! changed)
1215 continue;
1216
1217 /* If any of the changed bits overlap with local_set,
1218 we'll have to rescan the block. Detect overlap by
1219 the AND with ~local_set turning off bits. */
1220 rescan = bitmap_operation (tmp, tmp, bb->local_set,
1221 BITMAP_AND_COMPL);
1222 }
1223 }
1224
1225 /* Let our caller know that BB changed enough to require its
1226 death notes updated. */
1227 if (blocks_out)
1228 SET_BIT (blocks_out, bb->index);
1229
1230 if (! rescan)
1231 {
1232 /* Add to live_at_start the set of all registers in
1233 new_live_at_end that aren't in the old live_at_end. */
1234
1235 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
1236 BITMAP_AND_COMPL);
1237 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1238
1239 changed = bitmap_operation (bb->global_live_at_start,
1240 bb->global_live_at_start,
1241 tmp, BITMAP_IOR);
1242 if (! changed)
1243 continue;
1244 }
1245 else
1246 {
1247 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1248
1249 /* Rescan the block insn by insn to turn (a copy of) live_at_end
1250 into live_at_start. */
1251 propagate_block (bb, new_live_at_end, bb->local_set,
1252 bb->cond_local_set, flags);
1253
1254 /* If live_at start didn't change, no need to go farther. */
1255 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
1256 continue;
1257
1258 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
1259 }
1260
1261 /* Queue all predecessors of BB so that we may re-examine
1262 their live_at_end. */
1263 for (e = bb->pred; e; e = e->pred_next)
1264 {
1265 basic_block pb = e->src;
1266 if (pb->aux == NULL)
1267 {
1268 *qtail++ = pb;
1269 if (qtail == qend)
1270 qtail = queue;
1271 pb->aux = pb;
1272 }
1273 }
1274 }
1275
1276 FREE_REG_SET (tmp);
1277 FREE_REG_SET (new_live_at_end);
1278 FREE_REG_SET (invalidated_by_call);
1279
1280 if (blocks_out)
1281 {
1282 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
1283 {
1284 basic_block bb = BASIC_BLOCK (i);
1285 FREE_REG_SET (bb->local_set);
1286 FREE_REG_SET (bb->cond_local_set);
1287 });
1288 }
1289 else
1290 {
1291 FOR_EACH_BB (bb)
1292 {
1293 FREE_REG_SET (bb->local_set);
1294 FREE_REG_SET (bb->cond_local_set);
1295 }
1296 }
1297
1298 free (queue);
1299 }
1300
1301 \f
1302 /* This structure is used to pass parameters to and from the
1303 the function find_regno_partial(). It is used to pass in the
1304 register number we are looking, as well as to return any rtx
1305 we find. */
1306
1307 typedef struct {
1308 unsigned regno_to_find;
1309 rtx retval;
1310 } find_regno_partial_param;
1311
1312
1313 /* Find the rtx for the reg numbers specified in 'data' if it is
1314 part of an expression which only uses part of the register. Return
1315 it in the structure passed in. */
1316 static int
1317 find_regno_partial (rtx *ptr, void *data)
1318 {
1319 find_regno_partial_param *param = (find_regno_partial_param *)data;
1320 unsigned reg = param->regno_to_find;
1321 param->retval = NULL_RTX;
1322
1323 if (*ptr == NULL_RTX)
1324 return 0;
1325
1326 switch (GET_CODE (*ptr))
1327 {
1328 case ZERO_EXTRACT:
1329 case SIGN_EXTRACT:
1330 case STRICT_LOW_PART:
1331 if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg)
1332 {
1333 param->retval = XEXP (*ptr, 0);
1334 return 1;
1335 }
1336 break;
1337
1338 case SUBREG:
1339 if (GET_CODE (SUBREG_REG (*ptr)) == REG
1340 && REGNO (SUBREG_REG (*ptr)) == reg)
1341 {
1342 param->retval = SUBREG_REG (*ptr);
1343 return 1;
1344 }
1345 break;
1346
1347 default:
1348 break;
1349 }
1350
1351 return 0;
1352 }
1353
1354 /* Process all immediate successors of the entry block looking for pseudo
1355 registers which are live on entry. Find all of those whose first
1356 instance is a partial register reference of some kind, and initialize
1357 them to 0 after the entry block. This will prevent bit sets within
1358 registers whose value is unknown, and may contain some kind of sticky
1359 bits we don't want. */
1360
1361 int
1362 initialize_uninitialized_subregs (void)
1363 {
1364 rtx insn;
1365 edge e;
1366 int reg, did_something = 0;
1367 find_regno_partial_param param;
1368
1369 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1370 {
1371 basic_block bb = e->dest;
1372 regset map = bb->global_live_at_start;
1373 EXECUTE_IF_SET_IN_REG_SET (map,
1374 FIRST_PSEUDO_REGISTER, reg,
1375 {
1376 int uid = REGNO_FIRST_UID (reg);
1377 rtx i;
1378
1379 /* Find an insn which mentions the register we are looking for.
1380 Its preferable to have an instance of the register's rtl since
1381 there may be various flags set which we need to duplicate.
1382 If we can't find it, its probably an automatic whose initial
1383 value doesn't matter, or hopefully something we don't care about. */
1384 for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1385 ;
1386 if (i != NULL_RTX)
1387 {
1388 /* Found the insn, now get the REG rtx, if we can. */
1389 param.regno_to_find = reg;
1390 for_each_rtx (&i, find_regno_partial, &param);
1391 if (param.retval != NULL_RTX)
1392 {
1393 start_sequence ();
1394 emit_move_insn (param.retval,
1395 CONST0_RTX (GET_MODE (param.retval)));
1396 insn = get_insns ();
1397 end_sequence ();
1398 insert_insn_on_edge (insn, e);
1399 did_something = 1;
1400 }
1401 }
1402 });
1403 }
1404
1405 if (did_something)
1406 commit_edge_insertions ();
1407 return did_something;
1408 }
1409
1410 \f
1411 /* Subroutines of life analysis. */
1412
1413 /* Allocate the permanent data structures that represent the results
1414 of life analysis. Not static since used also for stupid life analysis. */
1415
1416 void
1417 allocate_bb_life_data (void)
1418 {
1419 basic_block bb;
1420
1421 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1422 {
1423 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1424 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1425 }
1426
1427 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1428 }
1429
1430 void
1431 allocate_reg_life_data (void)
1432 {
1433 int i;
1434
1435 max_regno = max_reg_num ();
1436 if (reg_deaths)
1437 abort ();
1438 reg_deaths = xcalloc (sizeof (*reg_deaths), max_regno);
1439
1440 /* Recalculate the register space, in case it has grown. Old style
1441 vector oriented regsets would set regset_{size,bytes} here also. */
1442 allocate_reg_info (max_regno, FALSE, FALSE);
1443
1444 /* Reset all the data we'll collect in propagate_block and its
1445 subroutines. */
1446 for (i = 0; i < max_regno; i++)
1447 {
1448 REG_N_SETS (i) = 0;
1449 REG_N_REFS (i) = 0;
1450 REG_N_DEATHS (i) = 0;
1451 REG_N_CALLS_CROSSED (i) = 0;
1452 REG_LIVE_LENGTH (i) = 0;
1453 REG_FREQ (i) = 0;
1454 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1455 }
1456 }
1457
1458 /* Delete dead instructions for propagate_block. */
1459
1460 static void
1461 propagate_block_delete_insn (rtx insn)
1462 {
1463 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1464
1465 /* If the insn referred to a label, and that label was attached to
1466 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
1467 pretty much mandatory to delete it, because the ADDR_VEC may be
1468 referencing labels that no longer exist.
1469
1470 INSN may reference a deleted label, particularly when a jump
1471 table has been optimized into a direct jump. There's no
1472 real good way to fix up the reference to the deleted label
1473 when the label is deleted, so we just allow it here. */
1474
1475 if (inote && GET_CODE (inote) == CODE_LABEL)
1476 {
1477 rtx label = XEXP (inote, 0);
1478 rtx next;
1479
1480 /* The label may be forced if it has been put in the constant
1481 pool. If that is the only use we must discard the table
1482 jump following it, but not the label itself. */
1483 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1484 && (next = next_nonnote_insn (label)) != NULL
1485 && GET_CODE (next) == JUMP_INSN
1486 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1487 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1488 {
1489 rtx pat = PATTERN (next);
1490 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1491 int len = XVECLEN (pat, diff_vec_p);
1492 int i;
1493
1494 for (i = 0; i < len; i++)
1495 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1496
1497 delete_insn_and_edges (next);
1498 ndead++;
1499 }
1500 }
1501
1502 delete_insn_and_edges (insn);
1503 ndead++;
1504 }
1505
1506 /* Delete dead libcalls for propagate_block. Return the insn
1507 before the libcall. */
1508
1509 static rtx
1510 propagate_block_delete_libcall (rtx insn, rtx note)
1511 {
1512 rtx first = XEXP (note, 0);
1513 rtx before = PREV_INSN (first);
1514
1515 delete_insn_chain_and_edges (first, insn);
1516 ndead++;
1517 return before;
1518 }
1519
1520 /* Update the life-status of regs for one insn. Return the previous insn. */
1521
1522 rtx
1523 propagate_one_insn (struct propagate_block_info *pbi, rtx insn)
1524 {
1525 rtx prev = PREV_INSN (insn);
1526 int flags = pbi->flags;
1527 int insn_is_dead = 0;
1528 int libcall_is_dead = 0;
1529 rtx note;
1530 int i;
1531
1532 if (! INSN_P (insn))
1533 return prev;
1534
1535 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1536 if (flags & PROP_SCAN_DEAD_CODE)
1537 {
1538 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1539 libcall_is_dead = (insn_is_dead && note != 0
1540 && libcall_dead_p (pbi, note, insn));
1541 }
1542
1543 /* If an instruction consists of just dead store(s) on final pass,
1544 delete it. */
1545 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1546 {
1547 /* If we're trying to delete a prologue or epilogue instruction
1548 that isn't flagged as possibly being dead, something is wrong.
1549 But if we are keeping the stack pointer depressed, we might well
1550 be deleting insns that are used to compute the amount to update
1551 it by, so they are fine. */
1552 if (reload_completed
1553 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1554 && (TYPE_RETURNS_STACK_DEPRESSED
1555 (TREE_TYPE (current_function_decl))))
1556 && (((HAVE_epilogue || HAVE_prologue)
1557 && prologue_epilogue_contains (insn))
1558 || (HAVE_sibcall_epilogue
1559 && sibcall_epilogue_contains (insn)))
1560 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1561 fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1562
1563 /* Record sets. Do this even for dead instructions, since they
1564 would have killed the values if they hadn't been deleted. */
1565 mark_set_regs (pbi, PATTERN (insn), insn);
1566
1567 /* CC0 is now known to be dead. Either this insn used it,
1568 in which case it doesn't anymore, or clobbered it,
1569 so the next insn can't use it. */
1570 pbi->cc0_live = 0;
1571
1572 if (libcall_is_dead)
1573 prev = propagate_block_delete_libcall ( insn, note);
1574 else
1575 {
1576
1577 /* If INSN contains a RETVAL note and is dead, but the libcall
1578 as a whole is not dead, then we want to remove INSN, but
1579 not the whole libcall sequence.
1580
1581 However, we need to also remove the dangling REG_LIBCALL
1582 note so that we do not have mis-matched LIBCALL/RETVAL
1583 notes. In theory we could find a new location for the
1584 REG_RETVAL note, but it hardly seems worth the effort.
1585
1586 NOTE at this point will be the RETVAL note if it exists. */
1587 if (note)
1588 {
1589 rtx libcall_note;
1590
1591 libcall_note
1592 = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
1593 remove_note (XEXP (note, 0), libcall_note);
1594 }
1595
1596 /* Similarly if INSN contains a LIBCALL note, remove the
1597 dangling REG_RETVAL note. */
1598 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
1599 if (note)
1600 {
1601 rtx retval_note;
1602
1603 retval_note
1604 = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
1605 remove_note (XEXP (note, 0), retval_note);
1606 }
1607
1608 /* Now delete INSN. */
1609 propagate_block_delete_insn (insn);
1610 }
1611
1612 return prev;
1613 }
1614
1615 /* See if this is an increment or decrement that can be merged into
1616 a following memory address. */
1617 #ifdef AUTO_INC_DEC
1618 {
1619 rtx x = single_set (insn);
1620
1621 /* Does this instruction increment or decrement a register? */
1622 if ((flags & PROP_AUTOINC)
1623 && x != 0
1624 && GET_CODE (SET_DEST (x)) == REG
1625 && (GET_CODE (SET_SRC (x)) == PLUS
1626 || GET_CODE (SET_SRC (x)) == MINUS)
1627 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1628 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1629 /* Ok, look for a following memory ref we can combine with.
1630 If one is found, change the memory ref to a PRE_INC
1631 or PRE_DEC, cancel this insn, and return 1.
1632 Return 0 if nothing has been done. */
1633 && try_pre_increment_1 (pbi, insn))
1634 return prev;
1635 }
1636 #endif /* AUTO_INC_DEC */
1637
1638 CLEAR_REG_SET (pbi->new_set);
1639
1640 /* If this is not the final pass, and this insn is copying the value of
1641 a library call and it's dead, don't scan the insns that perform the
1642 library call, so that the call's arguments are not marked live. */
1643 if (libcall_is_dead)
1644 {
1645 /* Record the death of the dest reg. */
1646 mark_set_regs (pbi, PATTERN (insn), insn);
1647
1648 insn = XEXP (note, 0);
1649 return PREV_INSN (insn);
1650 }
1651 else if (GET_CODE (PATTERN (insn)) == SET
1652 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1653 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1654 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1655 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1656 {
1657 /* We have an insn to pop a constant amount off the stack.
1658 (Such insns use PLUS regardless of the direction of the stack,
1659 and any insn to adjust the stack by a constant is always a pop
1660 or part of a push.)
1661 These insns, if not dead stores, have no effect on life, though
1662 they do have an effect on the memory stores we are tracking. */
1663 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1664 /* Still, we need to update local_set, lest ifcvt.c:dead_or_predicable
1665 concludes that the stack pointer is not modified. */
1666 mark_set_regs (pbi, PATTERN (insn), insn);
1667 }
1668 else
1669 {
1670 rtx note;
1671 /* Any regs live at the time of a call instruction must not go
1672 in a register clobbered by calls. Find all regs now live and
1673 record this for them. */
1674
1675 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
1676 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1677 { REG_N_CALLS_CROSSED (i)++; });
1678
1679 /* Record sets. Do this even for dead instructions, since they
1680 would have killed the values if they hadn't been deleted. */
1681 mark_set_regs (pbi, PATTERN (insn), insn);
1682
1683 if (GET_CODE (insn) == CALL_INSN)
1684 {
1685 regset live_at_end;
1686 bool sibcall_p;
1687 rtx note, cond;
1688 int i;
1689
1690 cond = NULL_RTX;
1691 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1692 cond = COND_EXEC_TEST (PATTERN (insn));
1693
1694 /* Non-constant calls clobber memory, constant calls do not
1695 clobber memory, though they may clobber outgoing arguments
1696 on the stack. */
1697 if (! CONST_OR_PURE_CALL_P (insn))
1698 {
1699 free_EXPR_LIST_list (&pbi->mem_set_list);
1700 pbi->mem_set_list_len = 0;
1701 }
1702 else
1703 invalidate_mems_from_set (pbi, stack_pointer_rtx);
1704
1705 /* There may be extra registers to be clobbered. */
1706 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1707 note;
1708 note = XEXP (note, 1))
1709 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1710 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1711 cond, insn, pbi->flags);
1712
1713 /* Calls change all call-used and global registers; sibcalls do not
1714 clobber anything that must be preserved at end-of-function,
1715 except for return values. */
1716
1717 sibcall_p = SIBLING_CALL_P (insn);
1718 live_at_end = EXIT_BLOCK_PTR->global_live_at_start;
1719 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1720 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)
1721 && ! (sibcall_p
1722 && REGNO_REG_SET_P (live_at_end, i)
1723 && ! refers_to_regno_p (i, i+1,
1724 current_function_return_rtx,
1725 (rtx *) 0)))
1726 {
1727 enum rtx_code code = global_regs[i] ? SET : CLOBBER;
1728 /* We do not want REG_UNUSED notes for these registers. */
1729 mark_set_1 (pbi, code, regno_reg_rtx[i], cond, insn,
1730 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1731 }
1732 }
1733
1734 /* If an insn doesn't use CC0, it becomes dead since we assume
1735 that every insn clobbers it. So show it dead here;
1736 mark_used_regs will set it live if it is referenced. */
1737 pbi->cc0_live = 0;
1738
1739 /* Record uses. */
1740 if (! insn_is_dead)
1741 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1742 if ((flags & PROP_EQUAL_NOTES)
1743 && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))
1744 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX))))
1745 mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn);
1746
1747 /* Sometimes we may have inserted something before INSN (such as a move)
1748 when we make an auto-inc. So ensure we will scan those insns. */
1749 #ifdef AUTO_INC_DEC
1750 prev = PREV_INSN (insn);
1751 #endif
1752
1753 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1754 {
1755 int i;
1756 rtx note, cond;
1757
1758 cond = NULL_RTX;
1759 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1760 cond = COND_EXEC_TEST (PATTERN (insn));
1761
1762 /* Calls use their arguments, and may clobber memory which
1763 address involves some register. */
1764 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1765 note;
1766 note = XEXP (note, 1))
1767 /* We find USE or CLOBBER entities in a FUNCTION_USAGE list: both
1768 of which mark_used_regs knows how to handle. */
1769 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), cond, insn);
1770
1771 /* The stack ptr is used (honorarily) by a CALL insn. */
1772 if ((flags & PROP_REG_INFO)
1773 && !REGNO_REG_SET_P (pbi->reg_live, STACK_POINTER_REGNUM))
1774 reg_deaths[STACK_POINTER_REGNUM] = pbi->insn_num;
1775 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1776
1777 /* Calls may also reference any of the global registers,
1778 so they are made live. */
1779 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1780 if (global_regs[i])
1781 mark_used_reg (pbi, regno_reg_rtx[i], cond, insn);
1782 }
1783 }
1784
1785 pbi->insn_num++;
1786
1787 return prev;
1788 }
1789
1790 /* Initialize a propagate_block_info struct for public consumption.
1791 Note that the structure itself is opaque to this file, but that
1792 the user can use the regsets provided here. */
1793
1794 struct propagate_block_info *
1795 init_propagate_block_info (basic_block bb, regset live, regset local_set,
1796 regset cond_local_set, int flags)
1797 {
1798 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1799
1800 pbi->bb = bb;
1801 pbi->reg_live = live;
1802 pbi->mem_set_list = NULL_RTX;
1803 pbi->mem_set_list_len = 0;
1804 pbi->local_set = local_set;
1805 pbi->cond_local_set = cond_local_set;
1806 pbi->cc0_live = 0;
1807 pbi->flags = flags;
1808 pbi->insn_num = 0;
1809
1810 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1811 pbi->reg_next_use = xcalloc (max_reg_num (), sizeof (rtx));
1812 else
1813 pbi->reg_next_use = NULL;
1814
1815 pbi->new_set = BITMAP_XMALLOC ();
1816
1817 #ifdef HAVE_conditional_execution
1818 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1819 free_reg_cond_life_info);
1820 pbi->reg_cond_reg = BITMAP_XMALLOC ();
1821
1822 /* If this block ends in a conditional branch, for each register
1823 live from one side of the branch and not the other, record the
1824 register as conditionally dead. */
1825 if (GET_CODE (BB_END (bb)) == JUMP_INSN
1826 && any_condjump_p (BB_END (bb)))
1827 {
1828 regset_head diff_head;
1829 regset diff = INITIALIZE_REG_SET (diff_head);
1830 basic_block bb_true, bb_false;
1831 int i;
1832
1833 /* Identify the successor blocks. */
1834 bb_true = bb->succ->dest;
1835 if (bb->succ->succ_next != NULL)
1836 {
1837 bb_false = bb->succ->succ_next->dest;
1838
1839 if (bb->succ->flags & EDGE_FALLTHRU)
1840 {
1841 basic_block t = bb_false;
1842 bb_false = bb_true;
1843 bb_true = t;
1844 }
1845 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
1846 abort ();
1847 }
1848 else
1849 {
1850 /* This can happen with a conditional jump to the next insn. */
1851 if (JUMP_LABEL (BB_END (bb)) != BB_HEAD (bb_true))
1852 abort ();
1853
1854 /* Simplest way to do nothing. */
1855 bb_false = bb_true;
1856 }
1857
1858 /* Compute which register lead different lives in the successors. */
1859 if (bitmap_operation (diff, bb_true->global_live_at_start,
1860 bb_false->global_live_at_start, BITMAP_XOR))
1861 {
1862 /* Extract the condition from the branch. */
1863 rtx set_src = SET_SRC (pc_set (BB_END (bb)));
1864 rtx cond_true = XEXP (set_src, 0);
1865 rtx reg = XEXP (cond_true, 0);
1866
1867 if (GET_CODE (reg) == SUBREG)
1868 reg = SUBREG_REG (reg);
1869
1870 /* We can only track conditional lifetimes if the condition is
1871 in the form of a comparison of a register against zero.
1872 If the condition is more complex than that, then it is safe
1873 not to record any information. */
1874 if (GET_CODE (reg) == REG
1875 && XEXP (cond_true, 1) == const0_rtx)
1876 {
1877 rtx cond_false
1878 = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
1879 GET_MODE (cond_true), XEXP (cond_true, 0),
1880 XEXP (cond_true, 1));
1881 if (GET_CODE (XEXP (set_src, 1)) == PC)
1882 {
1883 rtx t = cond_false;
1884 cond_false = cond_true;
1885 cond_true = t;
1886 }
1887
1888 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
1889
1890 /* For each such register, mark it conditionally dead. */
1891 EXECUTE_IF_SET_IN_REG_SET
1892 (diff, 0, i,
1893 {
1894 struct reg_cond_life_info *rcli;
1895 rtx cond;
1896
1897 rcli = xmalloc (sizeof (*rcli));
1898
1899 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
1900 cond = cond_false;
1901 else
1902 cond = cond_true;
1903 rcli->condition = cond;
1904 rcli->stores = const0_rtx;
1905 rcli->orig_condition = cond;
1906
1907 splay_tree_insert (pbi->reg_cond_dead, i,
1908 (splay_tree_value) rcli);
1909 });
1910 }
1911 }
1912
1913 FREE_REG_SET (diff);
1914 }
1915 #endif
1916
1917 /* If this block has no successors, any stores to the frame that aren't
1918 used later in the block are dead. So make a pass over the block
1919 recording any such that are made and show them dead at the end. We do
1920 a very conservative and simple job here. */
1921 if (optimize
1922 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1923 && (TYPE_RETURNS_STACK_DEPRESSED
1924 (TREE_TYPE (current_function_decl))))
1925 && (flags & PROP_SCAN_DEAD_STORES)
1926 && (bb->succ == NULL
1927 || (bb->succ->succ_next == NULL
1928 && bb->succ->dest == EXIT_BLOCK_PTR
1929 && ! current_function_calls_eh_return)))
1930 {
1931 rtx insn, set;
1932 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
1933 if (GET_CODE (insn) == INSN
1934 && (set = single_set (insn))
1935 && GET_CODE (SET_DEST (set)) == MEM)
1936 {
1937 rtx mem = SET_DEST (set);
1938 rtx canon_mem = canon_rtx (mem);
1939
1940 if (XEXP (canon_mem, 0) == frame_pointer_rtx
1941 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
1942 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
1943 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
1944 add_to_mem_set_list (pbi, canon_mem);
1945 }
1946 }
1947
1948 return pbi;
1949 }
1950
1951 /* Release a propagate_block_info struct. */
1952
1953 void
1954 free_propagate_block_info (struct propagate_block_info *pbi)
1955 {
1956 free_EXPR_LIST_list (&pbi->mem_set_list);
1957
1958 BITMAP_XFREE (pbi->new_set);
1959
1960 #ifdef HAVE_conditional_execution
1961 splay_tree_delete (pbi->reg_cond_dead);
1962 BITMAP_XFREE (pbi->reg_cond_reg);
1963 #endif
1964
1965 if (pbi->flags & PROP_REG_INFO)
1966 {
1967 int num = pbi->insn_num;
1968 int i;
1969
1970 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1971 { REG_LIVE_LENGTH (i) += num - reg_deaths[i];
1972 reg_deaths[i] = 0;
1973 });
1974 }
1975 if (pbi->reg_next_use)
1976 free (pbi->reg_next_use);
1977
1978 free (pbi);
1979 }
1980
1981 /* Compute the registers live at the beginning of a basic block BB from
1982 those live at the end.
1983
1984 When called, REG_LIVE contains those live at the end. On return, it
1985 contains those live at the beginning.
1986
1987 LOCAL_SET, if non-null, will be set with all registers killed
1988 unconditionally by this basic block.
1989 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
1990 killed conditionally by this basic block. If there is any unconditional
1991 set of a register, then the corresponding bit will be set in LOCAL_SET
1992 and cleared in COND_LOCAL_SET.
1993 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
1994 case, the resulting set will be equal to the union of the two sets that
1995 would otherwise be computed.
1996
1997 Return nonzero if an INSN is deleted (i.e. by dead code removal). */
1998
1999 int
2000 propagate_block (basic_block bb, regset live, regset local_set,
2001 regset cond_local_set, int flags)
2002 {
2003 struct propagate_block_info *pbi;
2004 rtx insn, prev;
2005 int changed;
2006
2007 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
2008
2009 if (flags & PROP_REG_INFO)
2010 {
2011 int i;
2012
2013 /* Process the regs live at the end of the block.
2014 Mark them as not local to any one basic block. */
2015 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
2016 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2017 }
2018
2019 /* Scan the block an insn at a time from end to beginning. */
2020
2021 changed = 0;
2022 for (insn = BB_END (bb); ; insn = prev)
2023 {
2024 /* If this is a call to `setjmp' et al, warn if any
2025 non-volatile datum is live. */
2026 if ((flags & PROP_REG_INFO)
2027 && GET_CODE (insn) == CALL_INSN
2028 && find_reg_note (insn, REG_SETJMP, NULL))
2029 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2030
2031 prev = propagate_one_insn (pbi, insn);
2032 if (!prev)
2033 changed |= insn != get_insns ();
2034 else
2035 changed |= NEXT_INSN (prev) != insn;
2036
2037 if (insn == BB_HEAD (bb))
2038 break;
2039 }
2040
2041 free_propagate_block_info (pbi);
2042
2043 return changed;
2044 }
2045 \f
2046 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2047 (SET expressions whose destinations are registers dead after the insn).
2048 NEEDED is the regset that says which regs are alive after the insn.
2049
2050 Unless CALL_OK is nonzero, an insn is needed if it contains a CALL.
2051
2052 If X is the entire body of an insn, NOTES contains the reg notes
2053 pertaining to the insn. */
2054
2055 static int
2056 insn_dead_p (struct propagate_block_info *pbi, rtx x, int call_ok,
2057 rtx notes ATTRIBUTE_UNUSED)
2058 {
2059 enum rtx_code code = GET_CODE (x);
2060
2061 /* Don't eliminate insns that may trap. */
2062 if (flag_non_call_exceptions && may_trap_p (x))
2063 return 0;
2064
2065 #ifdef AUTO_INC_DEC
2066 /* As flow is invoked after combine, we must take existing AUTO_INC
2067 expressions into account. */
2068 for (; notes; notes = XEXP (notes, 1))
2069 {
2070 if (REG_NOTE_KIND (notes) == REG_INC)
2071 {
2072 int regno = REGNO (XEXP (notes, 0));
2073
2074 /* Don't delete insns to set global regs. */
2075 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2076 || REGNO_REG_SET_P (pbi->reg_live, regno))
2077 return 0;
2078 }
2079 }
2080 #endif
2081
2082 /* If setting something that's a reg or part of one,
2083 see if that register's altered value will be live. */
2084
2085 if (code == SET)
2086 {
2087 rtx r = SET_DEST (x);
2088
2089 #ifdef HAVE_cc0
2090 if (GET_CODE (r) == CC0)
2091 return ! pbi->cc0_live;
2092 #endif
2093
2094 /* A SET that is a subroutine call cannot be dead. */
2095 if (GET_CODE (SET_SRC (x)) == CALL)
2096 {
2097 if (! call_ok)
2098 return 0;
2099 }
2100
2101 /* Don't eliminate loads from volatile memory or volatile asms. */
2102 else if (volatile_refs_p (SET_SRC (x)))
2103 return 0;
2104
2105 if (GET_CODE (r) == MEM)
2106 {
2107 rtx temp, canon_r;
2108
2109 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2110 return 0;
2111
2112 canon_r = canon_rtx (r);
2113
2114 /* Walk the set of memory locations we are currently tracking
2115 and see if one is an identical match to this memory location.
2116 If so, this memory write is dead (remember, we're walking
2117 backwards from the end of the block to the start). Since
2118 rtx_equal_p does not check the alias set or flags, we also
2119 must have the potential for them to conflict (anti_dependence). */
2120 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2121 if (unchanging_anti_dependence (r, XEXP (temp, 0)))
2122 {
2123 rtx mem = XEXP (temp, 0);
2124
2125 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2126 && (GET_MODE_SIZE (GET_MODE (canon_r))
2127 <= GET_MODE_SIZE (GET_MODE (mem))))
2128 return 1;
2129
2130 #ifdef AUTO_INC_DEC
2131 /* Check if memory reference matches an auto increment. Only
2132 post increment/decrement or modify are valid. */
2133 if (GET_MODE (mem) == GET_MODE (r)
2134 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2135 || GET_CODE (XEXP (mem, 0)) == POST_INC
2136 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2137 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2138 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2139 return 1;
2140 #endif
2141 }
2142 }
2143 else
2144 {
2145 while (GET_CODE (r) == SUBREG
2146 || GET_CODE (r) == STRICT_LOW_PART
2147 || GET_CODE (r) == ZERO_EXTRACT)
2148 r = XEXP (r, 0);
2149
2150 if (GET_CODE (r) == REG)
2151 {
2152 int regno = REGNO (r);
2153
2154 /* Obvious. */
2155 if (REGNO_REG_SET_P (pbi->reg_live, regno))
2156 return 0;
2157
2158 /* If this is a hard register, verify that subsequent
2159 words are not needed. */
2160 if (regno < FIRST_PSEUDO_REGISTER)
2161 {
2162 int n = hard_regno_nregs[regno][GET_MODE (r)];
2163
2164 while (--n > 0)
2165 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2166 return 0;
2167 }
2168
2169 /* Don't delete insns to set global regs. */
2170 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2171 return 0;
2172
2173 /* Make sure insns to set the stack pointer aren't deleted. */
2174 if (regno == STACK_POINTER_REGNUM)
2175 return 0;
2176
2177 /* ??? These bits might be redundant with the force live bits
2178 in calculate_global_regs_live. We would delete from
2179 sequential sets; whether this actually affects real code
2180 for anything but the stack pointer I don't know. */
2181 /* Make sure insns to set the frame pointer aren't deleted. */
2182 if (regno == FRAME_POINTER_REGNUM
2183 && (! reload_completed || frame_pointer_needed))
2184 return 0;
2185 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2186 if (regno == HARD_FRAME_POINTER_REGNUM
2187 && (! reload_completed || frame_pointer_needed))
2188 return 0;
2189 #endif
2190
2191 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2192 /* Make sure insns to set arg pointer are never deleted
2193 (if the arg pointer isn't fixed, there will be a USE
2194 for it, so we can treat it normally). */
2195 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2196 return 0;
2197 #endif
2198
2199 /* Otherwise, the set is dead. */
2200 return 1;
2201 }
2202 }
2203 }
2204
2205 /* If performing several activities, insn is dead if each activity
2206 is individually dead. Also, CLOBBERs and USEs can be ignored; a
2207 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2208 worth keeping. */
2209 else if (code == PARALLEL)
2210 {
2211 int i = XVECLEN (x, 0);
2212
2213 for (i--; i >= 0; i--)
2214 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2215 && GET_CODE (XVECEXP (x, 0, i)) != USE
2216 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2217 return 0;
2218
2219 return 1;
2220 }
2221
2222 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2223 is not necessarily true for hard registers until after reload. */
2224 else if (code == CLOBBER)
2225 {
2226 if (GET_CODE (XEXP (x, 0)) == REG
2227 && (REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2228 || reload_completed)
2229 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2230 return 1;
2231 }
2232
2233 /* ??? A base USE is a historical relic. It ought not be needed anymore.
2234 Instances where it is still used are either (1) temporary and the USE
2235 escaped the pass, (2) cruft and the USE need not be emitted anymore,
2236 or (3) hiding bugs elsewhere that are not properly representing data
2237 flow. */
2238
2239 return 0;
2240 }
2241
2242 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
2243 return 1 if the entire library call is dead.
2244 This is true if INSN copies a register (hard or pseudo)
2245 and if the hard return reg of the call insn is dead.
2246 (The caller should have tested the destination of the SET inside
2247 INSN already for death.)
2248
2249 If this insn doesn't just copy a register, then we don't
2250 have an ordinary libcall. In that case, cse could not have
2251 managed to substitute the source for the dest later on,
2252 so we can assume the libcall is dead.
2253
2254 PBI is the block info giving pseudoregs live before this insn.
2255 NOTE is the REG_RETVAL note of the insn. */
2256
2257 static int
2258 libcall_dead_p (struct propagate_block_info *pbi, rtx note, rtx insn)
2259 {
2260 rtx x = single_set (insn);
2261
2262 if (x)
2263 {
2264 rtx r = SET_SRC (x);
2265
2266 if (GET_CODE (r) == REG)
2267 {
2268 rtx call = XEXP (note, 0);
2269 rtx call_pat;
2270 int i;
2271
2272 /* Find the call insn. */
2273 while (call != insn && GET_CODE (call) != CALL_INSN)
2274 call = NEXT_INSN (call);
2275
2276 /* If there is none, do nothing special,
2277 since ordinary death handling can understand these insns. */
2278 if (call == insn)
2279 return 0;
2280
2281 /* See if the hard reg holding the value is dead.
2282 If this is a PARALLEL, find the call within it. */
2283 call_pat = PATTERN (call);
2284 if (GET_CODE (call_pat) == PARALLEL)
2285 {
2286 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2287 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2288 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2289 break;
2290
2291 /* This may be a library call that is returning a value
2292 via invisible pointer. Do nothing special, since
2293 ordinary death handling can understand these insns. */
2294 if (i < 0)
2295 return 0;
2296
2297 call_pat = XVECEXP (call_pat, 0, i);
2298 }
2299
2300 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
2301 }
2302 }
2303 return 1;
2304 }
2305
2306 /* 1 if register REGNO was alive at a place where `setjmp' was called
2307 and was set more than once or is an argument.
2308 Such regs may be clobbered by `longjmp'. */
2309
2310 int
2311 regno_clobbered_at_setjmp (int regno)
2312 {
2313 if (n_basic_blocks == 0)
2314 return 0;
2315
2316 return ((REG_N_SETS (regno) > 1
2317 || REGNO_REG_SET_P (ENTRY_BLOCK_PTR->global_live_at_end, regno))
2318 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2319 }
2320 \f
2321 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
2322 maximal list size; look for overlaps in mode and select the largest. */
2323 static void
2324 add_to_mem_set_list (struct propagate_block_info *pbi, rtx mem)
2325 {
2326 rtx i;
2327
2328 /* We don't know how large a BLKmode store is, so we must not
2329 take them into consideration. */
2330 if (GET_MODE (mem) == BLKmode)
2331 return;
2332
2333 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2334 {
2335 rtx e = XEXP (i, 0);
2336 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2337 {
2338 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2339 {
2340 #ifdef AUTO_INC_DEC
2341 /* If we must store a copy of the mem, we can just modify
2342 the mode of the stored copy. */
2343 if (pbi->flags & PROP_AUTOINC)
2344 PUT_MODE (e, GET_MODE (mem));
2345 else
2346 #endif
2347 XEXP (i, 0) = mem;
2348 }
2349 return;
2350 }
2351 }
2352
2353 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
2354 {
2355 #ifdef AUTO_INC_DEC
2356 /* Store a copy of mem, otherwise the address may be
2357 scrogged by find_auto_inc. */
2358 if (pbi->flags & PROP_AUTOINC)
2359 mem = shallow_copy_rtx (mem);
2360 #endif
2361 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2362 pbi->mem_set_list_len++;
2363 }
2364 }
2365
2366 /* INSN references memory, possibly using autoincrement addressing modes.
2367 Find any entries on the mem_set_list that need to be invalidated due
2368 to an address change. */
2369
2370 static int
2371 invalidate_mems_from_autoinc (rtx *px, void *data)
2372 {
2373 rtx x = *px;
2374 struct propagate_block_info *pbi = data;
2375
2376 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
2377 {
2378 invalidate_mems_from_set (pbi, XEXP (x, 0));
2379 return -1;
2380 }
2381
2382 return 0;
2383 }
2384
2385 /* EXP is a REG. Remove any dependent entries from pbi->mem_set_list. */
2386
2387 static void
2388 invalidate_mems_from_set (struct propagate_block_info *pbi, rtx exp)
2389 {
2390 rtx temp = pbi->mem_set_list;
2391 rtx prev = NULL_RTX;
2392 rtx next;
2393
2394 while (temp)
2395 {
2396 next = XEXP (temp, 1);
2397 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2398 {
2399 /* Splice this entry out of the list. */
2400 if (prev)
2401 XEXP (prev, 1) = next;
2402 else
2403 pbi->mem_set_list = next;
2404 free_EXPR_LIST_node (temp);
2405 pbi->mem_set_list_len--;
2406 }
2407 else
2408 prev = temp;
2409 temp = next;
2410 }
2411 }
2412
2413 /* Process the registers that are set within X. Their bits are set to
2414 1 in the regset DEAD, because they are dead prior to this insn.
2415
2416 If INSN is nonzero, it is the insn being processed.
2417
2418 FLAGS is the set of operations to perform. */
2419
2420 static void
2421 mark_set_regs (struct propagate_block_info *pbi, rtx x, rtx insn)
2422 {
2423 rtx cond = NULL_RTX;
2424 rtx link;
2425 enum rtx_code code;
2426 int flags = pbi->flags;
2427
2428 if (insn)
2429 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2430 {
2431 if (REG_NOTE_KIND (link) == REG_INC)
2432 mark_set_1 (pbi, SET, XEXP (link, 0),
2433 (GET_CODE (x) == COND_EXEC
2434 ? COND_EXEC_TEST (x) : NULL_RTX),
2435 insn, flags);
2436 }
2437 retry:
2438 switch (code = GET_CODE (x))
2439 {
2440 case SET:
2441 if (GET_CODE (XEXP (x, 1)) == ASM_OPERANDS)
2442 flags |= PROP_ASM_SCAN;
2443 /* Fall through */
2444 case CLOBBER:
2445 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, flags);
2446 return;
2447
2448 case COND_EXEC:
2449 cond = COND_EXEC_TEST (x);
2450 x = COND_EXEC_CODE (x);
2451 goto retry;
2452
2453 case PARALLEL:
2454 {
2455 int i;
2456
2457 /* We must scan forwards. If we have an asm, we need to set
2458 the PROP_ASM_SCAN flag before scanning the clobbers. */
2459 for (i = 0; i < XVECLEN (x, 0); i++)
2460 {
2461 rtx sub = XVECEXP (x, 0, i);
2462 switch (code = GET_CODE (sub))
2463 {
2464 case COND_EXEC:
2465 if (cond != NULL_RTX)
2466 abort ();
2467
2468 cond = COND_EXEC_TEST (sub);
2469 sub = COND_EXEC_CODE (sub);
2470 if (GET_CODE (sub) == SET)
2471 goto mark_set;
2472 if (GET_CODE (sub) == CLOBBER)
2473 goto mark_clob;
2474 break;
2475
2476 case SET:
2477 mark_set:
2478 if (GET_CODE (XEXP (sub, 1)) == ASM_OPERANDS)
2479 flags |= PROP_ASM_SCAN;
2480 /* Fall through */
2481 case CLOBBER:
2482 mark_clob:
2483 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, flags);
2484 break;
2485
2486 case ASM_OPERANDS:
2487 flags |= PROP_ASM_SCAN;
2488 break;
2489
2490 default:
2491 break;
2492 }
2493 }
2494 break;
2495 }
2496
2497 default:
2498 break;
2499 }
2500 }
2501
2502 /* Process a single set, which appears in INSN. REG (which may not
2503 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2504 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2505 If the set is conditional (because it appear in a COND_EXEC), COND
2506 will be the condition. */
2507
2508 static void
2509 mark_set_1 (struct propagate_block_info *pbi, enum rtx_code code, rtx reg, rtx cond, rtx insn, int flags)
2510 {
2511 int regno_first = -1, regno_last = -1;
2512 unsigned long not_dead = 0;
2513 int i;
2514
2515 /* Modifying just one hardware register of a multi-reg value or just a
2516 byte field of a register does not mean the value from before this insn
2517 is now dead. Of course, if it was dead after it's unused now. */
2518
2519 switch (GET_CODE (reg))
2520 {
2521 case PARALLEL:
2522 /* Some targets place small structures in registers for return values of
2523 functions. We have to detect this case specially here to get correct
2524 flow information. */
2525 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2526 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2527 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2528 flags);
2529 return;
2530
2531 case ZERO_EXTRACT:
2532 case SIGN_EXTRACT:
2533 case STRICT_LOW_PART:
2534 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
2535 do
2536 reg = XEXP (reg, 0);
2537 while (GET_CODE (reg) == SUBREG
2538 || GET_CODE (reg) == ZERO_EXTRACT
2539 || GET_CODE (reg) == SIGN_EXTRACT
2540 || GET_CODE (reg) == STRICT_LOW_PART);
2541 if (GET_CODE (reg) == MEM)
2542 break;
2543 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2544 /* Fall through. */
2545
2546 case REG:
2547 regno_last = regno_first = REGNO (reg);
2548 if (regno_first < FIRST_PSEUDO_REGISTER)
2549 regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
2550 break;
2551
2552 case SUBREG:
2553 if (GET_CODE (SUBREG_REG (reg)) == REG)
2554 {
2555 enum machine_mode outer_mode = GET_MODE (reg);
2556 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2557
2558 /* Identify the range of registers affected. This is moderately
2559 tricky for hard registers. See alter_subreg. */
2560
2561 regno_last = regno_first = REGNO (SUBREG_REG (reg));
2562 if (regno_first < FIRST_PSEUDO_REGISTER)
2563 {
2564 regno_first += subreg_regno_offset (regno_first, inner_mode,
2565 SUBREG_BYTE (reg),
2566 outer_mode);
2567 regno_last = (regno_first
2568 + hard_regno_nregs[regno_first][outer_mode] - 1);
2569
2570 /* Since we've just adjusted the register number ranges, make
2571 sure REG matches. Otherwise some_was_live will be clear
2572 when it shouldn't have been, and we'll create incorrect
2573 REG_UNUSED notes. */
2574 reg = gen_rtx_REG (outer_mode, regno_first);
2575 }
2576 else
2577 {
2578 /* If the number of words in the subreg is less than the number
2579 of words in the full register, we have a well-defined partial
2580 set. Otherwise the high bits are undefined.
2581
2582 This is only really applicable to pseudos, since we just took
2583 care of multi-word hard registers. */
2584 if (((GET_MODE_SIZE (outer_mode)
2585 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2586 < ((GET_MODE_SIZE (inner_mode)
2587 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2588 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2589 regno_first);
2590
2591 reg = SUBREG_REG (reg);
2592 }
2593 }
2594 else
2595 reg = SUBREG_REG (reg);
2596 break;
2597
2598 default:
2599 break;
2600 }
2601
2602 /* If this set is a MEM, then it kills any aliased writes.
2603 If this set is a REG, then it kills any MEMs which use the reg. */
2604 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
2605 {
2606 if (GET_CODE (reg) == REG)
2607 invalidate_mems_from_set (pbi, reg);
2608
2609 /* If the memory reference had embedded side effects (autoincrement
2610 address modes. Then we may need to kill some entries on the
2611 memory set list. */
2612 if (insn && GET_CODE (reg) == MEM)
2613 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
2614
2615 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2616 /* ??? With more effort we could track conditional memory life. */
2617 && ! cond)
2618 add_to_mem_set_list (pbi, canon_rtx (reg));
2619 }
2620
2621 if (GET_CODE (reg) == REG
2622 && ! (regno_first == FRAME_POINTER_REGNUM
2623 && (! reload_completed || frame_pointer_needed))
2624 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2625 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2626 && (! reload_completed || frame_pointer_needed))
2627 #endif
2628 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2629 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2630 #endif
2631 )
2632 {
2633 int some_was_live = 0, some_was_dead = 0;
2634
2635 for (i = regno_first; i <= regno_last; ++i)
2636 {
2637 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2638 if (pbi->local_set)
2639 {
2640 /* Order of the set operation matters here since both
2641 sets may be the same. */
2642 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2643 if (cond != NULL_RTX
2644 && ! REGNO_REG_SET_P (pbi->local_set, i))
2645 SET_REGNO_REG_SET (pbi->cond_local_set, i);
2646 else
2647 SET_REGNO_REG_SET (pbi->local_set, i);
2648 }
2649 if (code != CLOBBER)
2650 SET_REGNO_REG_SET (pbi->new_set, i);
2651
2652 some_was_live |= needed_regno;
2653 some_was_dead |= ! needed_regno;
2654 }
2655
2656 #ifdef HAVE_conditional_execution
2657 /* Consider conditional death in deciding that the register needs
2658 a death note. */
2659 if (some_was_live && ! not_dead
2660 /* The stack pointer is never dead. Well, not strictly true,
2661 but it's very difficult to tell from here. Hopefully
2662 combine_stack_adjustments will fix up the most egregious
2663 errors. */
2664 && regno_first != STACK_POINTER_REGNUM)
2665 {
2666 for (i = regno_first; i <= regno_last; ++i)
2667 if (! mark_regno_cond_dead (pbi, i, cond))
2668 not_dead |= ((unsigned long) 1) << (i - regno_first);
2669 }
2670 #endif
2671
2672 /* Additional data to record if this is the final pass. */
2673 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2674 | PROP_DEATH_NOTES | PROP_AUTOINC))
2675 {
2676 rtx y;
2677 int blocknum = pbi->bb->index;
2678
2679 y = NULL_RTX;
2680 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2681 {
2682 y = pbi->reg_next_use[regno_first];
2683
2684 /* The next use is no longer next, since a store intervenes. */
2685 for (i = regno_first; i <= regno_last; ++i)
2686 pbi->reg_next_use[i] = 0;
2687 }
2688
2689 if (flags & PROP_REG_INFO)
2690 {
2691 for (i = regno_first; i <= regno_last; ++i)
2692 {
2693 /* Count (weighted) references, stores, etc. This counts a
2694 register twice if it is modified, but that is correct. */
2695 REG_N_SETS (i) += 1;
2696 REG_N_REFS (i) += 1;
2697 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2698
2699 /* The insns where a reg is live are normally counted
2700 elsewhere, but we want the count to include the insn
2701 where the reg is set, and the normal counting mechanism
2702 would not count it. */
2703 REG_LIVE_LENGTH (i) += 1;
2704 }
2705
2706 /* If this is a hard reg, record this function uses the reg. */
2707 if (regno_first < FIRST_PSEUDO_REGISTER)
2708 {
2709 for (i = regno_first; i <= regno_last; i++)
2710 regs_ever_live[i] = 1;
2711 if (flags & PROP_ASM_SCAN)
2712 for (i = regno_first; i <= regno_last; i++)
2713 regs_asm_clobbered[i] = 1;
2714 }
2715 else
2716 {
2717 /* Keep track of which basic blocks each reg appears in. */
2718 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2719 REG_BASIC_BLOCK (regno_first) = blocknum;
2720 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2721 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2722 }
2723 }
2724
2725 if (! some_was_dead)
2726 {
2727 if (flags & PROP_LOG_LINKS)
2728 {
2729 /* Make a logical link from the next following insn
2730 that uses this register, back to this insn.
2731 The following insns have already been processed.
2732
2733 We don't build a LOG_LINK for hard registers containing
2734 in ASM_OPERANDs. If these registers get replaced,
2735 we might wind up changing the semantics of the insn,
2736 even if reload can make what appear to be valid
2737 assignments later.
2738
2739 We don't build a LOG_LINK for global registers to
2740 or from a function call. We don't want to let
2741 combine think that it knows what is going on with
2742 global registers. */
2743 if (y && (BLOCK_NUM (y) == blocknum)
2744 && (regno_first >= FIRST_PSEUDO_REGISTER
2745 || (asm_noperands (PATTERN (y)) < 0
2746 && ! ((GET_CODE (insn) == CALL_INSN
2747 || GET_CODE (y) == CALL_INSN)
2748 && global_regs[regno_first]))))
2749 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2750 }
2751 }
2752 else if (not_dead)
2753 ;
2754 else if (! some_was_live)
2755 {
2756 if (flags & PROP_REG_INFO)
2757 REG_N_DEATHS (regno_first) += 1;
2758
2759 if (flags & PROP_DEATH_NOTES)
2760 {
2761 /* Note that dead stores have already been deleted
2762 when possible. If we get here, we have found a
2763 dead store that cannot be eliminated (because the
2764 same insn does something useful). Indicate this
2765 by marking the reg being set as dying here. */
2766 REG_NOTES (insn)
2767 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2768 }
2769 }
2770 else
2771 {
2772 if (flags & PROP_DEATH_NOTES)
2773 {
2774 /* This is a case where we have a multi-word hard register
2775 and some, but not all, of the words of the register are
2776 needed in subsequent insns. Write REG_UNUSED notes
2777 for those parts that were not needed. This case should
2778 be rare. */
2779
2780 for (i = regno_first; i <= regno_last; ++i)
2781 if (! REGNO_REG_SET_P (pbi->reg_live, i))
2782 REG_NOTES (insn)
2783 = alloc_EXPR_LIST (REG_UNUSED,
2784 regno_reg_rtx[i],
2785 REG_NOTES (insn));
2786 }
2787 }
2788 }
2789
2790 /* Mark the register as being dead. */
2791 if (some_was_live
2792 /* The stack pointer is never dead. Well, not strictly true,
2793 but it's very difficult to tell from here. Hopefully
2794 combine_stack_adjustments will fix up the most egregious
2795 errors. */
2796 && regno_first != STACK_POINTER_REGNUM)
2797 {
2798 for (i = regno_first; i <= regno_last; ++i)
2799 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2800 {
2801 if ((pbi->flags & PROP_REG_INFO)
2802 && REGNO_REG_SET_P (pbi->reg_live, i))
2803 {
2804 REG_LIVE_LENGTH (i) += pbi->insn_num - reg_deaths[i];
2805 reg_deaths[i] = 0;
2806 }
2807 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2808 }
2809 }
2810 }
2811 else if (GET_CODE (reg) == REG)
2812 {
2813 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2814 pbi->reg_next_use[regno_first] = 0;
2815
2816 if ((flags & PROP_REG_INFO) != 0
2817 && (flags & PROP_ASM_SCAN) != 0
2818 && regno_first < FIRST_PSEUDO_REGISTER)
2819 {
2820 for (i = regno_first; i <= regno_last; i++)
2821 regs_asm_clobbered[i] = 1;
2822 }
2823 }
2824
2825 /* If this is the last pass and this is a SCRATCH, show it will be dying
2826 here and count it. */
2827 else if (GET_CODE (reg) == SCRATCH)
2828 {
2829 if (flags & PROP_DEATH_NOTES)
2830 REG_NOTES (insn)
2831 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2832 }
2833 }
2834 \f
2835 #ifdef HAVE_conditional_execution
2836 /* Mark REGNO conditionally dead.
2837 Return true if the register is now unconditionally dead. */
2838
2839 static int
2840 mark_regno_cond_dead (struct propagate_block_info *pbi, int regno, rtx cond)
2841 {
2842 /* If this is a store to a predicate register, the value of the
2843 predicate is changing, we don't know that the predicate as seen
2844 before is the same as that seen after. Flush all dependent
2845 conditions from reg_cond_dead. This will make all such
2846 conditionally live registers unconditionally live. */
2847 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
2848 flush_reg_cond_reg (pbi, regno);
2849
2850 /* If this is an unconditional store, remove any conditional
2851 life that may have existed. */
2852 if (cond == NULL_RTX)
2853 splay_tree_remove (pbi->reg_cond_dead, regno);
2854 else
2855 {
2856 splay_tree_node node;
2857 struct reg_cond_life_info *rcli;
2858 rtx ncond;
2859
2860 /* Otherwise this is a conditional set. Record that fact.
2861 It may have been conditionally used, or there may be a
2862 subsequent set with a complimentary condition. */
2863
2864 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
2865 if (node == NULL)
2866 {
2867 /* The register was unconditionally live previously.
2868 Record the current condition as the condition under
2869 which it is dead. */
2870 rcli = xmalloc (sizeof (*rcli));
2871 rcli->condition = cond;
2872 rcli->stores = cond;
2873 rcli->orig_condition = const0_rtx;
2874 splay_tree_insert (pbi->reg_cond_dead, regno,
2875 (splay_tree_value) rcli);
2876
2877 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2878
2879 /* Not unconditionally dead. */
2880 return 0;
2881 }
2882 else
2883 {
2884 /* The register was conditionally live previously.
2885 Add the new condition to the old. */
2886 rcli = (struct reg_cond_life_info *) node->value;
2887 ncond = rcli->condition;
2888 ncond = ior_reg_cond (ncond, cond, 1);
2889 if (rcli->stores == const0_rtx)
2890 rcli->stores = cond;
2891 else if (rcli->stores != const1_rtx)
2892 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
2893
2894 /* If the register is now unconditionally dead, remove the entry
2895 in the splay_tree. A register is unconditionally dead if the
2896 dead condition ncond is true. A register is also unconditionally
2897 dead if the sum of all conditional stores is an unconditional
2898 store (stores is true), and the dead condition is identically the
2899 same as the original dead condition initialized at the end of
2900 the block. This is a pointer compare, not an rtx_equal_p
2901 compare. */
2902 if (ncond == const1_rtx
2903 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
2904 splay_tree_remove (pbi->reg_cond_dead, regno);
2905 else
2906 {
2907 rcli->condition = ncond;
2908
2909 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2910
2911 /* Not unconditionally dead. */
2912 return 0;
2913 }
2914 }
2915 }
2916
2917 return 1;
2918 }
2919
2920 /* Called from splay_tree_delete for pbi->reg_cond_life. */
2921
2922 static void
2923 free_reg_cond_life_info (splay_tree_value value)
2924 {
2925 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
2926 free (rcli);
2927 }
2928
2929 /* Helper function for flush_reg_cond_reg. */
2930
2931 static int
2932 flush_reg_cond_reg_1 (splay_tree_node node, void *data)
2933 {
2934 struct reg_cond_life_info *rcli;
2935 int *xdata = (int *) data;
2936 unsigned int regno = xdata[0];
2937
2938 /* Don't need to search if last flushed value was farther on in
2939 the in-order traversal. */
2940 if (xdata[1] >= (int) node->key)
2941 return 0;
2942
2943 /* Splice out portions of the expression that refer to regno. */
2944 rcli = (struct reg_cond_life_info *) node->value;
2945 rcli->condition = elim_reg_cond (rcli->condition, regno);
2946 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
2947 rcli->stores = elim_reg_cond (rcli->stores, regno);
2948
2949 /* If the entire condition is now false, signal the node to be removed. */
2950 if (rcli->condition == const0_rtx)
2951 {
2952 xdata[1] = node->key;
2953 return -1;
2954 }
2955 else if (rcli->condition == const1_rtx)
2956 abort ();
2957
2958 return 0;
2959 }
2960
2961 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
2962
2963 static void
2964 flush_reg_cond_reg (struct propagate_block_info *pbi, int regno)
2965 {
2966 int pair[2];
2967
2968 pair[0] = regno;
2969 pair[1] = -1;
2970 while (splay_tree_foreach (pbi->reg_cond_dead,
2971 flush_reg_cond_reg_1, pair) == -1)
2972 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
2973
2974 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
2975 }
2976
2977 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
2978 For ior/and, the ADD flag determines whether we want to add the new
2979 condition X to the old one unconditionally. If it is zero, we will
2980 only return a new expression if X allows us to simplify part of
2981 OLD, otherwise we return NULL to the caller.
2982 If ADD is nonzero, we will return a new condition in all cases. The
2983 toplevel caller of one of these functions should always pass 1 for
2984 ADD. */
2985
2986 static rtx
2987 ior_reg_cond (rtx old, rtx x, int add)
2988 {
2989 rtx op0, op1;
2990
2991 if (COMPARISON_P (old))
2992 {
2993 if (COMPARISON_P (x)
2994 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
2995 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
2996 return const1_rtx;
2997 if (GET_CODE (x) == GET_CODE (old)
2998 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
2999 return old;
3000 if (! add)
3001 return NULL;
3002 return gen_rtx_IOR (0, old, x);
3003 }
3004
3005 switch (GET_CODE (old))
3006 {
3007 case IOR:
3008 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3009 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3010 if (op0 != NULL || op1 != NULL)
3011 {
3012 if (op0 == const0_rtx)
3013 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3014 if (op1 == const0_rtx)
3015 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3016 if (op0 == const1_rtx || op1 == const1_rtx)
3017 return const1_rtx;
3018 if (op0 == NULL)
3019 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3020 else if (rtx_equal_p (x, op0))
3021 /* (x | A) | x ~ (x | A). */
3022 return old;
3023 if (op1 == NULL)
3024 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3025 else if (rtx_equal_p (x, op1))
3026 /* (A | x) | x ~ (A | x). */
3027 return old;
3028 return gen_rtx_IOR (0, op0, op1);
3029 }
3030 if (! add)
3031 return NULL;
3032 return gen_rtx_IOR (0, old, x);
3033
3034 case AND:
3035 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3036 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3037 if (op0 != NULL || op1 != NULL)
3038 {
3039 if (op0 == const1_rtx)
3040 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3041 if (op1 == const1_rtx)
3042 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3043 if (op0 == const0_rtx || op1 == const0_rtx)
3044 return const0_rtx;
3045 if (op0 == NULL)
3046 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3047 else if (rtx_equal_p (x, op0))
3048 /* (x & A) | x ~ x. */
3049 return op0;
3050 if (op1 == NULL)
3051 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3052 else if (rtx_equal_p (x, op1))
3053 /* (A & x) | x ~ x. */
3054 return op1;
3055 return gen_rtx_AND (0, op0, op1);
3056 }
3057 if (! add)
3058 return NULL;
3059 return gen_rtx_IOR (0, old, x);
3060
3061 case NOT:
3062 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3063 if (op0 != NULL)
3064 return not_reg_cond (op0);
3065 if (! add)
3066 return NULL;
3067 return gen_rtx_IOR (0, old, x);
3068
3069 default:
3070 abort ();
3071 }
3072 }
3073
3074 static rtx
3075 not_reg_cond (rtx x)
3076 {
3077 enum rtx_code x_code;
3078
3079 if (x == const0_rtx)
3080 return const1_rtx;
3081 else if (x == const1_rtx)
3082 return const0_rtx;
3083 x_code = GET_CODE (x);
3084 if (x_code == NOT)
3085 return XEXP (x, 0);
3086 if (COMPARISON_P (x)
3087 && GET_CODE (XEXP (x, 0)) == REG)
3088 {
3089 if (XEXP (x, 1) != const0_rtx)
3090 abort ();
3091
3092 return gen_rtx_fmt_ee (reverse_condition (x_code),
3093 VOIDmode, XEXP (x, 0), const0_rtx);
3094 }
3095 return gen_rtx_NOT (0, x);
3096 }
3097
3098 static rtx
3099 and_reg_cond (rtx old, rtx x, int add)
3100 {
3101 rtx op0, op1;
3102
3103 if (COMPARISON_P (old))
3104 {
3105 if (COMPARISON_P (x)
3106 && GET_CODE (x) == reverse_condition (GET_CODE (old))
3107 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3108 return const0_rtx;
3109 if (GET_CODE (x) == GET_CODE (old)
3110 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3111 return old;
3112 if (! add)
3113 return NULL;
3114 return gen_rtx_AND (0, old, x);
3115 }
3116
3117 switch (GET_CODE (old))
3118 {
3119 case IOR:
3120 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3121 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3122 if (op0 != NULL || op1 != NULL)
3123 {
3124 if (op0 == const0_rtx)
3125 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3126 if (op1 == const0_rtx)
3127 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3128 if (op0 == const1_rtx || op1 == const1_rtx)
3129 return const1_rtx;
3130 if (op0 == NULL)
3131 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3132 else if (rtx_equal_p (x, op0))
3133 /* (x | A) & x ~ x. */
3134 return op0;
3135 if (op1 == NULL)
3136 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3137 else if (rtx_equal_p (x, op1))
3138 /* (A | x) & x ~ x. */
3139 return op1;
3140 return gen_rtx_IOR (0, op0, op1);
3141 }
3142 if (! add)
3143 return NULL;
3144 return gen_rtx_AND (0, old, x);
3145
3146 case AND:
3147 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3148 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3149 if (op0 != NULL || op1 != NULL)
3150 {
3151 if (op0 == const1_rtx)
3152 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3153 if (op1 == const1_rtx)
3154 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3155 if (op0 == const0_rtx || op1 == const0_rtx)
3156 return const0_rtx;
3157 if (op0 == NULL)
3158 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3159 else if (rtx_equal_p (x, op0))
3160 /* (x & A) & x ~ (x & A). */
3161 return old;
3162 if (op1 == NULL)
3163 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3164 else if (rtx_equal_p (x, op1))
3165 /* (A & x) & x ~ (A & x). */
3166 return old;
3167 return gen_rtx_AND (0, op0, op1);
3168 }
3169 if (! add)
3170 return NULL;
3171 return gen_rtx_AND (0, old, x);
3172
3173 case NOT:
3174 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3175 if (op0 != NULL)
3176 return not_reg_cond (op0);
3177 if (! add)
3178 return NULL;
3179 return gen_rtx_AND (0, old, x);
3180
3181 default:
3182 abort ();
3183 }
3184 }
3185
3186 /* Given a condition X, remove references to reg REGNO and return the
3187 new condition. The removal will be done so that all conditions
3188 involving REGNO are considered to evaluate to false. This function
3189 is used when the value of REGNO changes. */
3190
3191 static rtx
3192 elim_reg_cond (rtx x, unsigned int regno)
3193 {
3194 rtx op0, op1;
3195
3196 if (COMPARISON_P (x))
3197 {
3198 if (REGNO (XEXP (x, 0)) == regno)
3199 return const0_rtx;
3200 return x;
3201 }
3202
3203 switch (GET_CODE (x))
3204 {
3205 case AND:
3206 op0 = elim_reg_cond (XEXP (x, 0), regno);
3207 op1 = elim_reg_cond (XEXP (x, 1), regno);
3208 if (op0 == const0_rtx || op1 == const0_rtx)
3209 return const0_rtx;
3210 if (op0 == const1_rtx)
3211 return op1;
3212 if (op1 == const1_rtx)
3213 return op0;
3214 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3215 return x;
3216 return gen_rtx_AND (0, op0, op1);
3217
3218 case IOR:
3219 op0 = elim_reg_cond (XEXP (x, 0), regno);
3220 op1 = elim_reg_cond (XEXP (x, 1), regno);
3221 if (op0 == const1_rtx || op1 == const1_rtx)
3222 return const1_rtx;
3223 if (op0 == const0_rtx)
3224 return op1;
3225 if (op1 == const0_rtx)
3226 return op0;
3227 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3228 return x;
3229 return gen_rtx_IOR (0, op0, op1);
3230
3231 case NOT:
3232 op0 = elim_reg_cond (XEXP (x, 0), regno);
3233 if (op0 == const0_rtx)
3234 return const1_rtx;
3235 if (op0 == const1_rtx)
3236 return const0_rtx;
3237 if (op0 != XEXP (x, 0))
3238 return not_reg_cond (op0);
3239 return x;
3240
3241 default:
3242 abort ();
3243 }
3244 }
3245 #endif /* HAVE_conditional_execution */
3246 \f
3247 #ifdef AUTO_INC_DEC
3248
3249 /* Try to substitute the auto-inc expression INC as the address inside
3250 MEM which occurs in INSN. Currently, the address of MEM is an expression
3251 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3252 that has a single set whose source is a PLUS of INCR_REG and something
3253 else. */
3254
3255 static void
3256 attempt_auto_inc (struct propagate_block_info *pbi, rtx inc, rtx insn,
3257 rtx mem, rtx incr, rtx incr_reg)
3258 {
3259 int regno = REGNO (incr_reg);
3260 rtx set = single_set (incr);
3261 rtx q = SET_DEST (set);
3262 rtx y = SET_SRC (set);
3263 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3264
3265 /* Make sure this reg appears only once in this insn. */
3266 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3267 return;
3268
3269 if (dead_or_set_p (incr, incr_reg)
3270 /* Mustn't autoinc an eliminable register. */
3271 && (regno >= FIRST_PSEUDO_REGISTER
3272 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3273 {
3274 /* This is the simple case. Try to make the auto-inc. If
3275 we can't, we are done. Otherwise, we will do any
3276 needed updates below. */
3277 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3278 return;
3279 }
3280 else if (GET_CODE (q) == REG
3281 /* PREV_INSN used here to check the semi-open interval
3282 [insn,incr). */
3283 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3284 /* We must also check for sets of q as q may be
3285 a call clobbered hard register and there may
3286 be a call between PREV_INSN (insn) and incr. */
3287 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3288 {
3289 /* We have *p followed sometime later by q = p+size.
3290 Both p and q must be live afterward,
3291 and q is not used between INSN and its assignment.
3292 Change it to q = p, ...*q..., q = q+size.
3293 Then fall into the usual case. */
3294 rtx insns, temp;
3295
3296 start_sequence ();
3297 emit_move_insn (q, incr_reg);
3298 insns = get_insns ();
3299 end_sequence ();
3300
3301 /* If we can't make the auto-inc, or can't make the
3302 replacement into Y, exit. There's no point in making
3303 the change below if we can't do the auto-inc and doing
3304 so is not correct in the pre-inc case. */
3305
3306 XEXP (inc, 0) = q;
3307 validate_change (insn, &XEXP (mem, 0), inc, 1);
3308 validate_change (incr, &XEXP (y, opnum), q, 1);
3309 if (! apply_change_group ())
3310 return;
3311
3312 /* We now know we'll be doing this change, so emit the
3313 new insn(s) and do the updates. */
3314 emit_insn_before (insns, insn);
3315
3316 if (BB_HEAD (pbi->bb) == insn)
3317 BB_HEAD (pbi->bb) = insns;
3318
3319 /* INCR will become a NOTE and INSN won't contain a
3320 use of INCR_REG. If a use of INCR_REG was just placed in
3321 the insn before INSN, make that the next use.
3322 Otherwise, invalidate it. */
3323 if (GET_CODE (PREV_INSN (insn)) == INSN
3324 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3325 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3326 pbi->reg_next_use[regno] = PREV_INSN (insn);
3327 else
3328 pbi->reg_next_use[regno] = 0;
3329
3330 incr_reg = q;
3331 regno = REGNO (q);
3332
3333 if ((pbi->flags & PROP_REG_INFO)
3334 && !REGNO_REG_SET_P (pbi->reg_live, regno))
3335 reg_deaths[regno] = pbi->insn_num;
3336
3337 /* REGNO is now used in INCR which is below INSN, but
3338 it previously wasn't live here. If we don't mark
3339 it as live, we'll put a REG_DEAD note for it
3340 on this insn, which is incorrect. */
3341 SET_REGNO_REG_SET (pbi->reg_live, regno);
3342
3343 /* If there are any calls between INSN and INCR, show
3344 that REGNO now crosses them. */
3345 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3346 if (GET_CODE (temp) == CALL_INSN)
3347 REG_N_CALLS_CROSSED (regno)++;
3348
3349 /* Invalidate alias info for Q since we just changed its value. */
3350 clear_reg_alias_info (q);
3351 }
3352 else
3353 return;
3354
3355 /* If we haven't returned, it means we were able to make the
3356 auto-inc, so update the status. First, record that this insn
3357 has an implicit side effect. */
3358
3359 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3360
3361 /* Modify the old increment-insn to simply copy
3362 the already-incremented value of our register. */
3363 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
3364 abort ();
3365
3366 /* If that makes it a no-op (copying the register into itself) delete
3367 it so it won't appear to be a "use" and a "set" of this
3368 register. */
3369 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3370 {
3371 /* If the original source was dead, it's dead now. */
3372 rtx note;
3373
3374 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3375 {
3376 remove_note (incr, note);
3377 if (XEXP (note, 0) != incr_reg)
3378 {
3379 unsigned int regno = REGNO (XEXP (note, 0));
3380
3381 if ((pbi->flags & PROP_REG_INFO)
3382 && REGNO_REG_SET_P (pbi->reg_live, regno))
3383 {
3384 REG_LIVE_LENGTH (regno) += pbi->insn_num - reg_deaths[regno];
3385 reg_deaths[regno] = 0;
3386 }
3387 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3388 }
3389 }
3390
3391 PUT_CODE (incr, NOTE);
3392 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3393 NOTE_SOURCE_FILE (incr) = 0;
3394 }
3395
3396 if (regno >= FIRST_PSEUDO_REGISTER)
3397 {
3398 /* Count an extra reference to the reg. When a reg is
3399 incremented, spilling it is worse, so we want to make
3400 that less likely. */
3401 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3402
3403 /* Count the increment as a setting of the register,
3404 even though it isn't a SET in rtl. */
3405 REG_N_SETS (regno)++;
3406 }
3407 }
3408
3409 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3410 reference. */
3411
3412 static void
3413 find_auto_inc (struct propagate_block_info *pbi, rtx x, rtx insn)
3414 {
3415 rtx addr = XEXP (x, 0);
3416 HOST_WIDE_INT offset = 0;
3417 rtx set, y, incr, inc_val;
3418 int regno;
3419 int size = GET_MODE_SIZE (GET_MODE (x));
3420
3421 if (GET_CODE (insn) == JUMP_INSN)
3422 return;
3423
3424 /* Here we detect use of an index register which might be good for
3425 postincrement, postdecrement, preincrement, or predecrement. */
3426
3427 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3428 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3429
3430 if (GET_CODE (addr) != REG)
3431 return;
3432
3433 regno = REGNO (addr);
3434
3435 /* Is the next use an increment that might make auto-increment? */
3436 incr = pbi->reg_next_use[regno];
3437 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3438 return;
3439 set = single_set (incr);
3440 if (set == 0 || GET_CODE (set) != SET)
3441 return;
3442 y = SET_SRC (set);
3443
3444 if (GET_CODE (y) != PLUS)
3445 return;
3446
3447 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3448 inc_val = XEXP (y, 1);
3449 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3450 inc_val = XEXP (y, 0);
3451 else
3452 return;
3453
3454 if (GET_CODE (inc_val) == CONST_INT)
3455 {
3456 if (HAVE_POST_INCREMENT
3457 && (INTVAL (inc_val) == size && offset == 0))
3458 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3459 incr, addr);
3460 else if (HAVE_POST_DECREMENT
3461 && (INTVAL (inc_val) == -size && offset == 0))
3462 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3463 incr, addr);
3464 else if (HAVE_PRE_INCREMENT
3465 && (INTVAL (inc_val) == size && offset == size))
3466 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3467 incr, addr);
3468 else if (HAVE_PRE_DECREMENT
3469 && (INTVAL (inc_val) == -size && offset == -size))
3470 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3471 incr, addr);
3472 else if (HAVE_POST_MODIFY_DISP && offset == 0)
3473 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3474 gen_rtx_PLUS (Pmode,
3475 addr,
3476 inc_val)),
3477 insn, x, incr, addr);
3478 else if (HAVE_PRE_MODIFY_DISP && offset == INTVAL (inc_val))
3479 attempt_auto_inc (pbi, gen_rtx_PRE_MODIFY (Pmode, addr,
3480 gen_rtx_PLUS (Pmode,
3481 addr,
3482 inc_val)),
3483 insn, x, incr, addr);
3484 }
3485 else if (GET_CODE (inc_val) == REG
3486 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3487 NEXT_INSN (incr)))
3488
3489 {
3490 if (HAVE_POST_MODIFY_REG && offset == 0)
3491 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3492 gen_rtx_PLUS (Pmode,
3493 addr,
3494 inc_val)),
3495 insn, x, incr, addr);
3496 }
3497 }
3498
3499 #endif /* AUTO_INC_DEC */
3500 \f
3501 static void
3502 mark_used_reg (struct propagate_block_info *pbi, rtx reg,
3503 rtx cond ATTRIBUTE_UNUSED, rtx insn)
3504 {
3505 unsigned int regno_first, regno_last, i;
3506 int some_was_live, some_was_dead, some_not_set;
3507
3508 regno_last = regno_first = REGNO (reg);
3509 if (regno_first < FIRST_PSEUDO_REGISTER)
3510 regno_last += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
3511
3512 /* Find out if any of this register is live after this instruction. */
3513 some_was_live = some_was_dead = 0;
3514 for (i = regno_first; i <= regno_last; ++i)
3515 {
3516 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3517 some_was_live |= needed_regno;
3518 some_was_dead |= ! needed_regno;
3519 }
3520
3521 /* Find out if any of the register was set this insn. */
3522 some_not_set = 0;
3523 for (i = regno_first; i <= regno_last; ++i)
3524 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3525
3526 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3527 {
3528 /* Record where each reg is used, so when the reg is set we know
3529 the next insn that uses it. */
3530 pbi->reg_next_use[regno_first] = insn;
3531 }
3532
3533 if (pbi->flags & PROP_REG_INFO)
3534 {
3535 if (regno_first < FIRST_PSEUDO_REGISTER)
3536 {
3537 /* If this is a register we are going to try to eliminate,
3538 don't mark it live here. If we are successful in
3539 eliminating it, it need not be live unless it is used for
3540 pseudos, in which case it will have been set live when it
3541 was allocated to the pseudos. If the register will not
3542 be eliminated, reload will set it live at that point.
3543
3544 Otherwise, record that this function uses this register. */
3545 /* ??? The PPC backend tries to "eliminate" on the pic
3546 register to itself. This should be fixed. In the mean
3547 time, hack around it. */
3548
3549 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3550 && (regno_first == FRAME_POINTER_REGNUM
3551 || regno_first == ARG_POINTER_REGNUM)))
3552 for (i = regno_first; i <= regno_last; ++i)
3553 regs_ever_live[i] = 1;
3554 }
3555 else
3556 {
3557 /* Keep track of which basic block each reg appears in. */
3558
3559 int blocknum = pbi->bb->index;
3560 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3561 REG_BASIC_BLOCK (regno_first) = blocknum;
3562 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3563 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3564
3565 /* Count (weighted) number of uses of each reg. */
3566 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3567 REG_N_REFS (regno_first)++;
3568 }
3569 for (i = regno_first; i <= regno_last; ++i)
3570 if (! REGNO_REG_SET_P (pbi->reg_live, i))
3571 {
3572 #ifdef ENABLE_CHECKING
3573 if (reg_deaths[i])
3574 abort ();
3575 #endif
3576 reg_deaths[i] = pbi->insn_num;
3577 }
3578 }
3579
3580 /* Record and count the insns in which a reg dies. If it is used in
3581 this insn and was dead below the insn then it dies in this insn.
3582 If it was set in this insn, we do not make a REG_DEAD note;
3583 likewise if we already made such a note. */
3584 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3585 && some_was_dead
3586 && some_not_set)
3587 {
3588 /* Check for the case where the register dying partially
3589 overlaps the register set by this insn. */
3590 if (regno_first != regno_last)
3591 for (i = regno_first; i <= regno_last; ++i)
3592 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3593
3594 /* If none of the words in X is needed, make a REG_DEAD note.
3595 Otherwise, we must make partial REG_DEAD notes. */
3596 if (! some_was_live)
3597 {
3598 if ((pbi->flags & PROP_DEATH_NOTES)
3599 && ! find_regno_note (insn, REG_DEAD, regno_first))
3600 REG_NOTES (insn)
3601 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3602
3603 if (pbi->flags & PROP_REG_INFO)
3604 REG_N_DEATHS (regno_first)++;
3605 }
3606 else
3607 {
3608 /* Don't make a REG_DEAD note for a part of a register
3609 that is set in the insn. */
3610 for (i = regno_first; i <= regno_last; ++i)
3611 if (! REGNO_REG_SET_P (pbi->reg_live, i)
3612 && ! dead_or_set_regno_p (insn, i))
3613 REG_NOTES (insn)
3614 = alloc_EXPR_LIST (REG_DEAD,
3615 regno_reg_rtx[i],
3616 REG_NOTES (insn));
3617 }
3618 }
3619
3620 /* Mark the register as being live. */
3621 for (i = regno_first; i <= regno_last; ++i)
3622 {
3623 #ifdef HAVE_conditional_execution
3624 int this_was_live = REGNO_REG_SET_P (pbi->reg_live, i);
3625 #endif
3626
3627 SET_REGNO_REG_SET (pbi->reg_live, i);
3628
3629 #ifdef HAVE_conditional_execution
3630 /* If this is a conditional use, record that fact. If it is later
3631 conditionally set, we'll know to kill the register. */
3632 if (cond != NULL_RTX)
3633 {
3634 splay_tree_node node;
3635 struct reg_cond_life_info *rcli;
3636 rtx ncond;
3637
3638 if (this_was_live)
3639 {
3640 node = splay_tree_lookup (pbi->reg_cond_dead, i);
3641 if (node == NULL)
3642 {
3643 /* The register was unconditionally live previously.
3644 No need to do anything. */
3645 }
3646 else
3647 {
3648 /* The register was conditionally live previously.
3649 Subtract the new life cond from the old death cond. */
3650 rcli = (struct reg_cond_life_info *) node->value;
3651 ncond = rcli->condition;
3652 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3653
3654 /* If the register is now unconditionally live,
3655 remove the entry in the splay_tree. */
3656 if (ncond == const0_rtx)
3657 splay_tree_remove (pbi->reg_cond_dead, i);
3658 else
3659 {
3660 rcli->condition = ncond;
3661 SET_REGNO_REG_SET (pbi->reg_cond_reg,
3662 REGNO (XEXP (cond, 0)));
3663 }
3664 }
3665 }
3666 else
3667 {
3668 /* The register was not previously live at all. Record
3669 the condition under which it is still dead. */
3670 rcli = xmalloc (sizeof (*rcli));
3671 rcli->condition = not_reg_cond (cond);
3672 rcli->stores = const0_rtx;
3673 rcli->orig_condition = const0_rtx;
3674 splay_tree_insert (pbi->reg_cond_dead, i,
3675 (splay_tree_value) rcli);
3676
3677 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3678 }
3679 }
3680 else if (this_was_live)
3681 {
3682 /* The register may have been conditionally live previously, but
3683 is now unconditionally live. Remove it from the conditionally
3684 dead list, so that a conditional set won't cause us to think
3685 it dead. */
3686 splay_tree_remove (pbi->reg_cond_dead, i);
3687 }
3688 #endif
3689 }
3690 }
3691
3692 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3693 This is done assuming the registers needed from X are those that
3694 have 1-bits in PBI->REG_LIVE.
3695
3696 INSN is the containing instruction. If INSN is dead, this function
3697 is not called. */
3698
3699 static void
3700 mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
3701 {
3702 RTX_CODE code;
3703 int regno;
3704 int flags = pbi->flags;
3705
3706 retry:
3707 if (!x)
3708 return;
3709 code = GET_CODE (x);
3710 switch (code)
3711 {
3712 case LABEL_REF:
3713 case SYMBOL_REF:
3714 case CONST_INT:
3715 case CONST:
3716 case CONST_DOUBLE:
3717 case CONST_VECTOR:
3718 case PC:
3719 case ADDR_VEC:
3720 case ADDR_DIFF_VEC:
3721 return;
3722
3723 #ifdef HAVE_cc0
3724 case CC0:
3725 pbi->cc0_live = 1;
3726 return;
3727 #endif
3728
3729 case CLOBBER:
3730 /* If we are clobbering a MEM, mark any registers inside the address
3731 as being used. */
3732 if (GET_CODE (XEXP (x, 0)) == MEM)
3733 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
3734 return;
3735
3736 case MEM:
3737 /* Don't bother watching stores to mems if this is not the
3738 final pass. We'll not be deleting dead stores this round. */
3739 if (optimize && (flags & PROP_SCAN_DEAD_STORES))
3740 {
3741 /* Invalidate the data for the last MEM stored, but only if MEM is
3742 something that can be stored into. */
3743 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3744 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3745 /* Needn't clear the memory set list. */
3746 ;
3747 else
3748 {
3749 rtx temp = pbi->mem_set_list;
3750 rtx prev = NULL_RTX;
3751 rtx next;
3752
3753 while (temp)
3754 {
3755 next = XEXP (temp, 1);
3756 if (unchanging_anti_dependence (XEXP (temp, 0), x))
3757 {
3758 /* Splice temp out of the list. */
3759 if (prev)
3760 XEXP (prev, 1) = next;
3761 else
3762 pbi->mem_set_list = next;
3763 free_EXPR_LIST_node (temp);
3764 pbi->mem_set_list_len--;
3765 }
3766 else
3767 prev = temp;
3768 temp = next;
3769 }
3770 }
3771
3772 /* If the memory reference had embedded side effects (autoincrement
3773 address modes. Then we may need to kill some entries on the
3774 memory set list. */
3775 if (insn)
3776 for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
3777 }
3778
3779 #ifdef AUTO_INC_DEC
3780 if (flags & PROP_AUTOINC)
3781 find_auto_inc (pbi, x, insn);
3782 #endif
3783 break;
3784
3785 case SUBREG:
3786 #ifdef CANNOT_CHANGE_MODE_CLASS
3787 if ((flags & PROP_REG_INFO)
3788 && GET_CODE (SUBREG_REG (x)) == REG
3789 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
3790 bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (x))
3791 * MAX_MACHINE_MODE
3792 + GET_MODE (x));
3793 #endif
3794
3795 /* While we're here, optimize this case. */
3796 x = SUBREG_REG (x);
3797 if (GET_CODE (x) != REG)
3798 goto retry;
3799 /* Fall through. */
3800
3801 case REG:
3802 /* See a register other than being set => mark it as needed. */
3803 mark_used_reg (pbi, x, cond, insn);
3804 return;
3805
3806 case SET:
3807 {
3808 rtx testreg = SET_DEST (x);
3809 int mark_dest = 0;
3810
3811 /* If storing into MEM, don't show it as being used. But do
3812 show the address as being used. */
3813 if (GET_CODE (testreg) == MEM)
3814 {
3815 #ifdef AUTO_INC_DEC
3816 if (flags & PROP_AUTOINC)
3817 find_auto_inc (pbi, testreg, insn);
3818 #endif
3819 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
3820 mark_used_regs (pbi, SET_SRC (x), cond, insn);
3821 return;
3822 }
3823
3824 /* Storing in STRICT_LOW_PART is like storing in a reg
3825 in that this SET might be dead, so ignore it in TESTREG.
3826 but in some other ways it is like using the reg.
3827
3828 Storing in a SUBREG or a bit field is like storing the entire
3829 register in that if the register's value is not used
3830 then this SET is not needed. */
3831 while (GET_CODE (testreg) == STRICT_LOW_PART
3832 || GET_CODE (testreg) == ZERO_EXTRACT
3833 || GET_CODE (testreg) == SIGN_EXTRACT
3834 || GET_CODE (testreg) == SUBREG)
3835 {
3836 #ifdef CANNOT_CHANGE_MODE_CLASS
3837 if ((flags & PROP_REG_INFO)
3838 && GET_CODE (testreg) == SUBREG
3839 && GET_CODE (SUBREG_REG (testreg)) == REG
3840 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER)
3841 bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (testreg))
3842 * MAX_MACHINE_MODE
3843 + GET_MODE (testreg));
3844 #endif
3845
3846 /* Modifying a single register in an alternate mode
3847 does not use any of the old value. But these other
3848 ways of storing in a register do use the old value. */
3849 if (GET_CODE (testreg) == SUBREG
3850 && !((REG_BYTES (SUBREG_REG (testreg))
3851 + UNITS_PER_WORD - 1) / UNITS_PER_WORD
3852 > (REG_BYTES (testreg)
3853 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3854 ;
3855 else
3856 mark_dest = 1;
3857
3858 testreg = XEXP (testreg, 0);
3859 }
3860
3861 /* If this is a store into a register or group of registers,
3862 recursively scan the value being stored. */
3863
3864 if ((GET_CODE (testreg) == PARALLEL
3865 && GET_MODE (testreg) == BLKmode)
3866 || (GET_CODE (testreg) == REG
3867 && (regno = REGNO (testreg),
3868 ! (regno == FRAME_POINTER_REGNUM
3869 && (! reload_completed || frame_pointer_needed)))
3870 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3871 && ! (regno == HARD_FRAME_POINTER_REGNUM
3872 && (! reload_completed || frame_pointer_needed))
3873 #endif
3874 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3875 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3876 #endif
3877 ))
3878 {
3879 if (mark_dest)
3880 mark_used_regs (pbi, SET_DEST (x), cond, insn);
3881 mark_used_regs (pbi, SET_SRC (x), cond, insn);
3882 return;
3883 }
3884 }
3885 break;
3886
3887 case ASM_OPERANDS:
3888 case UNSPEC_VOLATILE:
3889 case TRAP_IF:
3890 case ASM_INPUT:
3891 {
3892 /* Traditional and volatile asm instructions must be considered to use
3893 and clobber all hard registers, all pseudo-registers and all of
3894 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
3895
3896 Consider for instance a volatile asm that changes the fpu rounding
3897 mode. An insn should not be moved across this even if it only uses
3898 pseudo-regs because it might give an incorrectly rounded result.
3899
3900 ?!? Unfortunately, marking all hard registers as live causes massive
3901 problems for the register allocator and marking all pseudos as live
3902 creates mountains of uninitialized variable warnings.
3903
3904 So for now, just clear the memory set list and mark any regs
3905 we can find in ASM_OPERANDS as used. */
3906 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3907 {
3908 free_EXPR_LIST_list (&pbi->mem_set_list);
3909 pbi->mem_set_list_len = 0;
3910 }
3911
3912 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
3913 We can not just fall through here since then we would be confused
3914 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3915 traditional asms unlike their normal usage. */
3916 if (code == ASM_OPERANDS)
3917 {
3918 int j;
3919
3920 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3921 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
3922 }
3923 break;
3924 }
3925
3926 case COND_EXEC:
3927 if (cond != NULL_RTX)
3928 abort ();
3929
3930 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
3931
3932 cond = COND_EXEC_TEST (x);
3933 x = COND_EXEC_CODE (x);
3934 goto retry;
3935
3936 default:
3937 break;
3938 }
3939
3940 /* Recursively scan the operands of this expression. */
3941
3942 {
3943 const char * const fmt = GET_RTX_FORMAT (code);
3944 int i;
3945
3946 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3947 {
3948 if (fmt[i] == 'e')
3949 {
3950 /* Tail recursive case: save a function call level. */
3951 if (i == 0)
3952 {
3953 x = XEXP (x, 0);
3954 goto retry;
3955 }
3956 mark_used_regs (pbi, XEXP (x, i), cond, insn);
3957 }
3958 else if (fmt[i] == 'E')
3959 {
3960 int j;
3961 for (j = 0; j < XVECLEN (x, i); j++)
3962 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
3963 }
3964 }
3965 }
3966 }
3967 \f
3968 #ifdef AUTO_INC_DEC
3969
3970 static int
3971 try_pre_increment_1 (struct propagate_block_info *pbi, rtx insn)
3972 {
3973 /* Find the next use of this reg. If in same basic block,
3974 make it do pre-increment or pre-decrement if appropriate. */
3975 rtx x = single_set (insn);
3976 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
3977 * INTVAL (XEXP (SET_SRC (x), 1)));
3978 int regno = REGNO (SET_DEST (x));
3979 rtx y = pbi->reg_next_use[regno];
3980 if (y != 0
3981 && SET_DEST (x) != stack_pointer_rtx
3982 && BLOCK_NUM (y) == BLOCK_NUM (insn)
3983 /* Don't do this if the reg dies, or gets set in y; a standard addressing
3984 mode would be better. */
3985 && ! dead_or_set_p (y, SET_DEST (x))
3986 && try_pre_increment (y, SET_DEST (x), amount))
3987 {
3988 /* We have found a suitable auto-increment and already changed
3989 insn Y to do it. So flush this increment instruction. */
3990 propagate_block_delete_insn (insn);
3991
3992 /* Count a reference to this reg for the increment insn we are
3993 deleting. When a reg is incremented, spilling it is worse,
3994 so we want to make that less likely. */
3995 if (regno >= FIRST_PSEUDO_REGISTER)
3996 {
3997 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3998 REG_N_SETS (regno)++;
3999 }
4000
4001 /* Flush any remembered memories depending on the value of
4002 the incremented register. */
4003 invalidate_mems_from_set (pbi, SET_DEST (x));
4004
4005 return 1;
4006 }
4007 return 0;
4008 }
4009
4010 /* Try to change INSN so that it does pre-increment or pre-decrement
4011 addressing on register REG in order to add AMOUNT to REG.
4012 AMOUNT is negative for pre-decrement.
4013 Returns 1 if the change could be made.
4014 This checks all about the validity of the result of modifying INSN. */
4015
4016 static int
4017 try_pre_increment (rtx insn, rtx reg, HOST_WIDE_INT amount)
4018 {
4019 rtx use;
4020
4021 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4022 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4023 int pre_ok = 0;
4024 /* Nonzero if we can try to make a post-increment or post-decrement.
4025 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4026 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4027 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4028 int post_ok = 0;
4029
4030 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4031 int do_post = 0;
4032
4033 /* From the sign of increment, see which possibilities are conceivable
4034 on this target machine. */
4035 if (HAVE_PRE_INCREMENT && amount > 0)
4036 pre_ok = 1;
4037 if (HAVE_POST_INCREMENT && amount > 0)
4038 post_ok = 1;
4039
4040 if (HAVE_PRE_DECREMENT && amount < 0)
4041 pre_ok = 1;
4042 if (HAVE_POST_DECREMENT && amount < 0)
4043 post_ok = 1;
4044
4045 if (! (pre_ok || post_ok))
4046 return 0;
4047
4048 /* It is not safe to add a side effect to a jump insn
4049 because if the incremented register is spilled and must be reloaded
4050 there would be no way to store the incremented value back in memory. */
4051
4052 if (GET_CODE (insn) == JUMP_INSN)
4053 return 0;
4054
4055 use = 0;
4056 if (pre_ok)
4057 use = find_use_as_address (PATTERN (insn), reg, 0);
4058 if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4059 {
4060 use = find_use_as_address (PATTERN (insn), reg, -amount);
4061 do_post = 1;
4062 }
4063
4064 if (use == 0 || use == (rtx) (size_t) 1)
4065 return 0;
4066
4067 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4068 return 0;
4069
4070 /* See if this combination of instruction and addressing mode exists. */
4071 if (! validate_change (insn, &XEXP (use, 0),
4072 gen_rtx_fmt_e (amount > 0
4073 ? (do_post ? POST_INC : PRE_INC)
4074 : (do_post ? POST_DEC : PRE_DEC),
4075 Pmode, reg), 0))
4076 return 0;
4077
4078 /* Record that this insn now has an implicit side effect on X. */
4079 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4080 return 1;
4081 }
4082
4083 #endif /* AUTO_INC_DEC */
4084 \f
4085 /* Find the place in the rtx X where REG is used as a memory address.
4086 Return the MEM rtx that so uses it.
4087 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4088 (plus REG (const_int PLUSCONST)).
4089
4090 If such an address does not appear, return 0.
4091 If REG appears more than once, or is used other than in such an address,
4092 return (rtx) 1. */
4093
4094 rtx
4095 find_use_as_address (rtx x, rtx reg, HOST_WIDE_INT plusconst)
4096 {
4097 enum rtx_code code = GET_CODE (x);
4098 const char * const fmt = GET_RTX_FORMAT (code);
4099 int i;
4100 rtx value = 0;
4101 rtx tem;
4102
4103 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4104 return x;
4105
4106 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4107 && XEXP (XEXP (x, 0), 0) == reg
4108 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4109 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4110 return x;
4111
4112 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4113 {
4114 /* If REG occurs inside a MEM used in a bit-field reference,
4115 that is unacceptable. */
4116 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4117 return (rtx) (size_t) 1;
4118 }
4119
4120 if (x == reg)
4121 return (rtx) (size_t) 1;
4122
4123 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4124 {
4125 if (fmt[i] == 'e')
4126 {
4127 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4128 if (value == 0)
4129 value = tem;
4130 else if (tem != 0)
4131 return (rtx) (size_t) 1;
4132 }
4133 else if (fmt[i] == 'E')
4134 {
4135 int j;
4136 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4137 {
4138 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4139 if (value == 0)
4140 value = tem;
4141 else if (tem != 0)
4142 return (rtx) (size_t) 1;
4143 }
4144 }
4145 }
4146
4147 return value;
4148 }
4149 \f
4150 /* Write information about registers and basic blocks into FILE.
4151 This is part of making a debugging dump. */
4152
4153 void
4154 dump_regset (regset r, FILE *outf)
4155 {
4156 int i;
4157 if (r == NULL)
4158 {
4159 fputs (" (nil)", outf);
4160 return;
4161 }
4162
4163 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4164 {
4165 fprintf (outf, " %d", i);
4166 if (i < FIRST_PSEUDO_REGISTER)
4167 fprintf (outf, " [%s]",
4168 reg_names[i]);
4169 });
4170 }
4171
4172 /* Print a human-readable representation of R on the standard error
4173 stream. This function is designed to be used from within the
4174 debugger. */
4175
4176 void
4177 debug_regset (regset r)
4178 {
4179 dump_regset (r, stderr);
4180 putc ('\n', stderr);
4181 }
4182
4183 /* Recompute register set/reference counts immediately prior to register
4184 allocation.
4185
4186 This avoids problems with set/reference counts changing to/from values
4187 which have special meanings to the register allocators.
4188
4189 Additionally, the reference counts are the primary component used by the
4190 register allocators to prioritize pseudos for allocation to hard regs.
4191 More accurate reference counts generally lead to better register allocation.
4192
4193 F is the first insn to be scanned.
4194
4195 LOOP_STEP denotes how much loop_depth should be incremented per
4196 loop nesting level in order to increase the ref count more for
4197 references in a loop.
4198
4199 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4200 possibly other information which is used by the register allocators. */
4201
4202 void
4203 recompute_reg_usage (rtx f ATTRIBUTE_UNUSED, int loop_step ATTRIBUTE_UNUSED)
4204 {
4205 allocate_reg_life_data ();
4206 /* distribute_notes in combiner fails to convert some of the REG_UNUSED notes
4207 to REG_DEAD notes. This causes CHECK_DEAD_NOTES in sched1 to abort. To
4208 solve this update the DEATH_NOTES here. */
4209 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO | PROP_DEATH_NOTES);
4210 }
4211
4212 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4213 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
4214 of the number of registers that died. */
4215
4216 int
4217 count_or_remove_death_notes (sbitmap blocks, int kill)
4218 {
4219 int count = 0;
4220 int i;
4221 basic_block bb;
4222
4223 /* This used to be a loop over all the blocks with a membership test
4224 inside the loop. That can be amazingly expensive on a large CFG
4225 when only a small number of bits are set in BLOCKs (for example,
4226 the calls from the scheduler typically have very few bits set).
4227
4228 For extra credit, someone should convert BLOCKS to a bitmap rather
4229 than an sbitmap. */
4230 if (blocks)
4231 {
4232 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4233 {
4234 count += count_or_remove_death_notes_bb (BASIC_BLOCK (i), kill);
4235 });
4236 }
4237 else
4238 {
4239 FOR_EACH_BB (bb)
4240 {
4241 count += count_or_remove_death_notes_bb (bb, kill);
4242 }
4243 }
4244
4245 return count;
4246 }
4247
4248 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from basic
4249 block BB. Returns a count of the number of registers that died. */
4250
4251 static int
4252 count_or_remove_death_notes_bb (basic_block bb, int kill)
4253 {
4254 int count = 0;
4255 rtx insn;
4256
4257 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
4258 {
4259 if (INSN_P (insn))
4260 {
4261 rtx *pprev = &REG_NOTES (insn);
4262 rtx link = *pprev;
4263
4264 while (link)
4265 {
4266 switch (REG_NOTE_KIND (link))
4267 {
4268 case REG_DEAD:
4269 if (GET_CODE (XEXP (link, 0)) == REG)
4270 {
4271 rtx reg = XEXP (link, 0);
4272 int n;
4273
4274 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4275 n = 1;
4276 else
4277 n = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
4278 count += n;
4279 }
4280
4281 /* Fall through. */
4282
4283 case REG_UNUSED:
4284 if (kill)
4285 {
4286 rtx next = XEXP (link, 1);
4287 free_EXPR_LIST_node (link);
4288 *pprev = link = next;
4289 break;
4290 }
4291 /* Fall through. */
4292
4293 default:
4294 pprev = &XEXP (link, 1);
4295 link = *pprev;
4296 break;
4297 }
4298 }
4299 }
4300
4301 if (insn == BB_END (bb))
4302 break;
4303 }
4304
4305 return count;
4306 }
4307
4308 /* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4309 if blocks is NULL. */
4310
4311 static void
4312 clear_log_links (sbitmap blocks)
4313 {
4314 rtx insn;
4315 int i;
4316
4317 if (!blocks)
4318 {
4319 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4320 if (INSN_P (insn))
4321 free_INSN_LIST_list (&LOG_LINKS (insn));
4322 }
4323 else
4324 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4325 {
4326 basic_block bb = BASIC_BLOCK (i);
4327
4328 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
4329 insn = NEXT_INSN (insn))
4330 if (INSN_P (insn))
4331 free_INSN_LIST_list (&LOG_LINKS (insn));
4332 });
4333 }
4334
4335 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4336 correspond to the hard registers, if any, set in that map. This
4337 could be done far more efficiently by having all sorts of special-cases
4338 with moving single words, but probably isn't worth the trouble. */
4339
4340 void
4341 reg_set_to_hard_reg_set (HARD_REG_SET *to, bitmap from)
4342 {
4343 int i;
4344
4345 EXECUTE_IF_SET_IN_BITMAP
4346 (from, 0, i,
4347 {
4348 if (i >= FIRST_PSEUDO_REGISTER)
4349 return;
4350 SET_HARD_REG_BIT (*to, i);
4351 });
4352 }