]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/flow.c
92ab49a172a310b3806476affd7f22661caf15f0
[thirdparty/gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "rtl.h"
124 #include "basic-block.h"
125 #include "insn-config.h"
126 #include "regs.h"
127 #include "hard-reg-set.h"
128 #include "flags.h"
129 #include "output.h"
130 #include "function.h"
131 #include "except.h"
132 #include "toplev.h"
133 #include "recog.h"
134 #include "insn-flags.h"
135
136 #include "obstack.h"
137 #define obstack_chunk_alloc xmalloc
138 #define obstack_chunk_free free
139
140
141 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
142 the stack pointer does not matter. The value is tested only in
143 functions that have frame pointers.
144 No definition is equivalent to always zero. */
145 #ifndef EXIT_IGNORE_STACK
146 #define EXIT_IGNORE_STACK 0
147 #endif
148
149
150 /* The contents of the current function definition are allocated
151 in this obstack, and all are freed at the end of the function.
152 For top-level functions, this is temporary_obstack.
153 Separate obstacks are made for nested functions. */
154
155 extern struct obstack *function_obstack;
156
157 /* Number of basic blocks in the current function. */
158
159 int n_basic_blocks;
160
161 /* The basic block array. */
162
163 varray_type basic_block_info;
164
165 /* The special entry and exit blocks. */
166
167 struct basic_block_def entry_exit_blocks[2] =
168 {
169 {
170 NULL, /* head */
171 NULL, /* end */
172 NULL, /* pred */
173 NULL, /* succ */
174 NULL, /* local_set */
175 NULL, /* global_live_at_start */
176 NULL, /* global_live_at_end */
177 NULL, /* aux */
178 ENTRY_BLOCK, /* index */
179 0 /* loop_depth */
180 },
181 {
182 NULL, /* head */
183 NULL, /* end */
184 NULL, /* pred */
185 NULL, /* succ */
186 NULL, /* local_set */
187 NULL, /* global_live_at_start */
188 NULL, /* global_live_at_end */
189 NULL, /* aux */
190 EXIT_BLOCK, /* index */
191 0 /* loop_depth */
192 }
193 };
194
195 /* Nonzero if the second flow pass has completed. */
196 int flow2_completed;
197
198 /* Maximum register number used in this function, plus one. */
199
200 int max_regno;
201
202 /* Indexed by n, giving various register information */
203
204 varray_type reg_n_info;
205
206 /* Size of the reg_n_info table. */
207
208 unsigned int reg_n_max;
209
210 /* Element N is the next insn that uses (hard or pseudo) register number N
211 within the current basic block; or zero, if there is no such insn.
212 This is valid only during the final backward scan in propagate_block. */
213
214 static rtx *reg_next_use;
215
216 /* Size of a regset for the current function,
217 in (1) bytes and (2) elements. */
218
219 int regset_bytes;
220 int regset_size;
221
222 /* Regset of regs live when calls to `setjmp'-like functions happen. */
223 /* ??? Does this exist only for the setjmp-clobbered warning message? */
224
225 regset regs_live_at_setjmp;
226
227 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
228 that have to go in the same hard reg.
229 The first two regs in the list are a pair, and the next two
230 are another pair, etc. */
231 rtx regs_may_share;
232
233 /* Depth within loops of basic block being scanned for lifetime analysis,
234 plus one. This is the weight attached to references to registers. */
235
236 static int loop_depth;
237
238 /* During propagate_block, this is non-zero if the value of CC0 is live. */
239
240 static int cc0_live;
241
242 /* During propagate_block, this contains a list of all the MEMs we are
243 tracking for dead store elimination.
244
245 ?!? Note we leak memory by not free-ing items on this list. We need to
246 write some generic routines to operate on memory lists since cse, gcse,
247 loop, sched, flow and possibly other passes all need to do basically the
248 same operations on these lists. */
249
250 static rtx mem_set_list;
251
252 /* Set of registers that may be eliminable. These are handled specially
253 in updating regs_ever_live. */
254
255 static HARD_REG_SET elim_reg_set;
256
257 /* The basic block structure for every insn, indexed by uid. */
258
259 varray_type basic_block_for_insn;
260
261 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
262 /* ??? Should probably be using LABEL_NUSES instead. It would take a
263 bit of surgery to be able to use or co-opt the routines in jump. */
264
265 static rtx label_value_list;
266
267 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
268
269 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
270 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
271 static bitmap uid_volatile;
272
273 /* Forward declarations */
274 static int count_basic_blocks PROTO((rtx));
275 static rtx find_basic_blocks_1 PROTO((rtx, rtx*));
276 static void create_basic_block PROTO((int, rtx, rtx, rtx));
277 static void compute_bb_for_insn PROTO((varray_type, int));
278 static void clear_edges PROTO((void));
279 static void make_edges PROTO((rtx, rtx*));
280 static void make_edge PROTO((basic_block, basic_block, int));
281 static void make_label_edge PROTO((basic_block, rtx, int));
282 static void mark_critical_edges PROTO((void));
283
284 static void commit_one_edge_insertion PROTO((edge));
285
286 static void delete_unreachable_blocks PROTO((void));
287 static void delete_eh_regions PROTO((void));
288 static int can_delete_note_p PROTO((rtx));
289 static void flow_delete_insn_chain PROTO((rtx, rtx));
290 static int delete_block PROTO((basic_block));
291 static void expunge_block PROTO((basic_block));
292 static rtx flow_delete_insn PROTO((rtx));
293 static int can_delete_label_p PROTO((rtx));
294 static void merge_blocks_nomove PROTO((basic_block, basic_block));
295 static int merge_blocks PROTO((edge,basic_block,basic_block));
296 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
297 static void calculate_loop_depth PROTO((rtx));
298
299 static int set_noop_p PROTO((rtx));
300 static int noop_move_p PROTO((rtx));
301 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
302 static void record_volatile_insns PROTO((rtx));
303 static void mark_regs_live_at_end PROTO((regset));
304 static void life_analysis_1 PROTO((rtx, int, int));
305 static void init_regset_vector PROTO ((regset *, int,
306 struct obstack *));
307 static void propagate_block PROTO((regset, rtx, rtx, int,
308 regset, int, int));
309 static int insn_dead_p PROTO((rtx, regset, int, rtx));
310 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
311 static void mark_set_regs PROTO((regset, regset, rtx,
312 rtx, regset));
313 static void mark_set_1 PROTO((regset, regset, rtx,
314 rtx, regset));
315 #ifdef AUTO_INC_DEC
316 static void find_auto_inc PROTO((regset, rtx, rtx));
317 static int try_pre_increment_1 PROTO((rtx));
318 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
319 #endif
320 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
321 void dump_flow_info PROTO((FILE *));
322 static void dump_edge_info PROTO((FILE *, edge, int));
323
324 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
325 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
326 int_list **, int));
327
328 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
329 int_list_ptr *, int *, int *));
330
331 static void count_reg_sets_1 PROTO ((rtx));
332 static void count_reg_sets PROTO ((rtx));
333 static void count_reg_references PROTO ((rtx));
334 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
335 static void invalidate_mems_from_autoinc PROTO ((rtx));
336 static void maybe_remove_dead_notes PROTO ((rtx, rtx, rtx, rtx,
337 rtx, rtx));
338 static int maybe_add_dead_note_use PROTO ((rtx, rtx));
339 static int maybe_add_dead_note PROTO ((rtx, rtx, rtx));
340 static int sets_reg_or_subreg PROTO ((rtx, rtx));
341 static void update_n_sets PROTO ((rtx, int));
342 static void new_insn_dead_notes PROTO ((rtx, rtx, rtx, rtx, rtx, rtx));
343 void verify_flow_info PROTO ((void));
344 \f
345 /* Find basic blocks of the current function.
346 F is the first insn of the function and NREGS the number of register
347 numbers in use. */
348
349 void
350 find_basic_blocks (f, nregs, file, do_cleanup)
351 rtx f;
352 int nregs ATTRIBUTE_UNUSED;
353 FILE *file ATTRIBUTE_UNUSED;
354 int do_cleanup;
355 {
356 rtx *bb_eh_end;
357 int max_uid;
358
359 /* Flush out existing data. */
360 if (basic_block_info != NULL)
361 {
362 int i;
363
364 clear_edges ();
365
366 /* Clear bb->aux on all extant basic blocks. We'll use this as a
367 tag for reuse during create_basic_block, just in case some pass
368 copies around basic block notes improperly. */
369 for (i = 0; i < n_basic_blocks; ++i)
370 BASIC_BLOCK (i)->aux = NULL;
371
372 VARRAY_FREE (basic_block_info);
373 }
374
375 n_basic_blocks = count_basic_blocks (f);
376
377 /* Size the basic block table. The actual structures will be allocated
378 by find_basic_blocks_1, since we want to keep the structure pointers
379 stable across calls to find_basic_blocks. */
380 /* ??? This whole issue would be much simpler if we called find_basic_blocks
381 exactly once, and thereafter we don't have a single long chain of
382 instructions at all until close to the end of compilation when we
383 actually lay them out. */
384
385 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
386
387 /* An array to record the active exception region at the end of each
388 basic block. It is filled in by find_basic_blocks_1 for make_edges. */
389 bb_eh_end = (rtx *) alloca (n_basic_blocks * sizeof (rtx));
390
391 label_value_list = find_basic_blocks_1 (f, bb_eh_end);
392
393 /* Record the block to which an insn belongs. */
394 /* ??? This should be done another way, by which (perhaps) a label is
395 tagged directly with the basic block that it starts. It is used for
396 more than that currently, but IMO that is the only valid use. */
397
398 max_uid = get_max_uid ();
399 #ifdef AUTO_INC_DEC
400 /* Leave space for insns life_analysis makes in some cases for auto-inc.
401 These cases are rare, so we don't need too much space. */
402 max_uid += max_uid / 10;
403 #endif
404
405 VARRAY_BB_INIT (basic_block_for_insn, max_uid, "basic_block_for_insn");
406 compute_bb_for_insn (basic_block_for_insn, max_uid);
407
408 /* Discover the edges of our cfg. */
409
410 make_edges (label_value_list, bb_eh_end);
411
412 /* Delete unreachable blocks. */
413
414 if (do_cleanup)
415 delete_unreachable_blocks ();
416
417 /* Mark critical edges. */
418
419 mark_critical_edges ();
420
421 /* Discover the loop depth at the start of each basic block to aid
422 register allocation. */
423 calculate_loop_depth (f);
424
425 /* Kill the data we won't maintain. */
426 label_value_list = 0;
427
428 #ifdef ENABLE_CHECKING
429 verify_flow_info ();
430 #endif
431 }
432
433 /* Count the basic blocks of the function. */
434
435 static int
436 count_basic_blocks (f)
437 rtx f;
438 {
439 register rtx insn;
440 register RTX_CODE prev_code;
441 register int count = 0;
442 int eh_region = 0;
443 int call_had_abnormal_edge = 0;
444 rtx prev_call = NULL_RTX;
445
446 prev_code = JUMP_INSN;
447 for (insn = f; insn; insn = NEXT_INSN (insn))
448 {
449 register RTX_CODE code = GET_CODE (insn);
450
451 if (code == CODE_LABEL
452 || (GET_RTX_CLASS (code) == 'i'
453 && (prev_code == JUMP_INSN
454 || prev_code == BARRIER
455 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
456 {
457 count++;
458
459 /* If the previous insn was a call that did not create an
460 abnormal edge, we want to add a nop so that the CALL_INSN
461 itself is not at basic_block_end. This allows us to
462 easily distinguish between normal calls and those which
463 create abnormal edges in the flow graph. */
464
465 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
466 {
467 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
468 emit_insn_after (nop, prev_call);
469 }
470 }
471
472 /* Record whether this call created an edge. */
473 if (code == CALL_INSN)
474 {
475 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
476 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
477 prev_call = insn;
478 call_had_abnormal_edge = 0;
479
480 /* If there is a specified EH region, we have an edge. */
481 if (eh_region && region > 0)
482 call_had_abnormal_edge = 1;
483 else
484 {
485 /* If there is a nonlocal goto label and the specified
486 region number isn't -1, we have an edge. (0 means
487 no throw, but might have a nonlocal goto). */
488 if (nonlocal_goto_handler_labels && region >= 0)
489 call_had_abnormal_edge = 1;
490 }
491 }
492 else if (code != NOTE)
493 prev_call = NULL_RTX;
494
495 if (code != NOTE)
496 prev_code = code;
497 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
498 ++eh_region;
499 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
500 --eh_region;
501
502 }
503
504 /* The rest of the compiler works a bit smoother when we don't have to
505 check for the edge case of do-nothing functions with no basic blocks. */
506 if (count == 0)
507 {
508 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
509 count = 1;
510 }
511
512 return count;
513 }
514
515 /* Find all basic blocks of the function whose first insn is F.
516 Store the correct data in the tables that describe the basic blocks,
517 set up the chains of references for each CODE_LABEL, and
518 delete any entire basic blocks that cannot be reached.
519
520 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
521 that are otherwise unreachable may be reachable with a non-local goto.
522
523 BB_EH_END is an array in which we record the list of exception regions
524 active at the end of every basic block. */
525
526 static rtx
527 find_basic_blocks_1 (f, bb_eh_end)
528 rtx f;
529 rtx *bb_eh_end;
530 {
531 register rtx insn, next;
532 int call_has_abnormal_edge = 0;
533 int i = 0;
534 rtx bb_note = NULL_RTX;
535 rtx eh_list = NULL_RTX;
536 rtx label_value_list = NULL_RTX;
537 rtx head = NULL_RTX;
538 rtx end = NULL_RTX;
539
540 /* We process the instructions in a slightly different way than we did
541 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
542 closed out the previous block, so that it gets attached at the proper
543 place. Since this form should be equivalent to the previous,
544 find_basic_blocks_0 continues to use the old form as a check. */
545
546 for (insn = f; insn; insn = next)
547 {
548 enum rtx_code code = GET_CODE (insn);
549
550 next = NEXT_INSN (insn);
551
552 if (code == CALL_INSN)
553 {
554 /* Record whether this call created an edge. */
555 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
556 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
557 call_has_abnormal_edge = 0;
558
559 /* If there is an EH region, we have an edge. */
560 if (eh_list && region > 0)
561 call_has_abnormal_edge = 1;
562 else
563 {
564 /* If there is a nonlocal goto label and the specified
565 region number isn't -1, we have an edge. (0 means
566 no throw, but might have a nonlocal goto). */
567 if (nonlocal_goto_handler_labels && region >= 0)
568 call_has_abnormal_edge = 1;
569 }
570 }
571
572 switch (code)
573 {
574 case NOTE:
575 {
576 int kind = NOTE_LINE_NUMBER (insn);
577
578 /* Keep a LIFO list of the currently active exception notes. */
579 if (kind == NOTE_INSN_EH_REGION_BEG)
580 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
581 else if (kind == NOTE_INSN_EH_REGION_END)
582 eh_list = XEXP (eh_list, 1);
583
584 /* Look for basic block notes with which to keep the
585 basic_block_info pointers stable. Unthread the note now;
586 we'll put it back at the right place in create_basic_block.
587 Or not at all if we've already found a note in this block. */
588 else if (kind == NOTE_INSN_BASIC_BLOCK)
589 {
590 if (bb_note == NULL_RTX)
591 bb_note = insn;
592 next = flow_delete_insn (insn);
593 }
594
595 break;
596 }
597
598 case CODE_LABEL:
599 /* A basic block starts at a label. If we've closed one off due
600 to a barrier or some such, no need to do it again. */
601 if (head != NULL_RTX)
602 {
603 /* While we now have edge lists with which other portions of
604 the compiler might determine a call ending a basic block
605 does not imply an abnormal edge, it will be a bit before
606 everything can be updated. So continue to emit a noop at
607 the end of such a block. */
608 if (GET_CODE (end) == CALL_INSN)
609 {
610 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
611 end = emit_insn_after (nop, end);
612 }
613
614 bb_eh_end[i] = eh_list;
615 create_basic_block (i++, head, end, bb_note);
616 bb_note = NULL_RTX;
617 }
618 head = end = insn;
619 break;
620
621 case JUMP_INSN:
622 /* A basic block ends at a jump. */
623 if (head == NULL_RTX)
624 head = insn;
625 else
626 {
627 /* ??? Make a special check for table jumps. The way this
628 happens is truely and amazingly gross. We are about to
629 create a basic block that contains just a code label and
630 an addr*vec jump insn. Worse, an addr_diff_vec creates
631 its own natural loop.
632
633 Prevent this bit of brain damage, pasting things together
634 correctly in make_edges.
635
636 The correct solution involves emitting the table directly
637 on the tablejump instruction as a note, or JUMP_LABEL. */
638
639 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
640 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
641 {
642 head = end = NULL;
643 n_basic_blocks--;
644 break;
645 }
646 }
647 end = insn;
648 goto new_bb_inclusive;
649
650 case BARRIER:
651 /* A basic block ends at a barrier. It may be that an unconditional
652 jump already closed the basic block -- no need to do it again. */
653 if (head == NULL_RTX)
654 break;
655
656 /* While we now have edge lists with which other portions of the
657 compiler might determine a call ending a basic block does not
658 imply an abnormal edge, it will be a bit before everything can
659 be updated. So continue to emit a noop at the end of such a
660 block. */
661 if (GET_CODE (end) == CALL_INSN)
662 {
663 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
664 end = emit_insn_after (nop, end);
665 }
666 goto new_bb_exclusive;
667
668 case CALL_INSN:
669 /* A basic block ends at a call that can either throw or
670 do a non-local goto. */
671 if (call_has_abnormal_edge)
672 {
673 new_bb_inclusive:
674 if (head == NULL_RTX)
675 head = insn;
676 end = insn;
677
678 new_bb_exclusive:
679 bb_eh_end[i] = eh_list;
680 create_basic_block (i++, head, end, bb_note);
681 head = end = NULL_RTX;
682 bb_note = NULL_RTX;
683 break;
684 }
685 /* FALLTHRU */
686
687 default:
688 if (GET_RTX_CLASS (code) == 'i')
689 {
690 if (head == NULL_RTX)
691 head = insn;
692 end = insn;
693 }
694 break;
695 }
696
697 if (GET_RTX_CLASS (code) == 'i')
698 {
699 rtx note;
700
701 /* Make a list of all labels referred to other than by jumps
702 (which just don't have the REG_LABEL notes).
703
704 Make a special exception for labels followed by an ADDR*VEC,
705 as this would be a part of the tablejump setup code.
706
707 Make a special exception for the eh_return_stub_label, which
708 we know isn't part of any otherwise visible control flow. */
709
710 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
711 if (REG_NOTE_KIND (note) == REG_LABEL)
712 {
713 rtx lab = XEXP (note, 0), next;
714
715 if (lab == eh_return_stub_label)
716 ;
717 else if ((next = next_nonnote_insn (lab)) != NULL
718 && GET_CODE (next) == JUMP_INSN
719 && (GET_CODE (PATTERN (next)) == ADDR_VEC
720 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
721 ;
722 else
723 label_value_list
724 = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
725 label_value_list);
726 }
727 }
728 }
729
730 if (head != NULL_RTX)
731 {
732 bb_eh_end[i] = eh_list;
733 create_basic_block (i++, head, end, bb_note);
734 }
735
736 if (i != n_basic_blocks)
737 abort ();
738
739 return label_value_list;
740 }
741
742 /* Create a new basic block consisting of the instructions between
743 HEAD and END inclusive. Reuses the note and basic block struct
744 in BB_NOTE, if any. */
745
746 static void
747 create_basic_block (index, head, end, bb_note)
748 int index;
749 rtx head, end, bb_note;
750 {
751 basic_block bb;
752
753 if (bb_note
754 && ! RTX_INTEGRATED_P (bb_note)
755 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
756 && bb->aux == NULL)
757 {
758 /* If we found an existing note, thread it back onto the chain. */
759
760 if (GET_CODE (head) == CODE_LABEL)
761 add_insn_after (bb_note, head);
762 else
763 {
764 add_insn_before (bb_note, head);
765 head = bb_note;
766 }
767 }
768 else
769 {
770 /* Otherwise we must create a note and a basic block structure.
771 Since we allow basic block structs in rtl, give the struct
772 the same lifetime by allocating it off the function obstack
773 rather than using malloc. */
774
775 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
776 memset (bb, 0, sizeof (*bb));
777
778 if (GET_CODE (head) == CODE_LABEL)
779 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
780 else
781 {
782 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
783 head = bb_note;
784 }
785 NOTE_BASIC_BLOCK (bb_note) = bb;
786 }
787
788 /* Always include the bb note in the block. */
789 if (NEXT_INSN (end) == bb_note)
790 end = bb_note;
791
792 bb->head = head;
793 bb->end = end;
794 bb->index = index;
795 BASIC_BLOCK (index) = bb;
796
797 /* Tag the block so that we know it has been used when considering
798 other basic block notes. */
799 bb->aux = bb;
800 }
801 \f
802 /* Records the basic block struct in BB_FOR_INSN, for every instruction
803 indexed by INSN_UID. MAX is the size of the array. */
804
805 static void
806 compute_bb_for_insn (bb_for_insn, max)
807 varray_type bb_for_insn;
808 int max;
809 {
810 int i;
811
812 for (i = 0; i < n_basic_blocks; ++i)
813 {
814 basic_block bb = BASIC_BLOCK (i);
815 rtx insn, end;
816
817 end = bb->end;
818 insn = bb->head;
819 while (1)
820 {
821 int uid = INSN_UID (insn);
822 if (uid < max)
823 VARRAY_BB (bb_for_insn, uid) = bb;
824 if (insn == end)
825 break;
826 insn = NEXT_INSN (insn);
827 }
828 }
829 }
830
831 /* Free the memory associated with the edge structures. */
832
833 static void
834 clear_edges ()
835 {
836 int i;
837 edge n, e;
838
839 for (i = 0; i < n_basic_blocks; ++i)
840 {
841 basic_block bb = BASIC_BLOCK (i);
842
843 for (e = bb->succ; e ; e = n)
844 {
845 n = e->succ_next;
846 free (e);
847 }
848
849 bb->succ = 0;
850 bb->pred = 0;
851 }
852
853 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
854 {
855 n = e->succ_next;
856 free (e);
857 }
858
859 ENTRY_BLOCK_PTR->succ = 0;
860 EXIT_BLOCK_PTR->pred = 0;
861 }
862
863 /* Identify the edges between basic blocks.
864
865 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
866 that are otherwise unreachable may be reachable with a non-local goto.
867
868 BB_EH_END is an array indexed by basic block number in which we record
869 the list of exception regions active at the end of the basic block. */
870
871 static void
872 make_edges (label_value_list, bb_eh_end)
873 rtx label_value_list;
874 rtx *bb_eh_end;
875 {
876 int i;
877 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
878
879 /* Assume no computed jump; revise as we create edges. */
880 current_function_has_computed_jump = 0;
881
882 /* By nature of the way these get numbered, block 0 is always the entry. */
883 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
884
885 for (i = 0; i < n_basic_blocks; ++i)
886 {
887 basic_block bb = BASIC_BLOCK (i);
888 rtx insn, x, eh_list;
889 enum rtx_code code;
890 int force_fallthru = 0;
891
892 /* If we have asynchronous exceptions, scan the notes for all exception
893 regions active in the block. In the normal case, we only need the
894 one active at the end of the block, which is bb_eh_end[i]. */
895
896 eh_list = bb_eh_end[i];
897 if (asynchronous_exceptions)
898 {
899 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
900 {
901 if (GET_CODE (insn) == NOTE
902 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
903 eh_list = gen_rtx_INSN_LIST (VOIDmode, insn, eh_list);
904 }
905 }
906
907 /* Now examine the last instruction of the block, and discover the
908 ways we can leave the block. */
909
910 insn = bb->end;
911 code = GET_CODE (insn);
912
913 /* A branch. */
914 if (code == JUMP_INSN)
915 {
916 rtx tmp;
917
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
924 {
925 rtvec vec;
926 int j;
927
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
930 else
931 vec = XVEC (PATTERN (tmp), 1);
932
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (bb, XEXP (RTVEC_ELT (vec, j), 0), 0);
935
936 /* Some targets (eg, ARM) emit a conditional jump that also
937 contains the out-of-range target. Scan for these and
938 add an edge if necessary. */
939 if ((tmp = single_set (insn)) != NULL
940 && SET_DEST (tmp) == pc_rtx
941 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
942 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
943 make_label_edge (bb, XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
944
945 #ifdef CASE_DROPS_THROUGH
946 /* Silly VAXen. The ADDR_VEC is going to be in the way of
947 us naturally detecting fallthru into the next block. */
948 force_fallthru = 1;
949 #endif
950 }
951
952 /* If this is a computed jump, then mark it as reaching
953 everything on the label_value_list and forced_labels list. */
954 else if (computed_jump_p (insn))
955 {
956 current_function_has_computed_jump = 1;
957
958 for (x = label_value_list; x; x = XEXP (x, 1))
959 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
960
961 for (x = forced_labels; x; x = XEXP (x, 1))
962 make_label_edge (bb, XEXP (x, 0), EDGE_ABNORMAL);
963 }
964
965 /* Returns create an exit out. */
966 else if (returnjump_p (insn))
967 make_edge (bb, EXIT_BLOCK_PTR, 0);
968
969 /* Otherwise, we have a plain conditional or unconditional jump. */
970 else
971 {
972 if (! JUMP_LABEL (insn))
973 abort ();
974 make_label_edge (bb, JUMP_LABEL (insn), 0);
975 }
976 }
977
978 /* If this is a CALL_INSN, then mark it as reaching the active EH
979 handler for this CALL_INSN. If we're handling asynchronous
980 exceptions then any insn can reach any of the active handlers.
981
982 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
983
984 if (code == CALL_INSN || asynchronous_exceptions)
985 {
986 int is_call = (code == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
987 handler_info **handler_list;
988 int eh_region = -1;
989 int num;
990
991 if (eh_list)
992 eh_region = NOTE_BLOCK_NUMBER (XEXP (eh_list, 0));
993
994 num = reachable_handlers (eh_region, eh_nest_info,
995 insn, &handler_list);
996 for ( ; num > 0; num--)
997 {
998 make_label_edge (bb, handler_list[num - 1]->handler_label,
999 EDGE_ABNORMAL | EDGE_EH | is_call);
1000 }
1001
1002 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1003 {
1004 /* ??? This could be made smarter: in some cases it's possible
1005 to tell that certain calls will not do a nonlocal goto.
1006
1007 For example, if the nested functions that do the nonlocal
1008 gotos do not have their addresses taken, then only calls to
1009 those functions or to other nested functions that use them
1010 could possibly do nonlocal gotos. */
1011 /* We do know that a REG_EH_REGION note with a value less
1012 than 0 is guaranteed not to perform a non-local goto. */
1013 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1014 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1015 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1016 make_label_edge (bb, XEXP (x, 0),
1017 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1018 }
1019 }
1020
1021 /* We know something about the structure of the function __throw in
1022 libgcc2.c. It is the only function that ever contains eh_stub
1023 labels. It modifies its return address so that the last block
1024 returns to one of the eh_stub labels within it. So we have to
1025 make additional edges in the flow graph. */
1026 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1027 make_label_edge (bb, eh_return_stub_label, EDGE_EH);
1028
1029 /* Find out if we can drop through to the next block. */
1030 insn = next_nonnote_insn (insn);
1031 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1032 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1033 else if (i + 1 < n_basic_blocks)
1034 {
1035 rtx tmp = BLOCK_HEAD (i + 1);
1036 if (GET_CODE (tmp) == NOTE)
1037 tmp = next_nonnote_insn (tmp);
1038 if (force_fallthru || insn == tmp)
1039 make_edge (bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1040 }
1041 }
1042 free_eh_nesting_info (eh_nest_info);
1043 }
1044
1045 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1046 about the edge that is accumulated between calls. */
1047
1048 static void
1049 make_edge (src, dst, flags)
1050 basic_block src, dst;
1051 int flags;
1052 {
1053 edge e;
1054
1055 /* Make sure we don't add duplicate edges. */
1056
1057 for (e = src->succ; e ; e = e->succ_next)
1058 if (e->dest == dst)
1059 {
1060 e->flags |= flags;
1061 return;
1062 }
1063
1064 e = (edge) xcalloc (1, sizeof (*e));
1065
1066 e->succ_next = src->succ;
1067 e->pred_next = dst->pred;
1068 e->src = src;
1069 e->dest = dst;
1070 e->flags = flags;
1071
1072 src->succ = e;
1073 dst->pred = e;
1074 }
1075
1076 /* Create an edge from a basic block to a label. */
1077
1078 static void
1079 make_label_edge (src, label, flags)
1080 basic_block src;
1081 rtx label;
1082 int flags;
1083 {
1084 if (GET_CODE (label) != CODE_LABEL)
1085 abort ();
1086
1087 /* If the label was never emitted, this insn is junk, but avoid a
1088 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1089 as a result of a syntax error and a diagnostic has already been
1090 printed. */
1091
1092 if (INSN_UID (label) == 0)
1093 return;
1094
1095 make_edge (src, BLOCK_FOR_INSN (label), flags);
1096 }
1097
1098 /* Identify critical edges and set the bits appropriately. */
1099 static void
1100 mark_critical_edges ()
1101 {
1102 int i, n = n_basic_blocks;
1103 basic_block bb;
1104
1105 /* We begin with the entry block. This is not terribly important now,
1106 but could be if a front end (Fortran) implemented alternate entry
1107 points. */
1108 bb = ENTRY_BLOCK_PTR;
1109 i = -1;
1110
1111 while (1)
1112 {
1113 edge e;
1114
1115 /* (1) Critical edges must have a source with multiple successors. */
1116 if (bb->succ && bb->succ->succ_next)
1117 {
1118 for (e = bb->succ; e ; e = e->succ_next)
1119 {
1120 /* (2) Critical edges must have a destination with multiple
1121 predecessors. Note that we know there is at least one
1122 predecessor -- the edge we followed to get here. */
1123 if (e->dest->pred->pred_next)
1124 e->flags |= EDGE_CRITICAL;
1125 else
1126 e->flags &= ~EDGE_CRITICAL;
1127 }
1128 }
1129 else
1130 {
1131 for (e = bb->succ; e ; e = e->succ_next)
1132 e->flags &= ~EDGE_CRITICAL;
1133 }
1134
1135 if (++i >= n)
1136 break;
1137 bb = BASIC_BLOCK (i);
1138 }
1139 }
1140 \f
1141 /* Split a (typically critical) edge. Return the new block.
1142 Abort on abnormal edges.
1143
1144 ??? The code generally expects to be called on critical edges.
1145 The case of a block ending in an unconditional jump to a
1146 block with multiple predecessors is not handled optimally. */
1147
1148 basic_block
1149 split_edge (edge_in)
1150 edge edge_in;
1151 {
1152 basic_block old_pred, bb, old_succ;
1153 edge edge_out;
1154 rtx bb_note;
1155 int i;
1156
1157 /* Abnormal edges cannot be split. */
1158 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1159 abort ();
1160
1161 old_pred = edge_in->src;
1162 old_succ = edge_in->dest;
1163
1164 /* Remove the existing edge from the destination's pred list. */
1165 {
1166 edge *pp;
1167 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1168 continue;
1169 *pp = edge_in->pred_next;
1170 edge_in->pred_next = NULL;
1171 }
1172
1173 /* Create the new structures. */
1174 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1175 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1176
1177 memset (bb, 0, sizeof (*bb));
1178 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
1179 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1180 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1181
1182 /* ??? This info is likely going to be out of date very soon. */
1183 CLEAR_REG_SET (bb->local_set);
1184 if (old_succ->global_live_at_start)
1185 {
1186 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1187 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1188 }
1189 else
1190 {
1191 CLEAR_REG_SET (bb->global_live_at_start);
1192 CLEAR_REG_SET (bb->global_live_at_end);
1193 }
1194
1195 /* Wire them up. */
1196 bb->pred = edge_in;
1197 bb->succ = edge_out;
1198
1199 edge_in->dest = bb;
1200 edge_in->flags &= ~EDGE_CRITICAL;
1201
1202 edge_out->pred_next = old_succ->pred;
1203 edge_out->succ_next = NULL;
1204 edge_out->src = bb;
1205 edge_out->dest = old_succ;
1206 edge_out->flags = EDGE_FALLTHRU;
1207 edge_out->probability = REG_BR_PROB_BASE;
1208
1209 old_succ->pred = edge_out;
1210
1211 /* Tricky case -- if there existed a fallthru into the successor
1212 (and we're not it) we must add a new unconditional jump around
1213 the new block we're actually interested in.
1214
1215 Further, if that edge is critical, this means a second new basic
1216 block must be created to hold it. In order to simplify correct
1217 insn placement, do this before we touch the existing basic block
1218 ordering for the block we were really wanting. */
1219 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1220 {
1221 edge e;
1222 for (e = edge_out->pred_next; e ; e = e->pred_next)
1223 if (e->flags & EDGE_FALLTHRU)
1224 break;
1225
1226 if (e)
1227 {
1228 basic_block jump_block;
1229 rtx pos;
1230
1231 if ((e->flags & EDGE_CRITICAL) == 0)
1232 {
1233 /* Non critical -- we can simply add a jump to the end
1234 of the existing predecessor. */
1235 jump_block = e->src;
1236 }
1237 else
1238 {
1239 /* We need a new block to hold the jump. The simplest
1240 way to do the bulk of the work here is to recursively
1241 call ourselves. */
1242 jump_block = split_edge (e);
1243 e = jump_block->succ;
1244 }
1245
1246 /* Now add the jump insn ... */
1247 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1248 jump_block->end);
1249 jump_block->end = pos;
1250 emit_barrier_after (pos);
1251
1252 /* ... let jump know that label is in use, ... */
1253 JUMP_LABEL (pos) = old_succ->head;
1254 ++LABEL_NUSES (old_succ->head);
1255
1256 /* ... and clear fallthru on the outgoing edge. */
1257 e->flags &= ~EDGE_FALLTHRU;
1258
1259 /* Continue splitting the interesting edge. */
1260 }
1261 }
1262
1263 /* Place the new block just in front of the successor. */
1264 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1265 for (i = n_basic_blocks - 1; i > old_succ->index; --i)
1266 {
1267 basic_block tmp = BASIC_BLOCK (i - 1);
1268 BASIC_BLOCK (i) = tmp;
1269 tmp->index = i;
1270 }
1271 BASIC_BLOCK (i) = bb;
1272 bb->index = i;
1273
1274 /* Create the basic block note. */
1275 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1276 NOTE_BASIC_BLOCK (bb_note) = bb;
1277 bb->head = bb->end = bb_note;
1278
1279 /* Not quite simple -- for non-fallthru edges, we must adjust the
1280 predecessor's jump instruction to target our new block. */
1281 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1282 {
1283 rtx tmp, insn = old_pred->end;
1284 rtx old_label = old_succ->head;
1285 rtx new_label = gen_label_rtx ();
1286
1287 if (GET_CODE (insn) != JUMP_INSN)
1288 abort ();
1289
1290 /* ??? Recognize a tablejump and adjust all matching cases. */
1291 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1292 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1293 && GET_CODE (tmp) == JUMP_INSN
1294 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1295 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1296 {
1297 rtvec vec;
1298 int j;
1299
1300 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1301 vec = XVEC (PATTERN (tmp), 0);
1302 else
1303 vec = XVEC (PATTERN (tmp), 1);
1304
1305 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1306 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1307 {
1308 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1309 --LABEL_NUSES (old_label);
1310 ++LABEL_NUSES (new_label);
1311 }
1312 }
1313 else
1314 {
1315 /* This would have indicated an abnormal edge. */
1316 if (computed_jump_p (insn))
1317 abort ();
1318
1319 /* A return instruction can't be redirected. */
1320 if (returnjump_p (insn))
1321 abort ();
1322
1323 /* If the insn doesn't go where we think, we're confused. */
1324 if (JUMP_LABEL (insn) != old_label)
1325 abort ();
1326
1327 redirect_jump (insn, new_label);
1328 }
1329
1330 emit_label_before (new_label, bb_note);
1331 bb->head = new_label;
1332 }
1333
1334 return bb;
1335 }
1336
1337 /* Queue instructions for insertion on an edge between two basic blocks.
1338 The new instructions and basic blocks (if any) will not appear in the
1339 CFG until commit_edge_insertions is called. */
1340
1341 void
1342 insert_insn_on_edge (pattern, e)
1343 rtx pattern;
1344 edge e;
1345 {
1346 /* We cannot insert instructions on an abnormal critical edge.
1347 It will be easier to find the culprit if we die now. */
1348 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1349 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1350 abort ();
1351
1352 if (e->insns == NULL_RTX)
1353 start_sequence ();
1354 else
1355 push_to_sequence (e->insns);
1356
1357 emit_insn (pattern);
1358
1359 e->insns = get_insns ();
1360 end_sequence();
1361 }
1362
1363 /* Update the CFG for the instructions queued on edge E. */
1364
1365 static void
1366 commit_one_edge_insertion (e)
1367 edge e;
1368 {
1369 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1370 basic_block bb;
1371
1372 /* Figure out where to put these things. If the destination has
1373 one predecessor, insert there. Except for the exit block. */
1374 if (e->dest->pred->pred_next == NULL
1375 && e->dest != EXIT_BLOCK_PTR)
1376 {
1377 bb = e->dest;
1378
1379 /* Get the location correct wrt a code label, and "nice" wrt
1380 a basic block note, and before everything else. */
1381 tmp = bb->head;
1382 if (GET_CODE (tmp) == CODE_LABEL)
1383 tmp = NEXT_INSN (tmp);
1384 if (GET_CODE (tmp) == NOTE
1385 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1386 tmp = NEXT_INSN (tmp);
1387 if (tmp == bb->head)
1388 before = tmp;
1389 else
1390 after = PREV_INSN (tmp);
1391 }
1392
1393 /* If the source has one successor and the edge is not abnormal,
1394 insert there. Except for the entry block. */
1395 else if ((e->flags & EDGE_ABNORMAL) == 0
1396 && e->src->succ->succ_next == NULL
1397 && e->src != ENTRY_BLOCK_PTR)
1398 {
1399 bb = e->src;
1400 if (GET_CODE (bb->end) == JUMP_INSN)
1401 {
1402 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1403 a jump with delay slots already filled? */
1404 if (! simplejump_p (bb->end))
1405 abort ();
1406
1407 before = bb->end;
1408 }
1409 else
1410 {
1411 /* We'd better be fallthru, or we've lost track of what's what. */
1412 if ((e->flags & EDGE_FALLTHRU) == 0)
1413 abort ();
1414
1415 after = bb->end;
1416 }
1417 }
1418
1419 /* Otherwise we must split the edge. */
1420 else
1421 {
1422 bb = split_edge (e);
1423 after = bb->end;
1424 }
1425
1426 /* Now that we've found the spot, do the insertion. */
1427 tmp = e->insns;
1428 e->insns = NULL_RTX;
1429
1430 /* Set the new block number for these insns, if structure is allocated. */
1431 if (basic_block_for_insn)
1432 {
1433 rtx i;
1434 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1435 set_block_for_insn (i, bb);
1436 }
1437
1438 if (before)
1439 {
1440 emit_insns_before (tmp, before);
1441 if (before == bb->head)
1442 bb->head = tmp;
1443 }
1444 else
1445 {
1446 tmp = emit_insns_after (tmp, after);
1447 if (after == bb->end)
1448 bb->end = tmp;
1449 }
1450 }
1451
1452 /* Update the CFG for all queued instructions. */
1453
1454 void
1455 commit_edge_insertions ()
1456 {
1457 int i;
1458 basic_block bb;
1459
1460 i = -1;
1461 bb = ENTRY_BLOCK_PTR;
1462 while (1)
1463 {
1464 edge e, next;
1465
1466 for (e = bb->succ; e ; e = next)
1467 {
1468 next = e->succ_next;
1469 if (e->insns)
1470 commit_one_edge_insertion (e);
1471 }
1472
1473 if (++i >= n_basic_blocks)
1474 break;
1475 bb = BASIC_BLOCK (i);
1476 }
1477 }
1478 \f
1479 /* Delete all unreachable basic blocks. */
1480
1481 static void
1482 delete_unreachable_blocks ()
1483 {
1484 basic_block *worklist, *tos;
1485 int deleted_handler;
1486 edge e;
1487 int i, n;
1488
1489 n = n_basic_blocks;
1490 tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1491
1492 /* Use basic_block->aux as a marker. Clear them all. */
1493
1494 for (i = 0; i < n; ++i)
1495 BASIC_BLOCK (i)->aux = NULL;
1496
1497 /* Add our starting points to the worklist. Almost always there will
1498 be only one. It isn't inconcievable that we might one day directly
1499 support Fortran alternate entry points. */
1500
1501 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1502 {
1503 *tos++ = e->dest;
1504
1505 /* Mark the block with a handy non-null value. */
1506 e->dest->aux = e;
1507 }
1508
1509 /* Iterate: find everything reachable from what we've already seen. */
1510
1511 while (tos != worklist)
1512 {
1513 basic_block b = *--tos;
1514
1515 for (e = b->succ; e ; e = e->succ_next)
1516 if (!e->dest->aux)
1517 {
1518 *tos++ = e->dest;
1519 e->dest->aux = e;
1520 }
1521 }
1522
1523 /* Delete all unreachable basic blocks. Count down so that we don't
1524 interfere with the block renumbering that happens in delete_block. */
1525
1526 deleted_handler = 0;
1527
1528 for (i = n - 1; i >= 0; --i)
1529 {
1530 basic_block b = BASIC_BLOCK (i);
1531
1532 if (b->aux != NULL)
1533 /* This block was found. Tidy up the mark. */
1534 b->aux = NULL;
1535 else
1536 deleted_handler |= delete_block (b);
1537 }
1538
1539 /* Fix up edges that now fall through, or rather should now fall through
1540 but previously required a jump around now deleted blocks. Simplify
1541 the search by only examining blocks numerically adjacent, since this
1542 is how find_basic_blocks created them. */
1543
1544 for (i = 1; i < n_basic_blocks; ++i)
1545 {
1546 basic_block b = BASIC_BLOCK (i - 1);
1547 basic_block c = BASIC_BLOCK (i);
1548 edge s;
1549
1550 /* We care about simple conditional or unconditional jumps with
1551 a single successor.
1552
1553 If we had a conditional branch to the next instruction when
1554 find_basic_blocks was called, then there will only be one
1555 out edge for the block which ended with the conditional
1556 branch (since we do not create duplicate edges).
1557
1558 Furthermore, the edge will be marked as a fallthru because we
1559 merge the flags for the duplicate edges. So we do not want to
1560 check that the edge is not a FALLTHRU edge. */
1561 if ((s = b->succ) != NULL
1562 && s->succ_next == NULL
1563 && s->dest == c
1564 /* If the jump insn has side effects, we can't tidy the edge. */
1565 && (GET_CODE (b->end) != JUMP_INSN
1566 || onlyjump_p (b->end)))
1567 tidy_fallthru_edge (s, b, c);
1568 }
1569
1570 /* Attempt to merge blocks as made possible by edge removal. If a block
1571 has only one successor, and the successor has only one predecessor,
1572 they may be combined. */
1573
1574 for (i = 0; i < n_basic_blocks; )
1575 {
1576 basic_block c, b = BASIC_BLOCK (i);
1577 edge s;
1578
1579 /* A loop because chains of blocks might be combineable. */
1580 while ((s = b->succ) != NULL
1581 && s->succ_next == NULL
1582 && (s->flags & EDGE_EH) == 0
1583 && (c = s->dest) != EXIT_BLOCK_PTR
1584 && c->pred->pred_next == NULL
1585 /* If the jump insn has side effects, we can't kill the edge. */
1586 && (GET_CODE (b->end) != JUMP_INSN
1587 || onlyjump_p (b->end))
1588 && merge_blocks (s, b, c))
1589 continue;
1590
1591 /* Don't get confused by the index shift caused by deleting blocks. */
1592 i = b->index + 1;
1593 }
1594
1595 /* If we deleted an exception handler, we may have EH region begin/end
1596 blocks to remove as well. */
1597 if (deleted_handler)
1598 delete_eh_regions ();
1599 }
1600
1601 /* Find EH regions for which there is no longer a handler, and delete them. */
1602
1603 static void
1604 delete_eh_regions ()
1605 {
1606 rtx insn;
1607
1608 update_rethrow_references ();
1609
1610 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1611 if (GET_CODE (insn) == NOTE)
1612 {
1613 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1614 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1615 {
1616 int num = NOTE_EH_HANDLER (insn);
1617 /* A NULL handler indicates a region is no longer needed,
1618 as long as it isn't the target of a rethrow. */
1619 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1620 {
1621 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1622 NOTE_SOURCE_FILE (insn) = 0;
1623 }
1624 }
1625 }
1626 }
1627
1628 /* Return true if NOTE is not one of the ones that must be kept paired,
1629 so that we may simply delete them. */
1630
1631 static int
1632 can_delete_note_p (note)
1633 rtx note;
1634 {
1635 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1636 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1637 }
1638
1639 /* Unlink a chain of insns between START and FINISH, leaving notes
1640 that must be paired. */
1641
1642 static void
1643 flow_delete_insn_chain (start, finish)
1644 rtx start, finish;
1645 {
1646 /* Unchain the insns one by one. It would be quicker to delete all
1647 of these with a single unchaining, rather than one at a time, but
1648 we need to keep the NOTE's. */
1649
1650 rtx next;
1651
1652 while (1)
1653 {
1654 next = NEXT_INSN (start);
1655 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1656 ;
1657 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1658 ;
1659 else
1660 next = flow_delete_insn (start);
1661
1662 if (start == finish)
1663 break;
1664 start = next;
1665 }
1666 }
1667
1668 /* Delete the insns in a (non-live) block. We physically delete every
1669 non-deleted-note insn, and update the flow graph appropriately.
1670
1671 Return nonzero if we deleted an exception handler. */
1672
1673 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1674 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1675
1676 static int
1677 delete_block (b)
1678 basic_block b;
1679 {
1680 int deleted_handler = 0;
1681 rtx insn, end;
1682
1683 /* If the head of this block is a CODE_LABEL, then it might be the
1684 label for an exception handler which can't be reached.
1685
1686 We need to remove the label from the exception_handler_label list
1687 and remove the associated NOTE_INSN_EH_REGION_BEG and
1688 NOTE_INSN_EH_REGION_END notes. */
1689
1690 insn = b->head;
1691
1692 never_reached_warning (insn);
1693
1694 if (GET_CODE (insn) == CODE_LABEL)
1695 {
1696 rtx x, *prev = &exception_handler_labels;
1697
1698 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1699 {
1700 if (XEXP (x, 0) == insn)
1701 {
1702 /* Found a match, splice this label out of the EH label list. */
1703 *prev = XEXP (x, 1);
1704 XEXP (x, 1) = NULL_RTX;
1705 XEXP (x, 0) = NULL_RTX;
1706
1707 /* Remove the handler from all regions */
1708 remove_handler (insn);
1709 deleted_handler = 1;
1710 break;
1711 }
1712 prev = &XEXP (x, 1);
1713 }
1714
1715 /* This label may be referenced by code solely for its value, or
1716 referenced by static data, or something. We have determined
1717 that it is not reachable, but cannot delete the label itself.
1718 Save code space and continue to delete the balance of the block,
1719 along with properly updating the cfg. */
1720 if (!can_delete_label_p (insn))
1721 {
1722 /* If we've only got one of these, skip the whole deleting
1723 insns thing. */
1724 if (insn == b->end)
1725 goto no_delete_insns;
1726 insn = NEXT_INSN (insn);
1727 }
1728 }
1729
1730 /* Selectively unlink the insn chain. Include any BARRIER that may
1731 follow the basic block. */
1732 end = next_nonnote_insn (b->end);
1733 if (!end || GET_CODE (end) != BARRIER)
1734 end = b->end;
1735 flow_delete_insn_chain (insn, end);
1736
1737 no_delete_insns:
1738
1739 /* Remove the edges into and out of this block. Note that there may
1740 indeed be edges in, if we are removing an unreachable loop. */
1741 {
1742 edge e, next, *q;
1743
1744 for (e = b->pred; e ; e = next)
1745 {
1746 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1747 continue;
1748 *q = e->succ_next;
1749 next = e->pred_next;
1750 free (e);
1751 }
1752 for (e = b->succ; e ; e = next)
1753 {
1754 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1755 continue;
1756 *q = e->pred_next;
1757 next = e->succ_next;
1758 free (e);
1759 }
1760
1761 b->pred = NULL;
1762 b->succ = NULL;
1763 }
1764
1765 /* Remove the basic block from the array, and compact behind it. */
1766 expunge_block (b);
1767
1768 return deleted_handler;
1769 }
1770
1771 /* Remove block B from the basic block array and compact behind it. */
1772
1773 static void
1774 expunge_block (b)
1775 basic_block b;
1776 {
1777 int i, n = n_basic_blocks;
1778
1779 for (i = b->index; i + 1 < n; ++i)
1780 {
1781 basic_block x = BASIC_BLOCK (i + 1);
1782 BASIC_BLOCK (i) = x;
1783 x->index = i;
1784 }
1785
1786 basic_block_info->num_elements--;
1787 n_basic_blocks--;
1788 }
1789
1790 /* Delete INSN by patching it out. Return the next insn. */
1791
1792 static rtx
1793 flow_delete_insn (insn)
1794 rtx insn;
1795 {
1796 rtx prev = PREV_INSN (insn);
1797 rtx next = NEXT_INSN (insn);
1798
1799 PREV_INSN (insn) = NULL_RTX;
1800 NEXT_INSN (insn) = NULL_RTX;
1801
1802 if (prev)
1803 NEXT_INSN (prev) = next;
1804 if (next)
1805 PREV_INSN (next) = prev;
1806 else
1807 set_last_insn (prev);
1808
1809 if (GET_CODE (insn) == CODE_LABEL)
1810 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1811
1812 /* If deleting a jump, decrement the use count of the label. Deleting
1813 the label itself should happen in the normal course of block merging. */
1814 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1815 LABEL_NUSES (JUMP_LABEL (insn))--;
1816
1817 return next;
1818 }
1819
1820 /* True if a given label can be deleted. */
1821
1822 static int
1823 can_delete_label_p (label)
1824 rtx label;
1825 {
1826 rtx x;
1827
1828 if (LABEL_PRESERVE_P (label))
1829 return 0;
1830
1831 for (x = forced_labels; x ; x = XEXP (x, 1))
1832 if (label == XEXP (x, 0))
1833 return 0;
1834 for (x = label_value_list; x ; x = XEXP (x, 1))
1835 if (label == XEXP (x, 0))
1836 return 0;
1837 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1838 if (label == XEXP (x, 0))
1839 return 0;
1840
1841 /* User declared labels must be preserved. */
1842 if (LABEL_NAME (label) != 0)
1843 return 0;
1844
1845 return 1;
1846 }
1847
1848 /* Blocks A and B are to be merged into a single block. The insns
1849 are already contiguous, hence `nomove'. */
1850
1851 static void
1852 merge_blocks_nomove (a, b)
1853 basic_block a, b;
1854 {
1855 edge e;
1856 rtx b_head, b_end, a_end;
1857 int b_empty = 0;
1858
1859 /* If there was a CODE_LABEL beginning B, delete it. */
1860 b_head = b->head;
1861 b_end = b->end;
1862 if (GET_CODE (b_head) == CODE_LABEL)
1863 {
1864 /* Detect basic blocks with nothing but a label. This can happen
1865 in particular at the end of a function. */
1866 if (b_head == b_end)
1867 b_empty = 1;
1868 b_head = flow_delete_insn (b_head);
1869 }
1870
1871 /* Delete the basic block note. */
1872 if (GET_CODE (b_head) == NOTE
1873 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
1874 {
1875 if (b_head == b_end)
1876 b_empty = 1;
1877 b_head = flow_delete_insn (b_head);
1878 }
1879
1880 /* If there was a jump out of A, delete it. */
1881 a_end = a->end;
1882 if (GET_CODE (a_end) == JUMP_INSN)
1883 {
1884 rtx prev;
1885
1886 prev = prev_nonnote_insn (a_end);
1887 if (!prev)
1888 prev = a->head;
1889
1890 #ifdef HAVE_cc0
1891 /* If this was a conditional jump, we need to also delete
1892 the insn that set cc0. */
1893
1894 if (prev && sets_cc0_p (prev))
1895 {
1896 rtx tmp = prev;
1897 prev = prev_nonnote_insn (prev);
1898 if (!prev)
1899 prev = a->head;
1900 flow_delete_insn (tmp);
1901 }
1902 #endif
1903
1904 /* Note that a->head != a->end, since we should have at least a
1905 bb note plus the jump, so prev != insn. */
1906 flow_delete_insn (a_end);
1907 a_end = prev;
1908 }
1909
1910 /* By definition, there should only be one successor of A, and that is
1911 B. Free that edge struct. */
1912 free (a->succ);
1913
1914 /* Adjust the edges out of B for the new owner. */
1915 for (e = b->succ; e ; e = e->succ_next)
1916 e->src = a;
1917 a->succ = b->succ;
1918
1919 /* Reassociate the insns of B with A. */
1920 if (!b_empty)
1921 {
1922 BLOCK_FOR_INSN (b_head) = a;
1923 while (b_head != b_end)
1924 {
1925 b_head = NEXT_INSN (b_head);
1926 BLOCK_FOR_INSN (b_head) = a;
1927 }
1928 a_end = b_head;
1929 }
1930 a->end = a_end;
1931
1932 /* Compact the basic block array. */
1933 expunge_block (b);
1934 }
1935
1936 /* Attempt to merge basic blocks that are potentially non-adjacent.
1937 Return true iff the attempt succeeded. */
1938
1939 static int
1940 merge_blocks (e, b, c)
1941 edge e;
1942 basic_block b, c;
1943 {
1944 /* If B has a fallthru edge to C, no need to move anything. */
1945 if (!(e->flags & EDGE_FALLTHRU))
1946 {
1947 /* ??? From here on out we must make sure to not munge nesting
1948 of exception regions and lexical blocks. Need to think about
1949 these cases before this gets implemented. */
1950 return 0;
1951
1952 /* If C has an outgoing fallthru, and B does not have an incoming
1953 fallthru, move B before C. The later clause is somewhat arbitrary,
1954 but avoids modifying blocks other than the two we've been given. */
1955
1956 /* Otherwise, move C after B. If C had a fallthru, which doesn't
1957 happen to be the physical successor to B, insert an unconditional
1958 branch. If C already ended with a conditional branch, the new
1959 jump must go in a new basic block D. */
1960 }
1961
1962 /* If a label still appears somewhere and we cannot delete the label,
1963 then we cannot merge the blocks. The edge was tidied already. */
1964 {
1965 rtx insn, stop = NEXT_INSN (c->head);
1966 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
1967 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
1968 return 0;
1969 }
1970
1971 merge_blocks_nomove (b, c);
1972 return 1;
1973 }
1974
1975 /* The given edge should potentially a fallthru edge. If that is in
1976 fact true, delete the unconditional jump and barriers that are in
1977 the way. */
1978
1979 static void
1980 tidy_fallthru_edge (e, b, c)
1981 edge e;
1982 basic_block b, c;
1983 {
1984 rtx q;
1985
1986 /* ??? In a late-running flow pass, other folks may have deleted basic
1987 blocks by nopping out blocks, leaving multiple BARRIERs between here
1988 and the target label. They ought to be chastized and fixed.
1989
1990 We can also wind up with a sequence of undeletable labels between
1991 one block and the next.
1992
1993 So search through a sequence of barriers, labels, and notes for
1994 the head of block C and assert that we really do fall through. */
1995
1996 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
1997 return;
1998
1999 /* Remove what will soon cease being the jump insn from the source block.
2000 If block B consisted only of this single jump, turn it into a deleted
2001 note. */
2002 q = b->end;
2003 if (GET_CODE (q) == JUMP_INSN)
2004 {
2005 #ifdef HAVE_cc0
2006 /* If this was a conditional jump, we need to also delete
2007 the insn that set cc0. */
2008 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2009 q = PREV_INSN (q);
2010 #endif
2011
2012 if (b->head == q)
2013 {
2014 PUT_CODE (q, NOTE);
2015 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2016 NOTE_SOURCE_FILE (q) = 0;
2017 }
2018 else
2019 b->end = q = PREV_INSN (q);
2020 }
2021
2022 /* Selectively unlink the sequence. */
2023 if (q != PREV_INSN (c->head))
2024 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2025
2026 e->flags |= EDGE_FALLTHRU;
2027 }
2028
2029 /* Discover and record the loop depth at the head of each basic block. */
2030
2031 static void
2032 calculate_loop_depth (insns)
2033 rtx insns;
2034 {
2035 basic_block bb;
2036 rtx insn;
2037 int i = 0, depth = 1;
2038
2039 bb = BASIC_BLOCK (i);
2040 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2041 {
2042 if (insn == bb->head)
2043 {
2044 bb->loop_depth = depth;
2045 if (++i >= n_basic_blocks)
2046 break;
2047 bb = BASIC_BLOCK (i);
2048 }
2049
2050 if (GET_CODE (insn) == NOTE)
2051 {
2052 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2053 depth++;
2054 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2055 depth--;
2056
2057 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2058 if (depth == 0)
2059 abort ();
2060 }
2061 }
2062 }
2063 \f
2064 /* Perform data flow analysis.
2065 F is the first insn of the function and NREGS the number of register numbers
2066 in use. */
2067
2068 void
2069 life_analysis (f, nregs, file, remove_dead_code)
2070 rtx f;
2071 int nregs;
2072 FILE *file;
2073 int remove_dead_code;
2074 {
2075 #ifdef ELIMINABLE_REGS
2076 register size_t i;
2077 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2078 #endif
2079
2080 /* Record which registers will be eliminated. We use this in
2081 mark_used_regs. */
2082
2083 CLEAR_HARD_REG_SET (elim_reg_set);
2084
2085 #ifdef ELIMINABLE_REGS
2086 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2087 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2088 #else
2089 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2090 #endif
2091
2092 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2093 uid_volatile = BITMAP_ALLOCA ();
2094
2095 /* We want alias analysis information for local dead store elimination. */
2096 init_alias_analysis ();
2097
2098 life_analysis_1 (f, nregs, remove_dead_code);
2099
2100 if (! reload_completed)
2101 mark_constant_function ();
2102
2103 end_alias_analysis ();
2104
2105 if (file)
2106 dump_flow_info (file);
2107
2108 BITMAP_FREE (uid_volatile);
2109 free_basic_block_vars (1);
2110 }
2111
2112 /* Free the variables allocated by find_basic_blocks.
2113
2114 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2115
2116 void
2117 free_basic_block_vars (keep_head_end_p)
2118 int keep_head_end_p;
2119 {
2120 if (basic_block_for_insn)
2121 {
2122 VARRAY_FREE (basic_block_for_insn);
2123 basic_block_for_insn = NULL;
2124 }
2125
2126 if (! keep_head_end_p)
2127 {
2128 clear_edges ();
2129 VARRAY_FREE (basic_block_info);
2130 n_basic_blocks = 0;
2131
2132 ENTRY_BLOCK_PTR->aux = NULL;
2133 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2134 EXIT_BLOCK_PTR->aux = NULL;
2135 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2136 }
2137 }
2138
2139 /* Return nonzero if the destination of SET equals the source. */
2140 static int
2141 set_noop_p (set)
2142 rtx set;
2143 {
2144 rtx src = SET_SRC (set);
2145 rtx dst = SET_DEST (set);
2146 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2147 && REGNO (src) == REGNO (dst))
2148 return 1;
2149 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2150 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2151 return 0;
2152 src = SUBREG_REG (src);
2153 dst = SUBREG_REG (dst);
2154 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2155 && REGNO (src) == REGNO (dst))
2156 return 1;
2157 return 0;
2158 }
2159
2160 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2161 value to itself. */
2162 static int
2163 noop_move_p (insn)
2164 rtx insn;
2165 {
2166 rtx pat = PATTERN (insn);
2167
2168 /* Insns carrying these notes are useful later on. */
2169 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2170 return 0;
2171
2172 if (GET_CODE (pat) == SET && set_noop_p (pat))
2173 return 1;
2174
2175 if (GET_CODE (pat) == PARALLEL)
2176 {
2177 int i;
2178 /* If nothing but SETs of registers to themselves,
2179 this insn can also be deleted. */
2180 for (i = 0; i < XVECLEN (pat, 0); i++)
2181 {
2182 rtx tem = XVECEXP (pat, 0, i);
2183
2184 if (GET_CODE (tem) == USE
2185 || GET_CODE (tem) == CLOBBER)
2186 continue;
2187
2188 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2189 return 0;
2190 }
2191
2192 return 1;
2193 }
2194 return 0;
2195 }
2196
2197 static void
2198 notice_stack_pointer_modification (x, pat)
2199 rtx x;
2200 rtx pat ATTRIBUTE_UNUSED;
2201 {
2202 if (x == stack_pointer_rtx
2203 /* The stack pointer is only modified indirectly as the result
2204 of a push until later in flow. See the comments in rtl.texi
2205 regarding Embedded Side-Effects on Addresses. */
2206 || (GET_CODE (x) == MEM
2207 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2208 || GET_CODE (XEXP (x, 0)) == PRE_INC
2209 || GET_CODE (XEXP (x, 0)) == POST_DEC
2210 || GET_CODE (XEXP (x, 0)) == POST_INC)
2211 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2212 current_function_sp_is_unchanging = 0;
2213 }
2214
2215 /* Record which insns refer to any volatile memory
2216 or for any reason can't be deleted just because they are dead stores.
2217 Also, delete any insns that copy a register to itself.
2218 And see if the stack pointer is modified. */
2219 static void
2220 record_volatile_insns (f)
2221 rtx f;
2222 {
2223 rtx insn;
2224 for (insn = f; insn; insn = NEXT_INSN (insn))
2225 {
2226 enum rtx_code code1 = GET_CODE (insn);
2227 if (code1 == CALL_INSN)
2228 SET_INSN_VOLATILE (insn);
2229 else if (code1 == INSN || code1 == JUMP_INSN)
2230 {
2231 if (GET_CODE (PATTERN (insn)) != USE
2232 && volatile_refs_p (PATTERN (insn)))
2233 SET_INSN_VOLATILE (insn);
2234
2235 /* A SET that makes space on the stack cannot be dead.
2236 (Such SETs occur only for allocating variable-size data,
2237 so they will always have a PLUS or MINUS according to the
2238 direction of stack growth.)
2239 Even if this function never uses this stack pointer value,
2240 signal handlers do! */
2241 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2242 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2243 #ifdef STACK_GROWS_DOWNWARD
2244 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2245 #else
2246 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2247 #endif
2248 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2249 SET_INSN_VOLATILE (insn);
2250
2251 /* Delete (in effect) any obvious no-op moves. */
2252 else if (noop_move_p (insn))
2253 {
2254 PUT_CODE (insn, NOTE);
2255 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2256 NOTE_SOURCE_FILE (insn) = 0;
2257 }
2258 }
2259
2260 /* Check if insn modifies the stack pointer. */
2261 if ( current_function_sp_is_unchanging
2262 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2263 note_stores (PATTERN (insn), notice_stack_pointer_modification);
2264 }
2265 }
2266
2267 /* Mark those regs which are needed at the end of the function as live
2268 at the end of the last basic block. */
2269 static void
2270 mark_regs_live_at_end (set)
2271 regset set;
2272 {
2273 int i;
2274
2275 /* If exiting needs the right stack value, consider the stack pointer
2276 live at the end of the function. */
2277 if (! EXIT_IGNORE_STACK
2278 || (! FRAME_POINTER_REQUIRED
2279 && ! current_function_calls_alloca
2280 && flag_omit_frame_pointer)
2281 || current_function_sp_is_unchanging)
2282 {
2283 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2284 }
2285
2286 /* Mark the frame pointer if needed at the end of the function. If
2287 we end up eliminating it, it will be removed from the live list
2288 of each basic block by reload. */
2289
2290 if (! reload_completed || frame_pointer_needed)
2291 {
2292 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2293 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2294 /* If they are different, also mark the hard frame pointer as live */
2295 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2296 #endif
2297 }
2298
2299 /* Mark all global registers, and all registers used by the epilogue
2300 as being live at the end of the function since they may be
2301 referenced by our caller. */
2302 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2303 if (global_regs[i]
2304 #ifdef EPILOGUE_USES
2305 || EPILOGUE_USES (i)
2306 #endif
2307 )
2308 SET_REGNO_REG_SET (set, i);
2309
2310 /* ??? Mark function return value here rather than as uses. */
2311 }
2312
2313 /* Determine which registers are live at the start of each
2314 basic block of the function whose first insn is F.
2315 NREGS is the number of registers used in F.
2316 We allocate the vector basic_block_live_at_start
2317 and the regsets that it points to, and fill them with the data.
2318 regset_size and regset_bytes are also set here. */
2319
2320 static void
2321 life_analysis_1 (f, nregs, remove_dead_code)
2322 rtx f;
2323 int nregs;
2324 int remove_dead_code;
2325 {
2326 int first_pass;
2327 int changed;
2328 register int i;
2329 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2330 regset *new_live_at_end;
2331
2332 struct obstack flow_obstack;
2333
2334 gcc_obstack_init (&flow_obstack);
2335
2336 max_regno = nregs;
2337
2338 /* Allocate and zero out many data structures
2339 that will record the data from lifetime analysis. */
2340
2341 allocate_reg_life_data ();
2342 allocate_bb_life_data ();
2343
2344 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2345 memset (reg_next_use, 0, nregs * sizeof (rtx));
2346
2347 /* Set up regset-vectors used internally within this function.
2348 Their meanings are documented above, with their declarations. */
2349
2350 new_live_at_end = (regset *) alloca ((n_basic_blocks + 1) * sizeof (regset));
2351 init_regset_vector (new_live_at_end, n_basic_blocks + 1, &flow_obstack);
2352
2353 /* Stick these vectors into the AUX field of the basic block, so that
2354 we don't have to keep going through the index. */
2355
2356 for (i = 0; i < n_basic_blocks; ++i)
2357 BASIC_BLOCK (i)->aux = new_live_at_end[i];
2358 ENTRY_BLOCK_PTR->aux = new_live_at_end[i];
2359
2360 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2361 This will be cleared by record_volatile_insns if it encounters an insn
2362 which modifies the stack pointer. */
2363 current_function_sp_is_unchanging = !current_function_calls_alloca;
2364
2365 record_volatile_insns (f);
2366
2367 if (n_basic_blocks > 0)
2368 {
2369 regset theend;
2370 register edge e;
2371
2372 theend = EXIT_BLOCK_PTR->global_live_at_start;
2373 mark_regs_live_at_end (theend);
2374
2375 /* Propogate this exit data to each of EXIT's predecessors. */
2376 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2377 {
2378 COPY_REG_SET (e->src->global_live_at_end, theend);
2379 COPY_REG_SET ((regset) e->src->aux, theend);
2380 }
2381 }
2382
2383 /* The post-reload life analysis have (on a global basis) the same registers
2384 live as was computed by reload itself.
2385
2386 Otherwise elimination offsets and such may be incorrect.
2387
2388 Reload will make some registers as live even though they do not appear
2389 in the rtl. */
2390 if (reload_completed)
2391 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2392 memset (regs_ever_live, 0, sizeof regs_ever_live);
2393
2394 /* Propagate life info through the basic blocks
2395 around the graph of basic blocks.
2396
2397 This is a relaxation process: each time a new register
2398 is live at the end of the basic block, we must scan the block
2399 to determine which registers are, as a consequence, live at the beginning
2400 of that block. These registers must then be marked live at the ends
2401 of all the blocks that can transfer control to that block.
2402 The process continues until it reaches a fixed point. */
2403
2404 first_pass = 1;
2405 changed = 1;
2406 while (changed)
2407 {
2408 changed = 0;
2409 for (i = n_basic_blocks - 1; i >= 0; i--)
2410 {
2411 basic_block bb = BASIC_BLOCK (i);
2412 int consider = first_pass;
2413 int must_rescan = first_pass;
2414 register int j;
2415
2416 if (!first_pass)
2417 {
2418 /* Set CONSIDER if this block needs thinking about at all
2419 (that is, if the regs live now at the end of it
2420 are not the same as were live at the end of it when
2421 we last thought about it).
2422 Set must_rescan if it needs to be thought about
2423 instruction by instruction (that is, if any additional
2424 reg that is live at the end now but was not live there before
2425 is one of the significant regs of this basic block). */
2426
2427 EXECUTE_IF_AND_COMPL_IN_REG_SET
2428 ((regset) bb->aux, bb->global_live_at_end, 0, j,
2429 {
2430 consider = 1;
2431 if (REGNO_REG_SET_P (bb->local_set, j))
2432 {
2433 must_rescan = 1;
2434 goto done;
2435 }
2436 });
2437 done:
2438 if (! consider)
2439 continue;
2440 }
2441
2442 /* The live_at_start of this block may be changing,
2443 so another pass will be required after this one. */
2444 changed = 1;
2445
2446 if (! must_rescan)
2447 {
2448 /* No complete rescan needed;
2449 just record those variables newly known live at end
2450 as live at start as well. */
2451 IOR_AND_COMPL_REG_SET (bb->global_live_at_start,
2452 (regset) bb->aux,
2453 bb->global_live_at_end);
2454
2455 IOR_AND_COMPL_REG_SET (bb->global_live_at_end,
2456 (regset) bb->aux,
2457 bb->global_live_at_end);
2458 }
2459 else
2460 {
2461 /* Update the basic_block_live_at_start
2462 by propagation backwards through the block. */
2463 COPY_REG_SET (bb->global_live_at_end, (regset) bb->aux);
2464 COPY_REG_SET (bb->global_live_at_start,
2465 bb->global_live_at_end);
2466 propagate_block (bb->global_live_at_start,
2467 bb->head, bb->end, 0,
2468 first_pass ? bb->local_set : (regset) 0,
2469 i, remove_dead_code);
2470 }
2471
2472 /* Update the new_live_at_end's of the block's predecessors. */
2473 {
2474 register edge e;
2475
2476 for (e = bb->pred; e ; e = e->pred_next)
2477 IOR_REG_SET ((regset) e->src->aux, bb->global_live_at_start);
2478 }
2479
2480 #ifdef USE_C_ALLOCA
2481 alloca (0);
2482 #endif
2483 }
2484 first_pass = 0;
2485 }
2486
2487 /* The only pseudos that are live at the beginning of the function are
2488 those that were not set anywhere in the function. local-alloc doesn't
2489 know how to handle these correctly, so mark them as not local to any
2490 one basic block. */
2491
2492 if (n_basic_blocks > 0)
2493 EXECUTE_IF_SET_IN_REG_SET (BASIC_BLOCK (0)->global_live_at_start,
2494 FIRST_PSEUDO_REGISTER, i,
2495 {
2496 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2497 });
2498
2499 /* Now the life information is accurate. Make one more pass over each
2500 basic block to delete dead stores, create autoincrement addressing
2501 and record how many times each register is used, is set, or dies. */
2502
2503 for (i = 0; i < n_basic_blocks; i++)
2504 {
2505 basic_block bb = BASIC_BLOCK (i);
2506
2507 /* We start with global_live_at_end to determine which stores are
2508 dead. This process is destructive, and we wish to preserve the
2509 contents of global_live_at_end for posterity. Fortunately,
2510 new_live_at_end, due to the way we converged on a solution,
2511 contains a duplicate of global_live_at_end that we can kill. */
2512 propagate_block ((regset) bb->aux, bb->head, bb->end, 1, (regset) 0, i, remove_dead_code);
2513
2514 #ifdef USE_C_ALLOCA
2515 alloca (0);
2516 #endif
2517 }
2518
2519 /* We have a problem with any pseudoreg that lives across the setjmp.
2520 ANSI says that if a user variable does not change in value between
2521 the setjmp and the longjmp, then the longjmp preserves it. This
2522 includes longjmp from a place where the pseudo appears dead.
2523 (In principle, the value still exists if it is in scope.)
2524 If the pseudo goes in a hard reg, some other value may occupy
2525 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2526 Conclusion: such a pseudo must not go in a hard reg. */
2527 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2528 FIRST_PSEUDO_REGISTER, i,
2529 {
2530 if (regno_reg_rtx[i] != 0)
2531 {
2532 REG_LIVE_LENGTH (i) = -1;
2533 REG_BASIC_BLOCK (i) = -1;
2534 }
2535 });
2536
2537 /* Restore regs_ever_live that was provided by reload. */
2538 if (reload_completed)
2539 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2540
2541 free_regset_vector (new_live_at_end, n_basic_blocks);
2542 obstack_free (&flow_obstack, NULL_PTR);
2543
2544 for (i = 0; i < n_basic_blocks; ++i)
2545 BASIC_BLOCK (i)->aux = NULL;
2546 ENTRY_BLOCK_PTR->aux = NULL;
2547 }
2548 \f
2549 /* Subroutines of life analysis. */
2550
2551 /* Allocate the permanent data structures that represent the results
2552 of life analysis. Not static since used also for stupid life analysis. */
2553
2554 void
2555 allocate_bb_life_data ()
2556 {
2557 register int i;
2558
2559 for (i = 0; i < n_basic_blocks; i++)
2560 {
2561 basic_block bb = BASIC_BLOCK (i);
2562
2563 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
2564 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
2565 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
2566 }
2567
2568 ENTRY_BLOCK_PTR->global_live_at_end
2569 = OBSTACK_ALLOC_REG_SET (function_obstack);
2570 EXIT_BLOCK_PTR->global_live_at_start
2571 = OBSTACK_ALLOC_REG_SET (function_obstack);
2572
2573 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
2574 }
2575
2576 void
2577 allocate_reg_life_data ()
2578 {
2579 int i;
2580
2581 /* Recalculate the register space, in case it has grown. Old style
2582 vector oriented regsets would set regset_{size,bytes} here also. */
2583 allocate_reg_info (max_regno, FALSE, FALSE);
2584
2585 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
2586 information, explicitly reset it here. The allocation should have
2587 already happened on the previous reg_scan pass. Make sure in case
2588 some more registers were allocated. */
2589 for (i = 0; i < max_regno; i++)
2590 REG_N_SETS (i) = 0;
2591 }
2592
2593 /* Make each element of VECTOR point at a regset. The vector has
2594 NELTS elements, and space is allocated from the ALLOC_OBSTACK
2595 obstack. */
2596
2597 static void
2598 init_regset_vector (vector, nelts, alloc_obstack)
2599 regset *vector;
2600 int nelts;
2601 struct obstack *alloc_obstack;
2602 {
2603 register int i;
2604
2605 for (i = 0; i < nelts; i++)
2606 {
2607 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
2608 CLEAR_REG_SET (vector[i]);
2609 }
2610 }
2611
2612 /* Release any additional space allocated for each element of VECTOR point
2613 other than the regset header itself. The vector has NELTS elements. */
2614
2615 void
2616 free_regset_vector (vector, nelts)
2617 regset *vector;
2618 int nelts;
2619 {
2620 register int i;
2621
2622 for (i = 0; i < nelts; i++)
2623 FREE_REG_SET (vector[i]);
2624 }
2625
2626 /* Compute the registers live at the beginning of a basic block
2627 from those live at the end.
2628
2629 When called, OLD contains those live at the end.
2630 On return, it contains those live at the beginning.
2631 FIRST and LAST are the first and last insns of the basic block.
2632
2633 FINAL is nonzero if we are doing the final pass which is not
2634 for computing the life info (since that has already been done)
2635 but for acting on it. On this pass, we delete dead stores,
2636 set up the logical links and dead-variables lists of instructions,
2637 and merge instructions for autoincrement and autodecrement addresses.
2638
2639 SIGNIFICANT is nonzero only the first time for each basic block.
2640 If it is nonzero, it points to a regset in which we store
2641 a 1 for each register that is set within the block.
2642
2643 BNUM is the number of the basic block. */
2644
2645 static void
2646 propagate_block (old, first, last, final, significant, bnum, remove_dead_code)
2647 register regset old;
2648 rtx first;
2649 rtx last;
2650 int final;
2651 regset significant;
2652 int bnum;
2653 int remove_dead_code;
2654 {
2655 register rtx insn;
2656 rtx prev;
2657 regset live;
2658 regset dead;
2659
2660 /* Find the loop depth for this block. Ignore loop level changes in the
2661 middle of the basic block -- for register allocation purposes, the
2662 important uses will be in the blocks wholely contained within the loop
2663 not in the loop pre-header or post-trailer. */
2664 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
2665
2666 dead = ALLOCA_REG_SET ();
2667 live = ALLOCA_REG_SET ();
2668
2669 cc0_live = 0;
2670 mem_set_list = NULL_RTX;
2671
2672 if (final)
2673 {
2674 register int i;
2675
2676 /* Process the regs live at the end of the block.
2677 Mark them as not local to any one basic block. */
2678 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2679 {
2680 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
2681 });
2682 }
2683
2684 /* Scan the block an insn at a time from end to beginning. */
2685
2686 for (insn = last; ; insn = prev)
2687 {
2688 prev = PREV_INSN (insn);
2689
2690 if (GET_CODE (insn) == NOTE)
2691 {
2692 /* If this is a call to `setjmp' et al,
2693 warn if any non-volatile datum is live. */
2694
2695 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2696 IOR_REG_SET (regs_live_at_setjmp, old);
2697 }
2698
2699 /* Update the life-status of regs for this insn.
2700 First DEAD gets which regs are set in this insn
2701 then LIVE gets which regs are used in this insn.
2702 Then the regs live before the insn
2703 are those live after, with DEAD regs turned off,
2704 and then LIVE regs turned on. */
2705
2706 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2707 {
2708 register int i;
2709 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
2710 int insn_is_dead = 0;
2711 int libcall_is_dead = 0;
2712
2713 if (remove_dead_code)
2714 {
2715 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
2716 /* Don't delete something that refers to volatile storage! */
2717 && ! INSN_VOLATILE (insn));
2718 libcall_is_dead = (insn_is_dead && note != 0
2719 && libcall_dead_p (PATTERN (insn), old, note, insn));
2720 }
2721
2722 /* If an instruction consists of just dead store(s) on final pass,
2723 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
2724 We could really delete it with delete_insn, but that
2725 can cause trouble for first or last insn in a basic block. */
2726 if (final && insn_is_dead)
2727 {
2728 PUT_CODE (insn, NOTE);
2729 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2730 NOTE_SOURCE_FILE (insn) = 0;
2731
2732 /* CC0 is now known to be dead. Either this insn used it,
2733 in which case it doesn't anymore, or clobbered it,
2734 so the next insn can't use it. */
2735 cc0_live = 0;
2736
2737 /* If this insn is copying the return value from a library call,
2738 delete the entire library call. */
2739 if (libcall_is_dead)
2740 {
2741 rtx first = XEXP (note, 0);
2742 rtx p = insn;
2743 while (INSN_DELETED_P (first))
2744 first = NEXT_INSN (first);
2745 while (p != first)
2746 {
2747 p = PREV_INSN (p);
2748 PUT_CODE (p, NOTE);
2749 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
2750 NOTE_SOURCE_FILE (p) = 0;
2751 }
2752 }
2753 goto flushed;
2754 }
2755
2756 CLEAR_REG_SET (dead);
2757 CLEAR_REG_SET (live);
2758
2759 /* See if this is an increment or decrement that can be
2760 merged into a following memory address. */
2761 #ifdef AUTO_INC_DEC
2762 {
2763 register rtx x = single_set (insn);
2764
2765 /* Does this instruction increment or decrement a register? */
2766 if (!reload_completed
2767 && final && x != 0
2768 && GET_CODE (SET_DEST (x)) == REG
2769 && (GET_CODE (SET_SRC (x)) == PLUS
2770 || GET_CODE (SET_SRC (x)) == MINUS)
2771 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
2772 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2773 /* Ok, look for a following memory ref we can combine with.
2774 If one is found, change the memory ref to a PRE_INC
2775 or PRE_DEC, cancel this insn, and return 1.
2776 Return 0 if nothing has been done. */
2777 && try_pre_increment_1 (insn))
2778 goto flushed;
2779 }
2780 #endif /* AUTO_INC_DEC */
2781
2782 /* If this is not the final pass, and this insn is copying the
2783 value of a library call and it's dead, don't scan the
2784 insns that perform the library call, so that the call's
2785 arguments are not marked live. */
2786 if (libcall_is_dead)
2787 {
2788 /* Mark the dest reg as `significant'. */
2789 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
2790
2791 insn = XEXP (note, 0);
2792 prev = PREV_INSN (insn);
2793 }
2794 else if (GET_CODE (PATTERN (insn)) == SET
2795 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2796 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2797 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
2798 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
2799 /* We have an insn to pop a constant amount off the stack.
2800 (Such insns use PLUS regardless of the direction of the stack,
2801 and any insn to adjust the stack by a constant is always a pop.)
2802 These insns, if not dead stores, have no effect on life. */
2803 ;
2804 else
2805 {
2806 /* Any regs live at the time of a call instruction
2807 must not go in a register clobbered by calls.
2808 Find all regs now live and record this for them. */
2809
2810 if (GET_CODE (insn) == CALL_INSN && final)
2811 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2812 {
2813 REG_N_CALLS_CROSSED (i)++;
2814 });
2815
2816 /* LIVE gets the regs used in INSN;
2817 DEAD gets those set by it. Dead insns don't make anything
2818 live. */
2819
2820 mark_set_regs (old, dead, PATTERN (insn),
2821 final ? insn : NULL_RTX, significant);
2822
2823 /* If an insn doesn't use CC0, it becomes dead since we
2824 assume that every insn clobbers it. So show it dead here;
2825 mark_used_regs will set it live if it is referenced. */
2826 cc0_live = 0;
2827
2828 if (! insn_is_dead)
2829 mark_used_regs (old, live, PATTERN (insn), final, insn);
2830
2831 /* Sometimes we may have inserted something before INSN (such as
2832 a move) when we make an auto-inc. So ensure we will scan
2833 those insns. */
2834 #ifdef AUTO_INC_DEC
2835 prev = PREV_INSN (insn);
2836 #endif
2837
2838 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
2839 {
2840 register int i;
2841
2842 rtx note;
2843
2844 for (note = CALL_INSN_FUNCTION_USAGE (insn);
2845 note;
2846 note = XEXP (note, 1))
2847 if (GET_CODE (XEXP (note, 0)) == USE)
2848 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
2849 final, insn);
2850
2851 /* Each call clobbers all call-clobbered regs that are not
2852 global or fixed. Note that the function-value reg is a
2853 call-clobbered reg, and mark_set_regs has already had
2854 a chance to handle it. */
2855
2856 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2857 if (call_used_regs[i] && ! global_regs[i]
2858 && ! fixed_regs[i])
2859 SET_REGNO_REG_SET (dead, i);
2860
2861 /* The stack ptr is used (honorarily) by a CALL insn. */
2862 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2863
2864 /* Calls may also reference any of the global registers,
2865 so they are made live. */
2866 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2867 if (global_regs[i])
2868 mark_used_regs (old, live,
2869 gen_rtx_REG (reg_raw_mode[i], i),
2870 final, insn);
2871
2872 /* Calls also clobber memory. */
2873 mem_set_list = NULL_RTX;
2874 }
2875
2876 /* Update OLD for the registers used or set. */
2877 AND_COMPL_REG_SET (old, dead);
2878 IOR_REG_SET (old, live);
2879
2880 }
2881
2882 /* On final pass, update counts of how many insns each reg is live
2883 at. */
2884 if (final)
2885 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
2886 { REG_LIVE_LENGTH (i)++; });
2887 }
2888 flushed: ;
2889 if (insn == first)
2890 break;
2891 }
2892
2893 FREE_REG_SET (dead);
2894 FREE_REG_SET (live);
2895 }
2896 \f
2897 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2898 (SET expressions whose destinations are registers dead after the insn).
2899 NEEDED is the regset that says which regs are alive after the insn.
2900
2901 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2902
2903 If X is the entire body of an insn, NOTES contains the reg notes
2904 pertaining to the insn. */
2905
2906 static int
2907 insn_dead_p (x, needed, call_ok, notes)
2908 rtx x;
2909 regset needed;
2910 int call_ok;
2911 rtx notes ATTRIBUTE_UNUSED;
2912 {
2913 enum rtx_code code = GET_CODE (x);
2914
2915 #ifdef AUTO_INC_DEC
2916 /* If flow is invoked after reload, we must take existing AUTO_INC
2917 expresions into account. */
2918 if (reload_completed)
2919 {
2920 for ( ; notes; notes = XEXP (notes, 1))
2921 {
2922 if (REG_NOTE_KIND (notes) == REG_INC)
2923 {
2924 int regno = REGNO (XEXP (notes, 0));
2925
2926 /* Don't delete insns to set global regs. */
2927 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2928 || REGNO_REG_SET_P (needed, regno))
2929 return 0;
2930 }
2931 }
2932 }
2933 #endif
2934
2935 /* If setting something that's a reg or part of one,
2936 see if that register's altered value will be live. */
2937
2938 if (code == SET)
2939 {
2940 rtx r = SET_DEST (x);
2941
2942 /* A SET that is a subroutine call cannot be dead. */
2943 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
2944 return 0;
2945
2946 #ifdef HAVE_cc0
2947 if (GET_CODE (r) == CC0)
2948 return ! cc0_live;
2949 #endif
2950
2951 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
2952 {
2953 rtx temp;
2954 /* Walk the set of memory locations we are currently tracking
2955 and see if one is an identical match to this memory location.
2956 If so, this memory write is dead (remember, we're walking
2957 backwards from the end of the block to the start. */
2958 temp = mem_set_list;
2959 while (temp)
2960 {
2961 if (rtx_equal_p (XEXP (temp, 0), r))
2962 return 1;
2963 temp = XEXP (temp, 1);
2964 }
2965 }
2966
2967 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
2968 || GET_CODE (r) == ZERO_EXTRACT)
2969 r = SUBREG_REG (r);
2970
2971 if (GET_CODE (r) == REG)
2972 {
2973 int regno = REGNO (r);
2974
2975 /* Don't delete insns to set global regs. */
2976 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2977 /* Make sure insns to set frame pointer aren't deleted. */
2978 || (regno == FRAME_POINTER_REGNUM
2979 && (! reload_completed || frame_pointer_needed))
2980 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2981 || (regno == HARD_FRAME_POINTER_REGNUM
2982 && (! reload_completed || frame_pointer_needed))
2983 #endif
2984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2985 /* Make sure insns to set arg pointer are never deleted
2986 (if the arg pointer isn't fixed, there will be a USE for
2987 it, so we can treat it normally). */
2988 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2989 #endif
2990 || REGNO_REG_SET_P (needed, regno))
2991 return 0;
2992
2993 /* If this is a hard register, verify that subsequent words are
2994 not needed. */
2995 if (regno < FIRST_PSEUDO_REGISTER)
2996 {
2997 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2998
2999 while (--n > 0)
3000 if (REGNO_REG_SET_P (needed, regno+n))
3001 return 0;
3002 }
3003
3004 return 1;
3005 }
3006 }
3007
3008 /* If performing several activities,
3009 insn is dead if each activity is individually dead.
3010 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3011 that's inside a PARALLEL doesn't make the insn worth keeping. */
3012 else if (code == PARALLEL)
3013 {
3014 int i = XVECLEN (x, 0);
3015
3016 for (i--; i >= 0; i--)
3017 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3018 && GET_CODE (XVECEXP (x, 0, i)) != USE
3019 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3020 return 0;
3021
3022 return 1;
3023 }
3024
3025 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3026 is not necessarily true for hard registers. */
3027 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3028 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3029 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3030 return 1;
3031
3032 /* We do not check other CLOBBER or USE here. An insn consisting of just
3033 a CLOBBER or just a USE should not be deleted. */
3034 return 0;
3035 }
3036
3037 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3038 return 1 if the entire library call is dead.
3039 This is true if X copies a register (hard or pseudo)
3040 and if the hard return reg of the call insn is dead.
3041 (The caller should have tested the destination of X already for death.)
3042
3043 If this insn doesn't just copy a register, then we don't
3044 have an ordinary libcall. In that case, cse could not have
3045 managed to substitute the source for the dest later on,
3046 so we can assume the libcall is dead.
3047
3048 NEEDED is the bit vector of pseudoregs live before this insn.
3049 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3050
3051 static int
3052 libcall_dead_p (x, needed, note, insn)
3053 rtx x;
3054 regset needed;
3055 rtx note;
3056 rtx insn;
3057 {
3058 register RTX_CODE code = GET_CODE (x);
3059
3060 if (code == SET)
3061 {
3062 register rtx r = SET_SRC (x);
3063 if (GET_CODE (r) == REG)
3064 {
3065 rtx call = XEXP (note, 0);
3066 rtx call_pat;
3067 register int i;
3068
3069 /* Find the call insn. */
3070 while (call != insn && GET_CODE (call) != CALL_INSN)
3071 call = NEXT_INSN (call);
3072
3073 /* If there is none, do nothing special,
3074 since ordinary death handling can understand these insns. */
3075 if (call == insn)
3076 return 0;
3077
3078 /* See if the hard reg holding the value is dead.
3079 If this is a PARALLEL, find the call within it. */
3080 call_pat = PATTERN (call);
3081 if (GET_CODE (call_pat) == PARALLEL)
3082 {
3083 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3084 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3085 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3086 break;
3087
3088 /* This may be a library call that is returning a value
3089 via invisible pointer. Do nothing special, since
3090 ordinary death handling can understand these insns. */
3091 if (i < 0)
3092 return 0;
3093
3094 call_pat = XVECEXP (call_pat, 0, i);
3095 }
3096
3097 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3098 }
3099 }
3100 return 1;
3101 }
3102
3103 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3104 live at function entry. Don't count global register variables, variables
3105 in registers that can be used for function arg passing, or variables in
3106 fixed hard registers. */
3107
3108 int
3109 regno_uninitialized (regno)
3110 int regno;
3111 {
3112 if (n_basic_blocks == 0
3113 || (regno < FIRST_PSEUDO_REGISTER
3114 && (global_regs[regno]
3115 || fixed_regs[regno]
3116 || FUNCTION_ARG_REGNO_P (regno))))
3117 return 0;
3118
3119 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3120 }
3121
3122 /* 1 if register REGNO was alive at a place where `setjmp' was called
3123 and was set more than once or is an argument.
3124 Such regs may be clobbered by `longjmp'. */
3125
3126 int
3127 regno_clobbered_at_setjmp (regno)
3128 int regno;
3129 {
3130 if (n_basic_blocks == 0)
3131 return 0;
3132
3133 return ((REG_N_SETS (regno) > 1
3134 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3135 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3136 }
3137 \f
3138 /* INSN references memory, possibly using autoincrement addressing modes.
3139 Find any entries on the mem_set_list that need to be invalidated due
3140 to an address change. */
3141 static void
3142 invalidate_mems_from_autoinc (insn)
3143 rtx insn;
3144 {
3145 rtx note = REG_NOTES (insn);
3146 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3147 {
3148 if (REG_NOTE_KIND (note) == REG_INC)
3149 {
3150 rtx temp = mem_set_list;
3151 rtx prev = NULL_RTX;
3152
3153 while (temp)
3154 {
3155 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3156 {
3157 /* Splice temp out of list. */
3158 if (prev)
3159 XEXP (prev, 1) = XEXP (temp, 1);
3160 else
3161 mem_set_list = XEXP (temp, 1);
3162 }
3163 else
3164 prev = temp;
3165 temp = XEXP (temp, 1);
3166 }
3167 }
3168 }
3169 }
3170
3171 /* Process the registers that are set within X.
3172 Their bits are set to 1 in the regset DEAD,
3173 because they are dead prior to this insn.
3174
3175 If INSN is nonzero, it is the insn being processed
3176 and the fact that it is nonzero implies this is the FINAL pass
3177 in propagate_block. In this case, various info about register
3178 usage is stored, LOG_LINKS fields of insns are set up. */
3179
3180 static void
3181 mark_set_regs (needed, dead, x, insn, significant)
3182 regset needed;
3183 regset dead;
3184 rtx x;
3185 rtx insn;
3186 regset significant;
3187 {
3188 register RTX_CODE code = GET_CODE (x);
3189
3190 if (code == SET || code == CLOBBER)
3191 mark_set_1 (needed, dead, x, insn, significant);
3192 else if (code == PARALLEL)
3193 {
3194 register int i;
3195 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3196 {
3197 code = GET_CODE (XVECEXP (x, 0, i));
3198 if (code == SET || code == CLOBBER)
3199 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
3200 }
3201 }
3202 }
3203
3204 /* Process a single SET rtx, X. */
3205
3206 static void
3207 mark_set_1 (needed, dead, x, insn, significant)
3208 regset needed;
3209 regset dead;
3210 rtx x;
3211 rtx insn;
3212 regset significant;
3213 {
3214 register int regno;
3215 register rtx reg = SET_DEST (x);
3216
3217 /* Some targets place small structures in registers for
3218 return values of functions. We have to detect this
3219 case specially here to get correct flow information. */
3220 if (GET_CODE (reg) == PARALLEL
3221 && GET_MODE (reg) == BLKmode)
3222 {
3223 register int i;
3224
3225 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3226 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
3227 return;
3228 }
3229
3230 /* Modifying just one hardware register of a multi-reg value
3231 or just a byte field of a register
3232 does not mean the value from before this insn is now dead.
3233 But it does mean liveness of that register at the end of the block
3234 is significant.
3235
3236 Within mark_set_1, however, we treat it as if the register is
3237 indeed modified. mark_used_regs will, however, also treat this
3238 register as being used. Thus, we treat these insns as setting a
3239 new value for the register as a function of its old value. This
3240 cases LOG_LINKS to be made appropriately and this will help combine. */
3241
3242 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3243 || GET_CODE (reg) == SIGN_EXTRACT
3244 || GET_CODE (reg) == STRICT_LOW_PART)
3245 reg = XEXP (reg, 0);
3246
3247 /* If this set is a MEM, then it kills any aliased writes.
3248 If this set is a REG, then it kills any MEMs which use the reg. */
3249 if (GET_CODE (reg) == MEM
3250 || GET_CODE (reg) == REG)
3251 {
3252 rtx temp = mem_set_list;
3253 rtx prev = NULL_RTX;
3254
3255 while (temp)
3256 {
3257 if ((GET_CODE (reg) == MEM
3258 && output_dependence (XEXP (temp, 0), reg))
3259 || (GET_CODE (reg) == REG
3260 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3261 {
3262 /* Splice this entry out of the list. */
3263 if (prev)
3264 XEXP (prev, 1) = XEXP (temp, 1);
3265 else
3266 mem_set_list = XEXP (temp, 1);
3267 }
3268 else
3269 prev = temp;
3270 temp = XEXP (temp, 1);
3271 }
3272 }
3273
3274 /* If the memory reference had embedded side effects (autoincrement
3275 address modes. Then we may need to kill some entries on the
3276 memory set list. */
3277 if (insn && GET_CODE (reg) == MEM)
3278 invalidate_mems_from_autoinc (insn);
3279
3280 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3281 /* We do not know the size of a BLKmode store, so we do not track
3282 them for redundant store elimination. */
3283 && GET_MODE (reg) != BLKmode
3284 /* There are no REG_INC notes for SP, so we can't assume we'll see
3285 everything that invalidates it. To be safe, don't eliminate any
3286 stores though SP; none of them should be redundant anyway. */
3287 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3288 mem_set_list = gen_rtx_EXPR_LIST (VOIDmode, reg, mem_set_list);
3289
3290 if (GET_CODE (reg) == REG
3291 && (regno = REGNO (reg), ! (regno == FRAME_POINTER_REGNUM
3292 && (! reload_completed || frame_pointer_needed)))
3293 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3294 && ! (regno == HARD_FRAME_POINTER_REGNUM
3295 && (! reload_completed || frame_pointer_needed))
3296 #endif
3297 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3298 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3299 #endif
3300 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3301 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3302 {
3303 int some_needed = REGNO_REG_SET_P (needed, regno);
3304 int some_not_needed = ! some_needed;
3305
3306 /* Mark it as a significant register for this basic block. */
3307 if (significant)
3308 SET_REGNO_REG_SET (significant, regno);
3309
3310 /* Mark it as dead before this insn. */
3311 SET_REGNO_REG_SET (dead, regno);
3312
3313 /* A hard reg in a wide mode may really be multiple registers.
3314 If so, mark all of them just like the first. */
3315 if (regno < FIRST_PSEUDO_REGISTER)
3316 {
3317 int n;
3318
3319 /* Nothing below is needed for the stack pointer; get out asap.
3320 Eg, log links aren't needed, since combine won't use them. */
3321 if (regno == STACK_POINTER_REGNUM)
3322 return;
3323
3324 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3325 while (--n > 0)
3326 {
3327 int regno_n = regno + n;
3328 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3329 if (significant)
3330 SET_REGNO_REG_SET (significant, regno_n);
3331
3332 SET_REGNO_REG_SET (dead, regno_n);
3333 some_needed |= needed_regno;
3334 some_not_needed |= ! needed_regno;
3335 }
3336 }
3337 /* Additional data to record if this is the final pass. */
3338 if (insn)
3339 {
3340 register rtx y = reg_next_use[regno];
3341 register int blocknum = BLOCK_NUM (insn);
3342
3343 /* If this is a hard reg, record this function uses the reg. */
3344
3345 if (regno < FIRST_PSEUDO_REGISTER)
3346 {
3347 register int i;
3348 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3349
3350 for (i = regno; i < endregno; i++)
3351 {
3352 /* The next use is no longer "next", since a store
3353 intervenes. */
3354 reg_next_use[i] = 0;
3355
3356 regs_ever_live[i] = 1;
3357 REG_N_SETS (i)++;
3358 }
3359 }
3360 else
3361 {
3362 /* The next use is no longer "next", since a store
3363 intervenes. */
3364 reg_next_use[regno] = 0;
3365
3366 /* Keep track of which basic blocks each reg appears in. */
3367
3368 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3369 REG_BASIC_BLOCK (regno) = blocknum;
3370 else if (REG_BASIC_BLOCK (regno) != blocknum)
3371 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3372
3373 /* Count (weighted) references, stores, etc. This counts a
3374 register twice if it is modified, but that is correct. */
3375 REG_N_SETS (regno)++;
3376
3377 REG_N_REFS (regno) += loop_depth;
3378
3379 /* The insns where a reg is live are normally counted
3380 elsewhere, but we want the count to include the insn
3381 where the reg is set, and the normal counting mechanism
3382 would not count it. */
3383 REG_LIVE_LENGTH (regno)++;
3384 }
3385
3386 if (! some_not_needed)
3387 {
3388 /* Make a logical link from the next following insn
3389 that uses this register, back to this insn.
3390 The following insns have already been processed.
3391
3392 We don't build a LOG_LINK for hard registers containing
3393 in ASM_OPERANDs. If these registers get replaced,
3394 we might wind up changing the semantics of the insn,
3395 even if reload can make what appear to be valid assignments
3396 later. */
3397 if (y && (BLOCK_NUM (y) == blocknum)
3398 && (regno >= FIRST_PSEUDO_REGISTER
3399 || asm_noperands (PATTERN (y)) < 0))
3400 LOG_LINKS (y)
3401 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
3402 }
3403 else if (! some_needed)
3404 {
3405 /* Note that dead stores have already been deleted when possible
3406 If we get here, we have found a dead store that cannot
3407 be eliminated (because the same insn does something useful).
3408 Indicate this by marking the reg being set as dying here. */
3409 REG_NOTES (insn)
3410 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3411 REG_N_DEATHS (REGNO (reg))++;
3412 }
3413 else
3414 {
3415 /* This is a case where we have a multi-word hard register
3416 and some, but not all, of the words of the register are
3417 needed in subsequent insns. Write REG_UNUSED notes
3418 for those parts that were not needed. This case should
3419 be rare. */
3420
3421 int i;
3422
3423 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
3424 i >= 0; i--)
3425 if (!REGNO_REG_SET_P (needed, regno + i))
3426 REG_NOTES (insn)
3427 = gen_rtx_EXPR_LIST (REG_UNUSED,
3428 gen_rtx_REG (reg_raw_mode[regno + i],
3429 regno + i),
3430 REG_NOTES (insn));
3431 }
3432 }
3433 }
3434 else if (GET_CODE (reg) == REG)
3435 reg_next_use[regno] = 0;
3436
3437 /* If this is the last pass and this is a SCRATCH, show it will be dying
3438 here and count it. */
3439 else if (GET_CODE (reg) == SCRATCH && insn != 0)
3440 {
3441 REG_NOTES (insn)
3442 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3443 }
3444 }
3445 \f
3446 #ifdef AUTO_INC_DEC
3447
3448 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3449 reference. */
3450
3451 static void
3452 find_auto_inc (needed, x, insn)
3453 regset needed;
3454 rtx x;
3455 rtx insn;
3456 {
3457 rtx addr = XEXP (x, 0);
3458 HOST_WIDE_INT offset = 0;
3459 rtx set;
3460
3461 /* Here we detect use of an index register which might be good for
3462 postincrement, postdecrement, preincrement, or predecrement. */
3463
3464 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3465 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3466
3467 if (GET_CODE (addr) == REG)
3468 {
3469 register rtx y;
3470 register int size = GET_MODE_SIZE (GET_MODE (x));
3471 rtx use;
3472 rtx incr;
3473 int regno = REGNO (addr);
3474
3475 /* Is the next use an increment that might make auto-increment? */
3476 if ((incr = reg_next_use[regno]) != 0
3477 && (set = single_set (incr)) != 0
3478 && GET_CODE (set) == SET
3479 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
3480 /* Can't add side effects to jumps; if reg is spilled and
3481 reloaded, there's no way to store back the altered value. */
3482 && GET_CODE (insn) != JUMP_INSN
3483 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
3484 && XEXP (y, 0) == addr
3485 && GET_CODE (XEXP (y, 1)) == CONST_INT
3486 && ((HAVE_POST_INCREMENT
3487 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
3488 || (HAVE_POST_DECREMENT
3489 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
3490 || (HAVE_PRE_INCREMENT
3491 && (INTVAL (XEXP (y, 1)) == size && offset == size))
3492 || (HAVE_PRE_DECREMENT
3493 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
3494 /* Make sure this reg appears only once in this insn. */
3495 && (use = find_use_as_address (PATTERN (insn), addr, offset),
3496 use != 0 && use != (rtx) 1))
3497 {
3498 rtx q = SET_DEST (set);
3499 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
3500 ? (offset ? PRE_INC : POST_INC)
3501 : (offset ? PRE_DEC : POST_DEC));
3502
3503 if (dead_or_set_p (incr, addr))
3504 {
3505 /* This is the simple case. Try to make the auto-inc. If
3506 we can't, we are done. Otherwise, we will do any
3507 needed updates below. */
3508 if (! validate_change (insn, &XEXP (x, 0),
3509 gen_rtx_fmt_e (inc_code, Pmode, addr),
3510 0))
3511 return;
3512 }
3513 else if (GET_CODE (q) == REG
3514 /* PREV_INSN used here to check the semi-open interval
3515 [insn,incr). */
3516 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3517 /* We must also check for sets of q as q may be
3518 a call clobbered hard register and there may
3519 be a call between PREV_INSN (insn) and incr. */
3520 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3521 {
3522 /* We have *p followed sometime later by q = p+size.
3523 Both p and q must be live afterward,
3524 and q is not used between INSN and its assignment.
3525 Change it to q = p, ...*q..., q = q+size.
3526 Then fall into the usual case. */
3527 rtx insns, temp;
3528 basic_block bb;
3529
3530 start_sequence ();
3531 emit_move_insn (q, addr);
3532 insns = get_insns ();
3533 end_sequence ();
3534
3535 bb = BLOCK_FOR_INSN (insn);
3536 for (temp = insns; temp; temp = NEXT_INSN (temp))
3537 set_block_for_insn (temp, bb);
3538
3539 /* If we can't make the auto-inc, or can't make the
3540 replacement into Y, exit. There's no point in making
3541 the change below if we can't do the auto-inc and doing
3542 so is not correct in the pre-inc case. */
3543
3544 validate_change (insn, &XEXP (x, 0),
3545 gen_rtx_fmt_e (inc_code, Pmode, q),
3546 1);
3547 validate_change (incr, &XEXP (y, 0), q, 1);
3548 if (! apply_change_group ())
3549 return;
3550
3551 /* We now know we'll be doing this change, so emit the
3552 new insn(s) and do the updates. */
3553 emit_insns_before (insns, insn);
3554
3555 if (BLOCK_FOR_INSN (insn)->head == insn)
3556 BLOCK_FOR_INSN (insn)->head = insns;
3557
3558 /* INCR will become a NOTE and INSN won't contain a
3559 use of ADDR. If a use of ADDR was just placed in
3560 the insn before INSN, make that the next use.
3561 Otherwise, invalidate it. */
3562 if (GET_CODE (PREV_INSN (insn)) == INSN
3563 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3564 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
3565 reg_next_use[regno] = PREV_INSN (insn);
3566 else
3567 reg_next_use[regno] = 0;
3568
3569 addr = q;
3570 regno = REGNO (q);
3571
3572 /* REGNO is now used in INCR which is below INSN, but
3573 it previously wasn't live here. If we don't mark
3574 it as needed, we'll put a REG_DEAD note for it
3575 on this insn, which is incorrect. */
3576 SET_REGNO_REG_SET (needed, regno);
3577
3578 /* If there are any calls between INSN and INCR, show
3579 that REGNO now crosses them. */
3580 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3581 if (GET_CODE (temp) == CALL_INSN)
3582 REG_N_CALLS_CROSSED (regno)++;
3583 }
3584 else
3585 return;
3586
3587 /* If we haven't returned, it means we were able to make the
3588 auto-inc, so update the status. First, record that this insn
3589 has an implicit side effect. */
3590
3591 REG_NOTES (insn)
3592 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
3593
3594 /* Modify the old increment-insn to simply copy
3595 the already-incremented value of our register. */
3596 if (! validate_change (incr, &SET_SRC (set), addr, 0))
3597 abort ();
3598
3599 /* If that makes it a no-op (copying the register into itself) delete
3600 it so it won't appear to be a "use" and a "set" of this
3601 register. */
3602 if (SET_DEST (set) == addr)
3603 {
3604 PUT_CODE (incr, NOTE);
3605 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3606 NOTE_SOURCE_FILE (incr) = 0;
3607 }
3608
3609 if (regno >= FIRST_PSEUDO_REGISTER)
3610 {
3611 /* Count an extra reference to the reg. When a reg is
3612 incremented, spilling it is worse, so we want to make
3613 that less likely. */
3614 REG_N_REFS (regno) += loop_depth;
3615
3616 /* Count the increment as a setting of the register,
3617 even though it isn't a SET in rtl. */
3618 REG_N_SETS (regno)++;
3619 }
3620 }
3621 }
3622 }
3623 #endif /* AUTO_INC_DEC */
3624 \f
3625 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
3626 This is done assuming the registers needed from X
3627 are those that have 1-bits in NEEDED.
3628
3629 On the final pass, FINAL is 1. This means try for autoincrement
3630 and count the uses and deaths of each pseudo-reg.
3631
3632 INSN is the containing instruction. If INSN is dead, this function is not
3633 called. */
3634
3635 static void
3636 mark_used_regs (needed, live, x, final, insn)
3637 regset needed;
3638 regset live;
3639 rtx x;
3640 int final;
3641 rtx insn;
3642 {
3643 register RTX_CODE code;
3644 register int regno;
3645 int i;
3646
3647 retry:
3648 code = GET_CODE (x);
3649 switch (code)
3650 {
3651 case LABEL_REF:
3652 case SYMBOL_REF:
3653 case CONST_INT:
3654 case CONST:
3655 case CONST_DOUBLE:
3656 case PC:
3657 case ADDR_VEC:
3658 case ADDR_DIFF_VEC:
3659 return;
3660
3661 #ifdef HAVE_cc0
3662 case CC0:
3663 cc0_live = 1;
3664 return;
3665 #endif
3666
3667 case CLOBBER:
3668 /* If we are clobbering a MEM, mark any registers inside the address
3669 as being used. */
3670 if (GET_CODE (XEXP (x, 0)) == MEM)
3671 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
3672 return;
3673
3674 case MEM:
3675 /* Invalidate the data for the last MEM stored, but only if MEM is
3676 something that can be stored into. */
3677 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3678 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3679 ; /* needn't clear the memory set list */
3680 else
3681 {
3682 rtx temp = mem_set_list;
3683 rtx prev = NULL_RTX;
3684
3685 while (temp)
3686 {
3687 if (anti_dependence (XEXP (temp, 0), x))
3688 {
3689 /* Splice temp out of the list. */
3690 if (prev)
3691 XEXP (prev, 1) = XEXP (temp, 1);
3692 else
3693 mem_set_list = XEXP (temp, 1);
3694 }
3695 else
3696 prev = temp;
3697 temp = XEXP (temp, 1);
3698 }
3699 }
3700
3701 /* If the memory reference had embedded side effects (autoincrement
3702 address modes. Then we may need to kill some entries on the
3703 memory set list. */
3704 if (insn)
3705 invalidate_mems_from_autoinc (insn);
3706
3707 #ifdef AUTO_INC_DEC
3708 if (final)
3709 find_auto_inc (needed, x, insn);
3710 #endif
3711 break;
3712
3713 case SUBREG:
3714 if (GET_CODE (SUBREG_REG (x)) == REG
3715 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3716 && (GET_MODE_SIZE (GET_MODE (x))
3717 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3718 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
3719
3720 /* While we're here, optimize this case. */
3721 x = SUBREG_REG (x);
3722
3723 /* In case the SUBREG is not of a register, don't optimize */
3724 if (GET_CODE (x) != REG)
3725 {
3726 mark_used_regs (needed, live, x, final, insn);
3727 return;
3728 }
3729
3730 /* ... fall through ... */
3731
3732 case REG:
3733 /* See a register other than being set
3734 => mark it as needed. */
3735
3736 regno = REGNO (x);
3737 {
3738 int some_needed = REGNO_REG_SET_P (needed, regno);
3739 int some_not_needed = ! some_needed;
3740
3741 SET_REGNO_REG_SET (live, regno);
3742
3743 /* A hard reg in a wide mode may really be multiple registers.
3744 If so, mark all of them just like the first. */
3745 if (regno < FIRST_PSEUDO_REGISTER)
3746 {
3747 int n;
3748
3749 /* For stack ptr or fixed arg pointer,
3750 nothing below can be necessary, so waste no more time. */
3751 if (regno == STACK_POINTER_REGNUM
3752 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3753 || (regno == HARD_FRAME_POINTER_REGNUM
3754 && (! reload_completed || frame_pointer_needed))
3755 #endif
3756 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3757 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3758 #endif
3759 || (regno == FRAME_POINTER_REGNUM
3760 && (! reload_completed || frame_pointer_needed)))
3761 {
3762 /* If this is a register we are going to try to eliminate,
3763 don't mark it live here. If we are successful in
3764 eliminating it, it need not be live unless it is used for
3765 pseudos, in which case it will have been set live when
3766 it was allocated to the pseudos. If the register will not
3767 be eliminated, reload will set it live at that point. */
3768
3769 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
3770 regs_ever_live[regno] = 1;
3771 return;
3772 }
3773 /* No death notes for global register variables;
3774 their values are live after this function exits. */
3775 if (global_regs[regno])
3776 {
3777 if (final)
3778 reg_next_use[regno] = insn;
3779 return;
3780 }
3781
3782 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3783 while (--n > 0)
3784 {
3785 int regno_n = regno + n;
3786 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3787
3788 SET_REGNO_REG_SET (live, regno_n);
3789 some_needed |= needed_regno;
3790 some_not_needed |= ! needed_regno;
3791 }
3792 }
3793 if (final)
3794 {
3795 /* Record where each reg is used, so when the reg
3796 is set we know the next insn that uses it. */
3797
3798 reg_next_use[regno] = insn;
3799
3800 if (regno < FIRST_PSEUDO_REGISTER)
3801 {
3802 /* If a hard reg is being used,
3803 record that this function does use it. */
3804
3805 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
3806 if (i == 0)
3807 i = 1;
3808 do
3809 regs_ever_live[regno + --i] = 1;
3810 while (i > 0);
3811 }
3812 else
3813 {
3814 /* Keep track of which basic block each reg appears in. */
3815
3816 register int blocknum = BLOCK_NUM (insn);
3817
3818 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3819 REG_BASIC_BLOCK (regno) = blocknum;
3820 else if (REG_BASIC_BLOCK (regno) != blocknum)
3821 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3822
3823 /* Count (weighted) number of uses of each reg. */
3824
3825 REG_N_REFS (regno) += loop_depth;
3826 }
3827
3828 /* Record and count the insns in which a reg dies.
3829 If it is used in this insn and was dead below the insn
3830 then it dies in this insn. If it was set in this insn,
3831 we do not make a REG_DEAD note; likewise if we already
3832 made such a note. */
3833
3834 if (some_not_needed
3835 && ! dead_or_set_p (insn, x)
3836 #if 0
3837 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
3838 #endif
3839 )
3840 {
3841 /* Check for the case where the register dying partially
3842 overlaps the register set by this insn. */
3843 if (regno < FIRST_PSEUDO_REGISTER
3844 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
3845 {
3846 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3847 while (--n >= 0)
3848 some_needed |= dead_or_set_regno_p (insn, regno + n);
3849 }
3850
3851 /* If none of the words in X is needed, make a REG_DEAD
3852 note. Otherwise, we must make partial REG_DEAD notes. */
3853 if (! some_needed)
3854 {
3855 REG_NOTES (insn)
3856 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
3857 REG_N_DEATHS (regno)++;
3858 }
3859 else
3860 {
3861 int i;
3862
3863 /* Don't make a REG_DEAD note for a part of a register
3864 that is set in the insn. */
3865
3866 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
3867 i >= 0; i--)
3868 if (!REGNO_REG_SET_P (needed, regno + i)
3869 && ! dead_or_set_regno_p (insn, regno + i))
3870 REG_NOTES (insn)
3871 = gen_rtx_EXPR_LIST (REG_DEAD,
3872 gen_rtx_REG (reg_raw_mode[regno + i],
3873 regno + i),
3874 REG_NOTES (insn));
3875 }
3876 }
3877 }
3878 }
3879 return;
3880
3881 case SET:
3882 {
3883 register rtx testreg = SET_DEST (x);
3884 int mark_dest = 0;
3885
3886 /* If storing into MEM, don't show it as being used. But do
3887 show the address as being used. */
3888 if (GET_CODE (testreg) == MEM)
3889 {
3890 #ifdef AUTO_INC_DEC
3891 if (final)
3892 find_auto_inc (needed, testreg, insn);
3893 #endif
3894 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
3895 mark_used_regs (needed, live, SET_SRC (x), final, insn);
3896 return;
3897 }
3898
3899 /* Storing in STRICT_LOW_PART is like storing in a reg
3900 in that this SET might be dead, so ignore it in TESTREG.
3901 but in some other ways it is like using the reg.
3902
3903 Storing in a SUBREG or a bit field is like storing the entire
3904 register in that if the register's value is not used
3905 then this SET is not needed. */
3906 while (GET_CODE (testreg) == STRICT_LOW_PART
3907 || GET_CODE (testreg) == ZERO_EXTRACT
3908 || GET_CODE (testreg) == SIGN_EXTRACT
3909 || GET_CODE (testreg) == SUBREG)
3910 {
3911 if (GET_CODE (testreg) == SUBREG
3912 && GET_CODE (SUBREG_REG (testreg)) == REG
3913 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3914 && (GET_MODE_SIZE (GET_MODE (testreg))
3915 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
3916 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
3917
3918 /* Modifying a single register in an alternate mode
3919 does not use any of the old value. But these other
3920 ways of storing in a register do use the old value. */
3921 if (GET_CODE (testreg) == SUBREG
3922 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
3923 ;
3924 else
3925 mark_dest = 1;
3926
3927 testreg = XEXP (testreg, 0);
3928 }
3929
3930 /* If this is a store into a register,
3931 recursively scan the value being stored. */
3932
3933 if ((GET_CODE (testreg) == PARALLEL
3934 && GET_MODE (testreg) == BLKmode)
3935 || (GET_CODE (testreg) == REG
3936 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
3937 && (! reload_completed || frame_pointer_needed)))
3938 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3939 && ! (regno == HARD_FRAME_POINTER_REGNUM
3940 && (! reload_completed || frame_pointer_needed))
3941 #endif
3942 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3943 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3944 #endif
3945 ))
3946 /* We used to exclude global_regs here, but that seems wrong.
3947 Storing in them is like storing in mem. */
3948 {
3949 mark_used_regs (needed, live, SET_SRC (x), final, insn);
3950 if (mark_dest)
3951 mark_used_regs (needed, live, SET_DEST (x), final, insn);
3952 return;
3953 }
3954 }
3955 break;
3956
3957 case RETURN:
3958 /* If exiting needs the right stack value, consider this insn as
3959 using the stack pointer. In any event, consider it as using
3960 all global registers and all registers used by return. */
3961 if (! EXIT_IGNORE_STACK
3962 || (! FRAME_POINTER_REQUIRED
3963 && ! current_function_calls_alloca
3964 && flag_omit_frame_pointer)
3965 || current_function_sp_is_unchanging)
3966 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3967
3968 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3969 if (global_regs[i]
3970 #ifdef EPILOGUE_USES
3971 || EPILOGUE_USES (i)
3972 #endif
3973 )
3974 SET_REGNO_REG_SET (live, i);
3975 break;
3976
3977 case ASM_OPERANDS:
3978 case UNSPEC_VOLATILE:
3979 case TRAP_IF:
3980 case ASM_INPUT:
3981 {
3982 /* Traditional and volatile asm instructions must be considered to use
3983 and clobber all hard registers, all pseudo-registers and all of
3984 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
3985
3986 Consider for instance a volatile asm that changes the fpu rounding
3987 mode. An insn should not be moved across this even if it only uses
3988 pseudo-regs because it might give an incorrectly rounded result.
3989
3990 ?!? Unfortunately, marking all hard registers as live causes massive
3991 problems for the register allocator and marking all pseudos as live
3992 creates mountains of uninitialized variable warnings.
3993
3994 So for now, just clear the memory set list and mark any regs
3995 we can find in ASM_OPERANDS as used. */
3996 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3997 mem_set_list = NULL_RTX;
3998
3999 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4000 We can not just fall through here since then we would be confused
4001 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4002 traditional asms unlike their normal usage. */
4003 if (code == ASM_OPERANDS)
4004 {
4005 int j;
4006
4007 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4008 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4009 final, insn);
4010 }
4011 break;
4012 }
4013
4014
4015 default:
4016 break;
4017 }
4018
4019 /* Recursively scan the operands of this expression. */
4020
4021 {
4022 register const char *fmt = GET_RTX_FORMAT (code);
4023 register int i;
4024
4025 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4026 {
4027 if (fmt[i] == 'e')
4028 {
4029 /* Tail recursive case: save a function call level. */
4030 if (i == 0)
4031 {
4032 x = XEXP (x, 0);
4033 goto retry;
4034 }
4035 mark_used_regs (needed, live, XEXP (x, i), final, insn);
4036 }
4037 else if (fmt[i] == 'E')
4038 {
4039 register int j;
4040 for (j = 0; j < XVECLEN (x, i); j++)
4041 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
4042 }
4043 }
4044 }
4045 }
4046 \f
4047 #ifdef AUTO_INC_DEC
4048
4049 static int
4050 try_pre_increment_1 (insn)
4051 rtx insn;
4052 {
4053 /* Find the next use of this reg. If in same basic block,
4054 make it do pre-increment or pre-decrement if appropriate. */
4055 rtx x = single_set (insn);
4056 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4057 * INTVAL (XEXP (SET_SRC (x), 1)));
4058 int regno = REGNO (SET_DEST (x));
4059 rtx y = reg_next_use[regno];
4060 if (y != 0
4061 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4062 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4063 mode would be better. */
4064 && ! dead_or_set_p (y, SET_DEST (x))
4065 && try_pre_increment (y, SET_DEST (x), amount))
4066 {
4067 /* We have found a suitable auto-increment
4068 and already changed insn Y to do it.
4069 So flush this increment-instruction. */
4070 PUT_CODE (insn, NOTE);
4071 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4072 NOTE_SOURCE_FILE (insn) = 0;
4073 /* Count a reference to this reg for the increment
4074 insn we are deleting. When a reg is incremented.
4075 spilling it is worse, so we want to make that
4076 less likely. */
4077 if (regno >= FIRST_PSEUDO_REGISTER)
4078 {
4079 REG_N_REFS (regno) += loop_depth;
4080 REG_N_SETS (regno)++;
4081 }
4082 return 1;
4083 }
4084 return 0;
4085 }
4086
4087 /* Try to change INSN so that it does pre-increment or pre-decrement
4088 addressing on register REG in order to add AMOUNT to REG.
4089 AMOUNT is negative for pre-decrement.
4090 Returns 1 if the change could be made.
4091 This checks all about the validity of the result of modifying INSN. */
4092
4093 static int
4094 try_pre_increment (insn, reg, amount)
4095 rtx insn, reg;
4096 HOST_WIDE_INT amount;
4097 {
4098 register rtx use;
4099
4100 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4101 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4102 int pre_ok = 0;
4103 /* Nonzero if we can try to make a post-increment or post-decrement.
4104 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4105 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4106 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4107 int post_ok = 0;
4108
4109 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4110 int do_post = 0;
4111
4112 /* From the sign of increment, see which possibilities are conceivable
4113 on this target machine. */
4114 if (HAVE_PRE_INCREMENT && amount > 0)
4115 pre_ok = 1;
4116 if (HAVE_POST_INCREMENT && amount > 0)
4117 post_ok = 1;
4118
4119 if (HAVE_PRE_DECREMENT && amount < 0)
4120 pre_ok = 1;
4121 if (HAVE_POST_DECREMENT && amount < 0)
4122 post_ok = 1;
4123
4124 if (! (pre_ok || post_ok))
4125 return 0;
4126
4127 /* It is not safe to add a side effect to a jump insn
4128 because if the incremented register is spilled and must be reloaded
4129 there would be no way to store the incremented value back in memory. */
4130
4131 if (GET_CODE (insn) == JUMP_INSN)
4132 return 0;
4133
4134 use = 0;
4135 if (pre_ok)
4136 use = find_use_as_address (PATTERN (insn), reg, 0);
4137 if (post_ok && (use == 0 || use == (rtx) 1))
4138 {
4139 use = find_use_as_address (PATTERN (insn), reg, -amount);
4140 do_post = 1;
4141 }
4142
4143 if (use == 0 || use == (rtx) 1)
4144 return 0;
4145
4146 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4147 return 0;
4148
4149 /* See if this combination of instruction and addressing mode exists. */
4150 if (! validate_change (insn, &XEXP (use, 0),
4151 gen_rtx_fmt_e (amount > 0
4152 ? (do_post ? POST_INC : PRE_INC)
4153 : (do_post ? POST_DEC : PRE_DEC),
4154 Pmode, reg), 0))
4155 return 0;
4156
4157 /* Record that this insn now has an implicit side effect on X. */
4158 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4159 return 1;
4160 }
4161
4162 #endif /* AUTO_INC_DEC */
4163 \f
4164 /* Find the place in the rtx X where REG is used as a memory address.
4165 Return the MEM rtx that so uses it.
4166 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4167 (plus REG (const_int PLUSCONST)).
4168
4169 If such an address does not appear, return 0.
4170 If REG appears more than once, or is used other than in such an address,
4171 return (rtx)1. */
4172
4173 rtx
4174 find_use_as_address (x, reg, plusconst)
4175 register rtx x;
4176 rtx reg;
4177 HOST_WIDE_INT plusconst;
4178 {
4179 enum rtx_code code = GET_CODE (x);
4180 const char *fmt = GET_RTX_FORMAT (code);
4181 register int i;
4182 register rtx value = 0;
4183 register rtx tem;
4184
4185 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4186 return x;
4187
4188 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4189 && XEXP (XEXP (x, 0), 0) == reg
4190 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4191 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4192 return x;
4193
4194 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4195 {
4196 /* If REG occurs inside a MEM used in a bit-field reference,
4197 that is unacceptable. */
4198 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4199 return (rtx) (HOST_WIDE_INT) 1;
4200 }
4201
4202 if (x == reg)
4203 return (rtx) (HOST_WIDE_INT) 1;
4204
4205 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4206 {
4207 if (fmt[i] == 'e')
4208 {
4209 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4210 if (value == 0)
4211 value = tem;
4212 else if (tem != 0)
4213 return (rtx) (HOST_WIDE_INT) 1;
4214 }
4215 if (fmt[i] == 'E')
4216 {
4217 register int j;
4218 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4219 {
4220 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4221 if (value == 0)
4222 value = tem;
4223 else if (tem != 0)
4224 return (rtx) (HOST_WIDE_INT) 1;
4225 }
4226 }
4227 }
4228
4229 return value;
4230 }
4231 \f
4232 /* Write information about registers and basic blocks into FILE.
4233 This is part of making a debugging dump. */
4234
4235 void
4236 dump_flow_info (file)
4237 FILE *file;
4238 {
4239 register int i;
4240 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4241
4242 fprintf (file, "%d registers.\n", max_regno);
4243 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4244 if (REG_N_REFS (i))
4245 {
4246 enum reg_class class, altclass;
4247 fprintf (file, "\nRegister %d used %d times across %d insns",
4248 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4249 if (REG_BASIC_BLOCK (i) >= 0)
4250 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4251 if (REG_N_SETS (i))
4252 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4253 (REG_N_SETS (i) == 1) ? "" : "s");
4254 if (REG_USERVAR_P (regno_reg_rtx[i]))
4255 fprintf (file, "; user var");
4256 if (REG_N_DEATHS (i) != 1)
4257 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4258 if (REG_N_CALLS_CROSSED (i) == 1)
4259 fprintf (file, "; crosses 1 call");
4260 else if (REG_N_CALLS_CROSSED (i))
4261 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4262 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4263 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4264 class = reg_preferred_class (i);
4265 altclass = reg_alternate_class (i);
4266 if (class != GENERAL_REGS || altclass != ALL_REGS)
4267 {
4268 if (altclass == ALL_REGS || class == ALL_REGS)
4269 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4270 else if (altclass == NO_REGS)
4271 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4272 else
4273 fprintf (file, "; pref %s, else %s",
4274 reg_class_names[(int) class],
4275 reg_class_names[(int) altclass]);
4276 }
4277 if (REGNO_POINTER_FLAG (i))
4278 fprintf (file, "; pointer");
4279 fprintf (file, ".\n");
4280 }
4281
4282 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
4283 for (i = 0; i < n_basic_blocks; i++)
4284 {
4285 register basic_block bb = BASIC_BLOCK (i);
4286 register int regno;
4287 register edge e;
4288
4289 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4290 i, INSN_UID (bb->head), INSN_UID (bb->end));
4291
4292 fprintf (file, "Predecessors: ");
4293 for (e = bb->pred; e ; e = e->pred_next)
4294 dump_edge_info (file, e, 0);
4295
4296 fprintf (file, "\nSuccessors: ");
4297 for (e = bb->succ; e ; e = e->succ_next)
4298 dump_edge_info (file, e, 1);
4299
4300 fprintf (file, "\nRegisters live at start:");
4301 if (bb->global_live_at_start)
4302 {
4303 for (regno = 0; regno < max_regno; regno++)
4304 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4305 fprintf (file, " %d", regno);
4306 }
4307 else
4308 fprintf (file, " n/a");
4309
4310 fprintf (file, "\nRegisters live at end:");
4311 if (bb->global_live_at_end)
4312 {
4313 for (regno = 0; regno < max_regno; regno++)
4314 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4315 fprintf (file, " %d", regno);
4316 }
4317 else
4318 fprintf (file, " n/a");
4319
4320 putc('\n', file);
4321 }
4322
4323 putc('\n', file);
4324 }
4325
4326 static void
4327 dump_edge_info (file, e, do_succ)
4328 FILE *file;
4329 edge e;
4330 int do_succ;
4331 {
4332 basic_block side = (do_succ ? e->dest : e->src);
4333
4334 if (side == ENTRY_BLOCK_PTR)
4335 fputs (" ENTRY", file);
4336 else if (side == EXIT_BLOCK_PTR)
4337 fputs (" EXIT", file);
4338 else
4339 fprintf (file, " %d", side->index);
4340
4341 if (e->flags)
4342 {
4343 static const char * const bitnames[] = {
4344 "fallthru", "crit", "ab", "abcall", "eh", "fake"
4345 };
4346 int comma = 0;
4347 int i, flags = e->flags;
4348
4349 fputc (' ', file);
4350 fputc ('(', file);
4351 for (i = 0; flags; i++)
4352 if (flags & (1 << i))
4353 {
4354 flags &= ~(1 << i);
4355
4356 if (comma)
4357 fputc (',', file);
4358 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
4359 fputs (bitnames[i], file);
4360 else
4361 fprintf (file, "%d", i);
4362 comma = 1;
4363 }
4364 fputc (')', file);
4365 }
4366 }
4367
4368 \f
4369 /* Like print_rtl, but also print out live information for the start of each
4370 basic block. */
4371
4372 void
4373 print_rtl_with_bb (outf, rtx_first)
4374 FILE *outf;
4375 rtx rtx_first;
4376 {
4377 register rtx tmp_rtx;
4378
4379 if (rtx_first == 0)
4380 fprintf (outf, "(nil)\n");
4381 else
4382 {
4383 int i;
4384 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
4385 int max_uid = get_max_uid ();
4386 basic_block *start = (basic_block *)
4387 alloca (max_uid * sizeof (basic_block));
4388 basic_block *end = (basic_block *)
4389 alloca (max_uid * sizeof (basic_block));
4390 enum bb_state *in_bb_p = (enum bb_state *)
4391 alloca (max_uid * sizeof (enum bb_state));
4392
4393 memset (start, 0, max_uid * sizeof (basic_block));
4394 memset (end, 0, max_uid * sizeof (basic_block));
4395 memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
4396
4397 for (i = n_basic_blocks - 1; i >= 0; i--)
4398 {
4399 basic_block bb = BASIC_BLOCK (i);
4400 rtx x;
4401
4402 start[INSN_UID (bb->head)] = bb;
4403 end[INSN_UID (bb->end)] = bb;
4404 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
4405 {
4406 enum bb_state state = IN_MULTIPLE_BB;
4407 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
4408 state = IN_ONE_BB;
4409 in_bb_p[INSN_UID(x)] = state;
4410
4411 if (x == bb->end)
4412 break;
4413 }
4414 }
4415
4416 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
4417 {
4418 int did_output;
4419 basic_block bb;
4420
4421 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
4422 {
4423 fprintf (outf, ";; Start of basic block %d, registers live:",
4424 bb->index);
4425
4426 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
4427 {
4428 fprintf (outf, " %d", i);
4429 if (i < FIRST_PSEUDO_REGISTER)
4430 fprintf (outf, " [%s]",
4431 reg_names[i]);
4432 });
4433 putc ('\n', outf);
4434 }
4435
4436 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
4437 && GET_CODE (tmp_rtx) != NOTE
4438 && GET_CODE (tmp_rtx) != BARRIER
4439 && ! obey_regdecls)
4440 fprintf (outf, ";; Insn is not within a basic block\n");
4441 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
4442 fprintf (outf, ";; Insn is in multiple basic blocks\n");
4443
4444 did_output = print_rtl_single (outf, tmp_rtx);
4445
4446 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
4447 fprintf (outf, ";; End of basic block %d\n", bb->index);
4448
4449 if (did_output)
4450 putc ('\n', outf);
4451 }
4452 }
4453 }
4454
4455 \f
4456 /* Integer list support. */
4457
4458 /* Allocate a node from list *HEAD_PTR. */
4459
4460 static int_list_ptr
4461 alloc_int_list_node (head_ptr)
4462 int_list_block **head_ptr;
4463 {
4464 struct int_list_block *first_blk = *head_ptr;
4465
4466 if (first_blk == NULL || first_blk->nodes_left <= 0)
4467 {
4468 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
4469 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
4470 first_blk->next = *head_ptr;
4471 *head_ptr = first_blk;
4472 }
4473
4474 first_blk->nodes_left--;
4475 return &first_blk->nodes[first_blk->nodes_left];
4476 }
4477
4478 /* Pointer to head of predecessor/successor block list. */
4479 static int_list_block *pred_int_list_blocks;
4480
4481 /* Add a new node to integer list LIST with value VAL.
4482 LIST is a pointer to a list object to allow for different implementations.
4483 If *LIST is initially NULL, the list is empty.
4484 The caller must not care whether the element is added to the front or
4485 to the end of the list (to allow for different implementations). */
4486
4487 static int_list_ptr
4488 add_int_list_node (blk_list, list, val)
4489 int_list_block **blk_list;
4490 int_list **list;
4491 int val;
4492 {
4493 int_list_ptr p = alloc_int_list_node (blk_list);
4494
4495 p->val = val;
4496 p->next = *list;
4497 *list = p;
4498 return p;
4499 }
4500
4501 /* Free the blocks of lists at BLK_LIST. */
4502
4503 void
4504 free_int_list (blk_list)
4505 int_list_block **blk_list;
4506 {
4507 int_list_block *p, *next;
4508
4509 for (p = *blk_list; p != NULL; p = next)
4510 {
4511 next = p->next;
4512 free (p);
4513 }
4514
4515 /* Mark list as empty for the next function we compile. */
4516 *blk_list = NULL;
4517 }
4518 \f
4519 /* Predecessor/successor computation. */
4520
4521 /* Mark PRED_BB a precessor of SUCC_BB,
4522 and conversely SUCC_BB a successor of PRED_BB. */
4523
4524 static void
4525 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
4526 int pred_bb;
4527 int succ_bb;
4528 int_list_ptr *s_preds;
4529 int_list_ptr *s_succs;
4530 int *num_preds;
4531 int *num_succs;
4532 {
4533 if (succ_bb != EXIT_BLOCK)
4534 {
4535 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
4536 num_preds[succ_bb]++;
4537 }
4538 if (pred_bb != ENTRY_BLOCK)
4539 {
4540 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
4541 num_succs[pred_bb]++;
4542 }
4543 }
4544
4545 /* Convert edge lists into pred/succ lists for backward compatibility. */
4546
4547 void
4548 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
4549 int_list_ptr *s_preds;
4550 int_list_ptr *s_succs;
4551 int *num_preds;
4552 int *num_succs;
4553 {
4554 int i, n = n_basic_blocks;
4555 edge e;
4556
4557 memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
4558 memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
4559 memset (num_preds, 0, n_basic_blocks * sizeof (int));
4560 memset (num_succs, 0, n_basic_blocks * sizeof (int));
4561
4562 for (i = 0; i < n; ++i)
4563 {
4564 basic_block bb = BASIC_BLOCK (i);
4565
4566 for (e = bb->succ; e ; e = e->succ_next)
4567 add_pred_succ (i, e->dest->index, s_preds, s_succs,
4568 num_preds, num_succs);
4569 }
4570
4571 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
4572 add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
4573 num_preds, num_succs);
4574 }
4575
4576 void
4577 dump_bb_data (file, preds, succs, live_info)
4578 FILE *file;
4579 int_list_ptr *preds;
4580 int_list_ptr *succs;
4581 int live_info;
4582 {
4583 int bb;
4584 int_list_ptr p;
4585
4586 fprintf (file, "BB data\n\n");
4587 for (bb = 0; bb < n_basic_blocks; bb++)
4588 {
4589 fprintf (file, "BB %d, start %d, end %d\n", bb,
4590 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
4591 fprintf (file, " preds:");
4592 for (p = preds[bb]; p != NULL; p = p->next)
4593 {
4594 int pred_bb = INT_LIST_VAL (p);
4595 if (pred_bb == ENTRY_BLOCK)
4596 fprintf (file, " entry");
4597 else
4598 fprintf (file, " %d", pred_bb);
4599 }
4600 fprintf (file, "\n");
4601 fprintf (file, " succs:");
4602 for (p = succs[bb]; p != NULL; p = p->next)
4603 {
4604 int succ_bb = INT_LIST_VAL (p);
4605 if (succ_bb == EXIT_BLOCK)
4606 fprintf (file, " exit");
4607 else
4608 fprintf (file, " %d", succ_bb);
4609 }
4610 if (live_info)
4611 {
4612 int regno;
4613 fprintf (file, "\nRegisters live at start:");
4614 for (regno = 0; regno < max_regno; regno++)
4615 if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
4616 fprintf (file, " %d", regno);
4617 fprintf (file, "\n");
4618 }
4619 fprintf (file, "\n");
4620 }
4621 fprintf (file, "\n");
4622 }
4623
4624 /* Free basic block data storage. */
4625
4626 void
4627 free_bb_mem ()
4628 {
4629 free_int_list (&pred_int_list_blocks);
4630 }
4631
4632 /* Compute dominator relationships. */
4633 void
4634 compute_dominators (dominators, post_dominators, s_preds, s_succs)
4635 sbitmap *dominators;
4636 sbitmap *post_dominators;
4637 int_list_ptr *s_preds;
4638 int_list_ptr *s_succs;
4639 {
4640 int bb, changed, passes;
4641 sbitmap *temp_bitmap;
4642
4643 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4644 sbitmap_vector_ones (dominators, n_basic_blocks);
4645 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4646 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4647
4648 sbitmap_zero (dominators[0]);
4649 SET_BIT (dominators[0], 0);
4650
4651 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
4652 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
4653
4654 passes = 0;
4655 changed = 1;
4656 while (changed)
4657 {
4658 changed = 0;
4659 for (bb = 1; bb < n_basic_blocks; bb++)
4660 {
4661 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4662 bb, s_preds);
4663 SET_BIT (temp_bitmap[bb], bb);
4664 changed |= sbitmap_a_and_b (dominators[bb],
4665 dominators[bb],
4666 temp_bitmap[bb]);
4667 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4668 bb, s_succs);
4669 SET_BIT (temp_bitmap[bb], bb);
4670 changed |= sbitmap_a_and_b (post_dominators[bb],
4671 post_dominators[bb],
4672 temp_bitmap[bb]);
4673 }
4674 passes++;
4675 }
4676
4677 free (temp_bitmap);
4678 }
4679
4680 /* Compute dominator relationships using new flow graph structures. */
4681 void
4682 compute_flow_dominators (dominators, post_dominators)
4683 sbitmap *dominators;
4684 sbitmap *post_dominators;
4685 {
4686 int bb, changed, passes;
4687 sbitmap *temp_bitmap;
4688
4689 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4690 sbitmap_vector_ones (dominators, n_basic_blocks);
4691 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4692 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4693
4694 sbitmap_zero (dominators[0]);
4695 SET_BIT (dominators[0], 0);
4696
4697 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
4698 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
4699
4700 passes = 0;
4701 changed = 1;
4702 while (changed)
4703 {
4704 changed = 0;
4705 for (bb = 1; bb < n_basic_blocks; bb++)
4706 {
4707 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
4708 SET_BIT (temp_bitmap[bb], bb);
4709 changed |= sbitmap_a_and_b (dominators[bb],
4710 dominators[bb],
4711 temp_bitmap[bb]);
4712 sbitmap_intersection_of_succs (temp_bitmap[bb], post_dominators, bb);
4713 SET_BIT (temp_bitmap[bb], bb);
4714 changed |= sbitmap_a_and_b (post_dominators[bb],
4715 post_dominators[bb],
4716 temp_bitmap[bb]);
4717 }
4718 passes++;
4719 }
4720
4721 free (temp_bitmap);
4722 }
4723
4724 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
4725
4726 void
4727 compute_immediate_dominators (idom, dominators)
4728 int *idom;
4729 sbitmap *dominators;
4730 {
4731 sbitmap *tmp;
4732 int b;
4733
4734 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4735
4736 /* Begin with tmp(n) = dom(n) - { n }. */
4737 for (b = n_basic_blocks; --b >= 0; )
4738 {
4739 sbitmap_copy (tmp[b], dominators[b]);
4740 RESET_BIT (tmp[b], b);
4741 }
4742
4743 /* Subtract out all of our dominator's dominators. */
4744 for (b = n_basic_blocks; --b >= 0; )
4745 {
4746 sbitmap tmp_b = tmp[b];
4747 int s;
4748
4749 for (s = n_basic_blocks; --s >= 0; )
4750 if (TEST_BIT (tmp_b, s))
4751 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
4752 }
4753
4754 /* Find the one bit set in the bitmap and put it in the output array. */
4755 for (b = n_basic_blocks; --b >= 0; )
4756 {
4757 int t;
4758 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
4759 }
4760
4761 sbitmap_vector_free (tmp);
4762 }
4763
4764 /* Count for a single SET rtx, X. */
4765
4766 static void
4767 count_reg_sets_1 (x)
4768 rtx x;
4769 {
4770 register int regno;
4771 register rtx reg = SET_DEST (x);
4772
4773 /* Find the register that's set/clobbered. */
4774 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4775 || GET_CODE (reg) == SIGN_EXTRACT
4776 || GET_CODE (reg) == STRICT_LOW_PART)
4777 reg = XEXP (reg, 0);
4778
4779 if (GET_CODE (reg) == PARALLEL
4780 && GET_MODE (reg) == BLKmode)
4781 {
4782 register int i;
4783 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4784 count_reg_sets_1 (XVECEXP (reg, 0, i));
4785 return;
4786 }
4787
4788 if (GET_CODE (reg) == REG)
4789 {
4790 regno = REGNO (reg);
4791 if (regno >= FIRST_PSEUDO_REGISTER)
4792 {
4793 /* Count (weighted) references, stores, etc. This counts a
4794 register twice if it is modified, but that is correct. */
4795 REG_N_SETS (regno)++;
4796
4797 REG_N_REFS (regno) += loop_depth;
4798 }
4799 }
4800 }
4801
4802 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4803 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4804
4805 static void
4806 count_reg_sets (x)
4807 rtx x;
4808 {
4809 register RTX_CODE code = GET_CODE (x);
4810
4811 if (code == SET || code == CLOBBER)
4812 count_reg_sets_1 (x);
4813 else if (code == PARALLEL)
4814 {
4815 register int i;
4816 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4817 {
4818 code = GET_CODE (XVECEXP (x, 0, i));
4819 if (code == SET || code == CLOBBER)
4820 count_reg_sets_1 (XVECEXP (x, 0, i));
4821 }
4822 }
4823 }
4824
4825 /* Increment REG_N_REFS by the current loop depth each register reference
4826 found in X. */
4827
4828 static void
4829 count_reg_references (x)
4830 rtx x;
4831 {
4832 register RTX_CODE code;
4833
4834 retry:
4835 code = GET_CODE (x);
4836 switch (code)
4837 {
4838 case LABEL_REF:
4839 case SYMBOL_REF:
4840 case CONST_INT:
4841 case CONST:
4842 case CONST_DOUBLE:
4843 case PC:
4844 case ADDR_VEC:
4845 case ADDR_DIFF_VEC:
4846 case ASM_INPUT:
4847 return;
4848
4849 #ifdef HAVE_cc0
4850 case CC0:
4851 return;
4852 #endif
4853
4854 case CLOBBER:
4855 /* If we are clobbering a MEM, mark any registers inside the address
4856 as being used. */
4857 if (GET_CODE (XEXP (x, 0)) == MEM)
4858 count_reg_references (XEXP (XEXP (x, 0), 0));
4859 return;
4860
4861 case SUBREG:
4862 /* While we're here, optimize this case. */
4863 x = SUBREG_REG (x);
4864
4865 /* In case the SUBREG is not of a register, don't optimize */
4866 if (GET_CODE (x) != REG)
4867 {
4868 count_reg_references (x);
4869 return;
4870 }
4871
4872 /* ... fall through ... */
4873
4874 case REG:
4875 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4876 REG_N_REFS (REGNO (x)) += loop_depth;
4877 return;
4878
4879 case SET:
4880 {
4881 register rtx testreg = SET_DEST (x);
4882 int mark_dest = 0;
4883
4884 /* If storing into MEM, don't show it as being used. But do
4885 show the address as being used. */
4886 if (GET_CODE (testreg) == MEM)
4887 {
4888 count_reg_references (XEXP (testreg, 0));
4889 count_reg_references (SET_SRC (x));
4890 return;
4891 }
4892
4893 /* Storing in STRICT_LOW_PART is like storing in a reg
4894 in that this SET might be dead, so ignore it in TESTREG.
4895 but in some other ways it is like using the reg.
4896
4897 Storing in a SUBREG or a bit field is like storing the entire
4898 register in that if the register's value is not used
4899 then this SET is not needed. */
4900 while (GET_CODE (testreg) == STRICT_LOW_PART
4901 || GET_CODE (testreg) == ZERO_EXTRACT
4902 || GET_CODE (testreg) == SIGN_EXTRACT
4903 || GET_CODE (testreg) == SUBREG)
4904 {
4905 /* Modifying a single register in an alternate mode
4906 does not use any of the old value. But these other
4907 ways of storing in a register do use the old value. */
4908 if (GET_CODE (testreg) == SUBREG
4909 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4910 ;
4911 else
4912 mark_dest = 1;
4913
4914 testreg = XEXP (testreg, 0);
4915 }
4916
4917 /* If this is a store into a register,
4918 recursively scan the value being stored. */
4919
4920 if ((GET_CODE (testreg) == PARALLEL
4921 && GET_MODE (testreg) == BLKmode)
4922 || GET_CODE (testreg) == REG)
4923 {
4924 count_reg_references (SET_SRC (x));
4925 if (mark_dest)
4926 count_reg_references (SET_DEST (x));
4927 return;
4928 }
4929 }
4930 break;
4931
4932 default:
4933 break;
4934 }
4935
4936 /* Recursively scan the operands of this expression. */
4937
4938 {
4939 register const char *fmt = GET_RTX_FORMAT (code);
4940 register int i;
4941
4942 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4943 {
4944 if (fmt[i] == 'e')
4945 {
4946 /* Tail recursive case: save a function call level. */
4947 if (i == 0)
4948 {
4949 x = XEXP (x, 0);
4950 goto retry;
4951 }
4952 count_reg_references (XEXP (x, i));
4953 }
4954 else if (fmt[i] == 'E')
4955 {
4956 register int j;
4957 for (j = 0; j < XVECLEN (x, i); j++)
4958 count_reg_references (XVECEXP (x, i, j));
4959 }
4960 }
4961 }
4962 }
4963
4964 /* Recompute register set/reference counts immediately prior to register
4965 allocation.
4966
4967 This avoids problems with set/reference counts changing to/from values
4968 which have special meanings to the register allocators.
4969
4970 Additionally, the reference counts are the primary component used by the
4971 register allocators to prioritize pseudos for allocation to hard regs.
4972 More accurate reference counts generally lead to better register allocation.
4973
4974 F is the first insn to be scanned.
4975 LOOP_STEP denotes how much loop_depth should be incremented per
4976 loop nesting level in order to increase the ref count more for references
4977 in a loop.
4978
4979 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4980 possibly other information which is used by the register allocators. */
4981
4982 void
4983 recompute_reg_usage (f, loop_step)
4984 rtx f;
4985 int loop_step;
4986 {
4987 rtx insn;
4988 int i, max_reg;
4989
4990 /* Clear out the old data. */
4991 max_reg = max_reg_num ();
4992 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4993 {
4994 REG_N_SETS (i) = 0;
4995 REG_N_REFS (i) = 0;
4996 }
4997
4998 /* Scan each insn in the chain and count how many times each register is
4999 set/used. */
5000 loop_depth = 1;
5001 for (insn = f; insn; insn = NEXT_INSN (insn))
5002 {
5003 /* Keep track of loop depth. */
5004 if (GET_CODE (insn) == NOTE)
5005 {
5006 /* Look for loop boundaries. */
5007 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5008 loop_depth -= loop_step;
5009 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5010 loop_depth += loop_step;
5011
5012 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5013 Abort now rather than setting register status incorrectly. */
5014 if (loop_depth == 0)
5015 abort ();
5016 }
5017 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5018 {
5019 rtx links;
5020
5021 /* This call will increment REG_N_SETS for each SET or CLOBBER
5022 of a register in INSN. It will also increment REG_N_REFS
5023 by the loop depth for each set of a register in INSN. */
5024 count_reg_sets (PATTERN (insn));
5025
5026 /* count_reg_sets does not detect autoincrement address modes, so
5027 detect them here by looking at the notes attached to INSN. */
5028 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5029 {
5030 if (REG_NOTE_KIND (links) == REG_INC)
5031 /* Count (weighted) references, stores, etc. This counts a
5032 register twice if it is modified, but that is correct. */
5033 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5034 }
5035
5036 /* This call will increment REG_N_REFS by the current loop depth for
5037 each reference to a register in INSN. */
5038 count_reg_references (PATTERN (insn));
5039
5040 /* count_reg_references will not include counts for arguments to
5041 function calls, so detect them here by examining the
5042 CALL_INSN_FUNCTION_USAGE data. */
5043 if (GET_CODE (insn) == CALL_INSN)
5044 {
5045 rtx note;
5046
5047 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5048 note;
5049 note = XEXP (note, 1))
5050 if (GET_CODE (XEXP (note, 0)) == USE)
5051 count_reg_references (XEXP (XEXP (note, 0), 0));
5052 }
5053 }
5054 }
5055 }
5056
5057 /* Record INSN's block as BB. */
5058
5059 void
5060 set_block_for_insn (insn, bb)
5061 rtx insn;
5062 basic_block bb;
5063 {
5064 size_t uid = INSN_UID (insn);
5065 if (uid >= basic_block_for_insn->num_elements)
5066 {
5067 int new_size;
5068
5069 /* Add one-eighth the size so we don't keep calling xrealloc. */
5070 new_size = uid + (uid + 7) / 8;
5071
5072 VARRAY_GROW (basic_block_for_insn, new_size);
5073 }
5074 VARRAY_BB (basic_block_for_insn, uid) = bb;
5075 }
5076
5077 /* Record INSN's block number as BB. */
5078 /* ??? This has got to go. */
5079
5080 void
5081 set_block_num (insn, bb)
5082 rtx insn;
5083 int bb;
5084 {
5085 set_block_for_insn (insn, BASIC_BLOCK (bb));
5086 }
5087 \f
5088 /* Unlink a chain of insns between START and FINISH inclusive, leaving notes
5089 that must be paired, and return the new chain. */
5090
5091 rtx
5092 unlink_insn_chain (start, finish)
5093 rtx start, finish;
5094 {
5095 rtx insert_point = PREV_INSN (start);
5096 rtx chain = NULL_RTX, curr;
5097
5098 /* Unchain the insns one by one. It would be quicker to delete all
5099 of these with a single unchaining, rather than one at a time, but
5100 we need to keep the NOTE's. */
5101
5102 while (1)
5103 {
5104 rtx next = NEXT_INSN (start);
5105
5106 remove_insn (start);
5107
5108 /* ??? Despite the fact that we're patching out the insn, it's
5109 still referenced in LOG_LINKS. Rather than try and track
5110 them all down and remove them, just mark the insn deleted. */
5111 INSN_DELETED_P (start) = 1;
5112
5113 if (GET_CODE (start) == NOTE && ! can_delete_note_p (start))
5114 {
5115 add_insn_after (start, insert_point);
5116 insert_point = start;
5117 }
5118 else
5119 {
5120 if (chain != NULL)
5121 {
5122 NEXT_INSN (curr) = start;
5123 PREV_INSN (start) = curr;
5124 curr = start;
5125 }
5126 else
5127 {
5128 chain = start;
5129 curr = start;
5130 PREV_INSN (chain) = NULL_RTX;
5131 }
5132 }
5133
5134 if (start == finish)
5135 break;
5136 start = next;
5137 }
5138
5139 if (chain != NULL_RTX)
5140 NEXT_INSN (curr) = NULL_RTX;
5141
5142 return chain;
5143 }
5144
5145 /* Subroutine of update_life_info. Determines whether multiple
5146 REG_NOTEs need to be distributed for the hard register mentioned in
5147 NOTE. This can happen if a reference to a hard register in the
5148 original insns was split into several smaller hard register
5149 references in the new insns. */
5150
5151 static void
5152 split_hard_reg_notes (curr_insn, note, first, last)
5153 rtx curr_insn, note, first, last;
5154 {
5155 rtx reg, temp, link;
5156 rtx insn;
5157 int n_regs, i, new_reg;
5158
5159 reg = XEXP (note, 0);
5160
5161 if (REG_NOTE_KIND (note) != REG_DEAD
5162 || GET_CODE (reg) != REG
5163 || REGNO (reg) >= FIRST_PSEUDO_REGISTER
5164 || HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) == 1)
5165 {
5166 XEXP (note, 1) = REG_NOTES (curr_insn);
5167 REG_NOTES (curr_insn) = note;
5168 return;
5169 }
5170
5171 n_regs = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5172
5173 for (i = 0; i < n_regs; i++)
5174 {
5175 new_reg = REGNO (reg) + i;
5176
5177 /* Check for references to new_reg in the split insns. */
5178 for (insn = last; ; insn = PREV_INSN (insn))
5179 {
5180 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5181 && (temp = regno_use_in (new_reg, PATTERN (insn))))
5182 {
5183 /* Create a new reg dead note here. */
5184 link = rtx_alloc (EXPR_LIST);
5185 PUT_REG_NOTE_KIND (link, REG_DEAD);
5186 XEXP (link, 0) = temp;
5187 XEXP (link, 1) = REG_NOTES (insn);
5188 REG_NOTES (insn) = link;
5189
5190 /* If killed multiple registers here, then add in the excess. */
5191 i += HARD_REGNO_NREGS (REGNO (temp), GET_MODE (temp)) - 1;
5192
5193 break;
5194 }
5195 /* It isn't mentioned anywhere, so no new reg note is needed for
5196 this register. */
5197 if (insn == first)
5198 break;
5199 }
5200 }
5201 }
5202
5203 /* SET_INSN kills REG; add a REG_DEAD note mentioning REG to the last
5204 use of REG in the insns after SET_INSN and before or including
5205 LAST, if necessary.
5206
5207 A non-zero value is returned if we added a REG_DEAD note, or if we
5208 determined that a REG_DEAD note because of this particular SET
5209 wasn't necessary. */
5210
5211 static int
5212 maybe_add_dead_note (reg, set_insn, last)
5213 rtx reg, set_insn, last;
5214 {
5215 rtx insn;
5216
5217 for (insn = last; insn != set_insn; insn = PREV_INSN (insn))
5218 {
5219 rtx set;
5220
5221 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5222 && reg_overlap_mentioned_p (reg, PATTERN (insn))
5223 && (set = single_set (insn)))
5224 {
5225 rtx insn_dest = SET_DEST (set);
5226
5227 while (GET_CODE (insn_dest) == ZERO_EXTRACT
5228 || GET_CODE (insn_dest) == SUBREG
5229 || GET_CODE (insn_dest) == STRICT_LOW_PART
5230 || GET_CODE (insn_dest) == SIGN_EXTRACT)
5231 insn_dest = XEXP (insn_dest, 0);
5232
5233 if (! rtx_equal_p (insn_dest, reg))
5234 {
5235 /* Use the same scheme as combine.c, don't put both REG_DEAD
5236 and REG_UNUSED notes on the same insn. */
5237 if (! find_regno_note (insn, REG_UNUSED, REGNO (reg))
5238 && ! find_regno_note (insn, REG_DEAD, REGNO (reg)))
5239 {
5240 rtx note = rtx_alloc (EXPR_LIST);
5241 PUT_REG_NOTE_KIND (note, REG_DEAD);
5242 XEXP (note, 0) = reg;
5243 XEXP (note, 1) = REG_NOTES (insn);
5244 REG_NOTES (insn) = note;
5245 }
5246 return 1;
5247 }
5248 else if (reg_overlap_mentioned_p (reg, SET_SRC (set)))
5249 {
5250 /* We found an instruction that both uses the register and
5251 sets it, so no new REG_NOTE is needed for the previous
5252 set. */
5253 return 0;
5254 }
5255 }
5256 }
5257 return 0;
5258 }
5259
5260 static int
5261 maybe_add_dead_note_use (insn, dest)
5262 rtx insn, dest;
5263 {
5264 rtx set;
5265
5266 /* We need to add a REG_DEAD note to the last place DEST is
5267 referenced. */
5268
5269 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5270 && reg_mentioned_p (dest, PATTERN (insn))
5271 && (set = single_set (insn)))
5272 {
5273 rtx insn_dest = SET_DEST (set);
5274
5275 while (GET_CODE (insn_dest) == ZERO_EXTRACT
5276 || GET_CODE (insn_dest) == SUBREG
5277 || GET_CODE (insn_dest) == STRICT_LOW_PART
5278 || GET_CODE (insn_dest) == SIGN_EXTRACT)
5279 insn_dest = XEXP (insn_dest, 0);
5280
5281 if (! rtx_equal_p (insn_dest, dest))
5282 {
5283 /* Use the same scheme as combine.c, don't put both REG_DEAD
5284 and REG_UNUSED notes on the same insn. */
5285 if (! find_regno_note (insn, REG_UNUSED, REGNO (dest))
5286 && ! find_regno_note (insn, REG_DEAD, REGNO (dest)))
5287 {
5288 rtx note = rtx_alloc (EXPR_LIST);
5289 PUT_REG_NOTE_KIND (note, REG_DEAD);
5290 XEXP (note, 0) = dest;
5291 XEXP (note, 1) = REG_NOTES (insn);
5292 REG_NOTES (insn) = note;
5293 }
5294 return 1;
5295 }
5296 }
5297 return 0;
5298 }
5299
5300 /* Find the first insn in the set of insns from FIRST to LAST inclusive
5301 that contains the note NOTE. */
5302 rtx
5303 find_insn_with_note (note, first, last)
5304 rtx note, first, last;
5305 {
5306 rtx insn;
5307
5308 for (insn = first; insn != NULL_RTX; insn = NEXT_INSN (insn))
5309 {
5310 rtx temp = find_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
5311 if (temp == note)
5312 {
5313 return insn;
5314 }
5315 if (insn == last)
5316 {
5317 break;
5318 }
5319 }
5320 return NULL_RTX;
5321 }
5322
5323 /* Subroutine of update_life_info. Determines whether a SET or
5324 CLOBBER in an insn created by splitting needs a REG_DEAD or
5325 REG_UNUSED note added. */
5326
5327 static void
5328 new_insn_dead_notes (pat, insn, first, last, orig_first_insn, orig_last_insn)
5329 rtx pat, insn, first, last, orig_first_insn, orig_last_insn;
5330 {
5331 rtx dest, tem;
5332
5333 if (GET_CODE (pat) != CLOBBER && GET_CODE (pat) != SET)
5334 abort ();
5335
5336 dest = XEXP (pat, 0);
5337
5338 while (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG
5339 || GET_CODE (dest) == STRICT_LOW_PART
5340 || GET_CODE (dest) == SIGN_EXTRACT)
5341 dest = XEXP (dest, 0);
5342
5343 if (GET_CODE (dest) == REG)
5344 {
5345 /* If the original insns already used this register, we may not
5346 add new notes for it. One example for a replacement that
5347 needs this test is when a multi-word memory access with
5348 register-indirect addressing is changed into multiple memory
5349 accesses with auto-increment and one adjusting add
5350 instruction for the address register.
5351
5352 However, there is a problem with this code. We're assuming
5353 that any registers that are set in the new insns are either
5354 set/referenced in the old insns (and thus "inherit" the
5355 liveness of the old insns), or are registers that are dead
5356 before we enter this part of the stream (and thus should be
5357 dead when we leave).
5358
5359 To do this absolutely correctly, we must determine the actual
5360 liveness of the registers before we go randomly adding
5361 REG_DEAD notes. This can probably be accurately done by
5362 calling mark_referenced_resources() on the old stream before
5363 replacing the old insns. */
5364
5365 for (tem = orig_first_insn; tem != NULL_RTX; tem = NEXT_INSN (tem))
5366 {
5367 if (GET_RTX_CLASS (GET_CODE (tem)) == 'i'
5368 && reg_referenced_p (dest, PATTERN (tem)))
5369 return;
5370 if (tem == orig_last_insn)
5371 break;
5372 }
5373
5374 /* So it's a new register, presumably only used within this
5375 group of insns. Find the last insn in the set of new insns
5376 that DEST is referenced in, and add a dead note to it. */
5377 if (! maybe_add_dead_note (dest, insn, last))
5378 {
5379 /* If this is a set, it must die somewhere, unless it is the
5380 dest of the original insn, and thus is live after the
5381 original insn. Abort if it isn't supposed to be live after
5382 the original insn.
5383
5384 If this is a clobber, then just add a REG_UNUSED note. */
5385 if (GET_CODE (pat) == CLOBBER)
5386 {
5387 rtx note = rtx_alloc (EXPR_LIST);
5388 PUT_REG_NOTE_KIND (note, REG_UNUSED);
5389 XEXP (note, 0) = dest;
5390 XEXP (note, 1) = REG_NOTES (insn);
5391 REG_NOTES (insn) = note;
5392 return;
5393 }
5394 else
5395 {
5396 rtx curr;
5397 int got_set = 0;
5398
5399 for (curr = orig_first_insn; curr; curr = NEXT_INSN (curr))
5400 {
5401 got_set = sets_reg_or_subreg (curr, dest);
5402 if (got_set)
5403 break;
5404 if (curr == orig_last_insn)
5405 break;
5406 }
5407
5408 /* In case reg was not used later, it is dead store.
5409 add REG_UNUSED note. */
5410 if (! got_set)
5411 {
5412 rtx note = rtx_alloc (EXPR_LIST);
5413 PUT_REG_NOTE_KIND (note, REG_UNUSED);
5414 XEXP (note, 0) = dest;
5415 XEXP (note, 1) = REG_NOTES (insn);
5416 REG_NOTES (insn) = note;
5417 return;
5418 }
5419 }
5420 }
5421
5422 if (insn != first)
5423 {
5424 rtx set = single_set (insn);
5425
5426 /* If this is a set, scan backwards for a previous
5427 reference, and attach a REG_DEAD note to it. But we don't
5428 want to do it if the insn is both using and setting the
5429 register.
5430
5431 Global registers are always live. */
5432 if (set && ! reg_overlap_mentioned_p (dest, SET_SRC (pat))
5433 && (REGNO (dest) >= FIRST_PSEUDO_REGISTER
5434 || ! global_regs[REGNO (dest)]))
5435 {
5436 for (tem = PREV_INSN (insn);
5437 tem != NULL_RTX; tem = PREV_INSN (tem))
5438 {
5439 if (maybe_add_dead_note_use (tem, dest))
5440 break;
5441 if (tem == first)
5442 break;
5443 }
5444 }
5445 }
5446 }
5447 }
5448
5449 /* Subroutine of update_life_info. Update the value of reg_n_sets for all
5450 registers modified by X. INC is -1 if the containing insn is being deleted,
5451 and is 1 if the containing insn is a newly generated insn. */
5452
5453 static void
5454 update_n_sets (x, inc)
5455 rtx x;
5456 int inc;
5457 {
5458 rtx dest = SET_DEST (x);
5459
5460 while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SUBREG
5461 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5462 dest = SUBREG_REG (dest);
5463
5464 if (GET_CODE (dest) == REG)
5465 {
5466 int regno = REGNO (dest);
5467
5468 if (regno < FIRST_PSEUDO_REGISTER)
5469 {
5470 register int i;
5471 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (dest));
5472
5473 for (i = regno; i < endregno; i++)
5474 REG_N_SETS (i) += inc;
5475 }
5476 else
5477 REG_N_SETS (regno) += inc;
5478 }
5479 }
5480
5481 /* Scan INSN for a SET that sets REG. If it sets REG via a SUBREG,
5482 then return 2. If it sets REG directly, return 1. Otherwise, return
5483 0. */
5484
5485 static int sets_reg_or_subreg_ret;
5486 static rtx sets_reg_or_subreg_rtx;
5487
5488 static void
5489 sets_reg_or_subreg_1 (x, set)
5490 rtx x, set;
5491 {
5492 if (rtx_equal_p (x, sets_reg_or_subreg_rtx))
5493 {
5494 if (x == XEXP (set, 0))
5495 sets_reg_or_subreg_ret = 1;
5496 else if (GET_CODE (XEXP (set, 0)) == SUBREG)
5497 sets_reg_or_subreg_ret = 2;
5498 }
5499 }
5500
5501 static int
5502 sets_reg_or_subreg (insn, reg)
5503 rtx insn;
5504 rtx reg;
5505 {
5506 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
5507 return 0;
5508
5509 sets_reg_or_subreg_ret = 0;
5510 sets_reg_or_subreg_rtx = reg;
5511 note_stores (PATTERN (insn), sets_reg_or_subreg_1);
5512 return sets_reg_or_subreg_ret;
5513 }
5514
5515 /* If a replaced SET_INSN (which is part of the insns between
5516 OLD_FIRST_INSN and OLD_LAST_INSN inclusive) is modifying a multiple
5517 register target, and the original dest is now set in the new insns
5518 (between FIRST_INSN and LAST_INSN inclusive) by one or more subreg
5519 sets, then the new insns no longer kill the destination of the
5520 original insn.
5521
5522 We may also be directly using the register in the new insns before
5523 setting it.
5524
5525 In either case, if there exists an instruction in the same basic
5526 block before the replaced insns which uses the original dest (and
5527 contains a corresponding REG_DEAD note), then we must remove this
5528 REG_DEAD note.
5529
5530 SET_INSN is the insn that contains the SET; it may be a PARALLEL
5531 containing the SET insn.
5532
5533 SET is the actual SET insn proper. */
5534
5535 static void
5536 maybe_remove_dead_notes (set_insn, set, first_insn, last_insn,
5537 old_first_insn, old_last_insn)
5538 rtx set_insn, set;
5539 rtx first_insn, last_insn;
5540 rtx old_first_insn, old_last_insn;
5541 {
5542 rtx insn;
5543 rtx stop_insn = NEXT_INSN (last_insn);
5544 int set_type = 0;
5545 rtx set_dest;
5546 rtx set_pattern;
5547
5548 if (GET_RTX_CLASS (GET_CODE (set)) != 'i')
5549 return;
5550
5551 set_pattern = PATTERN (set);
5552
5553 if (GET_CODE (set_pattern) == PARALLEL)
5554 {
5555 int i;
5556
5557 for (i = 0; i < XVECLEN (set_pattern, 0); i++)
5558 {
5559 maybe_remove_dead_notes (set_insn, XVECEXP (set_pattern, 0, i),
5560 first_insn, last_insn,
5561 old_first_insn, old_last_insn);
5562 }
5563 return;
5564 }
5565
5566 if (GET_CODE (set_pattern) != SET)
5567 {
5568 return;
5569 }
5570
5571 set_dest = SET_DEST (set_pattern);
5572
5573 if (GET_CODE (set_dest) != REG)
5574 {
5575 return;
5576 }
5577
5578 /* We have a set of a REG. First we need to determine if this set is
5579 both using and setting the register. (FIXME: if this is in a
5580 PARALLEL, we will have to check the other exprs as well.) */
5581 if (reg_overlap_mentioned_p (set_dest, SET_SRC (set_pattern)))
5582 {
5583 return;
5584 }
5585
5586 /* Now determine if we used or set the register in the old insns
5587 previous to this one. */
5588
5589 for (insn = old_first_insn; insn != set_insn; insn = NEXT_INSN (insn))
5590 {
5591 if (reg_overlap_mentioned_p (set_dest, insn))
5592 {
5593 return;
5594 }
5595 }
5596
5597 /* Now determine if we're setting it in the new insns, or using
5598 it. */
5599 for (insn = first_insn; insn != stop_insn; insn = NEXT_INSN (insn))
5600 {
5601 set_type = sets_reg_or_subreg (insn, set_dest);
5602 if (set_type != 0)
5603 {
5604 break;
5605 }
5606 else if (reg_overlap_mentioned_p (set_dest, insn))
5607 {
5608 /* Is the reg now used in this new insn? -- This is probably an
5609 error. */
5610 set_type = 2;
5611 break;
5612 }
5613 }
5614 if (set_type == 2)
5615 {
5616 /* The register is being set via a SUBREG or is being used in
5617 some other way, so it's no longer dead.
5618
5619 Search backwards from first_insn, looking for the first insn
5620 that uses the original dest. Stop if we pass a CODE_LABEL or
5621 a JUMP_INSN.
5622
5623 If we find such an insn and it has a REG_DEAD note referring
5624 to the original dest, then delete the note. */
5625
5626 for (insn = first_insn; insn != NULL_RTX; insn = PREV_INSN (insn))
5627 {
5628 if (GET_CODE (insn) == CODE_LABEL
5629 || GET_CODE (insn) == JUMP_INSN)
5630 break;
5631 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5632 && reg_mentioned_p (set_dest, insn))
5633 {
5634 rtx note = find_regno_note (insn, REG_DEAD, REGNO (set_dest));
5635 if (note != NULL_RTX)
5636 {
5637 remove_note (insn, note);
5638 }
5639 /* ??? -- Is this right? */
5640 break;
5641 }
5642 }
5643 }
5644 else if (set_type == 0)
5645 {
5646 /* The reg is not being set or used in the new insns at all. */
5647 int i, regno;
5648
5649 /* Should never reach here for a pseudo reg. */
5650 if (REGNO (set_dest) >= FIRST_PSEUDO_REGISTER)
5651 abort ();
5652
5653 /* This can happen for a hard register, if the new insns do not
5654 contain instructions which would be no-ops referring to the
5655 old registers.
5656
5657 We try to verify that this is the case by checking to see if
5658 the original instruction uses all of the registers that it
5659 set. This case is OK, because deleting a no-op can not affect
5660 REG_DEAD notes on other insns. If this is not the case, then
5661 abort. */
5662
5663 regno = REGNO (set_dest);
5664 for (i = HARD_REGNO_NREGS (regno, GET_MODE (set_dest)) - 1;
5665 i >= 0; i--)
5666 {
5667 if (! refers_to_regno_p (regno + i, regno + i + 1, set,
5668 NULL_PTR))
5669 break;
5670 }
5671 if (i >= 0)
5672 abort ();
5673 }
5674 }
5675
5676 /* Updates all flow-analysis related quantities (including REG_NOTES) for
5677 the insns from FIRST to LAST inclusive that were created by replacing
5678 the insns from ORIG_INSN_FIRST to ORIG_INSN_LAST inclusive. NOTES
5679 are the original REG_NOTES. */
5680
5681 void
5682 update_life_info (notes, first, last, orig_first_insn, orig_last_insn)
5683 rtx notes;
5684 rtx first, last;
5685 rtx orig_first_insn, orig_last_insn;
5686 {
5687 rtx insn, note;
5688 rtx next;
5689 rtx orig_dest, temp;
5690 rtx orig_insn;
5691 rtx tem;
5692
5693 /* Get and save the destination set by the original insn, if there
5694 was only one insn replaced. */
5695
5696 if (orig_first_insn == orig_last_insn)
5697 {
5698 orig_insn = orig_first_insn;
5699 orig_dest = single_set (orig_insn);
5700 if (orig_dest)
5701 orig_dest = SET_DEST (orig_dest);
5702 }
5703 else
5704 {
5705 orig_insn = NULL_RTX;
5706 orig_dest = NULL_RTX;
5707 }
5708
5709 /* Move REG_NOTES from the original insns to where they now belong. */
5710
5711 for (note = notes; note; note = next)
5712 {
5713 next = XEXP (note, 1);
5714 switch (REG_NOTE_KIND (note))
5715 {
5716 case REG_DEAD:
5717 case REG_UNUSED:
5718 /* Move these notes from the original insn to the last new
5719 insn where the register is mentioned. */
5720
5721 for (insn = last; ; insn = PREV_INSN (insn))
5722 {
5723 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5724 && reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
5725 {
5726 /* Sometimes need to convert REG_UNUSED notes to
5727 REG_DEAD notes. */
5728 if (REG_NOTE_KIND (note) == REG_UNUSED
5729 && GET_CODE (XEXP (note, 0)) == REG
5730 && ! dead_or_set_p (insn, XEXP (note, 0)))
5731 {
5732 PUT_REG_NOTE_KIND (note, REG_DEAD);
5733 }
5734 split_hard_reg_notes (insn, note, first, last);
5735 /* The reg only dies in one insn, the last one that uses
5736 it. */
5737 break;
5738 }
5739 /* It must die somewhere, fail if we couldn't find where it died.
5740
5741 We abort because otherwise the register will be live
5742 longer than it should, and we'll probably take an
5743 abort later. What we should do instead is search back
5744 and find the appropriate places to insert the note. */
5745 if (insn == first)
5746 {
5747 if (REG_NOTE_KIND (note) == REG_DEAD)
5748 {
5749 abort ();
5750 }
5751 break;
5752 }
5753 }
5754 break;
5755
5756 case REG_WAS_0:
5757 {
5758 rtx note_dest;
5759
5760 /* If the insn that set the register to 0 was deleted, this
5761 note cannot be relied on any longer. The destination might
5762 even have been moved to memory.
5763 This was observed for SH4 with execute/920501-6.c compilation,
5764 -O2 -fomit-frame-pointer -finline-functions . */
5765
5766 if (GET_CODE (XEXP (note, 0)) == NOTE
5767 || INSN_DELETED_P (XEXP (note, 0)))
5768 break;
5769 if (orig_insn != NULL_RTX)
5770 {
5771 note_dest = orig_dest;
5772 }
5773 else
5774 {
5775 note_dest = find_insn_with_note (note, orig_first_insn,
5776 orig_last_insn);
5777 if (note_dest != NULL_RTX)
5778 {
5779 note_dest = single_set (note_dest);
5780 if (note_dest != NULL_RTX)
5781 note_dest = SET_DEST (note_dest);
5782 }
5783 }
5784 /* This note applies to the dest of the original insn. Find the
5785 first new insn that now has the same dest, and move the note
5786 there. */
5787
5788 if (! note_dest)
5789 break;
5790
5791 for (insn = first; ; insn = NEXT_INSN (insn))
5792 {
5793 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5794 && (temp = single_set (insn))
5795 && rtx_equal_p (SET_DEST (temp), note_dest))
5796 {
5797 XEXP (note, 1) = REG_NOTES (insn);
5798 REG_NOTES (insn) = note;
5799 /* The reg is only zero before one insn, the first that
5800 uses it. */
5801 break;
5802 }
5803 /* If this note refers to a multiple word hard
5804 register, it may have been split into several smaller
5805 hard register references. We could split the notes,
5806 but simply dropping them is good enough. */
5807 if (GET_CODE (note_dest) == REG
5808 && REGNO (note_dest) < FIRST_PSEUDO_REGISTER
5809 && HARD_REGNO_NREGS (REGNO (note_dest),
5810 GET_MODE (note_dest)) > 1)
5811 break;
5812
5813 /* It must be set somewhere; bail if we couldn't find
5814 where it was set. */
5815 }
5816 }
5817 break;
5818
5819 case REG_EQUAL:
5820 case REG_EQUIV:
5821 /* A REG_EQUIV or REG_EQUAL note on an insn with more than one
5822 set is meaningless. Just drop the note. */
5823 if (! orig_dest)
5824 break;
5825
5826 case REG_NO_CONFLICT:
5827 case REG_NOALIAS:
5828 /* These notes apply to the dest of the original insn. Find the last
5829 new insn that now has the same dest, and move the note there.
5830
5831 If we are replacing multiple insns, just drop the note. */
5832
5833 if (! orig_insn)
5834 break;
5835
5836 if (! orig_dest)
5837 abort ();
5838
5839 for (insn = last; ; insn = PREV_INSN (insn))
5840 {
5841 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5842 && (temp = single_set (insn))
5843 && rtx_equal_p (SET_DEST (temp), orig_dest))
5844 {
5845 XEXP (note, 1) = REG_NOTES (insn);
5846 REG_NOTES (insn) = note;
5847 /* Only put this note on one of the new insns. */
5848 break;
5849 }
5850
5851 /* The original dest must still be set someplace. Abort if we
5852 couldn't find it. */
5853 if (insn == first)
5854 {
5855 /* However, if this note refers to a multiple word hard
5856 register, it may have been split into several smaller
5857 hard register references. We could split the notes,
5858 but simply dropping them is good enough. */
5859 if (GET_CODE (orig_dest) == REG
5860 && REGNO (orig_dest) < FIRST_PSEUDO_REGISTER
5861 && HARD_REGNO_NREGS (REGNO (orig_dest),
5862 GET_MODE (orig_dest)) > 1)
5863 break;
5864 /* Likewise for multi-word memory references. */
5865 if (GET_CODE (orig_dest) == MEM
5866 && GET_MODE_SIZE (GET_MODE (orig_dest)) > MOVE_MAX)
5867 break;
5868 abort ();
5869 }
5870 }
5871 break;
5872
5873 case REG_LIBCALL:
5874 /* Move a REG_LIBCALL note to the first insn created, and update
5875 the corresponding REG_RETVAL note. */
5876 XEXP (note, 1) = REG_NOTES (first);
5877 REG_NOTES (first) = note;
5878
5879 insn = XEXP (note, 0);
5880 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
5881 if (note)
5882 XEXP (note, 0) = first;
5883 break;
5884
5885 case REG_EXEC_COUNT:
5886 /* Move a REG_EXEC_COUNT note to the first insn created. */
5887 XEXP (note, 1) = REG_NOTES (first);
5888 REG_NOTES (first) = note;
5889 break;
5890
5891 case REG_RETVAL:
5892 /* Move a REG_RETVAL note to the last insn created, and update
5893 the corresponding REG_LIBCALL note. */
5894 XEXP (note, 1) = REG_NOTES (last);
5895 REG_NOTES (last) = note;
5896
5897 insn = XEXP (note, 0);
5898 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
5899 if (note)
5900 XEXP (note, 0) = last;
5901 break;
5902
5903 case REG_NONNEG:
5904 case REG_BR_PROB:
5905 /* This should be moved to whichever instruction is a JUMP_INSN. */
5906
5907 for (insn = last; ; insn = PREV_INSN (insn))
5908 {
5909 if (GET_CODE (insn) == JUMP_INSN)
5910 {
5911 XEXP (note, 1) = REG_NOTES (insn);
5912 REG_NOTES (insn) = note;
5913 /* Only put this note on one of the new insns. */
5914 break;
5915 }
5916 /* Fail if we couldn't find a JUMP_INSN. */
5917 if (insn == first)
5918 abort ();
5919 }
5920 break;
5921
5922 case REG_INC:
5923 /* reload sometimes leaves obsolete REG_INC notes around. */
5924 if (reload_completed)
5925 break;
5926 /* This should be moved to whichever instruction now has the
5927 increment operation. */
5928 abort ();
5929
5930 case REG_LABEL:
5931 /* Should be moved to the new insn(s) which use the label. */
5932 for (insn = first; insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
5933 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
5934 && reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
5935 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL,
5936 XEXP (note, 0),
5937 REG_NOTES (insn));
5938 break;
5939
5940 case REG_CC_SETTER:
5941 case REG_CC_USER:
5942 /* These two notes will never appear until after reorg, so we don't
5943 have to handle them here. */
5944 default:
5945 abort ();
5946 }
5947 }
5948
5949 /* Each new insn created has a new set. If the destination is a
5950 register, then this reg is now live across several insns, whereas
5951 previously the dest reg was born and died within the same insn.
5952 To reflect this, we now need a REG_DEAD note on the insn where
5953 this dest reg dies.
5954
5955 Similarly, the new insns may have clobbers that need REG_UNUSED
5956 notes. */
5957
5958 for (insn = first; ;insn = NEXT_INSN (insn))
5959 {
5960 rtx pat;
5961 int i;
5962
5963 pat = PATTERN (insn);
5964 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
5965 new_insn_dead_notes (pat, insn, first, last,
5966 orig_first_insn, orig_last_insn);
5967 else if (GET_CODE (pat) == PARALLEL)
5968 {
5969 for (i = 0; i < XVECLEN (pat, 0); i++)
5970 {
5971 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
5972 || GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER)
5973 {
5974 rtx parpat = XVECEXP (pat, 0, i);
5975
5976 new_insn_dead_notes (parpat, insn, first, last,
5977 orig_first_insn, orig_last_insn);
5978 }
5979 }
5980 }
5981 if (insn == last)
5982 {
5983 break;
5984 }
5985 }
5986
5987 /* Check to see if we have any REG_DEAD notes on insns previous to
5988 the new ones that are now incorrect and need to be removed. */
5989
5990 for (insn = orig_first_insn; ; insn = NEXT_INSN (insn))
5991 {
5992 maybe_remove_dead_notes (insn, insn, first, last,
5993 orig_first_insn, orig_last_insn);
5994
5995 if (insn == orig_last_insn)
5996 break;
5997 }
5998
5999 /* Update reg_n_sets. This is necessary to prevent local alloc from
6000 converting REG_EQUAL notes to REG_EQUIV when the new insns are setting
6001 a reg multiple times instead of once. */
6002
6003 for (tem = orig_first_insn; tem != NULL_RTX; tem = NEXT_INSN (tem))
6004 {
6005 rtx x;
6006 RTX_CODE code;
6007
6008 if (GET_RTX_CLASS (GET_CODE (tem)) != 'i')
6009 continue;
6010
6011 x = PATTERN (tem);
6012 code = GET_CODE (x);
6013 if (code == SET || code == CLOBBER)
6014 update_n_sets (x, -1);
6015 else if (code == PARALLEL)
6016 {
6017 int i;
6018 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6019 {
6020 code = GET_CODE (XVECEXP (x, 0, i));
6021 if (code == SET || code == CLOBBER)
6022 update_n_sets (XVECEXP (x, 0, i), -1);
6023 }
6024 }
6025 if (tem == orig_last_insn)
6026 break;
6027 }
6028
6029 for (insn = first; ; insn = NEXT_INSN (insn))
6030 {
6031 rtx x = PATTERN (insn);
6032 RTX_CODE code = GET_CODE (x);
6033
6034 if (code == SET || code == CLOBBER)
6035 update_n_sets (x, 1);
6036 else if (code == PARALLEL)
6037 {
6038 int i;
6039 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6040 {
6041 code = GET_CODE (XVECEXP (x, 0, i));
6042 if (code == SET || code == CLOBBER)
6043 update_n_sets (XVECEXP (x, 0, i), 1);
6044 }
6045 }
6046
6047 if (insn == last)
6048 break;
6049 }
6050 }
6051 \f
6052 /* Prepends the set of REG_NOTES in NEW to NOTES, and returns NEW. */
6053 static rtx
6054 prepend_reg_notes (notes, new)
6055 rtx notes, new;
6056 {
6057 rtx end;
6058
6059 if (new == NULL_RTX)
6060 {
6061 return notes;
6062 }
6063 if (notes == NULL_RTX)
6064 {
6065 return new;
6066 }
6067 end = new;
6068 while (XEXP (end, 1) != NULL_RTX)
6069 {
6070 end = XEXP (end, 1);
6071 }
6072 XEXP (end, 1) = notes;
6073 return new;
6074 }
6075 \f
6076 /* Replace the insns from FIRST to LAST inclusive with the set of insns in
6077 NEW, and update the life analysis info accordingly. */
6078 void
6079 replace_insns (first, last, first_new, notes)
6080 rtx first, last, first_new, notes;
6081 {
6082 rtx stop = NEXT_INSN (last);
6083 rtx prev = PREV_INSN (first);
6084 rtx last_new, curr;
6085 int i;
6086
6087 if (notes == NULL_RTX)
6088 {
6089 for (curr = first; curr != stop; curr = NEXT_INSN (curr))
6090 if (GET_RTX_CLASS (GET_CODE (curr)) == 'i')
6091 notes = prepend_reg_notes (notes, REG_NOTES (curr));
6092 }
6093
6094 last_new = emit_insn_after (first_new, prev);
6095 first_new = NEXT_INSN (prev);
6096
6097 for (i = 0; i < n_basic_blocks; i++)
6098 {
6099 if (BLOCK_HEAD (i) == first)
6100 BLOCK_HEAD (i) = first_new;
6101 if (BLOCK_END (i) == last)
6102 BLOCK_END (i) = last_new;
6103 }
6104 /* This is probably bogus. */
6105 if (first_new == last_new)
6106 {
6107 if (GET_CODE (first_new) == SEQUENCE)
6108 {
6109 first_new = XVECEXP (first_new, 0, 0);
6110 last_new = XVECEXP (last_new, 0, XVECLEN (last_new, 0) - 1);
6111 }
6112 }
6113 update_life_info (notes, first_new, last_new, first, last);
6114 flow_delete_insn_chain (first, last);
6115 }
6116 \f
6117 /* Verify the CFG consistency. This function check some CFG invariants and
6118 aborts when something is wrong. Hope that this function will help to
6119 convert many optimization passes to preserve CFG consistent.
6120
6121 Currently it does following checks:
6122
6123 - test head/end pointers
6124 - overlapping of basic blocks
6125 - edge list corectness
6126 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6127 - tails of basic blocks (ensure that boundary is necesary)
6128 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6129 and NOTE_INSN_BASIC_BLOCK
6130 - check that all insns are in the basic blocks
6131 (except the switch handling code, barriers and notes)
6132
6133 In future it can be extended check a lot of other stuff as well
6134 (reachability of basic blocks, life information, etc. etc.). */
6135
6136 void
6137 verify_flow_info ()
6138 {
6139 const int max_uid = get_max_uid ();
6140 const rtx rtx_first = get_insns ();
6141 basic_block *bb_info;
6142 rtx x;
6143 int i;
6144
6145 bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
6146 memset (bb_info, 0, max_uid * sizeof (basic_block));
6147
6148 /* First pass check head/end pointers and set bb_info array used by
6149 later passes. */
6150 for (i = n_basic_blocks - 1; i >= 0; i--)
6151 {
6152 basic_block bb = BASIC_BLOCK (i);
6153
6154 /* Check the head pointer and make sure that it is pointing into
6155 insn list. */
6156 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6157 if (x == bb->head)
6158 break;
6159 if (!x)
6160 {
6161 error ("Head insn %d for block %d not found in the insn stream.",
6162 INSN_UID (bb->head), bb->index);
6163 abort ();
6164 }
6165
6166 /* Check the end pointer and make sure that it is pointing into
6167 insn list. */
6168 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6169 {
6170 if (bb_info[INSN_UID (x)] != NULL)
6171 {
6172 error ("Insn %d is in multiple basic blocks (%d and %d)",
6173 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6174 abort ();
6175 }
6176 bb_info[INSN_UID (x)] = bb;
6177
6178 if (x == bb->end)
6179 break;
6180 }
6181 if (!x)
6182 {
6183 error ("End insn %d for block %d not found in the insn stream.",
6184 INSN_UID (bb->end), bb->index);
6185 abort ();
6186 }
6187 }
6188
6189 /* Now check the basic blocks (boundaries etc.) */
6190 for (i = n_basic_blocks - 1; i >= 0; i--)
6191 {
6192 basic_block bb = BASIC_BLOCK (i);
6193 /* Check corectness of edge lists */
6194 edge e;
6195
6196 e = bb->succ;
6197 while (e)
6198 {
6199 if (e->src != bb)
6200 {
6201 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6202 bb->index);
6203 fprintf (stderr, "Predecessor: ");
6204 dump_edge_info (stderr, e, 0);
6205 fprintf (stderr, "\nSuccessor: ");
6206 dump_edge_info (stderr, e, 1);
6207 fflush (stderr);
6208 abort ();
6209 }
6210 if (e->dest != EXIT_BLOCK_PTR)
6211 {
6212 edge e2 = e->dest->pred;
6213 while (e2 && e2 != e)
6214 e2 = e2->pred_next;
6215 if (!e2)
6216 {
6217 error ("Basic block %i edge lists are corrupted", bb->index);
6218 abort ();
6219 }
6220 }
6221 e = e->succ_next;
6222 }
6223
6224 e = bb->pred;
6225 while (e)
6226 {
6227 if (e->dest != bb)
6228 {
6229 error ("Basic block %d pred edge is corrupted", bb->index);
6230 fputs ("Predecessor: ", stderr);
6231 dump_edge_info (stderr, e, 0);
6232 fputs ("\nSuccessor: ", stderr);
6233 dump_edge_info (stderr, e, 1);
6234 fputc ('\n', stderr);
6235 abort ();
6236 }
6237 if (e->src != ENTRY_BLOCK_PTR)
6238 {
6239 edge e2 = e->src->succ;
6240 while (e2 && e2 != e)
6241 e2 = e2->succ_next;
6242 if (!e2)
6243 {
6244 error ("Basic block %i edge lists are corrupted", bb->index);
6245 abort;
6246 }
6247 }
6248 e = e->pred_next;
6249 }
6250
6251 /* OK pointers are correct. Now check the header of basic
6252 block. It ought to contain optional CODE_LABEL followed
6253 by NOTE_BASIC_BLOCK. */
6254 x = bb->head;
6255 if (GET_CODE (x) == CODE_LABEL)
6256 {
6257 if (bb->end == x)
6258 {
6259 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6260 bb->index);
6261 abort ();
6262 }
6263 x = NEXT_INSN (x);
6264 }
6265 if (GET_CODE (x) != NOTE
6266 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6267 || NOTE_BASIC_BLOCK (x) != bb)
6268 {
6269 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6270 bb->index);
6271 abort ();
6272 }
6273
6274 if (bb->end == x)
6275 {
6276 /* Do checks for empty blocks here */
6277 }
6278 else
6279 {
6280 x = NEXT_INSN (x);
6281 while (x)
6282 {
6283 if (GET_CODE (x) == NOTE
6284 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6285 {
6286 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6287 INSN_UID (x), bb->index);
6288 abort ();
6289 }
6290
6291 if (x == bb->end)
6292 break;
6293
6294 if (GET_CODE (x) == JUMP_INSN
6295 || GET_CODE (x) == CODE_LABEL
6296 || GET_CODE (x) == BARRIER)
6297 {
6298 error ("In basic block %d:", bb->index);
6299 fatal_insn ("Flow control insn inside a basic block", x);
6300 }
6301
6302 x = NEXT_INSN (x);
6303 }
6304 }
6305 }
6306
6307 x = rtx_first;
6308 while (x)
6309 {
6310 if (!bb_info[INSN_UID (x)])
6311 {
6312 switch (GET_CODE (x))
6313 {
6314 case BARRIER:
6315 case NOTE:
6316 break;
6317
6318 case CODE_LABEL:
6319 /* An addr_vec is placed outside any block block. */
6320 if (NEXT_INSN (x)
6321 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6322 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6323 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6324 {
6325 x = NEXT_INSN (x);
6326 }
6327
6328 /* But in any case, non-deletable labels can appear anywhere. */
6329 break;
6330
6331 default:
6332 fatal_insn ("Insn outside basic block", x);
6333 }
6334 }
6335
6336 x = NEXT_INSN (x);
6337 }
6338 }
6339 \f
6340 /* Functions to access an edge list with a vector representation.
6341 Enough data is kept such that given an index number, the
6342 pred and succ that edge reprsents can be determined, or
6343 given a pred and a succ, it's index number can be returned.
6344 This allows algorithms which comsume a lot of memory to
6345 represent the normally full matrix of edge (pred,succ) with a
6346 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6347 wasted space in the client code due to sparse flow graphs. */
6348
6349 /* This functions initializes the edge list. Basically the entire
6350 flowgraph is processed, and all edges are assigned a number,
6351 and the data structure is filed in. */
6352 struct edge_list *
6353 create_edge_list ()
6354 {
6355 struct edge_list *elist;
6356 edge e;
6357 int num_edges;
6358 int x;
6359 int block_count;
6360
6361 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6362
6363 num_edges = 0;
6364
6365 /* Determine the number of edges in the flow graph by counting successor
6366 edges on each basic block. */
6367 for (x = 0; x < n_basic_blocks; x++)
6368 {
6369 basic_block bb = BASIC_BLOCK (x);
6370
6371 for (e = bb->succ; e; e = e->succ_next)
6372 num_edges++;
6373 }
6374 /* Don't forget successors of the entry block. */
6375 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6376 num_edges++;
6377
6378 elist = xmalloc (sizeof (struct edge_list));
6379 elist->num_blocks = block_count;
6380 elist->num_edges = num_edges;
6381 elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
6382
6383 num_edges = 0;
6384
6385 /* Follow successors of the entry block, and register these edges. */
6386 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6387 {
6388 elist->index_to_edge[num_edges] = e;
6389 num_edges++;
6390 }
6391
6392 for (x = 0; x < n_basic_blocks; x++)
6393 {
6394 basic_block bb = BASIC_BLOCK (x);
6395
6396 /* Follow all successors of blocks, and register these edges. */
6397 for (e = bb->succ; e; e = e->succ_next)
6398 {
6399 elist->index_to_edge[num_edges] = e;
6400 num_edges++;
6401 }
6402 }
6403 return elist;
6404 }
6405
6406 /* This function free's memory associated with an edge list. */
6407 void
6408 free_edge_list (elist)
6409 struct edge_list *elist;
6410 {
6411 if (elist)
6412 {
6413 free (elist->index_to_edge);
6414 free (elist);
6415 }
6416 }
6417
6418 /* This function provides debug output showing an edge list. */
6419 void
6420 print_edge_list (f, elist)
6421 FILE *f;
6422 struct edge_list *elist;
6423 {
6424 int x;
6425 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6426 elist->num_blocks - 2, elist->num_edges);
6427
6428 for (x = 0; x < elist->num_edges; x++)
6429 {
6430 fprintf (f, " %-4d - edge(", x);
6431 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6432 fprintf (f,"entry,");
6433 else
6434 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6435
6436 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6437 fprintf (f,"exit)\n");
6438 else
6439 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6440 }
6441 }
6442
6443 /* This function provides an internal consistancy check of an edge list,
6444 verifying that all edges are present, and that there are no
6445 extra edges. */
6446 void
6447 verify_edge_list (f, elist)
6448 FILE *f;
6449 struct edge_list *elist;
6450 {
6451 int x, pred, succ, index;
6452 edge e;
6453
6454 for (x = 0; x < n_basic_blocks; x++)
6455 {
6456 basic_block bb = BASIC_BLOCK (x);
6457
6458 for (e = bb->succ; e; e = e->succ_next)
6459 {
6460 pred = e->src->index;
6461 succ = e->dest->index;
6462 index = EDGE_INDEX (elist, pred, succ);
6463 if (index == EDGE_INDEX_NO_EDGE)
6464 {
6465 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6466 continue;
6467 }
6468 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6469 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6470 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6471 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6472 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6473 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6474 }
6475 }
6476 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6477 {
6478 pred = e->src->index;
6479 succ = e->dest->index;
6480 index = EDGE_INDEX (elist, pred, succ);
6481 if (index == EDGE_INDEX_NO_EDGE)
6482 {
6483 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6484 continue;
6485 }
6486 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6487 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6488 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6489 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6490 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6491 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6492 }
6493 /* We've verified that all the edges are in the list, no lets make sure
6494 there are no spurious edges in the list. */
6495
6496 for (pred = 0 ; pred < n_basic_blocks; pred++)
6497 for (succ = 0 ; succ < n_basic_blocks; succ++)
6498 {
6499 basic_block p = BASIC_BLOCK (pred);
6500 basic_block s = BASIC_BLOCK (succ);
6501
6502 int found_edge = 0;
6503
6504 for (e = p->succ; e; e = e->succ_next)
6505 if (e->dest == s)
6506 {
6507 found_edge = 1;
6508 break;
6509 }
6510 for (e = s->pred; e; e = e->pred_next)
6511 if (e->src == p)
6512 {
6513 found_edge = 1;
6514 break;
6515 }
6516 if (EDGE_INDEX (elist, pred, succ) == EDGE_INDEX_NO_EDGE
6517 && found_edge != 0)
6518 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6519 pred, succ);
6520 if (EDGE_INDEX (elist, pred, succ) != EDGE_INDEX_NO_EDGE
6521 && found_edge == 0)
6522 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6523 pred, succ, EDGE_INDEX (elist, pred, succ));
6524 }
6525 for (succ = 0 ; succ < n_basic_blocks; succ++)
6526 {
6527 basic_block p = ENTRY_BLOCK_PTR;
6528 basic_block s = BASIC_BLOCK (succ);
6529
6530 int found_edge = 0;
6531
6532 for (e = p->succ; e; e = e->succ_next)
6533 if (e->dest == s)
6534 {
6535 found_edge = 1;
6536 break;
6537 }
6538 for (e = s->pred; e; e = e->pred_next)
6539 if (e->src == p)
6540 {
6541 found_edge = 1;
6542 break;
6543 }
6544 if (EDGE_INDEX (elist, ENTRY_BLOCK, succ) == EDGE_INDEX_NO_EDGE
6545 && found_edge != 0)
6546 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6547 succ);
6548 if (EDGE_INDEX (elist, ENTRY_BLOCK, succ) != EDGE_INDEX_NO_EDGE
6549 && found_edge == 0)
6550 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6551 succ, EDGE_INDEX (elist, ENTRY_BLOCK, succ));
6552 }
6553 for (pred = 0 ; pred < n_basic_blocks; pred++)
6554 {
6555 basic_block p = BASIC_BLOCK (pred);
6556 basic_block s = EXIT_BLOCK_PTR;
6557
6558 int found_edge = 0;
6559
6560 for (e = p->succ; e; e = e->succ_next)
6561 if (e->dest == s)
6562 {
6563 found_edge = 1;
6564 break;
6565 }
6566 for (e = s->pred; e; e = e->pred_next)
6567 if (e->src == p)
6568 {
6569 found_edge = 1;
6570 break;
6571 }
6572 if (EDGE_INDEX (elist, pred, EXIT_BLOCK) == EDGE_INDEX_NO_EDGE
6573 && found_edge != 0)
6574 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6575 pred);
6576 if (EDGE_INDEX (elist, pred, EXIT_BLOCK) != EDGE_INDEX_NO_EDGE
6577 && found_edge == 0)
6578 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6579 pred, EDGE_INDEX (elist, pred, EXIT_BLOCK));
6580 }
6581 }
6582
6583 /* This routine will determine what, if any, edge there is between
6584 a specified predecessor and successor. */
6585
6586 int
6587 find_edge_index (edge_list, pred, succ)
6588 struct edge_list *edge_list;
6589 int pred, succ;
6590 {
6591 int x;
6592 for (x = 0; x < NUM_EDGES (edge_list); x++)
6593 {
6594 if (INDEX_EDGE_PRED_BB (edge_list, x)->index == pred
6595 && INDEX_EDGE_SUCC_BB (edge_list, x)->index == succ)
6596 return x;
6597 }
6598 return (EDGE_INDEX_NO_EDGE);
6599 }
6600