]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/flow.c
flow.c (flow_nodes_print, [...]): New functions.
[thirdparty/gcc.git] / gcc / flow.c
1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
25
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
29
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
33
34 ** find_basic_blocks **
35
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
40
41 find_basic_blocks also finds any unreachable loops and deletes them.
42
43 ** life_analysis **
44
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
48
49 ** live-register info **
50
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
53
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
58
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
65
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
75 REG_DEAD notes.
76
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
83
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
87
88 ** Other actions of life_analysis **
89
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
92
93 life_analysis deletes insns whose only effect is to store a value
94 that is never used.
95
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
101
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
104
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
108
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
111
112 /* TODO:
113
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
117 - log links creation
118 - pre/post modify transformation
119 */
120 \f
121 #include "config.h"
122 #include "system.h"
123 #include "tree.h"
124 #include "rtl.h"
125 #include "tm_p.h"
126 #include "basic-block.h"
127 #include "insn-config.h"
128 #include "regs.h"
129 #include "hard-reg-set.h"
130 #include "flags.h"
131 #include "output.h"
132 #include "function.h"
133 #include "except.h"
134 #include "toplev.h"
135 #include "recog.h"
136 #include "insn-flags.h"
137
138 #include "obstack.h"
139
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
142
143
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
150 #endif
151
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
154 #endif
155
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
158 #endif
159
160 /* The contents of the current function definition are allocated
161 in this obstack, and all are freed at the end of the function.
162 For top-level functions, this is temporary_obstack.
163 Separate obstacks are made for nested functions. */
164
165 extern struct obstack *function_obstack;
166
167 /* Number of basic blocks in the current function. */
168
169 int n_basic_blocks;
170
171 /* Number of edges in the current function. */
172
173 int n_edges;
174
175 /* The basic block array. */
176
177 varray_type basic_block_info;
178
179 /* The special entry and exit blocks. */
180
181 struct basic_block_def entry_exit_blocks[2] =
182 {
183 {
184 NULL, /* head */
185 NULL, /* end */
186 NULL, /* pred */
187 NULL, /* succ */
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
191 NULL, /* aux */
192 ENTRY_BLOCK, /* index */
193 0, /* loop_depth */
194 -1, -1 /* eh_beg, eh_end */
195 },
196 {
197 NULL, /* head */
198 NULL, /* end */
199 NULL, /* pred */
200 NULL, /* succ */
201 NULL, /* local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
204 NULL, /* aux */
205 EXIT_BLOCK, /* index */
206 0, /* loop_depth */
207 -1, -1 /* eh_beg, eh_end */
208 }
209 };
210
211 /* Nonzero if the second flow pass has completed. */
212 int flow2_completed;
213
214 /* Maximum register number used in this function, plus one. */
215
216 int max_regno;
217
218 /* Indexed by n, giving various register information */
219
220 varray_type reg_n_info;
221
222 /* Size of the reg_n_info table. */
223
224 unsigned int reg_n_max;
225
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
229
230 static rtx *reg_next_use;
231
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
234
235 int regset_bytes;
236 int regset_size;
237
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
240
241 regset regs_live_at_setjmp;
242
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
247 rtx regs_may_share;
248
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
251
252 static int loop_depth;
253
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
255
256 static int cc0_live;
257
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
260
261 static rtx mem_set_list;
262
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
265
266 static HARD_REG_SET elim_reg_set;
267
268 /* The basic block structure for every insn, indexed by uid. */
269
270 varray_type basic_block_for_insn;
271
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
275
276 static rtx label_value_list;
277
278 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
279
280 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
281 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
282 static bitmap uid_volatile;
283
284 /* Forward declarations */
285 static int count_basic_blocks PROTO((rtx));
286 static rtx find_basic_blocks_1 PROTO((rtx));
287 static void create_basic_block PROTO((int, rtx, rtx, rtx));
288 static void clear_edges PROTO((void));
289 static void make_edges PROTO((rtx));
290 static void make_edge PROTO((sbitmap *, basic_block,
291 basic_block, int));
292 static void make_label_edge PROTO((sbitmap *, basic_block,
293 rtx, int));
294 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
295 basic_block, rtx, int));
296 static void mark_critical_edges PROTO((void));
297 static void move_stray_eh_region_notes PROTO((void));
298 static void record_active_eh_regions PROTO((rtx));
299
300 static void commit_one_edge_insertion PROTO((edge));
301
302 static void delete_unreachable_blocks PROTO((void));
303 static void delete_eh_regions PROTO((void));
304 static int can_delete_note_p PROTO((rtx));
305 static int delete_block PROTO((basic_block));
306 static void expunge_block PROTO((basic_block));
307 static rtx flow_delete_insn PROTO((rtx));
308 static int can_delete_label_p PROTO((rtx));
309 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
310 basic_block));
311 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
312 basic_block));
313 static void merge_blocks_nomove PROTO((basic_block, basic_block));
314 static int merge_blocks PROTO((edge,basic_block,basic_block));
315 static void try_merge_blocks PROTO((void));
316 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
317 static void calculate_loop_depth PROTO((rtx));
318
319 static int verify_wide_reg_1 PROTO((rtx *, void *));
320 static void verify_wide_reg PROTO((int, rtx, rtx));
321 static void verify_local_live_at_start PROTO((regset, basic_block));
322 static int set_noop_p PROTO((rtx));
323 static int noop_move_p PROTO((rtx));
324 static void notice_stack_pointer_modification PROTO ((rtx, rtx, void *));
325 static void record_volatile_insns PROTO((rtx));
326 static void mark_reg PROTO((regset, rtx));
327 static void mark_regs_live_at_end PROTO((regset));
328 static void life_analysis_1 PROTO((rtx, int, int));
329 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
330 static void propagate_block PROTO((regset, rtx, rtx,
331 regset, int, int));
332 static int insn_dead_p PROTO((rtx, regset, int, rtx));
333 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
334 static void mark_set_regs PROTO((regset, regset, rtx,
335 rtx, regset, int));
336 static void mark_set_1 PROTO((regset, regset, rtx,
337 rtx, regset, int));
338 #ifdef AUTO_INC_DEC
339 static void find_auto_inc PROTO((regset, rtx, rtx));
340 static int try_pre_increment_1 PROTO((rtx));
341 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
342 #endif
343 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
344 void dump_flow_info PROTO((FILE *));
345 void debug_flow_info PROTO((void));
346 static void dump_edge_info PROTO((FILE *, edge, int));
347
348 static void count_reg_sets_1 PROTO ((rtx));
349 static void count_reg_sets PROTO ((rtx));
350 static void count_reg_references PROTO ((rtx));
351 static void invalidate_mems_from_autoinc PROTO ((rtx));
352 static void remove_edge PROTO ((edge));
353 static void remove_fake_successors PROTO ((basic_block));
354
355 /* This function is always defined so it can be called from the
356 debugger, and it is declared extern so we don't get warnings about
357 it being unused. */
358 void verify_flow_info PROTO ((void));
359
360 \f
361 /* Find basic blocks of the current function.
362 F is the first insn of the function and NREGS the number of register
363 numbers in use. */
364
365 void
366 find_basic_blocks (f, nregs, file, do_cleanup)
367 rtx f;
368 int nregs ATTRIBUTE_UNUSED;
369 FILE *file ATTRIBUTE_UNUSED;
370 int do_cleanup;
371 {
372 int max_uid;
373
374 /* Flush out existing data. */
375 if (basic_block_info != NULL)
376 {
377 int i;
378
379 clear_edges ();
380
381 /* Clear bb->aux on all extant basic blocks. We'll use this as a
382 tag for reuse during create_basic_block, just in case some pass
383 copies around basic block notes improperly. */
384 for (i = 0; i < n_basic_blocks; ++i)
385 BASIC_BLOCK (i)->aux = NULL;
386
387 VARRAY_FREE (basic_block_info);
388 }
389
390 n_basic_blocks = count_basic_blocks (f);
391
392 /* Size the basic block table. The actual structures will be allocated
393 by find_basic_blocks_1, since we want to keep the structure pointers
394 stable across calls to find_basic_blocks. */
395 /* ??? This whole issue would be much simpler if we called find_basic_blocks
396 exactly once, and thereafter we don't have a single long chain of
397 instructions at all until close to the end of compilation when we
398 actually lay them out. */
399
400 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
401
402 label_value_list = find_basic_blocks_1 (f);
403
404 /* Record the block to which an insn belongs. */
405 /* ??? This should be done another way, by which (perhaps) a label is
406 tagged directly with the basic block that it starts. It is used for
407 more than that currently, but IMO that is the only valid use. */
408
409 max_uid = get_max_uid ();
410 #ifdef AUTO_INC_DEC
411 /* Leave space for insns life_analysis makes in some cases for auto-inc.
412 These cases are rare, so we don't need too much space. */
413 max_uid += max_uid / 10;
414 #endif
415
416 compute_bb_for_insn (max_uid);
417
418 /* Discover the edges of our cfg. */
419
420 record_active_eh_regions (f);
421 make_edges (label_value_list);
422
423 /* Delete unreachable blocks, then merge blocks when possible. */
424
425 if (do_cleanup)
426 {
427 delete_unreachable_blocks ();
428 move_stray_eh_region_notes ();
429 record_active_eh_regions (f);
430 try_merge_blocks ();
431 }
432
433 /* Mark critical edges. */
434
435 mark_critical_edges ();
436
437 /* Discover the loop depth at the start of each basic block to aid
438 register allocation. */
439 calculate_loop_depth (f);
440
441 /* Kill the data we won't maintain. */
442 label_value_list = NULL_RTX;
443
444 #ifdef ENABLE_CHECKING
445 verify_flow_info ();
446 #endif
447 }
448
449 /* Count the basic blocks of the function. */
450
451 static int
452 count_basic_blocks (f)
453 rtx f;
454 {
455 register rtx insn;
456 register RTX_CODE prev_code;
457 register int count = 0;
458 int eh_region = 0;
459 int call_had_abnormal_edge = 0;
460 rtx prev_call = NULL_RTX;
461
462 prev_code = JUMP_INSN;
463 for (insn = f; insn; insn = NEXT_INSN (insn))
464 {
465 register RTX_CODE code = GET_CODE (insn);
466
467 if (code == CODE_LABEL
468 || (GET_RTX_CLASS (code) == 'i'
469 && (prev_code == JUMP_INSN
470 || prev_code == BARRIER
471 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
472 {
473 count++;
474
475 /* If the previous insn was a call that did not create an
476 abnormal edge, we want to add a nop so that the CALL_INSN
477 itself is not at basic_block_end. This allows us to
478 easily distinguish between normal calls and those which
479 create abnormal edges in the flow graph. */
480
481 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
482 {
483 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
484 emit_insn_after (nop, prev_call);
485 }
486 }
487
488 /* Record whether this call created an edge. */
489 if (code == CALL_INSN)
490 {
491 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
492 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
493 prev_call = insn;
494 call_had_abnormal_edge = 0;
495
496 /* If there is a specified EH region, we have an edge. */
497 if (eh_region && region > 0)
498 call_had_abnormal_edge = 1;
499 else
500 {
501 /* If there is a nonlocal goto label and the specified
502 region number isn't -1, we have an edge. (0 means
503 no throw, but might have a nonlocal goto). */
504 if (nonlocal_goto_handler_labels && region >= 0)
505 call_had_abnormal_edge = 1;
506 }
507 }
508 else if (code != NOTE)
509 prev_call = NULL_RTX;
510
511 if (code != NOTE)
512 prev_code = code;
513 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
514 ++eh_region;
515 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
516 --eh_region;
517
518 }
519
520 /* The rest of the compiler works a bit smoother when we don't have to
521 check for the edge case of do-nothing functions with no basic blocks. */
522 if (count == 0)
523 {
524 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
525 count = 1;
526 }
527
528 return count;
529 }
530
531 /* Find all basic blocks of the function whose first insn is F.
532
533 Collect and return a list of labels whose addresses are taken. This
534 will be used in make_edges for use with computed gotos. */
535
536 static rtx
537 find_basic_blocks_1 (f)
538 rtx f;
539 {
540 register rtx insn, next;
541 int call_has_abnormal_edge = 0;
542 int i = 0;
543 rtx bb_note = NULL_RTX;
544 rtx eh_list = NULL_RTX;
545 rtx label_value_list = NULL_RTX;
546 rtx head = NULL_RTX;
547 rtx end = NULL_RTX;
548
549 /* We process the instructions in a slightly different way than we did
550 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
551 closed out the previous block, so that it gets attached at the proper
552 place. Since this form should be equivalent to the previous,
553 count_basic_blocks continues to use the old form as a check. */
554
555 for (insn = f; insn; insn = next)
556 {
557 enum rtx_code code = GET_CODE (insn);
558
559 next = NEXT_INSN (insn);
560
561 if (code == CALL_INSN)
562 {
563 /* Record whether this call created an edge. */
564 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
565 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
566 call_has_abnormal_edge = 0;
567
568 /* If there is an EH region, we have an edge. */
569 if (eh_list && region > 0)
570 call_has_abnormal_edge = 1;
571 else
572 {
573 /* If there is a nonlocal goto label and the specified
574 region number isn't -1, we have an edge. (0 means
575 no throw, but might have a nonlocal goto). */
576 if (nonlocal_goto_handler_labels && region >= 0)
577 call_has_abnormal_edge = 1;
578 }
579 }
580
581 switch (code)
582 {
583 case NOTE:
584 {
585 int kind = NOTE_LINE_NUMBER (insn);
586
587 /* Keep a LIFO list of the currently active exception notes. */
588 if (kind == NOTE_INSN_EH_REGION_BEG)
589 eh_list = alloc_INSN_LIST (insn, eh_list);
590 else if (kind == NOTE_INSN_EH_REGION_END)
591 {
592 rtx t = eh_list;
593 eh_list = XEXP (eh_list, 1);
594 free_INSN_LIST_node (t);
595 }
596
597 /* Look for basic block notes with which to keep the
598 basic_block_info pointers stable. Unthread the note now;
599 we'll put it back at the right place in create_basic_block.
600 Or not at all if we've already found a note in this block. */
601 else if (kind == NOTE_INSN_BASIC_BLOCK)
602 {
603 if (bb_note == NULL_RTX)
604 bb_note = insn;
605 next = flow_delete_insn (insn);
606 }
607
608 break;
609 }
610
611 case CODE_LABEL:
612 /* A basic block starts at a label. If we've closed one off due
613 to a barrier or some such, no need to do it again. */
614 if (head != NULL_RTX)
615 {
616 /* While we now have edge lists with which other portions of
617 the compiler might determine a call ending a basic block
618 does not imply an abnormal edge, it will be a bit before
619 everything can be updated. So continue to emit a noop at
620 the end of such a block. */
621 if (GET_CODE (end) == CALL_INSN)
622 {
623 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
624 end = emit_insn_after (nop, end);
625 }
626
627 create_basic_block (i++, head, end, bb_note);
628 bb_note = NULL_RTX;
629 }
630 head = end = insn;
631 break;
632
633 case JUMP_INSN:
634 /* A basic block ends at a jump. */
635 if (head == NULL_RTX)
636 head = insn;
637 else
638 {
639 /* ??? Make a special check for table jumps. The way this
640 happens is truly and amazingly gross. We are about to
641 create a basic block that contains just a code label and
642 an addr*vec jump insn. Worse, an addr_diff_vec creates
643 its own natural loop.
644
645 Prevent this bit of brain damage, pasting things together
646 correctly in make_edges.
647
648 The correct solution involves emitting the table directly
649 on the tablejump instruction as a note, or JUMP_LABEL. */
650
651 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
652 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
653 {
654 head = end = NULL;
655 n_basic_blocks--;
656 break;
657 }
658 }
659 end = insn;
660 goto new_bb_inclusive;
661
662 case BARRIER:
663 /* A basic block ends at a barrier. It may be that an unconditional
664 jump already closed the basic block -- no need to do it again. */
665 if (head == NULL_RTX)
666 break;
667
668 /* While we now have edge lists with which other portions of the
669 compiler might determine a call ending a basic block does not
670 imply an abnormal edge, it will be a bit before everything can
671 be updated. So continue to emit a noop at the end of such a
672 block. */
673 if (GET_CODE (end) == CALL_INSN)
674 {
675 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
676 end = emit_insn_after (nop, end);
677 }
678 goto new_bb_exclusive;
679
680 case CALL_INSN:
681 /* A basic block ends at a call that can either throw or
682 do a non-local goto. */
683 if (call_has_abnormal_edge)
684 {
685 new_bb_inclusive:
686 if (head == NULL_RTX)
687 head = insn;
688 end = insn;
689
690 new_bb_exclusive:
691 create_basic_block (i++, head, end, bb_note);
692 head = end = NULL_RTX;
693 bb_note = NULL_RTX;
694 break;
695 }
696 /* FALLTHRU */
697
698 default:
699 if (GET_RTX_CLASS (code) == 'i')
700 {
701 if (head == NULL_RTX)
702 head = insn;
703 end = insn;
704 }
705 break;
706 }
707
708 if (GET_RTX_CLASS (code) == 'i')
709 {
710 rtx note;
711
712 /* Make a list of all labels referred to other than by jumps
713 (which just don't have the REG_LABEL notes).
714
715 Make a special exception for labels followed by an ADDR*VEC,
716 as this would be a part of the tablejump setup code.
717
718 Make a special exception for the eh_return_stub_label, which
719 we know isn't part of any otherwise visible control flow. */
720
721 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
722 if (REG_NOTE_KIND (note) == REG_LABEL)
723 {
724 rtx lab = XEXP (note, 0), next;
725
726 if (lab == eh_return_stub_label)
727 ;
728 else if ((next = next_nonnote_insn (lab)) != NULL
729 && GET_CODE (next) == JUMP_INSN
730 && (GET_CODE (PATTERN (next)) == ADDR_VEC
731 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
732 ;
733 else
734 label_value_list
735 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
736 }
737 }
738 }
739
740 if (head != NULL_RTX)
741 create_basic_block (i++, head, end, bb_note);
742
743 if (i != n_basic_blocks)
744 abort ();
745
746 return label_value_list;
747 }
748
749 /* Create a new basic block consisting of the instructions between
750 HEAD and END inclusive. Reuses the note and basic block struct
751 in BB_NOTE, if any. */
752
753 static void
754 create_basic_block (index, head, end, bb_note)
755 int index;
756 rtx head, end, bb_note;
757 {
758 basic_block bb;
759
760 if (bb_note
761 && ! RTX_INTEGRATED_P (bb_note)
762 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
763 && bb->aux == NULL)
764 {
765 /* If we found an existing note, thread it back onto the chain. */
766
767 if (GET_CODE (head) == CODE_LABEL)
768 add_insn_after (bb_note, head);
769 else
770 {
771 add_insn_before (bb_note, head);
772 head = bb_note;
773 }
774 }
775 else
776 {
777 /* Otherwise we must create a note and a basic block structure.
778 Since we allow basic block structs in rtl, give the struct
779 the same lifetime by allocating it off the function obstack
780 rather than using malloc. */
781
782 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
783 memset (bb, 0, sizeof (*bb));
784
785 if (GET_CODE (head) == CODE_LABEL)
786 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
787 else
788 {
789 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
790 head = bb_note;
791 }
792 NOTE_BASIC_BLOCK (bb_note) = bb;
793 }
794
795 /* Always include the bb note in the block. */
796 if (NEXT_INSN (end) == bb_note)
797 end = bb_note;
798
799 bb->head = head;
800 bb->end = end;
801 bb->index = index;
802 BASIC_BLOCK (index) = bb;
803
804 /* Tag the block so that we know it has been used when considering
805 other basic block notes. */
806 bb->aux = bb;
807 }
808 \f
809 /* Records the basic block struct in BB_FOR_INSN, for every instruction
810 indexed by INSN_UID. MAX is the size of the array. */
811
812 void
813 compute_bb_for_insn (max)
814 int max;
815 {
816 int i;
817
818 if (basic_block_for_insn)
819 VARRAY_FREE (basic_block_for_insn);
820 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
821
822 for (i = 0; i < n_basic_blocks; ++i)
823 {
824 basic_block bb = BASIC_BLOCK (i);
825 rtx insn, end;
826
827 end = bb->end;
828 insn = bb->head;
829 while (1)
830 {
831 int uid = INSN_UID (insn);
832 if (uid < max)
833 VARRAY_BB (basic_block_for_insn, uid) = bb;
834 if (insn == end)
835 break;
836 insn = NEXT_INSN (insn);
837 }
838 }
839 }
840
841 /* Free the memory associated with the edge structures. */
842
843 static void
844 clear_edges ()
845 {
846 int i;
847 edge n, e;
848
849 for (i = 0; i < n_basic_blocks; ++i)
850 {
851 basic_block bb = BASIC_BLOCK (i);
852
853 for (e = bb->succ; e ; e = n)
854 {
855 n = e->succ_next;
856 free (e);
857 }
858
859 bb->succ = 0;
860 bb->pred = 0;
861 }
862
863 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
864 {
865 n = e->succ_next;
866 free (e);
867 }
868
869 ENTRY_BLOCK_PTR->succ = 0;
870 EXIT_BLOCK_PTR->pred = 0;
871
872 n_edges = 0;
873 }
874
875 /* Identify the edges between basic blocks.
876
877 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
878 that are otherwise unreachable may be reachable with a non-local goto.
879
880 BB_EH_END is an array indexed by basic block number in which we record
881 the list of exception regions active at the end of the basic block. */
882
883 static void
884 make_edges (label_value_list)
885 rtx label_value_list;
886 {
887 int i;
888 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
889 sbitmap *edge_cache = NULL;
890
891 /* Assume no computed jump; revise as we create edges. */
892 current_function_has_computed_jump = 0;
893
894 /* Heavy use of computed goto in machine-generated code can lead to
895 nearly fully-connected CFGs. In that case we spend a significant
896 amount of time searching the edge lists for duplicates. */
897 if (forced_labels || label_value_list)
898 {
899 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
900 sbitmap_vector_zero (edge_cache, n_basic_blocks);
901 }
902
903 /* By nature of the way these get numbered, block 0 is always the entry. */
904 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
905
906 for (i = 0; i < n_basic_blocks; ++i)
907 {
908 basic_block bb = BASIC_BLOCK (i);
909 rtx insn, x;
910 enum rtx_code code;
911 int force_fallthru = 0;
912
913 /* Examine the last instruction of the block, and discover the
914 ways we can leave the block. */
915
916 insn = bb->end;
917 code = GET_CODE (insn);
918
919 /* A branch. */
920 if (code == JUMP_INSN)
921 {
922 rtx tmp;
923
924 /* ??? Recognize a tablejump and do the right thing. */
925 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
926 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
927 && GET_CODE (tmp) == JUMP_INSN
928 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
929 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
930 {
931 rtvec vec;
932 int j;
933
934 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
935 vec = XVEC (PATTERN (tmp), 0);
936 else
937 vec = XVEC (PATTERN (tmp), 1);
938
939 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
940 make_label_edge (edge_cache, bb,
941 XEXP (RTVEC_ELT (vec, j), 0), 0);
942
943 /* Some targets (eg, ARM) emit a conditional jump that also
944 contains the out-of-range target. Scan for these and
945 add an edge if necessary. */
946 if ((tmp = single_set (insn)) != NULL
947 && SET_DEST (tmp) == pc_rtx
948 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
949 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
950 make_label_edge (edge_cache, bb,
951 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
952
953 #ifdef CASE_DROPS_THROUGH
954 /* Silly VAXen. The ADDR_VEC is going to be in the way of
955 us naturally detecting fallthru into the next block. */
956 force_fallthru = 1;
957 #endif
958 }
959
960 /* If this is a computed jump, then mark it as reaching
961 everything on the label_value_list and forced_labels list. */
962 else if (computed_jump_p (insn))
963 {
964 current_function_has_computed_jump = 1;
965
966 for (x = label_value_list; x; x = XEXP (x, 1))
967 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
968
969 for (x = forced_labels; x; x = XEXP (x, 1))
970 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
971 }
972
973 /* Returns create an exit out. */
974 else if (returnjump_p (insn))
975 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
976
977 /* Otherwise, we have a plain conditional or unconditional jump. */
978 else
979 {
980 if (! JUMP_LABEL (insn))
981 abort ();
982 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
983 }
984 }
985
986 /* If this is a CALL_INSN, then mark it as reaching the active EH
987 handler for this CALL_INSN. If we're handling asynchronous
988 exceptions then any insn can reach any of the active handlers.
989
990 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
991
992 if (code == CALL_INSN || asynchronous_exceptions)
993 {
994 /* If there's an EH region active at the end of a block,
995 add the appropriate edges. */
996 if (bb->eh_end >= 0)
997 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
998
999 /* If we have asynchronous exceptions, do the same for *all*
1000 exception regions active in the block. */
1001 if (asynchronous_exceptions
1002 && bb->eh_beg != bb->eh_end)
1003 {
1004 if (bb->eh_beg >= 0)
1005 make_eh_edge (edge_cache, eh_nest_info, bb,
1006 NULL_RTX, bb->eh_beg);
1007
1008 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1009 if (GET_CODE (x) == NOTE
1010 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1011 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1012 {
1013 int region = NOTE_EH_HANDLER (x);
1014 make_eh_edge (edge_cache, eh_nest_info, bb,
1015 NULL_RTX, region);
1016 }
1017 }
1018
1019 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1020 {
1021 /* ??? This could be made smarter: in some cases it's possible
1022 to tell that certain calls will not do a nonlocal goto.
1023
1024 For example, if the nested functions that do the nonlocal
1025 gotos do not have their addresses taken, then only calls to
1026 those functions or to other nested functions that use them
1027 could possibly do nonlocal gotos. */
1028 /* We do know that a REG_EH_REGION note with a value less
1029 than 0 is guaranteed not to perform a non-local goto. */
1030 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1031 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1032 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1033 make_label_edge (edge_cache, bb, XEXP (x, 0),
1034 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1035 }
1036 }
1037
1038 /* We know something about the structure of the function __throw in
1039 libgcc2.c. It is the only function that ever contains eh_stub
1040 labels. It modifies its return address so that the last block
1041 returns to one of the eh_stub labels within it. So we have to
1042 make additional edges in the flow graph. */
1043 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1044 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1045
1046 /* Find out if we can drop through to the next block. */
1047 insn = next_nonnote_insn (insn);
1048 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1049 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1050 else if (i + 1 < n_basic_blocks)
1051 {
1052 rtx tmp = BLOCK_HEAD (i + 1);
1053 if (GET_CODE (tmp) == NOTE)
1054 tmp = next_nonnote_insn (tmp);
1055 if (force_fallthru || insn == tmp)
1056 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1057 }
1058 }
1059
1060 free_eh_nesting_info (eh_nest_info);
1061 if (edge_cache)
1062 sbitmap_vector_free (edge_cache);
1063 }
1064
1065 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1066 about the edge that is accumulated between calls. */
1067
1068 static void
1069 make_edge (edge_cache, src, dst, flags)
1070 sbitmap *edge_cache;
1071 basic_block src, dst;
1072 int flags;
1073 {
1074 int use_edge_cache;
1075 edge e;
1076
1077 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1078 many edges to them, and we didn't allocate memory for it. */
1079 use_edge_cache = (edge_cache
1080 && src != ENTRY_BLOCK_PTR
1081 && dst != EXIT_BLOCK_PTR);
1082
1083 /* Make sure we don't add duplicate edges. */
1084 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1085 for (e = src->succ; e ; e = e->succ_next)
1086 if (e->dest == dst)
1087 {
1088 e->flags |= flags;
1089 return;
1090 }
1091
1092 e = (edge) xcalloc (1, sizeof (*e));
1093 n_edges++;
1094
1095 e->succ_next = src->succ;
1096 e->pred_next = dst->pred;
1097 e->src = src;
1098 e->dest = dst;
1099 e->flags = flags;
1100
1101 src->succ = e;
1102 dst->pred = e;
1103
1104 if (use_edge_cache)
1105 SET_BIT (edge_cache[src->index], dst->index);
1106 }
1107
1108 /* Create an edge from a basic block to a label. */
1109
1110 static void
1111 make_label_edge (edge_cache, src, label, flags)
1112 sbitmap *edge_cache;
1113 basic_block src;
1114 rtx label;
1115 int flags;
1116 {
1117 if (GET_CODE (label) != CODE_LABEL)
1118 abort ();
1119
1120 /* If the label was never emitted, this insn is junk, but avoid a
1121 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1122 as a result of a syntax error and a diagnostic has already been
1123 printed. */
1124
1125 if (INSN_UID (label) == 0)
1126 return;
1127
1128 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1129 }
1130
1131 /* Create the edges generated by INSN in REGION. */
1132
1133 static void
1134 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1135 sbitmap *edge_cache;
1136 eh_nesting_info *eh_nest_info;
1137 basic_block src;
1138 rtx insn;
1139 int region;
1140 {
1141 handler_info **handler_list;
1142 int num, is_call;
1143
1144 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1145 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1146 while (--num >= 0)
1147 {
1148 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1149 EDGE_ABNORMAL | EDGE_EH | is_call);
1150 }
1151 }
1152
1153 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1154 dangerous if we intend to move basic blocks around. Move such notes
1155 into the following block. */
1156
1157 static void
1158 move_stray_eh_region_notes ()
1159 {
1160 int i;
1161 basic_block b1, b2;
1162
1163 if (n_basic_blocks < 2)
1164 return;
1165
1166 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1167 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1168 {
1169 rtx insn, next, list = NULL_RTX;
1170
1171 b1 = BASIC_BLOCK (i);
1172 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1173 {
1174 next = NEXT_INSN (insn);
1175 if (GET_CODE (insn) == NOTE
1176 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1177 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1178 {
1179 /* Unlink from the insn chain. */
1180 NEXT_INSN (PREV_INSN (insn)) = next;
1181 PREV_INSN (next) = PREV_INSN (insn);
1182
1183 /* Queue it. */
1184 NEXT_INSN (insn) = list;
1185 list = insn;
1186 }
1187 }
1188
1189 if (list == NULL_RTX)
1190 continue;
1191
1192 /* Find where to insert these things. */
1193 insn = b2->head;
1194 if (GET_CODE (insn) == CODE_LABEL)
1195 insn = NEXT_INSN (insn);
1196
1197 while (list)
1198 {
1199 next = NEXT_INSN (list);
1200 add_insn_after (list, insn);
1201 list = next;
1202 }
1203 }
1204 }
1205
1206 /* Recompute eh_beg/eh_end for each basic block. */
1207
1208 static void
1209 record_active_eh_regions (f)
1210 rtx f;
1211 {
1212 rtx insn, eh_list = NULL_RTX;
1213 int i = 0;
1214 basic_block bb = BASIC_BLOCK (0);
1215
1216 for (insn = f; insn ; insn = NEXT_INSN (insn))
1217 {
1218 if (bb->head == insn)
1219 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1220
1221 if (GET_CODE (insn) == NOTE)
1222 {
1223 int kind = NOTE_LINE_NUMBER (insn);
1224 if (kind == NOTE_INSN_EH_REGION_BEG)
1225 eh_list = alloc_INSN_LIST (insn, eh_list);
1226 else if (kind == NOTE_INSN_EH_REGION_END)
1227 {
1228 rtx t = XEXP (eh_list, 1);
1229 free_INSN_LIST_node (eh_list);
1230 eh_list = t;
1231 }
1232 }
1233
1234 if (bb->end == insn)
1235 {
1236 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1237 i += 1;
1238 if (i == n_basic_blocks)
1239 break;
1240 bb = BASIC_BLOCK (i);
1241 }
1242 }
1243 }
1244
1245 /* Identify critical edges and set the bits appropriately. */
1246
1247 static void
1248 mark_critical_edges ()
1249 {
1250 int i, n = n_basic_blocks;
1251 basic_block bb;
1252
1253 /* We begin with the entry block. This is not terribly important now,
1254 but could be if a front end (Fortran) implemented alternate entry
1255 points. */
1256 bb = ENTRY_BLOCK_PTR;
1257 i = -1;
1258
1259 while (1)
1260 {
1261 edge e;
1262
1263 /* (1) Critical edges must have a source with multiple successors. */
1264 if (bb->succ && bb->succ->succ_next)
1265 {
1266 for (e = bb->succ; e ; e = e->succ_next)
1267 {
1268 /* (2) Critical edges must have a destination with multiple
1269 predecessors. Note that we know there is at least one
1270 predecessor -- the edge we followed to get here. */
1271 if (e->dest->pred->pred_next)
1272 e->flags |= EDGE_CRITICAL;
1273 else
1274 e->flags &= ~EDGE_CRITICAL;
1275 }
1276 }
1277 else
1278 {
1279 for (e = bb->succ; e ; e = e->succ_next)
1280 e->flags &= ~EDGE_CRITICAL;
1281 }
1282
1283 if (++i >= n)
1284 break;
1285 bb = BASIC_BLOCK (i);
1286 }
1287 }
1288 \f
1289 /* Split a (typically critical) edge. Return the new block.
1290 Abort on abnormal edges.
1291
1292 ??? The code generally expects to be called on critical edges.
1293 The case of a block ending in an unconditional jump to a
1294 block with multiple predecessors is not handled optimally. */
1295
1296 basic_block
1297 split_edge (edge_in)
1298 edge edge_in;
1299 {
1300 basic_block old_pred, bb, old_succ;
1301 edge edge_out;
1302 rtx bb_note;
1303 int i, j;
1304
1305 /* Abnormal edges cannot be split. */
1306 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1307 abort ();
1308
1309 old_pred = edge_in->src;
1310 old_succ = edge_in->dest;
1311
1312 /* Remove the existing edge from the destination's pred list. */
1313 {
1314 edge *pp;
1315 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1316 continue;
1317 *pp = edge_in->pred_next;
1318 edge_in->pred_next = NULL;
1319 }
1320
1321 /* Create the new structures. */
1322 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1323 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1324 n_edges++;
1325
1326 memset (bb, 0, sizeof (*bb));
1327 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1328 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1329
1330 /* ??? This info is likely going to be out of date very soon. */
1331 if (old_succ->global_live_at_start)
1332 {
1333 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1334 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1335 }
1336 else
1337 {
1338 CLEAR_REG_SET (bb->global_live_at_start);
1339 CLEAR_REG_SET (bb->global_live_at_end);
1340 }
1341
1342 /* Wire them up. */
1343 bb->pred = edge_in;
1344 bb->succ = edge_out;
1345
1346 edge_in->dest = bb;
1347 edge_in->flags &= ~EDGE_CRITICAL;
1348
1349 edge_out->pred_next = old_succ->pred;
1350 edge_out->succ_next = NULL;
1351 edge_out->src = bb;
1352 edge_out->dest = old_succ;
1353 edge_out->flags = EDGE_FALLTHRU;
1354 edge_out->probability = REG_BR_PROB_BASE;
1355
1356 old_succ->pred = edge_out;
1357
1358 /* Tricky case -- if there existed a fallthru into the successor
1359 (and we're not it) we must add a new unconditional jump around
1360 the new block we're actually interested in.
1361
1362 Further, if that edge is critical, this means a second new basic
1363 block must be created to hold it. In order to simplify correct
1364 insn placement, do this before we touch the existing basic block
1365 ordering for the block we were really wanting. */
1366 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1367 {
1368 edge e;
1369 for (e = edge_out->pred_next; e ; e = e->pred_next)
1370 if (e->flags & EDGE_FALLTHRU)
1371 break;
1372
1373 if (e)
1374 {
1375 basic_block jump_block;
1376 rtx pos;
1377
1378 if ((e->flags & EDGE_CRITICAL) == 0)
1379 {
1380 /* Non critical -- we can simply add a jump to the end
1381 of the existing predecessor. */
1382 jump_block = e->src;
1383 }
1384 else
1385 {
1386 /* We need a new block to hold the jump. The simplest
1387 way to do the bulk of the work here is to recursively
1388 call ourselves. */
1389 jump_block = split_edge (e);
1390 e = jump_block->succ;
1391 }
1392
1393 /* Now add the jump insn ... */
1394 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1395 jump_block->end);
1396 jump_block->end = pos;
1397 emit_barrier_after (pos);
1398
1399 /* ... let jump know that label is in use, ... */
1400 JUMP_LABEL (pos) = old_succ->head;
1401 ++LABEL_NUSES (old_succ->head);
1402
1403 /* ... and clear fallthru on the outgoing edge. */
1404 e->flags &= ~EDGE_FALLTHRU;
1405
1406 /* Continue splitting the interesting edge. */
1407 }
1408 }
1409
1410 /* Place the new block just in front of the successor. */
1411 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1412 if (old_succ == EXIT_BLOCK_PTR)
1413 j = n_basic_blocks - 1;
1414 else
1415 j = old_succ->index;
1416 for (i = n_basic_blocks - 1; i > j; --i)
1417 {
1418 basic_block tmp = BASIC_BLOCK (i - 1);
1419 BASIC_BLOCK (i) = tmp;
1420 tmp->index = i;
1421 }
1422 BASIC_BLOCK (i) = bb;
1423 bb->index = i;
1424
1425 /* Create the basic block note.
1426
1427 Where we place the note can have a noticable impact on the generated
1428 code. Consider this cfg:
1429
1430
1431 E
1432 |
1433 0
1434 / \
1435 +->1-->2--->E
1436 | |
1437 +--+
1438
1439 If we need to insert an insn on the edge from block 0 to block 1,
1440 we want to ensure the instructions we insert are outside of any
1441 loop notes that physically sit between block 0 and block 1. Otherwise
1442 we confuse the loop optimizer into thinking the loop is a phony. */
1443 if (old_succ != EXIT_BLOCK_PTR
1444 && PREV_INSN (old_succ->head)
1445 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1446 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1447 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1448 PREV_INSN (old_succ->head));
1449 else if (old_succ != EXIT_BLOCK_PTR)
1450 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1451 else
1452 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1453 NOTE_BASIC_BLOCK (bb_note) = bb;
1454 bb->head = bb->end = bb_note;
1455
1456 /* Not quite simple -- for non-fallthru edges, we must adjust the
1457 predecessor's jump instruction to target our new block. */
1458 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1459 {
1460 rtx tmp, insn = old_pred->end;
1461 rtx old_label = old_succ->head;
1462 rtx new_label = gen_label_rtx ();
1463
1464 if (GET_CODE (insn) != JUMP_INSN)
1465 abort ();
1466
1467 /* ??? Recognize a tablejump and adjust all matching cases. */
1468 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1469 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1470 && GET_CODE (tmp) == JUMP_INSN
1471 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1472 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1473 {
1474 rtvec vec;
1475 int j;
1476
1477 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1478 vec = XVEC (PATTERN (tmp), 0);
1479 else
1480 vec = XVEC (PATTERN (tmp), 1);
1481
1482 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1483 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1484 {
1485 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1486 --LABEL_NUSES (old_label);
1487 ++LABEL_NUSES (new_label);
1488 }
1489
1490 /* Handle casesi dispatch insns */
1491 if ((tmp = single_set (insn)) != NULL
1492 && SET_DEST (tmp) == pc_rtx
1493 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1494 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1495 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1496 {
1497 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1498 new_label);
1499 --LABEL_NUSES (old_label);
1500 ++LABEL_NUSES (new_label);
1501 }
1502 }
1503 else
1504 {
1505 /* This would have indicated an abnormal edge. */
1506 if (computed_jump_p (insn))
1507 abort ();
1508
1509 /* A return instruction can't be redirected. */
1510 if (returnjump_p (insn))
1511 abort ();
1512
1513 /* If the insn doesn't go where we think, we're confused. */
1514 if (JUMP_LABEL (insn) != old_label)
1515 abort ();
1516
1517 redirect_jump (insn, new_label);
1518 }
1519
1520 emit_label_before (new_label, bb_note);
1521 bb->head = new_label;
1522 }
1523
1524 return bb;
1525 }
1526
1527 /* Queue instructions for insertion on an edge between two basic blocks.
1528 The new instructions and basic blocks (if any) will not appear in the
1529 CFG until commit_edge_insertions is called. */
1530
1531 void
1532 insert_insn_on_edge (pattern, e)
1533 rtx pattern;
1534 edge e;
1535 {
1536 /* We cannot insert instructions on an abnormal critical edge.
1537 It will be easier to find the culprit if we die now. */
1538 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1539 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1540 abort ();
1541
1542 if (e->insns == NULL_RTX)
1543 start_sequence ();
1544 else
1545 push_to_sequence (e->insns);
1546
1547 emit_insn (pattern);
1548
1549 e->insns = get_insns ();
1550 end_sequence();
1551 }
1552
1553 /* Update the CFG for the instructions queued on edge E. */
1554
1555 static void
1556 commit_one_edge_insertion (e)
1557 edge e;
1558 {
1559 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1560 basic_block bb;
1561
1562 /* Figure out where to put these things. If the destination has
1563 one predecessor, insert there. Except for the exit block. */
1564 if (e->dest->pred->pred_next == NULL
1565 && e->dest != EXIT_BLOCK_PTR)
1566 {
1567 bb = e->dest;
1568
1569 /* Get the location correct wrt a code label, and "nice" wrt
1570 a basic block note, and before everything else. */
1571 tmp = bb->head;
1572 if (GET_CODE (tmp) == CODE_LABEL)
1573 tmp = NEXT_INSN (tmp);
1574 if (GET_CODE (tmp) == NOTE
1575 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1576 tmp = NEXT_INSN (tmp);
1577 if (tmp == bb->head)
1578 before = tmp;
1579 else
1580 after = PREV_INSN (tmp);
1581 }
1582
1583 /* If the source has one successor and the edge is not abnormal,
1584 insert there. Except for the entry block. */
1585 else if ((e->flags & EDGE_ABNORMAL) == 0
1586 && e->src->succ->succ_next == NULL
1587 && e->src != ENTRY_BLOCK_PTR)
1588 {
1589 bb = e->src;
1590 if (GET_CODE (bb->end) == JUMP_INSN)
1591 {
1592 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1593 a jump with delay slots already filled? */
1594 if (! simplejump_p (bb->end))
1595 abort ();
1596
1597 before = bb->end;
1598 }
1599 else
1600 {
1601 /* We'd better be fallthru, or we've lost track of what's what. */
1602 if ((e->flags & EDGE_FALLTHRU) == 0)
1603 abort ();
1604
1605 after = bb->end;
1606 }
1607 }
1608
1609 /* Otherwise we must split the edge. */
1610 else
1611 {
1612 bb = split_edge (e);
1613 after = bb->end;
1614 }
1615
1616 /* Now that we've found the spot, do the insertion. */
1617 tmp = e->insns;
1618 e->insns = NULL_RTX;
1619
1620 /* Set the new block number for these insns, if structure is allocated. */
1621 if (basic_block_for_insn)
1622 {
1623 rtx i;
1624 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1625 set_block_for_insn (i, bb);
1626 }
1627
1628 if (before)
1629 {
1630 emit_insns_before (tmp, before);
1631 if (before == bb->head)
1632 bb->head = tmp;
1633 }
1634 else
1635 {
1636 tmp = emit_insns_after (tmp, after);
1637 if (after == bb->end)
1638 bb->end = tmp;
1639 }
1640 }
1641
1642 /* Update the CFG for all queued instructions. */
1643
1644 void
1645 commit_edge_insertions ()
1646 {
1647 int i;
1648 basic_block bb;
1649
1650 i = -1;
1651 bb = ENTRY_BLOCK_PTR;
1652 while (1)
1653 {
1654 edge e, next;
1655
1656 for (e = bb->succ; e ; e = next)
1657 {
1658 next = e->succ_next;
1659 if (e->insns)
1660 commit_one_edge_insertion (e);
1661 }
1662
1663 if (++i >= n_basic_blocks)
1664 break;
1665 bb = BASIC_BLOCK (i);
1666 }
1667 }
1668 \f
1669 /* Delete all unreachable basic blocks. */
1670
1671 static void
1672 delete_unreachable_blocks ()
1673 {
1674 basic_block *worklist, *tos;
1675 int deleted_handler;
1676 edge e;
1677 int i, n;
1678
1679 n = n_basic_blocks;
1680 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1681
1682 /* Use basic_block->aux as a marker. Clear them all. */
1683
1684 for (i = 0; i < n; ++i)
1685 BASIC_BLOCK (i)->aux = NULL;
1686
1687 /* Add our starting points to the worklist. Almost always there will
1688 be only one. It isn't inconcievable that we might one day directly
1689 support Fortran alternate entry points. */
1690
1691 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1692 {
1693 *tos++ = e->dest;
1694
1695 /* Mark the block with a handy non-null value. */
1696 e->dest->aux = e;
1697 }
1698
1699 /* Iterate: find everything reachable from what we've already seen. */
1700
1701 while (tos != worklist)
1702 {
1703 basic_block b = *--tos;
1704
1705 for (e = b->succ; e ; e = e->succ_next)
1706 if (!e->dest->aux)
1707 {
1708 *tos++ = e->dest;
1709 e->dest->aux = e;
1710 }
1711 }
1712
1713 /* Delete all unreachable basic blocks. Count down so that we don't
1714 interfere with the block renumbering that happens in delete_block. */
1715
1716 deleted_handler = 0;
1717
1718 for (i = n - 1; i >= 0; --i)
1719 {
1720 basic_block b = BASIC_BLOCK (i);
1721
1722 if (b->aux != NULL)
1723 /* This block was found. Tidy up the mark. */
1724 b->aux = NULL;
1725 else
1726 deleted_handler |= delete_block (b);
1727 }
1728
1729 /* Fix up edges that now fall through, or rather should now fall through
1730 but previously required a jump around now deleted blocks. Simplify
1731 the search by only examining blocks numerically adjacent, since this
1732 is how find_basic_blocks created them. */
1733
1734 for (i = 1; i < n_basic_blocks; ++i)
1735 {
1736 basic_block b = BASIC_BLOCK (i - 1);
1737 basic_block c = BASIC_BLOCK (i);
1738 edge s;
1739
1740 /* We care about simple conditional or unconditional jumps with
1741 a single successor.
1742
1743 If we had a conditional branch to the next instruction when
1744 find_basic_blocks was called, then there will only be one
1745 out edge for the block which ended with the conditional
1746 branch (since we do not create duplicate edges).
1747
1748 Furthermore, the edge will be marked as a fallthru because we
1749 merge the flags for the duplicate edges. So we do not want to
1750 check that the edge is not a FALLTHRU edge. */
1751 if ((s = b->succ) != NULL
1752 && s->succ_next == NULL
1753 && s->dest == c
1754 /* If the jump insn has side effects, we can't tidy the edge. */
1755 && (GET_CODE (b->end) != JUMP_INSN
1756 || onlyjump_p (b->end)))
1757 tidy_fallthru_edge (s, b, c);
1758 }
1759
1760 /* If we deleted an exception handler, we may have EH region begin/end
1761 blocks to remove as well. */
1762 if (deleted_handler)
1763 delete_eh_regions ();
1764
1765 free (worklist);
1766 }
1767
1768 /* Find EH regions for which there is no longer a handler, and delete them. */
1769
1770 static void
1771 delete_eh_regions ()
1772 {
1773 rtx insn;
1774
1775 update_rethrow_references ();
1776
1777 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1778 if (GET_CODE (insn) == NOTE)
1779 {
1780 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1781 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1782 {
1783 int num = NOTE_EH_HANDLER (insn);
1784 /* A NULL handler indicates a region is no longer needed,
1785 as long as it isn't the target of a rethrow. */
1786 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1787 {
1788 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1789 NOTE_SOURCE_FILE (insn) = 0;
1790 }
1791 }
1792 }
1793 }
1794
1795 /* Return true if NOTE is not one of the ones that must be kept paired,
1796 so that we may simply delete them. */
1797
1798 static int
1799 can_delete_note_p (note)
1800 rtx note;
1801 {
1802 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1803 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1804 }
1805
1806 /* Unlink a chain of insns between START and FINISH, leaving notes
1807 that must be paired. */
1808
1809 void
1810 flow_delete_insn_chain (start, finish)
1811 rtx start, finish;
1812 {
1813 /* Unchain the insns one by one. It would be quicker to delete all
1814 of these with a single unchaining, rather than one at a time, but
1815 we need to keep the NOTE's. */
1816
1817 rtx next;
1818
1819 while (1)
1820 {
1821 next = NEXT_INSN (start);
1822 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1823 ;
1824 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1825 ;
1826 else
1827 next = flow_delete_insn (start);
1828
1829 if (start == finish)
1830 break;
1831 start = next;
1832 }
1833 }
1834
1835 /* Delete the insns in a (non-live) block. We physically delete every
1836 non-deleted-note insn, and update the flow graph appropriately.
1837
1838 Return nonzero if we deleted an exception handler. */
1839
1840 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1841 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1842
1843 static int
1844 delete_block (b)
1845 basic_block b;
1846 {
1847 int deleted_handler = 0;
1848 rtx insn, end;
1849
1850 /* If the head of this block is a CODE_LABEL, then it might be the
1851 label for an exception handler which can't be reached.
1852
1853 We need to remove the label from the exception_handler_label list
1854 and remove the associated NOTE_INSN_EH_REGION_BEG and
1855 NOTE_INSN_EH_REGION_END notes. */
1856
1857 insn = b->head;
1858
1859 never_reached_warning (insn);
1860
1861 if (GET_CODE (insn) == CODE_LABEL)
1862 {
1863 rtx x, *prev = &exception_handler_labels;
1864
1865 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1866 {
1867 if (XEXP (x, 0) == insn)
1868 {
1869 /* Found a match, splice this label out of the EH label list. */
1870 *prev = XEXP (x, 1);
1871 XEXP (x, 1) = NULL_RTX;
1872 XEXP (x, 0) = NULL_RTX;
1873
1874 /* Remove the handler from all regions */
1875 remove_handler (insn);
1876 deleted_handler = 1;
1877 break;
1878 }
1879 prev = &XEXP (x, 1);
1880 }
1881
1882 /* This label may be referenced by code solely for its value, or
1883 referenced by static data, or something. We have determined
1884 that it is not reachable, but cannot delete the label itself.
1885 Save code space and continue to delete the balance of the block,
1886 along with properly updating the cfg. */
1887 if (!can_delete_label_p (insn))
1888 {
1889 /* If we've only got one of these, skip the whole deleting
1890 insns thing. */
1891 if (insn == b->end)
1892 goto no_delete_insns;
1893 insn = NEXT_INSN (insn);
1894 }
1895 }
1896
1897 /* Selectively unlink the insn chain. Include any BARRIER that may
1898 follow the basic block. */
1899 end = next_nonnote_insn (b->end);
1900 if (!end || GET_CODE (end) != BARRIER)
1901 end = b->end;
1902 flow_delete_insn_chain (insn, end);
1903
1904 no_delete_insns:
1905
1906 /* Remove the edges into and out of this block. Note that there may
1907 indeed be edges in, if we are removing an unreachable loop. */
1908 {
1909 edge e, next, *q;
1910
1911 for (e = b->pred; e ; e = next)
1912 {
1913 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1914 continue;
1915 *q = e->succ_next;
1916 next = e->pred_next;
1917 n_edges--;
1918 free (e);
1919 }
1920 for (e = b->succ; e ; e = next)
1921 {
1922 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1923 continue;
1924 *q = e->pred_next;
1925 next = e->succ_next;
1926 n_edges--;
1927 free (e);
1928 }
1929
1930 b->pred = NULL;
1931 b->succ = NULL;
1932 }
1933
1934 /* Remove the basic block from the array, and compact behind it. */
1935 expunge_block (b);
1936
1937 return deleted_handler;
1938 }
1939
1940 /* Remove block B from the basic block array and compact behind it. */
1941
1942 static void
1943 expunge_block (b)
1944 basic_block b;
1945 {
1946 int i, n = n_basic_blocks;
1947
1948 for (i = b->index; i + 1 < n; ++i)
1949 {
1950 basic_block x = BASIC_BLOCK (i + 1);
1951 BASIC_BLOCK (i) = x;
1952 x->index = i;
1953 }
1954
1955 basic_block_info->num_elements--;
1956 n_basic_blocks--;
1957 }
1958
1959 /* Delete INSN by patching it out. Return the next insn. */
1960
1961 static rtx
1962 flow_delete_insn (insn)
1963 rtx insn;
1964 {
1965 rtx prev = PREV_INSN (insn);
1966 rtx next = NEXT_INSN (insn);
1967
1968 PREV_INSN (insn) = NULL_RTX;
1969 NEXT_INSN (insn) = NULL_RTX;
1970
1971 if (prev)
1972 NEXT_INSN (prev) = next;
1973 if (next)
1974 PREV_INSN (next) = prev;
1975 else
1976 set_last_insn (prev);
1977
1978 if (GET_CODE (insn) == CODE_LABEL)
1979 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1980
1981 /* If deleting a jump, decrement the use count of the label. Deleting
1982 the label itself should happen in the normal course of block merging. */
1983 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1984 LABEL_NUSES (JUMP_LABEL (insn))--;
1985
1986 return next;
1987 }
1988
1989 /* True if a given label can be deleted. */
1990
1991 static int
1992 can_delete_label_p (label)
1993 rtx label;
1994 {
1995 rtx x;
1996
1997 if (LABEL_PRESERVE_P (label))
1998 return 0;
1999
2000 for (x = forced_labels; x ; x = XEXP (x, 1))
2001 if (label == XEXP (x, 0))
2002 return 0;
2003 for (x = label_value_list; x ; x = XEXP (x, 1))
2004 if (label == XEXP (x, 0))
2005 return 0;
2006 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2007 if (label == XEXP (x, 0))
2008 return 0;
2009
2010 /* User declared labels must be preserved. */
2011 if (LABEL_NAME (label) != 0)
2012 return 0;
2013
2014 return 1;
2015 }
2016
2017 /* Blocks A and B are to be merged into a single block. A has no incoming
2018 fallthru edge, so it can be moved before B without adding or modifying
2019 any jumps (aside from the jump from A to B). */
2020
2021 static int
2022 merge_blocks_move_predecessor_nojumps (a, b)
2023 basic_block a, b;
2024 {
2025 rtx start, end, barrier;
2026 int index;
2027
2028 start = a->head;
2029 end = a->end;
2030
2031 /* We want to delete the BARRIER after the end of the insns we are
2032 going to move. If we don't find a BARRIER, then do nothing. This
2033 can happen in some cases if we have labels we can not delete.
2034
2035 Similarly, do nothing if we can not delete the label at the start
2036 of the target block. */
2037 barrier = next_nonnote_insn (end);
2038 if (GET_CODE (barrier) != BARRIER
2039 || (GET_CODE (b->head) == CODE_LABEL
2040 && ! can_delete_label_p (b->head)))
2041 return 0;
2042 else
2043 flow_delete_insn (barrier);
2044
2045 /* Move block and loop notes out of the chain so that we do not
2046 disturb their order.
2047
2048 ??? A better solution would be to squeeze out all the non-nested notes
2049 and adjust the block trees appropriately. Even better would be to have
2050 a tighter connection between block trees and rtl so that this is not
2051 necessary. */
2052 start = squeeze_notes (start, end);
2053
2054 /* Scramble the insn chain. */
2055 if (end != PREV_INSN (b->head))
2056 reorder_insns (start, end, PREV_INSN (b->head));
2057
2058 if (rtl_dump_file)
2059 {
2060 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2061 a->index, b->index);
2062 }
2063
2064 /* Swap the records for the two blocks around. Although we are deleting B,
2065 A is now where B was and we want to compact the BB array from where
2066 A used to be. */
2067 BASIC_BLOCK(a->index) = b;
2068 BASIC_BLOCK(b->index) = a;
2069 index = a->index;
2070 a->index = b->index;
2071 b->index = index;
2072
2073 /* Now blocks A and B are contiguous. Merge them. */
2074 merge_blocks_nomove (a, b);
2075
2076 return 1;
2077 }
2078
2079 /* Blocks A and B are to be merged into a single block. B has no outgoing
2080 fallthru edge, so it can be moved after A without adding or modifying
2081 any jumps (aside from the jump from A to B). */
2082
2083 static int
2084 merge_blocks_move_successor_nojumps (a, b)
2085 basic_block a, b;
2086 {
2087 rtx start, end, barrier;
2088
2089 start = b->head;
2090 end = b->end;
2091
2092 /* We want to delete the BARRIER after the end of the insns we are
2093 going to move. If we don't find a BARRIER, then do nothing. This
2094 can happen in some cases if we have labels we can not delete.
2095
2096 Similarly, do nothing if we can not delete the label at the start
2097 of the target block. */
2098 barrier = next_nonnote_insn (end);
2099 if (GET_CODE (barrier) != BARRIER
2100 || (GET_CODE (b->head) == CODE_LABEL
2101 && ! can_delete_label_p (b->head)))
2102 return 0;
2103 else
2104 flow_delete_insn (barrier);
2105
2106 /* Move block and loop notes out of the chain so that we do not
2107 disturb their order.
2108
2109 ??? A better solution would be to squeeze out all the non-nested notes
2110 and adjust the block trees appropriately. Even better would be to have
2111 a tighter connection between block trees and rtl so that this is not
2112 necessary. */
2113 start = squeeze_notes (start, end);
2114
2115 /* Scramble the insn chain. */
2116 reorder_insns (start, end, a->end);
2117
2118 /* Now blocks A and B are contiguous. Merge them. */
2119 merge_blocks_nomove (a, b);
2120
2121 if (rtl_dump_file)
2122 {
2123 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2124 b->index, a->index);
2125 }
2126
2127 return 1;
2128 }
2129
2130 /* Blocks A and B are to be merged into a single block. The insns
2131 are already contiguous, hence `nomove'. */
2132
2133 static void
2134 merge_blocks_nomove (a, b)
2135 basic_block a, b;
2136 {
2137 edge e;
2138 rtx b_head, b_end, a_end;
2139 int b_empty = 0;
2140
2141 /* If there was a CODE_LABEL beginning B, delete it. */
2142 b_head = b->head;
2143 b_end = b->end;
2144 if (GET_CODE (b_head) == CODE_LABEL)
2145 {
2146 /* Detect basic blocks with nothing but a label. This can happen
2147 in particular at the end of a function. */
2148 if (b_head == b_end)
2149 b_empty = 1;
2150 b_head = flow_delete_insn (b_head);
2151 }
2152
2153 /* Delete the basic block note. */
2154 if (GET_CODE (b_head) == NOTE
2155 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2156 {
2157 if (b_head == b_end)
2158 b_empty = 1;
2159 b_head = flow_delete_insn (b_head);
2160 }
2161
2162 /* If there was a jump out of A, delete it. */
2163 a_end = a->end;
2164 if (GET_CODE (a_end) == JUMP_INSN)
2165 {
2166 rtx prev;
2167
2168 prev = prev_nonnote_insn (a_end);
2169 if (!prev)
2170 prev = a->head;
2171
2172 #ifdef HAVE_cc0
2173 /* If this was a conditional jump, we need to also delete
2174 the insn that set cc0. */
2175
2176 if (prev && sets_cc0_p (prev))
2177 {
2178 rtx tmp = prev;
2179 prev = prev_nonnote_insn (prev);
2180 if (!prev)
2181 prev = a->head;
2182 flow_delete_insn (tmp);
2183 }
2184 #endif
2185
2186 /* Note that a->head != a->end, since we should have at least a
2187 bb note plus the jump, so prev != insn. */
2188 flow_delete_insn (a_end);
2189 a_end = prev;
2190 }
2191
2192 /* By definition, there should only be one successor of A, and that is
2193 B. Free that edge struct. */
2194 n_edges--;
2195 free (a->succ);
2196
2197 /* Adjust the edges out of B for the new owner. */
2198 for (e = b->succ; e ; e = e->succ_next)
2199 e->src = a;
2200 a->succ = b->succ;
2201
2202 /* Reassociate the insns of B with A. */
2203 if (!b_empty)
2204 {
2205 BLOCK_FOR_INSN (b_head) = a;
2206 while (b_head != b_end)
2207 {
2208 b_head = NEXT_INSN (b_head);
2209 BLOCK_FOR_INSN (b_head) = a;
2210 }
2211 a_end = b_head;
2212 }
2213 a->end = a_end;
2214
2215 /* Compact the basic block array. */
2216 expunge_block (b);
2217 }
2218
2219 /* Attempt to merge basic blocks that are potentially non-adjacent.
2220 Return true iff the attempt succeeded. */
2221
2222 static int
2223 merge_blocks (e, b, c)
2224 edge e;
2225 basic_block b, c;
2226 {
2227 /* If B has a fallthru edge to C, no need to move anything. */
2228 if (e->flags & EDGE_FALLTHRU)
2229 {
2230 /* If a label still appears somewhere and we cannot delete the label,
2231 then we cannot merge the blocks. The edge was tidied already. */
2232
2233 rtx insn, stop = NEXT_INSN (c->head);
2234 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2235 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2236 return 0;
2237
2238 merge_blocks_nomove (b, c);
2239
2240 if (rtl_dump_file)
2241 {
2242 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2243 b->index, c->index);
2244 }
2245
2246 return 1;
2247 }
2248 else
2249 {
2250 edge tmp_edge;
2251 basic_block d;
2252 int c_has_outgoing_fallthru;
2253 int b_has_incoming_fallthru;
2254
2255 /* We must make sure to not munge nesting of exception regions,
2256 lexical blocks, and loop notes.
2257
2258 The first is taken care of by requiring that the active eh
2259 region at the end of one block always matches the active eh
2260 region at the beginning of the next block.
2261
2262 The later two are taken care of by squeezing out all the notes. */
2263
2264 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2265 executed and we may want to treat blocks which have two out
2266 edges, one normal, one abnormal as only having one edge for
2267 block merging purposes. */
2268
2269 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2270 if (tmp_edge->flags & EDGE_FALLTHRU)
2271 break;
2272 c_has_outgoing_fallthru = (tmp_edge != NULL);
2273
2274 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2275 if (tmp_edge->flags & EDGE_FALLTHRU)
2276 break;
2277 b_has_incoming_fallthru = (tmp_edge != NULL);
2278
2279 /* If B does not have an incoming fallthru, and the exception regions
2280 match, then it can be moved immediately before C without introducing
2281 or modifying jumps.
2282
2283 C can not be the first block, so we do not have to worry about
2284 accessing a non-existent block. */
2285 d = BASIC_BLOCK (c->index - 1);
2286 if (! b_has_incoming_fallthru
2287 && d->eh_end == b->eh_beg
2288 && b->eh_end == c->eh_beg)
2289 return merge_blocks_move_predecessor_nojumps (b, c);
2290
2291 /* Otherwise, we're going to try to move C after B. Make sure the
2292 exception regions match.
2293
2294 If B is the last basic block, then we must not try to access the
2295 block structure for block B + 1. Luckily in that case we do not
2296 need to worry about matching exception regions. */
2297 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2298 if (b->eh_end == c->eh_beg
2299 && (d == NULL || c->eh_end == d->eh_beg))
2300 {
2301 /* If C does not have an outgoing fallthru, then it can be moved
2302 immediately after B without introducing or modifying jumps. */
2303 if (! c_has_outgoing_fallthru)
2304 return merge_blocks_move_successor_nojumps (b, c);
2305
2306 /* Otherwise, we'll need to insert an extra jump, and possibly
2307 a new block to contain it. */
2308 /* ??? Not implemented yet. */
2309 }
2310
2311 return 0;
2312 }
2313 }
2314
2315 /* Top level driver for merge_blocks. */
2316
2317 static void
2318 try_merge_blocks ()
2319 {
2320 int i;
2321
2322 /* Attempt to merge blocks as made possible by edge removal. If a block
2323 has only one successor, and the successor has only one predecessor,
2324 they may be combined. */
2325
2326 for (i = 0; i < n_basic_blocks; )
2327 {
2328 basic_block c, b = BASIC_BLOCK (i);
2329 edge s;
2330
2331 /* A loop because chains of blocks might be combineable. */
2332 while ((s = b->succ) != NULL
2333 && s->succ_next == NULL
2334 && (s->flags & EDGE_EH) == 0
2335 && (c = s->dest) != EXIT_BLOCK_PTR
2336 && c->pred->pred_next == NULL
2337 /* If the jump insn has side effects, we can't kill the edge. */
2338 && (GET_CODE (b->end) != JUMP_INSN
2339 || onlyjump_p (b->end))
2340 && merge_blocks (s, b, c))
2341 continue;
2342
2343 /* Don't get confused by the index shift caused by deleting blocks. */
2344 i = b->index + 1;
2345 }
2346 }
2347
2348 /* The given edge should potentially a fallthru edge. If that is in
2349 fact true, delete the unconditional jump and barriers that are in
2350 the way. */
2351
2352 static void
2353 tidy_fallthru_edge (e, b, c)
2354 edge e;
2355 basic_block b, c;
2356 {
2357 rtx q;
2358
2359 /* ??? In a late-running flow pass, other folks may have deleted basic
2360 blocks by nopping out blocks, leaving multiple BARRIERs between here
2361 and the target label. They ought to be chastized and fixed.
2362
2363 We can also wind up with a sequence of undeletable labels between
2364 one block and the next.
2365
2366 So search through a sequence of barriers, labels, and notes for
2367 the head of block C and assert that we really do fall through. */
2368
2369 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2370 return;
2371
2372 /* Remove what will soon cease being the jump insn from the source block.
2373 If block B consisted only of this single jump, turn it into a deleted
2374 note. */
2375 q = b->end;
2376 if (GET_CODE (q) == JUMP_INSN)
2377 {
2378 #ifdef HAVE_cc0
2379 /* If this was a conditional jump, we need to also delete
2380 the insn that set cc0. */
2381 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2382 q = PREV_INSN (q);
2383 #endif
2384
2385 if (b->head == q)
2386 {
2387 PUT_CODE (q, NOTE);
2388 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2389 NOTE_SOURCE_FILE (q) = 0;
2390 }
2391 else
2392 b->end = q = PREV_INSN (q);
2393 }
2394
2395 /* Selectively unlink the sequence. */
2396 if (q != PREV_INSN (c->head))
2397 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2398
2399 e->flags |= EDGE_FALLTHRU;
2400 }
2401
2402 /* Discover and record the loop depth at the head of each basic block. */
2403
2404 static void
2405 calculate_loop_depth (insns)
2406 rtx insns;
2407 {
2408 basic_block bb;
2409 rtx insn;
2410 int i = 0, depth = 1;
2411
2412 bb = BASIC_BLOCK (i);
2413 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2414 {
2415 if (insn == bb->head)
2416 {
2417 bb->loop_depth = depth;
2418 if (++i >= n_basic_blocks)
2419 break;
2420 bb = BASIC_BLOCK (i);
2421 }
2422
2423 if (GET_CODE (insn) == NOTE)
2424 {
2425 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2426 depth++;
2427 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2428 depth--;
2429
2430 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2431 if (depth == 0)
2432 abort ();
2433 }
2434 }
2435 }
2436 \f
2437 /* Perform data flow analysis.
2438 F is the first insn of the function and NREGS the number of register numbers
2439 in use. */
2440
2441 void
2442 life_analysis (f, nregs, file, remove_dead_code)
2443 rtx f;
2444 int nregs;
2445 FILE *file;
2446 int remove_dead_code;
2447 {
2448 #ifdef ELIMINABLE_REGS
2449 register size_t i;
2450 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2451 #endif
2452 int flags;
2453
2454 /* Record which registers will be eliminated. We use this in
2455 mark_used_regs. */
2456
2457 CLEAR_HARD_REG_SET (elim_reg_set);
2458
2459 #ifdef ELIMINABLE_REGS
2460 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2461 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2462 #else
2463 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2464 #endif
2465
2466 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2467 uid_volatile = BITMAP_XMALLOC ();
2468
2469 /* We want alias analysis information for local dead store elimination. */
2470 init_alias_analysis ();
2471
2472 flags = PROP_FINAL;
2473 if (! remove_dead_code)
2474 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2475 life_analysis_1 (f, nregs, flags);
2476
2477 if (! reload_completed)
2478 mark_constant_function ();
2479
2480 end_alias_analysis ();
2481
2482 if (file)
2483 dump_flow_info (file);
2484
2485 BITMAP_XFREE (uid_volatile);
2486 free_basic_block_vars (1);
2487 }
2488
2489 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2490 Search for REGNO. If found, abort if it is not wider than word_mode. */
2491
2492 static int
2493 verify_wide_reg_1 (px, pregno)
2494 rtx *px;
2495 void *pregno;
2496 {
2497 rtx x = *px;
2498 int regno = *(int *) pregno;
2499
2500 if (GET_CODE (x) == REG && REGNO (x) == regno)
2501 {
2502 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2503 abort ();
2504 return 1;
2505 }
2506 return 0;
2507 }
2508
2509 /* A subroutine of verify_local_live_at_start. Search through insns
2510 between HEAD and END looking for register REGNO. */
2511
2512 static void
2513 verify_wide_reg (regno, head, end)
2514 int regno;
2515 rtx head, end;
2516 {
2517 while (1)
2518 {
2519 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2520 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, &regno))
2521 return;
2522 if (head == end)
2523 break;
2524 head = NEXT_INSN (head);
2525 }
2526
2527 /* We didn't find the register at all. Something's way screwy. */
2528 abort ();
2529 }
2530
2531 /* A subroutine of update_life_info. Verify that there are no untoward
2532 changes in live_at_start during a local update. */
2533
2534 static void
2535 verify_local_live_at_start (new_live_at_start, bb)
2536 regset new_live_at_start;
2537 basic_block bb;
2538 {
2539 if (reload_completed)
2540 {
2541 /* After reload, there are no pseudos, nor subregs of multi-word
2542 registers. The regsets should exactly match. */
2543 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2544 abort ();
2545 }
2546 else
2547 {
2548 int i;
2549
2550 /* Find the set of changed registers. */
2551 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2552
2553 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2554 {
2555 /* No registers should die. */
2556 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2557 abort ();
2558 /* Verify that the now-live register is wider than word_mode. */
2559 verify_wide_reg (i, bb->head, bb->end);
2560 });
2561 }
2562 }
2563
2564 /* Updates death notes starting with the basic blocks set in BLOCKS.
2565
2566 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2567 expecting local modifications to basic blocks. If we find extra
2568 registers live at the beginning of a block, then we either killed
2569 useful data, or we have a broken split that wants data not provided.
2570 If we find registers removed from live_at_start, that means we have
2571 a broken peephole that is killing a register it shouldn't.
2572
2573 ??? This is not true in one situation -- when a pre-reload splitter
2574 generates subregs of a multi-word pseudo, current life analysis will
2575 lose the kill. So we _can_ have a pseudo go live. How irritating.
2576
2577 BLOCK_FOR_INSN is assumed to be correct.
2578
2579 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2580 reg_next_use for that. Including PROP_REG_INFO does not refresh
2581 regs_ever_live unless the caller resets it to zero. */
2582
2583 void
2584 update_life_info (blocks, extent, prop_flags)
2585 sbitmap blocks;
2586 enum update_life_extent extent;
2587 int prop_flags;
2588 {
2589 regset tmp;
2590 int i;
2591
2592 tmp = ALLOCA_REG_SET ();
2593
2594 /* For a global update, we go through the relaxation process again. */
2595 if (extent != UPDATE_LIFE_LOCAL)
2596 {
2597 calculate_global_regs_live (blocks, blocks,
2598 prop_flags & PROP_SCAN_DEAD_CODE);
2599
2600 /* If asked, remove notes from the blocks we'll update. */
2601 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2602 count_or_remove_death_notes (blocks, 1);
2603 }
2604
2605 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2606 {
2607 basic_block bb = BASIC_BLOCK (i);
2608
2609 COPY_REG_SET (tmp, bb->global_live_at_end);
2610 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2611 prop_flags);
2612
2613 if (extent == UPDATE_LIFE_LOCAL)
2614 verify_local_live_at_start (tmp, bb);
2615 });
2616
2617 FREE_REG_SET (tmp);
2618 }
2619
2620 /* Free the variables allocated by find_basic_blocks.
2621
2622 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2623
2624 void
2625 free_basic_block_vars (keep_head_end_p)
2626 int keep_head_end_p;
2627 {
2628 if (basic_block_for_insn)
2629 {
2630 VARRAY_FREE (basic_block_for_insn);
2631 basic_block_for_insn = NULL;
2632 }
2633
2634 if (! keep_head_end_p)
2635 {
2636 clear_edges ();
2637 VARRAY_FREE (basic_block_info);
2638 n_basic_blocks = 0;
2639
2640 ENTRY_BLOCK_PTR->aux = NULL;
2641 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2642 EXIT_BLOCK_PTR->aux = NULL;
2643 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2644 }
2645 }
2646
2647 /* Return nonzero if the destination of SET equals the source. */
2648 static int
2649 set_noop_p (set)
2650 rtx set;
2651 {
2652 rtx src = SET_SRC (set);
2653 rtx dst = SET_DEST (set);
2654 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2655 && REGNO (src) == REGNO (dst))
2656 return 1;
2657 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2658 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2659 return 0;
2660 src = SUBREG_REG (src);
2661 dst = SUBREG_REG (dst);
2662 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2663 && REGNO (src) == REGNO (dst))
2664 return 1;
2665 return 0;
2666 }
2667
2668 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2669 value to itself. */
2670 static int
2671 noop_move_p (insn)
2672 rtx insn;
2673 {
2674 rtx pat = PATTERN (insn);
2675
2676 /* Insns carrying these notes are useful later on. */
2677 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2678 return 0;
2679
2680 if (GET_CODE (pat) == SET && set_noop_p (pat))
2681 return 1;
2682
2683 if (GET_CODE (pat) == PARALLEL)
2684 {
2685 int i;
2686 /* If nothing but SETs of registers to themselves,
2687 this insn can also be deleted. */
2688 for (i = 0; i < XVECLEN (pat, 0); i++)
2689 {
2690 rtx tem = XVECEXP (pat, 0, i);
2691
2692 if (GET_CODE (tem) == USE
2693 || GET_CODE (tem) == CLOBBER)
2694 continue;
2695
2696 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2697 return 0;
2698 }
2699
2700 return 1;
2701 }
2702 return 0;
2703 }
2704
2705 static void
2706 notice_stack_pointer_modification (x, pat, data)
2707 rtx x;
2708 rtx pat ATTRIBUTE_UNUSED;
2709 void *data ATTRIBUTE_UNUSED;
2710 {
2711 if (x == stack_pointer_rtx
2712 /* The stack pointer is only modified indirectly as the result
2713 of a push until later in flow. See the comments in rtl.texi
2714 regarding Embedded Side-Effects on Addresses. */
2715 || (GET_CODE (x) == MEM
2716 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2717 || GET_CODE (XEXP (x, 0)) == PRE_INC
2718 || GET_CODE (XEXP (x, 0)) == POST_DEC
2719 || GET_CODE (XEXP (x, 0)) == POST_INC)
2720 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2721 current_function_sp_is_unchanging = 0;
2722 }
2723
2724 /* Record which insns refer to any volatile memory
2725 or for any reason can't be deleted just because they are dead stores.
2726 Also, delete any insns that copy a register to itself.
2727 And see if the stack pointer is modified. */
2728 static void
2729 record_volatile_insns (f)
2730 rtx f;
2731 {
2732 rtx insn;
2733 for (insn = f; insn; insn = NEXT_INSN (insn))
2734 {
2735 enum rtx_code code1 = GET_CODE (insn);
2736 if (code1 == CALL_INSN)
2737 SET_INSN_VOLATILE (insn);
2738 else if (code1 == INSN || code1 == JUMP_INSN)
2739 {
2740 if (GET_CODE (PATTERN (insn)) != USE
2741 && volatile_refs_p (PATTERN (insn)))
2742 SET_INSN_VOLATILE (insn);
2743
2744 /* A SET that makes space on the stack cannot be dead.
2745 (Such SETs occur only for allocating variable-size data,
2746 so they will always have a PLUS or MINUS according to the
2747 direction of stack growth.)
2748 Even if this function never uses this stack pointer value,
2749 signal handlers do! */
2750 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2751 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2752 #ifdef STACK_GROWS_DOWNWARD
2753 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2754 #else
2755 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2756 #endif
2757 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2758 SET_INSN_VOLATILE (insn);
2759
2760 /* Delete (in effect) any obvious no-op moves. */
2761 else if (noop_move_p (insn))
2762 {
2763 PUT_CODE (insn, NOTE);
2764 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2765 NOTE_SOURCE_FILE (insn) = 0;
2766 }
2767 }
2768
2769 /* Check if insn modifies the stack pointer. */
2770 if ( current_function_sp_is_unchanging
2771 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2772 note_stores (PATTERN (insn),
2773 notice_stack_pointer_modification,
2774 NULL);
2775 }
2776 }
2777
2778 /* Mark a register in SET. Hard registers in large modes get all
2779 of their component registers set as well. */
2780 static void
2781 mark_reg (set, reg)
2782 regset set;
2783 rtx reg;
2784 {
2785 int regno = REGNO (reg);
2786
2787 SET_REGNO_REG_SET (set, regno);
2788 if (regno < FIRST_PSEUDO_REGISTER)
2789 {
2790 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2791 while (--n > 0)
2792 SET_REGNO_REG_SET (set, regno + n);
2793 }
2794 }
2795
2796 /* Mark those regs which are needed at the end of the function as live
2797 at the end of the last basic block. */
2798 static void
2799 mark_regs_live_at_end (set)
2800 regset set;
2801 {
2802 tree type;
2803 int i;
2804
2805 /* If exiting needs the right stack value, consider the stack pointer
2806 live at the end of the function. */
2807 if ((HAVE_epilogue && reload_completed)
2808 || ! EXIT_IGNORE_STACK
2809 || (! FRAME_POINTER_REQUIRED
2810 && ! current_function_calls_alloca
2811 && flag_omit_frame_pointer)
2812 || current_function_sp_is_unchanging)
2813 {
2814 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2815 }
2816
2817 /* Mark the frame pointer if needed at the end of the function. If
2818 we end up eliminating it, it will be removed from the live list
2819 of each basic block by reload. */
2820
2821 if (! reload_completed || frame_pointer_needed)
2822 {
2823 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2824 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2825 /* If they are different, also mark the hard frame pointer as live */
2826 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2827 #endif
2828 }
2829
2830 #ifdef PIC_OFFSET_TABLE_REGNUM
2831 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2832 /* Many architectures have a GP register even without flag_pic.
2833 Assume the pic register is not in use, or will be handled by
2834 other means, if it is not fixed. */
2835 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2836 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2837 #endif
2838 #endif
2839
2840 /* Mark all global registers, and all registers used by the epilogue
2841 as being live at the end of the function since they may be
2842 referenced by our caller. */
2843 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2844 if (global_regs[i]
2845 #ifdef EPILOGUE_USES
2846 || EPILOGUE_USES (i)
2847 #endif
2848 )
2849 SET_REGNO_REG_SET (set, i);
2850
2851 /* Mark all call-saved registers that we actaully used. */
2852 if (HAVE_epilogue && reload_completed)
2853 {
2854 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2855 if (! call_used_regs[i] && regs_ever_live[i])
2856 SET_REGNO_REG_SET (set, i);
2857 }
2858
2859 /* Mark function return value. */
2860 /* ??? Only do this after reload. Consider a non-void function that
2861 omits a return statement. Across that edge we'll have the return
2862 register live, and no set for it. Thus the return register will
2863 be live back through the CFG to the entry, and thus we die. A
2864 possible solution is to emit a clobber at exits without returns. */
2865
2866 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2867 if (reload_completed
2868 && type != void_type_node)
2869 {
2870 rtx outgoing;
2871
2872 if (current_function_returns_struct
2873 || current_function_returns_pcc_struct)
2874 type = build_pointer_type (type);
2875
2876 #ifdef FUNCTION_OUTGOING_VALUE
2877 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2878 #else
2879 outgoing = FUNCTION_VALUE (type, current_function_decl);
2880 #endif
2881
2882 if (GET_CODE (outgoing) == REG)
2883 mark_reg (set, outgoing);
2884 else if (GET_CODE (outgoing) == PARALLEL)
2885 {
2886 int len = XVECLEN (outgoing, 0);
2887
2888 /* Check for a NULL entry, used to indicate that the parameter
2889 goes on the stack and in registers. */
2890 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2891
2892 for ( ; i < len; ++i)
2893 {
2894 rtx r = XVECEXP (outgoing, 0, i);
2895 if (GET_CODE (r) == REG)
2896 mark_reg (set, r);
2897 }
2898 }
2899 else
2900 abort ();
2901 }
2902 }
2903
2904 /* Determine which registers are live at the start of each
2905 basic block of the function whose first insn is F.
2906 NREGS is the number of registers used in F.
2907 We allocate the vector basic_block_live_at_start
2908 and the regsets that it points to, and fill them with the data.
2909 regset_size and regset_bytes are also set here. */
2910
2911 static void
2912 life_analysis_1 (f, nregs, flags)
2913 rtx f;
2914 int nregs;
2915 int flags;
2916 {
2917 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2918 register int i;
2919
2920 max_regno = nregs;
2921
2922 /* Allocate and zero out many data structures
2923 that will record the data from lifetime analysis. */
2924
2925 allocate_reg_life_data ();
2926 allocate_bb_life_data ();
2927
2928 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2929
2930 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2931 This will be cleared by record_volatile_insns if it encounters an insn
2932 which modifies the stack pointer. */
2933 current_function_sp_is_unchanging = !current_function_calls_alloca;
2934 record_volatile_insns (f);
2935
2936 /* Find the set of registers live on function exit. Do this before
2937 zeroing regs_ever_live, as we use that data post-reload. */
2938 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2939
2940 /* The post-reload life analysis have (on a global basis) the same
2941 registers live as was computed by reload itself. elimination
2942 Otherwise offsets and such may be incorrect.
2943
2944 Reload will make some registers as live even though they do not
2945 appear in the rtl. */
2946 if (reload_completed)
2947 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2948 memset (regs_ever_live, 0, sizeof regs_ever_live);
2949
2950 /* Compute register life at block boundaries. It'd be nice to
2951 begin with just the exit and noreturn blocks, but that set
2952 is not immediately handy. */
2953 {
2954 sbitmap blocks;
2955 blocks = sbitmap_alloc (n_basic_blocks);
2956 sbitmap_ones (blocks);
2957 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2958 sbitmap_free (blocks);
2959 }
2960
2961 /* The only pseudos that are live at the beginning of the function are
2962 those that were not set anywhere in the function. local-alloc doesn't
2963 know how to handle these correctly, so mark them as not local to any
2964 one basic block. */
2965
2966 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2967 FIRST_PSEUDO_REGISTER, i,
2968 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2969
2970 /* Now the life information is accurate. Make one more pass over each
2971 basic block to delete dead stores, create autoincrement addressing
2972 and record how many times each register is used, is set, or dies. */
2973 {
2974 regset tmp;
2975 tmp = ALLOCA_REG_SET ();
2976
2977 for (i = n_basic_blocks - 1; i >= 0; --i)
2978 {
2979 basic_block bb = BASIC_BLOCK (i);
2980
2981 COPY_REG_SET (tmp, bb->global_live_at_end);
2982 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2983 }
2984
2985 FREE_REG_SET (tmp);
2986 }
2987
2988 /* We have a problem with any pseudoreg that lives across the setjmp.
2989 ANSI says that if a user variable does not change in value between
2990 the setjmp and the longjmp, then the longjmp preserves it. This
2991 includes longjmp from a place where the pseudo appears dead.
2992 (In principle, the value still exists if it is in scope.)
2993 If the pseudo goes in a hard reg, some other value may occupy
2994 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2995 Conclusion: such a pseudo must not go in a hard reg. */
2996 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2997 FIRST_PSEUDO_REGISTER, i,
2998 {
2999 if (regno_reg_rtx[i] != 0)
3000 {
3001 REG_LIVE_LENGTH (i) = -1;
3002 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3003 }
3004 });
3005
3006 /* Restore regs_ever_live that was provided by reload. */
3007 if (reload_completed)
3008 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
3009
3010 /* Clean up. */
3011 free (reg_next_use);
3012 reg_next_use = NULL;
3013 }
3014
3015 /* Propagate global life info around the graph of basic blocks. Begin
3016 considering blocks with their corresponding bit set in BLOCKS_IN.
3017 BLOCKS_OUT is set for every block that was changed. */
3018
3019 static void
3020 calculate_global_regs_live (blocks_in, blocks_out, flags)
3021 sbitmap blocks_in, blocks_out;
3022 int flags;
3023 {
3024 basic_block *queue, *qhead, *qtail, *qend;
3025 regset tmp, new_live_at_end;
3026 int i;
3027
3028 tmp = ALLOCA_REG_SET ();
3029 new_live_at_end = ALLOCA_REG_SET ();
3030
3031 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3032 because the `head == tail' style test for an empty queue doesn't
3033 work with a full queue. */
3034 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3035 qtail = queue;
3036 qhead = qend = queue + n_basic_blocks + 2;
3037
3038 /* Clear out the garbage that might be hanging out in bb->aux. */
3039 for (i = n_basic_blocks - 1; i >= 0; --i)
3040 BASIC_BLOCK (i)->aux = NULL;
3041
3042 /* Queue the blocks set in the initial mask. Do this in reverse block
3043 number order so that we are more likely for the first round to do
3044 useful work. We use AUX non-null to flag that the block is queued. */
3045 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3046 {
3047 basic_block bb = BASIC_BLOCK (i);
3048 *--qhead = bb;
3049 bb->aux = bb;
3050 });
3051
3052 sbitmap_zero (blocks_out);
3053
3054 while (qhead != qtail)
3055 {
3056 int rescan, changed;
3057 basic_block bb;
3058 edge e;
3059
3060 bb = *qhead++;
3061 if (qhead == qend)
3062 qhead = queue;
3063 bb->aux = NULL;
3064
3065 /* Begin by propogating live_at_start from the successor blocks. */
3066 CLEAR_REG_SET (new_live_at_end);
3067 for (e = bb->succ; e ; e = e->succ_next)
3068 {
3069 basic_block sb = e->dest;
3070 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3071 }
3072
3073 if (bb == ENTRY_BLOCK_PTR)
3074 {
3075 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3076 continue;
3077 }
3078
3079 /* On our first pass through this block, we'll go ahead and continue.
3080 Recognize first pass by local_set NULL. On subsequent passes, we
3081 get to skip out early if live_at_end wouldn't have changed. */
3082
3083 if (bb->local_set == NULL)
3084 {
3085 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3086 rescan = 1;
3087 }
3088 else
3089 {
3090 /* If any bits were removed from live_at_end, we'll have to
3091 rescan the block. This wouldn't be necessary if we had
3092 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3093 local_live is really dependant on live_at_end. */
3094 CLEAR_REG_SET (tmp);
3095 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3096 new_live_at_end, BITMAP_AND_COMPL);
3097
3098 if (! rescan)
3099 {
3100 /* Find the set of changed bits. Take this opportunity
3101 to notice that this set is empty and early out. */
3102 CLEAR_REG_SET (tmp);
3103 changed = bitmap_operation (tmp, bb->global_live_at_end,
3104 new_live_at_end, BITMAP_XOR);
3105 if (! changed)
3106 continue;
3107
3108 /* If any of the changed bits overlap with local_set,
3109 we'll have to rescan the block. Detect overlap by
3110 the AND with ~local_set turning off bits. */
3111 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3112 BITMAP_AND_COMPL);
3113 }
3114 }
3115
3116 /* Let our caller know that BB changed enough to require its
3117 death notes updated. */
3118 SET_BIT (blocks_out, bb->index);
3119
3120 if (! rescan)
3121 {
3122 /* Add to live_at_start the set of all registers in
3123 new_live_at_end that aren't in the old live_at_end. */
3124
3125 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3126 BITMAP_AND_COMPL);
3127 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3128
3129 changed = bitmap_operation (bb->global_live_at_start,
3130 bb->global_live_at_start,
3131 tmp, BITMAP_IOR);
3132 if (! changed)
3133 continue;
3134 }
3135 else
3136 {
3137 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3138
3139 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3140 into live_at_start. */
3141 propagate_block (new_live_at_end, bb->head, bb->end,
3142 bb->local_set, bb->index, flags);
3143
3144 /* If live_at start didn't change, no need to go farther. */
3145 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3146 continue;
3147
3148 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3149 }
3150
3151 /* Queue all predecessors of BB so that we may re-examine
3152 their live_at_end. */
3153 for (e = bb->pred; e ; e = e->pred_next)
3154 {
3155 basic_block pb = e->src;
3156 if (pb->aux == NULL)
3157 {
3158 *qtail++ = pb;
3159 if (qtail == qend)
3160 qtail = queue;
3161 pb->aux = pb;
3162 }
3163 }
3164 }
3165
3166 FREE_REG_SET (tmp);
3167 FREE_REG_SET (new_live_at_end);
3168
3169 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3170 {
3171 basic_block bb = BASIC_BLOCK (i);
3172 FREE_REG_SET (bb->local_set);
3173 });
3174
3175 free (queue);
3176 }
3177 \f
3178 /* Subroutines of life analysis. */
3179
3180 /* Allocate the permanent data structures that represent the results
3181 of life analysis. Not static since used also for stupid life analysis. */
3182
3183 void
3184 allocate_bb_life_data ()
3185 {
3186 register int i;
3187
3188 for (i = 0; i < n_basic_blocks; i++)
3189 {
3190 basic_block bb = BASIC_BLOCK (i);
3191
3192 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3193 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3194 }
3195
3196 ENTRY_BLOCK_PTR->global_live_at_end
3197 = OBSTACK_ALLOC_REG_SET (function_obstack);
3198 EXIT_BLOCK_PTR->global_live_at_start
3199 = OBSTACK_ALLOC_REG_SET (function_obstack);
3200
3201 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3202 }
3203
3204 void
3205 allocate_reg_life_data ()
3206 {
3207 int i;
3208
3209 /* Recalculate the register space, in case it has grown. Old style
3210 vector oriented regsets would set regset_{size,bytes} here also. */
3211 allocate_reg_info (max_regno, FALSE, FALSE);
3212
3213 /* Reset all the data we'll collect in propagate_block and its
3214 subroutines. */
3215 for (i = 0; i < max_regno; i++)
3216 {
3217 REG_N_SETS (i) = 0;
3218 REG_N_REFS (i) = 0;
3219 REG_N_DEATHS (i) = 0;
3220 REG_N_CALLS_CROSSED (i) = 0;
3221 REG_LIVE_LENGTH (i) = 0;
3222 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3223 }
3224 }
3225
3226 /* Compute the registers live at the beginning of a basic block
3227 from those live at the end.
3228
3229 When called, OLD contains those live at the end.
3230 On return, it contains those live at the beginning.
3231 FIRST and LAST are the first and last insns of the basic block.
3232
3233 FINAL is nonzero if we are doing the final pass which is not
3234 for computing the life info (since that has already been done)
3235 but for acting on it. On this pass, we delete dead stores,
3236 set up the logical links and dead-variables lists of instructions,
3237 and merge instructions for autoincrement and autodecrement addresses.
3238
3239 SIGNIFICANT is nonzero only the first time for each basic block.
3240 If it is nonzero, it points to a regset in which we store
3241 a 1 for each register that is set within the block.
3242
3243 BNUM is the number of the basic block. */
3244
3245 static void
3246 propagate_block (old, first, last, significant, bnum, flags)
3247 register regset old;
3248 rtx first;
3249 rtx last;
3250 regset significant;
3251 int bnum;
3252 int flags;
3253 {
3254 register rtx insn;
3255 rtx prev;
3256 regset live;
3257 regset dead;
3258
3259 /* Find the loop depth for this block. Ignore loop level changes in the
3260 middle of the basic block -- for register allocation purposes, the
3261 important uses will be in the blocks wholely contained within the loop
3262 not in the loop pre-header or post-trailer. */
3263 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3264
3265 dead = ALLOCA_REG_SET ();
3266 live = ALLOCA_REG_SET ();
3267
3268 cc0_live = 0;
3269
3270 if (flags & PROP_REG_INFO)
3271 {
3272 register int i;
3273
3274 /* Process the regs live at the end of the block.
3275 Mark them as not local to any one basic block. */
3276 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3277 {
3278 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3279 });
3280 }
3281
3282 /* Scan the block an insn at a time from end to beginning. */
3283
3284 for (insn = last; ; insn = prev)
3285 {
3286 prev = PREV_INSN (insn);
3287
3288 if (GET_CODE (insn) == NOTE)
3289 {
3290 /* If this is a call to `setjmp' et al,
3291 warn if any non-volatile datum is live. */
3292
3293 if ((flags & PROP_REG_INFO)
3294 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3295 IOR_REG_SET (regs_live_at_setjmp, old);
3296 }
3297
3298 /* Update the life-status of regs for this insn.
3299 First DEAD gets which regs are set in this insn
3300 then LIVE gets which regs are used in this insn.
3301 Then the regs live before the insn
3302 are those live after, with DEAD regs turned off,
3303 and then LIVE regs turned on. */
3304
3305 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3306 {
3307 register int i;
3308 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3309 int insn_is_dead = 0;
3310 int libcall_is_dead = 0;
3311
3312 if (flags & PROP_SCAN_DEAD_CODE)
3313 {
3314 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3315 /* Don't delete something that refers to volatile storage! */
3316 && ! INSN_VOLATILE (insn));
3317 libcall_is_dead = (insn_is_dead && note != 0
3318 && libcall_dead_p (PATTERN (insn), old, note, insn));
3319 }
3320
3321 /* We almost certainly don't want to delete prologue or epilogue
3322 instructions. Warn about probable compiler losage. */
3323 if ((flags & PROP_KILL_DEAD_CODE)
3324 && insn_is_dead
3325 && reload_completed
3326 && (HAVE_epilogue || HAVE_prologue)
3327 && prologue_epilogue_contains (insn))
3328 {
3329 warning ("ICE: would have deleted prologue/epilogue insn");
3330 debug_rtx (insn);
3331 libcall_is_dead = insn_is_dead = 0;
3332 }
3333
3334 /* If an instruction consists of just dead store(s) on final pass,
3335 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3336 We could really delete it with delete_insn, but that
3337 can cause trouble for first or last insn in a basic block. */
3338 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3339 {
3340 rtx inote;
3341 /* If the insn referred to a label, note that the label is
3342 now less used. */
3343 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3344 {
3345 if (REG_NOTE_KIND (inote) == REG_LABEL)
3346 {
3347 rtx label = XEXP (inote, 0);
3348 rtx next;
3349 LABEL_NUSES (label)--;
3350
3351 /* If this label was attached to an ADDR_VEC, it's
3352 safe to delete the ADDR_VEC. In fact, it's pretty much
3353 mandatory to delete it, because the ADDR_VEC may
3354 be referencing labels that no longer exist. */
3355 if (LABEL_NUSES (label) == 0
3356 && (next = next_nonnote_insn (label)) != NULL
3357 && GET_CODE (next) == JUMP_INSN
3358 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3359 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3360 {
3361 rtx pat = PATTERN (next);
3362 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3363 int len = XVECLEN (pat, diff_vec_p);
3364 int i;
3365 for (i = 0; i < len; i++)
3366 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3367 PUT_CODE (next, NOTE);
3368 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3369 NOTE_SOURCE_FILE (next) = 0;
3370 }
3371 }
3372 }
3373
3374 PUT_CODE (insn, NOTE);
3375 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3376 NOTE_SOURCE_FILE (insn) = 0;
3377
3378 /* CC0 is now known to be dead. Either this insn used it,
3379 in which case it doesn't anymore, or clobbered it,
3380 so the next insn can't use it. */
3381 cc0_live = 0;
3382
3383 /* If this insn is copying the return value from a library call,
3384 delete the entire library call. */
3385 if (libcall_is_dead)
3386 {
3387 rtx first = XEXP (note, 0);
3388 rtx p = insn;
3389 while (INSN_DELETED_P (first))
3390 first = NEXT_INSN (first);
3391 while (p != first)
3392 {
3393 p = PREV_INSN (p);
3394 PUT_CODE (p, NOTE);
3395 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3396 NOTE_SOURCE_FILE (p) = 0;
3397 }
3398 }
3399 goto flushed;
3400 }
3401
3402 CLEAR_REG_SET (dead);
3403 CLEAR_REG_SET (live);
3404
3405 /* See if this is an increment or decrement that can be
3406 merged into a following memory address. */
3407 #ifdef AUTO_INC_DEC
3408 {
3409 register rtx x = single_set (insn);
3410
3411 /* Does this instruction increment or decrement a register? */
3412 if (!reload_completed
3413 && (flags & PROP_AUTOINC)
3414 && x != 0
3415 && GET_CODE (SET_DEST (x)) == REG
3416 && (GET_CODE (SET_SRC (x)) == PLUS
3417 || GET_CODE (SET_SRC (x)) == MINUS)
3418 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3419 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3420 /* Ok, look for a following memory ref we can combine with.
3421 If one is found, change the memory ref to a PRE_INC
3422 or PRE_DEC, cancel this insn, and return 1.
3423 Return 0 if nothing has been done. */
3424 && try_pre_increment_1 (insn))
3425 goto flushed;
3426 }
3427 #endif /* AUTO_INC_DEC */
3428
3429 /* If this is not the final pass, and this insn is copying the
3430 value of a library call and it's dead, don't scan the
3431 insns that perform the library call, so that the call's
3432 arguments are not marked live. */
3433 if (libcall_is_dead)
3434 {
3435 /* Mark the dest reg as `significant'. */
3436 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3437 significant, flags);
3438
3439 insn = XEXP (note, 0);
3440 prev = PREV_INSN (insn);
3441 }
3442 else if (GET_CODE (PATTERN (insn)) == SET
3443 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3444 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3445 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3446 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3447 /* We have an insn to pop a constant amount off the stack.
3448 (Such insns use PLUS regardless of the direction of the stack,
3449 and any insn to adjust the stack by a constant is always a pop.)
3450 These insns, if not dead stores, have no effect on life. */
3451 ;
3452 else
3453 {
3454 /* Any regs live at the time of a call instruction
3455 must not go in a register clobbered by calls.
3456 Find all regs now live and record this for them. */
3457
3458 if (GET_CODE (insn) == CALL_INSN
3459 && (flags & PROP_REG_INFO))
3460 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3461 {
3462 REG_N_CALLS_CROSSED (i)++;
3463 });
3464
3465 /* LIVE gets the regs used in INSN;
3466 DEAD gets those set by it. Dead insns don't make anything
3467 live. */
3468
3469 mark_set_regs (old, dead, PATTERN (insn),
3470 insn, significant, flags);
3471
3472 /* If an insn doesn't use CC0, it becomes dead since we
3473 assume that every insn clobbers it. So show it dead here;
3474 mark_used_regs will set it live if it is referenced. */
3475 cc0_live = 0;
3476
3477 if (! insn_is_dead)
3478 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3479
3480 /* Sometimes we may have inserted something before INSN (such as
3481 a move) when we make an auto-inc. So ensure we will scan
3482 those insns. */
3483 #ifdef AUTO_INC_DEC
3484 prev = PREV_INSN (insn);
3485 #endif
3486
3487 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3488 {
3489 register int i;
3490
3491 rtx note;
3492
3493 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3494 note;
3495 note = XEXP (note, 1))
3496 if (GET_CODE (XEXP (note, 0)) == USE)
3497 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3498 flags, insn);
3499
3500 /* Each call clobbers all call-clobbered regs that are not
3501 global or fixed. Note that the function-value reg is a
3502 call-clobbered reg, and mark_set_regs has already had
3503 a chance to handle it. */
3504
3505 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3506 if (call_used_regs[i] && ! global_regs[i]
3507 && ! fixed_regs[i])
3508 {
3509 SET_REGNO_REG_SET (dead, i);
3510 if (significant)
3511 SET_REGNO_REG_SET (significant, i);
3512 }
3513
3514 /* The stack ptr is used (honorarily) by a CALL insn. */
3515 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3516
3517 /* Calls may also reference any of the global registers,
3518 so they are made live. */
3519 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3520 if (global_regs[i])
3521 mark_used_regs (old, live,
3522 gen_rtx_REG (reg_raw_mode[i], i),
3523 flags, insn);
3524
3525 /* Calls also clobber memory. */
3526 free_EXPR_LIST_list (&mem_set_list);
3527 }
3528
3529 /* Update OLD for the registers used or set. */
3530 AND_COMPL_REG_SET (old, dead);
3531 IOR_REG_SET (old, live);
3532
3533 }
3534
3535 /* On final pass, update counts of how many insns each reg is live
3536 at. */
3537 if (flags & PROP_REG_INFO)
3538 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3539 { REG_LIVE_LENGTH (i)++; });
3540 }
3541 flushed: ;
3542 if (insn == first)
3543 break;
3544 }
3545
3546 FREE_REG_SET (dead);
3547 FREE_REG_SET (live);
3548 free_EXPR_LIST_list (&mem_set_list);
3549 }
3550 \f
3551 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3552 (SET expressions whose destinations are registers dead after the insn).
3553 NEEDED is the regset that says which regs are alive after the insn.
3554
3555 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3556
3557 If X is the entire body of an insn, NOTES contains the reg notes
3558 pertaining to the insn. */
3559
3560 static int
3561 insn_dead_p (x, needed, call_ok, notes)
3562 rtx x;
3563 regset needed;
3564 int call_ok;
3565 rtx notes ATTRIBUTE_UNUSED;
3566 {
3567 enum rtx_code code = GET_CODE (x);
3568
3569 #ifdef AUTO_INC_DEC
3570 /* If flow is invoked after reload, we must take existing AUTO_INC
3571 expresions into account. */
3572 if (reload_completed)
3573 {
3574 for ( ; notes; notes = XEXP (notes, 1))
3575 {
3576 if (REG_NOTE_KIND (notes) == REG_INC)
3577 {
3578 int regno = REGNO (XEXP (notes, 0));
3579
3580 /* Don't delete insns to set global regs. */
3581 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3582 || REGNO_REG_SET_P (needed, regno))
3583 return 0;
3584 }
3585 }
3586 }
3587 #endif
3588
3589 /* If setting something that's a reg or part of one,
3590 see if that register's altered value will be live. */
3591
3592 if (code == SET)
3593 {
3594 rtx r = SET_DEST (x);
3595
3596 /* A SET that is a subroutine call cannot be dead. */
3597 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3598 return 0;
3599
3600 #ifdef HAVE_cc0
3601 if (GET_CODE (r) == CC0)
3602 return ! cc0_live;
3603 #endif
3604
3605 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3606 {
3607 rtx temp;
3608 /* Walk the set of memory locations we are currently tracking
3609 and see if one is an identical match to this memory location.
3610 If so, this memory write is dead (remember, we're walking
3611 backwards from the end of the block to the start. */
3612 temp = mem_set_list;
3613 while (temp)
3614 {
3615 if (rtx_equal_p (XEXP (temp, 0), r))
3616 return 1;
3617 temp = XEXP (temp, 1);
3618 }
3619 }
3620
3621 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3622 || GET_CODE (r) == ZERO_EXTRACT)
3623 r = XEXP (r, 0);
3624
3625 if (GET_CODE (r) == REG)
3626 {
3627 int regno = REGNO (r);
3628
3629 /* Don't delete insns to set global regs. */
3630 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3631 /* Make sure insns to set frame pointer aren't deleted. */
3632 || (regno == FRAME_POINTER_REGNUM
3633 && (! reload_completed || frame_pointer_needed))
3634 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3635 || (regno == HARD_FRAME_POINTER_REGNUM
3636 && (! reload_completed || frame_pointer_needed))
3637 #endif
3638 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3639 /* Make sure insns to set arg pointer are never deleted
3640 (if the arg pointer isn't fixed, there will be a USE for
3641 it, so we can treat it normally). */
3642 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3643 #endif
3644 || REGNO_REG_SET_P (needed, regno))
3645 return 0;
3646
3647 /* If this is a hard register, verify that subsequent words are
3648 not needed. */
3649 if (regno < FIRST_PSEUDO_REGISTER)
3650 {
3651 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3652
3653 while (--n > 0)
3654 if (REGNO_REG_SET_P (needed, regno+n))
3655 return 0;
3656 }
3657
3658 return 1;
3659 }
3660 }
3661
3662 /* If performing several activities,
3663 insn is dead if each activity is individually dead.
3664 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3665 that's inside a PARALLEL doesn't make the insn worth keeping. */
3666 else if (code == PARALLEL)
3667 {
3668 int i = XVECLEN (x, 0);
3669
3670 for (i--; i >= 0; i--)
3671 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3672 && GET_CODE (XVECEXP (x, 0, i)) != USE
3673 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3674 return 0;
3675
3676 return 1;
3677 }
3678
3679 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3680 is not necessarily true for hard registers. */
3681 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3682 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3683 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3684 return 1;
3685
3686 /* We do not check other CLOBBER or USE here. An insn consisting of just
3687 a CLOBBER or just a USE should not be deleted. */
3688 return 0;
3689 }
3690
3691 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3692 return 1 if the entire library call is dead.
3693 This is true if X copies a register (hard or pseudo)
3694 and if the hard return reg of the call insn is dead.
3695 (The caller should have tested the destination of X already for death.)
3696
3697 If this insn doesn't just copy a register, then we don't
3698 have an ordinary libcall. In that case, cse could not have
3699 managed to substitute the source for the dest later on,
3700 so we can assume the libcall is dead.
3701
3702 NEEDED is the bit vector of pseudoregs live before this insn.
3703 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3704
3705 static int
3706 libcall_dead_p (x, needed, note, insn)
3707 rtx x;
3708 regset needed;
3709 rtx note;
3710 rtx insn;
3711 {
3712 register RTX_CODE code = GET_CODE (x);
3713
3714 if (code == SET)
3715 {
3716 register rtx r = SET_SRC (x);
3717 if (GET_CODE (r) == REG)
3718 {
3719 rtx call = XEXP (note, 0);
3720 rtx call_pat;
3721 register int i;
3722
3723 /* Find the call insn. */
3724 while (call != insn && GET_CODE (call) != CALL_INSN)
3725 call = NEXT_INSN (call);
3726
3727 /* If there is none, do nothing special,
3728 since ordinary death handling can understand these insns. */
3729 if (call == insn)
3730 return 0;
3731
3732 /* See if the hard reg holding the value is dead.
3733 If this is a PARALLEL, find the call within it. */
3734 call_pat = PATTERN (call);
3735 if (GET_CODE (call_pat) == PARALLEL)
3736 {
3737 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3738 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3739 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3740 break;
3741
3742 /* This may be a library call that is returning a value
3743 via invisible pointer. Do nothing special, since
3744 ordinary death handling can understand these insns. */
3745 if (i < 0)
3746 return 0;
3747
3748 call_pat = XVECEXP (call_pat, 0, i);
3749 }
3750
3751 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3752 }
3753 }
3754 return 1;
3755 }
3756
3757 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3758 live at function entry. Don't count global register variables, variables
3759 in registers that can be used for function arg passing, or variables in
3760 fixed hard registers. */
3761
3762 int
3763 regno_uninitialized (regno)
3764 int regno;
3765 {
3766 if (n_basic_blocks == 0
3767 || (regno < FIRST_PSEUDO_REGISTER
3768 && (global_regs[regno]
3769 || fixed_regs[regno]
3770 || FUNCTION_ARG_REGNO_P (regno))))
3771 return 0;
3772
3773 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3774 }
3775
3776 /* 1 if register REGNO was alive at a place where `setjmp' was called
3777 and was set more than once or is an argument.
3778 Such regs may be clobbered by `longjmp'. */
3779
3780 int
3781 regno_clobbered_at_setjmp (regno)
3782 int regno;
3783 {
3784 if (n_basic_blocks == 0)
3785 return 0;
3786
3787 return ((REG_N_SETS (regno) > 1
3788 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3789 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3790 }
3791 \f
3792 /* INSN references memory, possibly using autoincrement addressing modes.
3793 Find any entries on the mem_set_list that need to be invalidated due
3794 to an address change. */
3795 static void
3796 invalidate_mems_from_autoinc (insn)
3797 rtx insn;
3798 {
3799 rtx note = REG_NOTES (insn);
3800 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3801 {
3802 if (REG_NOTE_KIND (note) == REG_INC)
3803 {
3804 rtx temp = mem_set_list;
3805 rtx prev = NULL_RTX;
3806 rtx next;
3807
3808 while (temp)
3809 {
3810 next = XEXP (temp, 1);
3811 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3812 {
3813 /* Splice temp out of list. */
3814 if (prev)
3815 XEXP (prev, 1) = next;
3816 else
3817 mem_set_list = next;
3818 free_EXPR_LIST_node (temp);
3819 }
3820 else
3821 prev = temp;
3822 temp = next;
3823 }
3824 }
3825 }
3826 }
3827
3828 /* Process the registers that are set within X. Their bits are set to
3829 1 in the regset DEAD, because they are dead prior to this insn.
3830
3831 If INSN is nonzero, it is the insn being processed.
3832
3833 FLAGS is the set of operations to perform. */
3834
3835 static void
3836 mark_set_regs (needed, dead, x, insn, significant, flags)
3837 regset needed;
3838 regset dead;
3839 rtx x;
3840 rtx insn;
3841 regset significant;
3842 int flags;
3843 {
3844 register RTX_CODE code = GET_CODE (x);
3845
3846 if (code == SET || code == CLOBBER)
3847 mark_set_1 (needed, dead, x, insn, significant, flags);
3848 else if (code == PARALLEL)
3849 {
3850 register int i;
3851 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3852 {
3853 code = GET_CODE (XVECEXP (x, 0, i));
3854 if (code == SET || code == CLOBBER)
3855 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3856 significant, flags);
3857 }
3858 }
3859 }
3860
3861 /* Process a single SET rtx, X. */
3862
3863 static void
3864 mark_set_1 (needed, dead, x, insn, significant, flags)
3865 regset needed;
3866 regset dead;
3867 rtx x;
3868 rtx insn;
3869 regset significant;
3870 int flags;
3871 {
3872 register int regno = -1;
3873 register rtx reg = SET_DEST (x);
3874
3875 /* Some targets place small structures in registers for
3876 return values of functions. We have to detect this
3877 case specially here to get correct flow information. */
3878 if (GET_CODE (reg) == PARALLEL
3879 && GET_MODE (reg) == BLKmode)
3880 {
3881 register int i;
3882
3883 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3884 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3885 significant, flags);
3886 return;
3887 }
3888
3889 /* Modifying just one hardware register of a multi-reg value
3890 or just a byte field of a register
3891 does not mean the value from before this insn is now dead.
3892 But it does mean liveness of that register at the end of the block
3893 is significant.
3894
3895 Within mark_set_1, however, we treat it as if the register is
3896 indeed modified. mark_used_regs will, however, also treat this
3897 register as being used. Thus, we treat these insns as setting a
3898 new value for the register as a function of its old value. This
3899 cases LOG_LINKS to be made appropriately and this will help combine. */
3900
3901 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3902 || GET_CODE (reg) == SIGN_EXTRACT
3903 || GET_CODE (reg) == STRICT_LOW_PART)
3904 reg = XEXP (reg, 0);
3905
3906 /* If this set is a MEM, then it kills any aliased writes.
3907 If this set is a REG, then it kills any MEMs which use the reg. */
3908 if (flags & PROP_SCAN_DEAD_CODE)
3909 {
3910 if (GET_CODE (reg) == MEM
3911 || GET_CODE (reg) == REG)
3912 {
3913 rtx temp = mem_set_list;
3914 rtx prev = NULL_RTX;
3915 rtx next;
3916
3917 while (temp)
3918 {
3919 next = XEXP (temp, 1);
3920 if ((GET_CODE (reg) == MEM
3921 && output_dependence (XEXP (temp, 0), reg))
3922 || (GET_CODE (reg) == REG
3923 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3924 {
3925 /* Splice this entry out of the list. */
3926 if (prev)
3927 XEXP (prev, 1) = next;
3928 else
3929 mem_set_list = next;
3930 free_EXPR_LIST_node (temp);
3931 }
3932 else
3933 prev = temp;
3934 temp = next;
3935 }
3936 }
3937
3938 /* If the memory reference had embedded side effects (autoincrement
3939 address modes. Then we may need to kill some entries on the
3940 memory set list. */
3941 if (insn && GET_CODE (reg) == MEM)
3942 invalidate_mems_from_autoinc (insn);
3943
3944 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3945 /* We do not know the size of a BLKmode store, so we do not track
3946 them for redundant store elimination. */
3947 && GET_MODE (reg) != BLKmode
3948 /* There are no REG_INC notes for SP, so we can't assume we'll see
3949 everything that invalidates it. To be safe, don't eliminate any
3950 stores though SP; none of them should be redundant anyway. */
3951 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3952 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3953 }
3954
3955 if (GET_CODE (reg) == REG
3956 && (regno = REGNO (reg),
3957 ! (regno == FRAME_POINTER_REGNUM
3958 && (! reload_completed || frame_pointer_needed)))
3959 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3960 && ! (regno == HARD_FRAME_POINTER_REGNUM
3961 && (! reload_completed || frame_pointer_needed))
3962 #endif
3963 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3964 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3965 #endif
3966 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3967 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3968 {
3969 int some_needed = REGNO_REG_SET_P (needed, regno);
3970 int some_not_needed = ! some_needed;
3971
3972 /* Mark it as a significant register for this basic block. */
3973 if (significant)
3974 SET_REGNO_REG_SET (significant, regno);
3975
3976 /* Mark it as dead before this insn. */
3977 SET_REGNO_REG_SET (dead, regno);
3978
3979 /* A hard reg in a wide mode may really be multiple registers.
3980 If so, mark all of them just like the first. */
3981 if (regno < FIRST_PSEUDO_REGISTER)
3982 {
3983 int n;
3984
3985 /* Nothing below is needed for the stack pointer; get out asap.
3986 Eg, log links aren't needed, since combine won't use them. */
3987 if (regno == STACK_POINTER_REGNUM)
3988 return;
3989
3990 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3991 while (--n > 0)
3992 {
3993 int regno_n = regno + n;
3994 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3995 if (significant)
3996 SET_REGNO_REG_SET (significant, regno_n);
3997
3998 SET_REGNO_REG_SET (dead, regno_n);
3999 some_needed |= needed_regno;
4000 some_not_needed |= ! needed_regno;
4001 }
4002 }
4003
4004 /* Additional data to record if this is the final pass. */
4005 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4006 | PROP_DEATH_NOTES | PROP_AUTOINC))
4007 {
4008 register rtx y;
4009 register int blocknum = BLOCK_NUM (insn);
4010
4011 y = NULL_RTX;
4012 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4013 y = reg_next_use[regno];
4014
4015 /* If this is a hard reg, record this function uses the reg. */
4016
4017 if (regno < FIRST_PSEUDO_REGISTER)
4018 {
4019 register int i;
4020 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4021
4022 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4023 for (i = regno; i < endregno; i++)
4024 {
4025 /* The next use is no longer "next", since a store
4026 intervenes. */
4027 reg_next_use[i] = 0;
4028 }
4029
4030 if (flags & PROP_REG_INFO)
4031 for (i = regno; i < endregno; i++)
4032 {
4033 regs_ever_live[i] = 1;
4034 REG_N_SETS (i)++;
4035 }
4036 }
4037 else
4038 {
4039 /* The next use is no longer "next", since a store
4040 intervenes. */
4041 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4042 reg_next_use[regno] = 0;
4043
4044 /* Keep track of which basic blocks each reg appears in. */
4045
4046 if (flags & PROP_REG_INFO)
4047 {
4048 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4049 REG_BASIC_BLOCK (regno) = blocknum;
4050 else if (REG_BASIC_BLOCK (regno) != blocknum)
4051 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4052
4053 /* Count (weighted) references, stores, etc. This counts a
4054 register twice if it is modified, but that is correct. */
4055 REG_N_SETS (regno)++;
4056 REG_N_REFS (regno) += loop_depth;
4057
4058 /* The insns where a reg is live are normally counted
4059 elsewhere, but we want the count to include the insn
4060 where the reg is set, and the normal counting mechanism
4061 would not count it. */
4062 REG_LIVE_LENGTH (regno)++;
4063 }
4064 }
4065
4066 if (! some_not_needed)
4067 {
4068 if (flags & PROP_LOG_LINKS)
4069 {
4070 /* Make a logical link from the next following insn
4071 that uses this register, back to this insn.
4072 The following insns have already been processed.
4073
4074 We don't build a LOG_LINK for hard registers containing
4075 in ASM_OPERANDs. If these registers get replaced,
4076 we might wind up changing the semantics of the insn,
4077 even if reload can make what appear to be valid
4078 assignments later. */
4079 if (y && (BLOCK_NUM (y) == blocknum)
4080 && (regno >= FIRST_PSEUDO_REGISTER
4081 || asm_noperands (PATTERN (y)) < 0))
4082 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4083 }
4084 }
4085 else if (! some_needed)
4086 {
4087 if (flags & PROP_REG_INFO)
4088 REG_N_DEATHS (REGNO (reg))++;
4089
4090 if (flags & PROP_DEATH_NOTES)
4091 {
4092 /* Note that dead stores have already been deleted
4093 when possible. If we get here, we have found a
4094 dead store that cannot be eliminated (because the
4095 same insn does something useful). Indicate this
4096 by marking the reg being set as dying here. */
4097 REG_NOTES (insn)
4098 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4099 }
4100 }
4101 else
4102 {
4103 if (flags & PROP_DEATH_NOTES)
4104 {
4105 /* This is a case where we have a multi-word hard register
4106 and some, but not all, of the words of the register are
4107 needed in subsequent insns. Write REG_UNUSED notes
4108 for those parts that were not needed. This case should
4109 be rare. */
4110
4111 int i;
4112
4113 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4114 i >= 0; i--)
4115 if (!REGNO_REG_SET_P (needed, regno + i))
4116 REG_NOTES (insn)
4117 = (alloc_EXPR_LIST
4118 (REG_UNUSED,
4119 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4120 REG_NOTES (insn)));
4121 }
4122 }
4123 }
4124 }
4125 else if (GET_CODE (reg) == REG)
4126 {
4127 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4128 reg_next_use[regno] = 0;
4129 }
4130
4131 /* If this is the last pass and this is a SCRATCH, show it will be dying
4132 here and count it. */
4133 else if (GET_CODE (reg) == SCRATCH)
4134 {
4135 if (flags & PROP_DEATH_NOTES)
4136 REG_NOTES (insn)
4137 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4138 }
4139 }
4140 \f
4141 #ifdef AUTO_INC_DEC
4142
4143 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4144 reference. */
4145
4146 static void
4147 find_auto_inc (needed, x, insn)
4148 regset needed;
4149 rtx x;
4150 rtx insn;
4151 {
4152 rtx addr = XEXP (x, 0);
4153 HOST_WIDE_INT offset = 0;
4154 rtx set;
4155
4156 /* Here we detect use of an index register which might be good for
4157 postincrement, postdecrement, preincrement, or predecrement. */
4158
4159 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4160 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4161
4162 if (GET_CODE (addr) == REG)
4163 {
4164 register rtx y;
4165 register int size = GET_MODE_SIZE (GET_MODE (x));
4166 rtx use;
4167 rtx incr;
4168 int regno = REGNO (addr);
4169
4170 /* Is the next use an increment that might make auto-increment? */
4171 if ((incr = reg_next_use[regno]) != 0
4172 && (set = single_set (incr)) != 0
4173 && GET_CODE (set) == SET
4174 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4175 /* Can't add side effects to jumps; if reg is spilled and
4176 reloaded, there's no way to store back the altered value. */
4177 && GET_CODE (insn) != JUMP_INSN
4178 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4179 && XEXP (y, 0) == addr
4180 && GET_CODE (XEXP (y, 1)) == CONST_INT
4181 && ((HAVE_POST_INCREMENT
4182 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4183 || (HAVE_POST_DECREMENT
4184 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4185 || (HAVE_PRE_INCREMENT
4186 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4187 || (HAVE_PRE_DECREMENT
4188 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4189 /* Make sure this reg appears only once in this insn. */
4190 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4191 use != 0 && use != (rtx) 1))
4192 {
4193 rtx q = SET_DEST (set);
4194 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4195 ? (offset ? PRE_INC : POST_INC)
4196 : (offset ? PRE_DEC : POST_DEC));
4197
4198 if (dead_or_set_p (incr, addr))
4199 {
4200 /* This is the simple case. Try to make the auto-inc. If
4201 we can't, we are done. Otherwise, we will do any
4202 needed updates below. */
4203 if (! validate_change (insn, &XEXP (x, 0),
4204 gen_rtx_fmt_e (inc_code, Pmode, addr),
4205 0))
4206 return;
4207 }
4208 else if (GET_CODE (q) == REG
4209 /* PREV_INSN used here to check the semi-open interval
4210 [insn,incr). */
4211 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4212 /* We must also check for sets of q as q may be
4213 a call clobbered hard register and there may
4214 be a call between PREV_INSN (insn) and incr. */
4215 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4216 {
4217 /* We have *p followed sometime later by q = p+size.
4218 Both p and q must be live afterward,
4219 and q is not used between INSN and its assignment.
4220 Change it to q = p, ...*q..., q = q+size.
4221 Then fall into the usual case. */
4222 rtx insns, temp;
4223 basic_block bb;
4224
4225 start_sequence ();
4226 emit_move_insn (q, addr);
4227 insns = get_insns ();
4228 end_sequence ();
4229
4230 bb = BLOCK_FOR_INSN (insn);
4231 for (temp = insns; temp; temp = NEXT_INSN (temp))
4232 set_block_for_insn (temp, bb);
4233
4234 /* If we can't make the auto-inc, or can't make the
4235 replacement into Y, exit. There's no point in making
4236 the change below if we can't do the auto-inc and doing
4237 so is not correct in the pre-inc case. */
4238
4239 validate_change (insn, &XEXP (x, 0),
4240 gen_rtx_fmt_e (inc_code, Pmode, q),
4241 1);
4242 validate_change (incr, &XEXP (y, 0), q, 1);
4243 if (! apply_change_group ())
4244 return;
4245
4246 /* We now know we'll be doing this change, so emit the
4247 new insn(s) and do the updates. */
4248 emit_insns_before (insns, insn);
4249
4250 if (BLOCK_FOR_INSN (insn)->head == insn)
4251 BLOCK_FOR_INSN (insn)->head = insns;
4252
4253 /* INCR will become a NOTE and INSN won't contain a
4254 use of ADDR. If a use of ADDR was just placed in
4255 the insn before INSN, make that the next use.
4256 Otherwise, invalidate it. */
4257 if (GET_CODE (PREV_INSN (insn)) == INSN
4258 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4259 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4260 reg_next_use[regno] = PREV_INSN (insn);
4261 else
4262 reg_next_use[regno] = 0;
4263
4264 addr = q;
4265 regno = REGNO (q);
4266
4267 /* REGNO is now used in INCR which is below INSN, but
4268 it previously wasn't live here. If we don't mark
4269 it as needed, we'll put a REG_DEAD note for it
4270 on this insn, which is incorrect. */
4271 SET_REGNO_REG_SET (needed, regno);
4272
4273 /* If there are any calls between INSN and INCR, show
4274 that REGNO now crosses them. */
4275 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4276 if (GET_CODE (temp) == CALL_INSN)
4277 REG_N_CALLS_CROSSED (regno)++;
4278 }
4279 else
4280 return;
4281
4282 /* If we haven't returned, it means we were able to make the
4283 auto-inc, so update the status. First, record that this insn
4284 has an implicit side effect. */
4285
4286 REG_NOTES (insn)
4287 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4288
4289 /* Modify the old increment-insn to simply copy
4290 the already-incremented value of our register. */
4291 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4292 abort ();
4293
4294 /* If that makes it a no-op (copying the register into itself) delete
4295 it so it won't appear to be a "use" and a "set" of this
4296 register. */
4297 if (SET_DEST (set) == addr)
4298 {
4299 PUT_CODE (incr, NOTE);
4300 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4301 NOTE_SOURCE_FILE (incr) = 0;
4302 }
4303
4304 if (regno >= FIRST_PSEUDO_REGISTER)
4305 {
4306 /* Count an extra reference to the reg. When a reg is
4307 incremented, spilling it is worse, so we want to make
4308 that less likely. */
4309 REG_N_REFS (regno) += loop_depth;
4310
4311 /* Count the increment as a setting of the register,
4312 even though it isn't a SET in rtl. */
4313 REG_N_SETS (regno)++;
4314 }
4315 }
4316 }
4317 }
4318 #endif /* AUTO_INC_DEC */
4319 \f
4320 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4321 This is done assuming the registers needed from X
4322 are those that have 1-bits in NEEDED.
4323
4324 FLAGS is the set of enabled operations.
4325
4326 INSN is the containing instruction. If INSN is dead, this function is not
4327 called. */
4328
4329 static void
4330 mark_used_regs (needed, live, x, flags, insn)
4331 regset needed;
4332 regset live;
4333 rtx x;
4334 int flags;
4335 rtx insn;
4336 {
4337 register RTX_CODE code;
4338 register int regno;
4339 int i;
4340
4341 retry:
4342 code = GET_CODE (x);
4343 switch (code)
4344 {
4345 case LABEL_REF:
4346 case SYMBOL_REF:
4347 case CONST_INT:
4348 case CONST:
4349 case CONST_DOUBLE:
4350 case PC:
4351 case ADDR_VEC:
4352 case ADDR_DIFF_VEC:
4353 return;
4354
4355 #ifdef HAVE_cc0
4356 case CC0:
4357 cc0_live = 1;
4358 return;
4359 #endif
4360
4361 case CLOBBER:
4362 /* If we are clobbering a MEM, mark any registers inside the address
4363 as being used. */
4364 if (GET_CODE (XEXP (x, 0)) == MEM)
4365 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4366 return;
4367
4368 case MEM:
4369 /* Don't bother watching stores to mems if this is not the
4370 final pass. We'll not be deleting dead stores this round. */
4371 if (flags & PROP_SCAN_DEAD_CODE)
4372 {
4373 /* Invalidate the data for the last MEM stored, but only if MEM is
4374 something that can be stored into. */
4375 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4376 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4377 ; /* needn't clear the memory set list */
4378 else
4379 {
4380 rtx temp = mem_set_list;
4381 rtx prev = NULL_RTX;
4382 rtx next;
4383
4384 while (temp)
4385 {
4386 next = XEXP (temp, 1);
4387 if (anti_dependence (XEXP (temp, 0), x))
4388 {
4389 /* Splice temp out of the list. */
4390 if (prev)
4391 XEXP (prev, 1) = next;
4392 else
4393 mem_set_list = next;
4394 free_EXPR_LIST_node (temp);
4395 }
4396 else
4397 prev = temp;
4398 temp = next;
4399 }
4400 }
4401
4402 /* If the memory reference had embedded side effects (autoincrement
4403 address modes. Then we may need to kill some entries on the
4404 memory set list. */
4405 if (insn)
4406 invalidate_mems_from_autoinc (insn);
4407 }
4408
4409 #ifdef AUTO_INC_DEC
4410 if (flags & PROP_AUTOINC)
4411 find_auto_inc (needed, x, insn);
4412 #endif
4413 break;
4414
4415 case SUBREG:
4416 if (GET_CODE (SUBREG_REG (x)) == REG
4417 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4418 && (GET_MODE_SIZE (GET_MODE (x))
4419 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4420 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4421
4422 /* While we're here, optimize this case. */
4423 x = SUBREG_REG (x);
4424
4425 /* In case the SUBREG is not of a register, don't optimize */
4426 if (GET_CODE (x) != REG)
4427 {
4428 mark_used_regs (needed, live, x, flags, insn);
4429 return;
4430 }
4431
4432 /* ... fall through ... */
4433
4434 case REG:
4435 /* See a register other than being set
4436 => mark it as needed. */
4437
4438 regno = REGNO (x);
4439 {
4440 int some_needed = REGNO_REG_SET_P (needed, regno);
4441 int some_not_needed = ! some_needed;
4442
4443 SET_REGNO_REG_SET (live, regno);
4444
4445 /* A hard reg in a wide mode may really be multiple registers.
4446 If so, mark all of them just like the first. */
4447 if (regno < FIRST_PSEUDO_REGISTER)
4448 {
4449 int n;
4450
4451 /* For stack ptr or fixed arg pointer,
4452 nothing below can be necessary, so waste no more time. */
4453 if (regno == STACK_POINTER_REGNUM
4454 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4455 || (regno == HARD_FRAME_POINTER_REGNUM
4456 && (! reload_completed || frame_pointer_needed))
4457 #endif
4458 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4459 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4460 #endif
4461 || (regno == FRAME_POINTER_REGNUM
4462 && (! reload_completed || frame_pointer_needed)))
4463 {
4464 /* If this is a register we are going to try to eliminate,
4465 don't mark it live here. If we are successful in
4466 eliminating it, it need not be live unless it is used for
4467 pseudos, in which case it will have been set live when
4468 it was allocated to the pseudos. If the register will not
4469 be eliminated, reload will set it live at that point. */
4470
4471 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4472 regs_ever_live[regno] = 1;
4473 return;
4474 }
4475 /* No death notes for global register variables;
4476 their values are live after this function exits. */
4477 if (global_regs[regno])
4478 {
4479 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4480 reg_next_use[regno] = insn;
4481 return;
4482 }
4483
4484 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4485 while (--n > 0)
4486 {
4487 int regno_n = regno + n;
4488 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4489
4490 SET_REGNO_REG_SET (live, regno_n);
4491 some_needed |= needed_regno;
4492 some_not_needed |= ! needed_regno;
4493 }
4494 }
4495
4496 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4497 {
4498 /* Record where each reg is used, so when the reg
4499 is set we know the next insn that uses it. */
4500
4501 reg_next_use[regno] = insn;
4502 }
4503 if (flags & PROP_REG_INFO)
4504 {
4505 if (regno < FIRST_PSEUDO_REGISTER)
4506 {
4507 /* If a hard reg is being used,
4508 record that this function does use it. */
4509
4510 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4511 if (i == 0)
4512 i = 1;
4513 do
4514 regs_ever_live[regno + --i] = 1;
4515 while (i > 0);
4516 }
4517 else
4518 {
4519 /* Keep track of which basic block each reg appears in. */
4520
4521 register int blocknum = BLOCK_NUM (insn);
4522
4523 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4524 REG_BASIC_BLOCK (regno) = blocknum;
4525 else if (REG_BASIC_BLOCK (regno) != blocknum)
4526 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4527
4528 /* Count (weighted) number of uses of each reg. */
4529
4530 REG_N_REFS (regno) += loop_depth;
4531 }
4532 }
4533
4534 /* Record and count the insns in which a reg dies.
4535 If it is used in this insn and was dead below the insn
4536 then it dies in this insn. If it was set in this insn,
4537 we do not make a REG_DEAD note; likewise if we already
4538 made such a note. */
4539
4540 if (flags & PROP_DEATH_NOTES)
4541 {
4542 if (some_not_needed
4543 && ! dead_or_set_p (insn, x)
4544 #if 0
4545 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4546 #endif
4547 )
4548 {
4549 /* Check for the case where the register dying partially
4550 overlaps the register set by this insn. */
4551 if (regno < FIRST_PSEUDO_REGISTER
4552 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4553 {
4554 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4555 while (--n >= 0)
4556 some_needed |= dead_or_set_regno_p (insn, regno + n);
4557 }
4558
4559 /* If none of the words in X is needed, make a REG_DEAD
4560 note. Otherwise, we must make partial REG_DEAD notes. */
4561 if (! some_needed)
4562 {
4563 REG_NOTES (insn)
4564 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4565 REG_N_DEATHS (regno)++;
4566 }
4567 else
4568 {
4569 int i;
4570
4571 /* Don't make a REG_DEAD note for a part of a register
4572 that is set in the insn. */
4573
4574 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4575 i >= 0; i--)
4576 if (!REGNO_REG_SET_P (needed, regno + i)
4577 && ! dead_or_set_regno_p (insn, regno + i))
4578 REG_NOTES (insn)
4579 = (alloc_EXPR_LIST
4580 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4581 regno + i),
4582 REG_NOTES (insn)));
4583 }
4584 }
4585 }
4586 }
4587 return;
4588
4589 case SET:
4590 {
4591 register rtx testreg = SET_DEST (x);
4592 int mark_dest = 0;
4593
4594 /* If storing into MEM, don't show it as being used. But do
4595 show the address as being used. */
4596 if (GET_CODE (testreg) == MEM)
4597 {
4598 #ifdef AUTO_INC_DEC
4599 if (flags & PROP_AUTOINC)
4600 find_auto_inc (needed, testreg, insn);
4601 #endif
4602 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4603 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4604 return;
4605 }
4606
4607 /* Storing in STRICT_LOW_PART is like storing in a reg
4608 in that this SET might be dead, so ignore it in TESTREG.
4609 but in some other ways it is like using the reg.
4610
4611 Storing in a SUBREG or a bit field is like storing the entire
4612 register in that if the register's value is not used
4613 then this SET is not needed. */
4614 while (GET_CODE (testreg) == STRICT_LOW_PART
4615 || GET_CODE (testreg) == ZERO_EXTRACT
4616 || GET_CODE (testreg) == SIGN_EXTRACT
4617 || GET_CODE (testreg) == SUBREG)
4618 {
4619 if (GET_CODE (testreg) == SUBREG
4620 && GET_CODE (SUBREG_REG (testreg)) == REG
4621 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4622 && (GET_MODE_SIZE (GET_MODE (testreg))
4623 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4624 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4625
4626 /* Modifying a single register in an alternate mode
4627 does not use any of the old value. But these other
4628 ways of storing in a register do use the old value. */
4629 if (GET_CODE (testreg) == SUBREG
4630 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4631 ;
4632 else
4633 mark_dest = 1;
4634
4635 testreg = XEXP (testreg, 0);
4636 }
4637
4638 /* If this is a store into a register,
4639 recursively scan the value being stored. */
4640
4641 if ((GET_CODE (testreg) == PARALLEL
4642 && GET_MODE (testreg) == BLKmode)
4643 || (GET_CODE (testreg) == REG
4644 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4645 && (! reload_completed || frame_pointer_needed)))
4646 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4647 && ! (regno == HARD_FRAME_POINTER_REGNUM
4648 && (! reload_completed || frame_pointer_needed))
4649 #endif
4650 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4651 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4652 #endif
4653 ))
4654 /* We used to exclude global_regs here, but that seems wrong.
4655 Storing in them is like storing in mem. */
4656 {
4657 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4658 if (mark_dest)
4659 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4660 return;
4661 }
4662 }
4663 break;
4664
4665 case RETURN:
4666 /* ??? This info should have been gotten from mark_regs_live_at_end,
4667 as applied to the EXIT block, and propagated along the edge that
4668 connects this block to the EXIT. */
4669 break;
4670
4671 case ASM_OPERANDS:
4672 case UNSPEC_VOLATILE:
4673 case TRAP_IF:
4674 case ASM_INPUT:
4675 {
4676 /* Traditional and volatile asm instructions must be considered to use
4677 and clobber all hard registers, all pseudo-registers and all of
4678 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4679
4680 Consider for instance a volatile asm that changes the fpu rounding
4681 mode. An insn should not be moved across this even if it only uses
4682 pseudo-regs because it might give an incorrectly rounded result.
4683
4684 ?!? Unfortunately, marking all hard registers as live causes massive
4685 problems for the register allocator and marking all pseudos as live
4686 creates mountains of uninitialized variable warnings.
4687
4688 So for now, just clear the memory set list and mark any regs
4689 we can find in ASM_OPERANDS as used. */
4690 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4691 free_EXPR_LIST_list (&mem_set_list);
4692
4693 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4694 We can not just fall through here since then we would be confused
4695 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4696 traditional asms unlike their normal usage. */
4697 if (code == ASM_OPERANDS)
4698 {
4699 int j;
4700
4701 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4702 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4703 flags, insn);
4704 }
4705 break;
4706 }
4707
4708
4709 default:
4710 break;
4711 }
4712
4713 /* Recursively scan the operands of this expression. */
4714
4715 {
4716 register const char *fmt = GET_RTX_FORMAT (code);
4717 register int i;
4718
4719 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4720 {
4721 if (fmt[i] == 'e')
4722 {
4723 /* Tail recursive case: save a function call level. */
4724 if (i == 0)
4725 {
4726 x = XEXP (x, 0);
4727 goto retry;
4728 }
4729 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4730 }
4731 else if (fmt[i] == 'E')
4732 {
4733 register int j;
4734 for (j = 0; j < XVECLEN (x, i); j++)
4735 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4736 }
4737 }
4738 }
4739 }
4740 \f
4741 #ifdef AUTO_INC_DEC
4742
4743 static int
4744 try_pre_increment_1 (insn)
4745 rtx insn;
4746 {
4747 /* Find the next use of this reg. If in same basic block,
4748 make it do pre-increment or pre-decrement if appropriate. */
4749 rtx x = single_set (insn);
4750 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4751 * INTVAL (XEXP (SET_SRC (x), 1)));
4752 int regno = REGNO (SET_DEST (x));
4753 rtx y = reg_next_use[regno];
4754 if (y != 0
4755 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4756 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4757 mode would be better. */
4758 && ! dead_or_set_p (y, SET_DEST (x))
4759 && try_pre_increment (y, SET_DEST (x), amount))
4760 {
4761 /* We have found a suitable auto-increment
4762 and already changed insn Y to do it.
4763 So flush this increment-instruction. */
4764 PUT_CODE (insn, NOTE);
4765 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4766 NOTE_SOURCE_FILE (insn) = 0;
4767 /* Count a reference to this reg for the increment
4768 insn we are deleting. When a reg is incremented.
4769 spilling it is worse, so we want to make that
4770 less likely. */
4771 if (regno >= FIRST_PSEUDO_REGISTER)
4772 {
4773 REG_N_REFS (regno) += loop_depth;
4774 REG_N_SETS (regno)++;
4775 }
4776 return 1;
4777 }
4778 return 0;
4779 }
4780
4781 /* Try to change INSN so that it does pre-increment or pre-decrement
4782 addressing on register REG in order to add AMOUNT to REG.
4783 AMOUNT is negative for pre-decrement.
4784 Returns 1 if the change could be made.
4785 This checks all about the validity of the result of modifying INSN. */
4786
4787 static int
4788 try_pre_increment (insn, reg, amount)
4789 rtx insn, reg;
4790 HOST_WIDE_INT amount;
4791 {
4792 register rtx use;
4793
4794 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4795 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4796 int pre_ok = 0;
4797 /* Nonzero if we can try to make a post-increment or post-decrement.
4798 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4799 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4800 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4801 int post_ok = 0;
4802
4803 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4804 int do_post = 0;
4805
4806 /* From the sign of increment, see which possibilities are conceivable
4807 on this target machine. */
4808 if (HAVE_PRE_INCREMENT && amount > 0)
4809 pre_ok = 1;
4810 if (HAVE_POST_INCREMENT && amount > 0)
4811 post_ok = 1;
4812
4813 if (HAVE_PRE_DECREMENT && amount < 0)
4814 pre_ok = 1;
4815 if (HAVE_POST_DECREMENT && amount < 0)
4816 post_ok = 1;
4817
4818 if (! (pre_ok || post_ok))
4819 return 0;
4820
4821 /* It is not safe to add a side effect to a jump insn
4822 because if the incremented register is spilled and must be reloaded
4823 there would be no way to store the incremented value back in memory. */
4824
4825 if (GET_CODE (insn) == JUMP_INSN)
4826 return 0;
4827
4828 use = 0;
4829 if (pre_ok)
4830 use = find_use_as_address (PATTERN (insn), reg, 0);
4831 if (post_ok && (use == 0 || use == (rtx) 1))
4832 {
4833 use = find_use_as_address (PATTERN (insn), reg, -amount);
4834 do_post = 1;
4835 }
4836
4837 if (use == 0 || use == (rtx) 1)
4838 return 0;
4839
4840 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4841 return 0;
4842
4843 /* See if this combination of instruction and addressing mode exists. */
4844 if (! validate_change (insn, &XEXP (use, 0),
4845 gen_rtx_fmt_e (amount > 0
4846 ? (do_post ? POST_INC : PRE_INC)
4847 : (do_post ? POST_DEC : PRE_DEC),
4848 Pmode, reg), 0))
4849 return 0;
4850
4851 /* Record that this insn now has an implicit side effect on X. */
4852 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4853 return 1;
4854 }
4855
4856 #endif /* AUTO_INC_DEC */
4857 \f
4858 /* Find the place in the rtx X where REG is used as a memory address.
4859 Return the MEM rtx that so uses it.
4860 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4861 (plus REG (const_int PLUSCONST)).
4862
4863 If such an address does not appear, return 0.
4864 If REG appears more than once, or is used other than in such an address,
4865 return (rtx)1. */
4866
4867 rtx
4868 find_use_as_address (x, reg, plusconst)
4869 register rtx x;
4870 rtx reg;
4871 HOST_WIDE_INT plusconst;
4872 {
4873 enum rtx_code code = GET_CODE (x);
4874 const char *fmt = GET_RTX_FORMAT (code);
4875 register int i;
4876 register rtx value = 0;
4877 register rtx tem;
4878
4879 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4880 return x;
4881
4882 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4883 && XEXP (XEXP (x, 0), 0) == reg
4884 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4885 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4886 return x;
4887
4888 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4889 {
4890 /* If REG occurs inside a MEM used in a bit-field reference,
4891 that is unacceptable. */
4892 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4893 return (rtx) (HOST_WIDE_INT) 1;
4894 }
4895
4896 if (x == reg)
4897 return (rtx) (HOST_WIDE_INT) 1;
4898
4899 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4900 {
4901 if (fmt[i] == 'e')
4902 {
4903 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4904 if (value == 0)
4905 value = tem;
4906 else if (tem != 0)
4907 return (rtx) (HOST_WIDE_INT) 1;
4908 }
4909 if (fmt[i] == 'E')
4910 {
4911 register int j;
4912 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4913 {
4914 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4915 if (value == 0)
4916 value = tem;
4917 else if (tem != 0)
4918 return (rtx) (HOST_WIDE_INT) 1;
4919 }
4920 }
4921 }
4922
4923 return value;
4924 }
4925 \f
4926 /* Write information about registers and basic blocks into FILE.
4927 This is part of making a debugging dump. */
4928
4929 void
4930 dump_flow_info (file)
4931 FILE *file;
4932 {
4933 register int i;
4934 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4935
4936 fprintf (file, "%d registers.\n", max_regno);
4937 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4938 if (REG_N_REFS (i))
4939 {
4940 enum reg_class class, altclass;
4941 fprintf (file, "\nRegister %d used %d times across %d insns",
4942 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4943 if (REG_BASIC_BLOCK (i) >= 0)
4944 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4945 if (REG_N_SETS (i))
4946 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4947 (REG_N_SETS (i) == 1) ? "" : "s");
4948 if (REG_USERVAR_P (regno_reg_rtx[i]))
4949 fprintf (file, "; user var");
4950 if (REG_N_DEATHS (i) != 1)
4951 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4952 if (REG_N_CALLS_CROSSED (i) == 1)
4953 fprintf (file, "; crosses 1 call");
4954 else if (REG_N_CALLS_CROSSED (i))
4955 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4956 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4957 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4958 class = reg_preferred_class (i);
4959 altclass = reg_alternate_class (i);
4960 if (class != GENERAL_REGS || altclass != ALL_REGS)
4961 {
4962 if (altclass == ALL_REGS || class == ALL_REGS)
4963 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4964 else if (altclass == NO_REGS)
4965 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4966 else
4967 fprintf (file, "; pref %s, else %s",
4968 reg_class_names[(int) class],
4969 reg_class_names[(int) altclass]);
4970 }
4971 if (REGNO_POINTER_FLAG (i))
4972 fprintf (file, "; pointer");
4973 fprintf (file, ".\n");
4974 }
4975
4976 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4977 for (i = 0; i < n_basic_blocks; i++)
4978 {
4979 register basic_block bb = BASIC_BLOCK (i);
4980 register int regno;
4981 register edge e;
4982
4983 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4984 i, INSN_UID (bb->head), INSN_UID (bb->end));
4985
4986 fprintf (file, "Predecessors: ");
4987 for (e = bb->pred; e ; e = e->pred_next)
4988 dump_edge_info (file, e, 0);
4989
4990 fprintf (file, "\nSuccessors: ");
4991 for (e = bb->succ; e ; e = e->succ_next)
4992 dump_edge_info (file, e, 1);
4993
4994 fprintf (file, "\nRegisters live at start:");
4995 if (bb->global_live_at_start)
4996 {
4997 for (regno = 0; regno < max_regno; regno++)
4998 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4999 fprintf (file, " %d", regno);
5000 }
5001 else
5002 fprintf (file, " n/a");
5003
5004 fprintf (file, "\nRegisters live at end:");
5005 if (bb->global_live_at_end)
5006 {
5007 for (regno = 0; regno < max_regno; regno++)
5008 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
5009 fprintf (file, " %d", regno);
5010 }
5011 else
5012 fprintf (file, " n/a");
5013
5014 putc('\n', file);
5015 }
5016
5017 putc('\n', file);
5018 }
5019
5020 void
5021 debug_flow_info ()
5022 {
5023 dump_flow_info (stderr);
5024 }
5025
5026 static void
5027 dump_edge_info (file, e, do_succ)
5028 FILE *file;
5029 edge e;
5030 int do_succ;
5031 {
5032 basic_block side = (do_succ ? e->dest : e->src);
5033
5034 if (side == ENTRY_BLOCK_PTR)
5035 fputs (" ENTRY", file);
5036 else if (side == EXIT_BLOCK_PTR)
5037 fputs (" EXIT", file);
5038 else
5039 fprintf (file, " %d", side->index);
5040
5041 if (e->flags)
5042 {
5043 static const char * const bitnames[] = {
5044 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5045 };
5046 int comma = 0;
5047 int i, flags = e->flags;
5048
5049 fputc (' ', file);
5050 fputc ('(', file);
5051 for (i = 0; flags; i++)
5052 if (flags & (1 << i))
5053 {
5054 flags &= ~(1 << i);
5055
5056 if (comma)
5057 fputc (',', file);
5058 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5059 fputs (bitnames[i], file);
5060 else
5061 fprintf (file, "%d", i);
5062 comma = 1;
5063 }
5064 fputc (')', file);
5065 }
5066 }
5067
5068 \f
5069 /* Like print_rtl, but also print out live information for the start of each
5070 basic block. */
5071
5072 void
5073 print_rtl_with_bb (outf, rtx_first)
5074 FILE *outf;
5075 rtx rtx_first;
5076 {
5077 register rtx tmp_rtx;
5078
5079 if (rtx_first == 0)
5080 fprintf (outf, "(nil)\n");
5081 else
5082 {
5083 int i;
5084 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5085 int max_uid = get_max_uid ();
5086 basic_block *start = (basic_block *)
5087 xcalloc (max_uid, sizeof (basic_block));
5088 basic_block *end = (basic_block *)
5089 xcalloc (max_uid, sizeof (basic_block));
5090 enum bb_state *in_bb_p = (enum bb_state *)
5091 xcalloc (max_uid, sizeof (enum bb_state));
5092
5093 for (i = n_basic_blocks - 1; i >= 0; i--)
5094 {
5095 basic_block bb = BASIC_BLOCK (i);
5096 rtx x;
5097
5098 start[INSN_UID (bb->head)] = bb;
5099 end[INSN_UID (bb->end)] = bb;
5100 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5101 {
5102 enum bb_state state = IN_MULTIPLE_BB;
5103 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5104 state = IN_ONE_BB;
5105 in_bb_p[INSN_UID(x)] = state;
5106
5107 if (x == bb->end)
5108 break;
5109 }
5110 }
5111
5112 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5113 {
5114 int did_output;
5115 basic_block bb;
5116
5117 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5118 {
5119 fprintf (outf, ";; Start of basic block %d, registers live:",
5120 bb->index);
5121
5122 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5123 {
5124 fprintf (outf, " %d", i);
5125 if (i < FIRST_PSEUDO_REGISTER)
5126 fprintf (outf, " [%s]",
5127 reg_names[i]);
5128 });
5129 putc ('\n', outf);
5130 }
5131
5132 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5133 && GET_CODE (tmp_rtx) != NOTE
5134 && GET_CODE (tmp_rtx) != BARRIER
5135 && ! obey_regdecls)
5136 fprintf (outf, ";; Insn is not within a basic block\n");
5137 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5138 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5139
5140 did_output = print_rtl_single (outf, tmp_rtx);
5141
5142 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5143 fprintf (outf, ";; End of basic block %d\n", bb->index);
5144
5145 if (did_output)
5146 putc ('\n', outf);
5147 }
5148
5149 free (start);
5150 free (end);
5151 free (in_bb_p);
5152 }
5153
5154 if (current_function_epilogue_delay_list != 0)
5155 {
5156 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5157 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5158 tmp_rtx = XEXP (tmp_rtx, 1))
5159 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5160 }
5161 }
5162
5163 /* Compute dominator relationships using new flow graph structures. */
5164 void
5165 compute_flow_dominators (dominators, post_dominators)
5166 sbitmap *dominators;
5167 sbitmap *post_dominators;
5168 {
5169 int bb;
5170 sbitmap *temp_bitmap;
5171 edge e;
5172 basic_block *worklist, *tos;
5173
5174 /* Allocate a worklist array/queue. Entries are only added to the
5175 list if they were not already on the list. So the size is
5176 bounded by the number of basic blocks. */
5177 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5178 * n_basic_blocks);
5179
5180 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5181 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5182
5183 if (dominators)
5184 {
5185 /* The optimistic setting of dominators requires us to put every
5186 block on the work list initially. */
5187 for (bb = 0; bb < n_basic_blocks; bb++)
5188 {
5189 *tos++ = BASIC_BLOCK (bb);
5190 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5191 }
5192
5193 /* We want a maximal solution, so initially assume everything dominates
5194 everything else. */
5195 sbitmap_vector_ones (dominators, n_basic_blocks);
5196
5197 /* Mark successors of the entry block so we can identify them below. */
5198 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5199 e->dest->aux = ENTRY_BLOCK_PTR;
5200
5201 /* Iterate until the worklist is empty. */
5202 while (tos != worklist)
5203 {
5204 /* Take the first entry off the worklist. */
5205 basic_block b = *--tos;
5206 bb = b->index;
5207
5208 /* Compute the intersection of the dominators of all the
5209 predecessor blocks.
5210
5211 If one of the predecessor blocks is the ENTRY block, then the
5212 intersection of the dominators of the predecessor blocks is
5213 defined as the null set. We can identify such blocks by the
5214 special value in the AUX field in the block structure. */
5215 if (b->aux == ENTRY_BLOCK_PTR)
5216 {
5217 /* Do not clear the aux field for blocks which are
5218 successors of the ENTRY block. That way we we never
5219 add them to the worklist again.
5220
5221 The intersect of dominators of the preds of this block is
5222 defined as the null set. */
5223 sbitmap_zero (temp_bitmap[bb]);
5224 }
5225 else
5226 {
5227 /* Clear the aux field of this block so it can be added to
5228 the worklist again if necessary. */
5229 b->aux = NULL;
5230 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5231 }
5232
5233 /* Make sure each block always dominates itself. */
5234 SET_BIT (temp_bitmap[bb], bb);
5235
5236 /* If the out state of this block changed, then we need to
5237 add the successors of this block to the worklist if they
5238 are not already on the worklist. */
5239 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5240 {
5241 for (e = b->succ; e; e = e->succ_next)
5242 {
5243 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5244 {
5245 *tos++ = e->dest;
5246 e->dest->aux = e;
5247 }
5248 }
5249 }
5250 }
5251 }
5252
5253 if (post_dominators)
5254 {
5255 /* The optimistic setting of dominators requires us to put every
5256 block on the work list initially. */
5257 for (bb = 0; bb < n_basic_blocks; bb++)
5258 {
5259 *tos++ = BASIC_BLOCK (bb);
5260 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5261 }
5262
5263 /* We want a maximal solution, so initially assume everything post
5264 dominates everything else. */
5265 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5266
5267 /* Mark predecessors of the exit block so we can identify them below. */
5268 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5269 e->src->aux = EXIT_BLOCK_PTR;
5270
5271 /* Iterate until the worklist is empty. */
5272 while (tos != worklist)
5273 {
5274 /* Take the first entry off the worklist. */
5275 basic_block b = *--tos;
5276 bb = b->index;
5277
5278 /* Compute the intersection of the post dominators of all the
5279 successor blocks.
5280
5281 If one of the successor blocks is the EXIT block, then the
5282 intersection of the dominators of the successor blocks is
5283 defined as the null set. We can identify such blocks by the
5284 special value in the AUX field in the block structure. */
5285 if (b->aux == EXIT_BLOCK_PTR)
5286 {
5287 /* Do not clear the aux field for blocks which are
5288 predecessors of the EXIT block. That way we we never
5289 add them to the worklist again.
5290
5291 The intersect of dominators of the succs of this block is
5292 defined as the null set. */
5293 sbitmap_zero (temp_bitmap[bb]);
5294 }
5295 else
5296 {
5297 /* Clear the aux field of this block so it can be added to
5298 the worklist again if necessary. */
5299 b->aux = NULL;
5300 sbitmap_intersection_of_succs (temp_bitmap[bb],
5301 post_dominators, bb);
5302 }
5303
5304 /* Make sure each block always post dominates itself. */
5305 SET_BIT (temp_bitmap[bb], bb);
5306
5307 /* If the out state of this block changed, then we need to
5308 add the successors of this block to the worklist if they
5309 are not already on the worklist. */
5310 if (sbitmap_a_and_b (post_dominators[bb],
5311 post_dominators[bb],
5312 temp_bitmap[bb]))
5313 {
5314 for (e = b->pred; e; e = e->pred_next)
5315 {
5316 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5317 {
5318 *tos++ = e->src;
5319 e->src->aux = e;
5320 }
5321 }
5322 }
5323 }
5324 }
5325 free (temp_bitmap);
5326 }
5327
5328 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5329
5330 void
5331 compute_immediate_dominators (idom, dominators)
5332 int *idom;
5333 sbitmap *dominators;
5334 {
5335 sbitmap *tmp;
5336 int b;
5337
5338 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5339
5340 /* Begin with tmp(n) = dom(n) - { n }. */
5341 for (b = n_basic_blocks; --b >= 0; )
5342 {
5343 sbitmap_copy (tmp[b], dominators[b]);
5344 RESET_BIT (tmp[b], b);
5345 }
5346
5347 /* Subtract out all of our dominator's dominators. */
5348 for (b = n_basic_blocks; --b >= 0; )
5349 {
5350 sbitmap tmp_b = tmp[b];
5351 int s;
5352
5353 for (s = n_basic_blocks; --s >= 0; )
5354 if (TEST_BIT (tmp_b, s))
5355 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5356 }
5357
5358 /* Find the one bit set in the bitmap and put it in the output array. */
5359 for (b = n_basic_blocks; --b >= 0; )
5360 {
5361 int t;
5362 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5363 }
5364
5365 sbitmap_vector_free (tmp);
5366 }
5367
5368 /* Count for a single SET rtx, X. */
5369
5370 static void
5371 count_reg_sets_1 (x)
5372 rtx x;
5373 {
5374 register int regno;
5375 register rtx reg = SET_DEST (x);
5376
5377 /* Find the register that's set/clobbered. */
5378 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5379 || GET_CODE (reg) == SIGN_EXTRACT
5380 || GET_CODE (reg) == STRICT_LOW_PART)
5381 reg = XEXP (reg, 0);
5382
5383 if (GET_CODE (reg) == PARALLEL
5384 && GET_MODE (reg) == BLKmode)
5385 {
5386 register int i;
5387 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5388 count_reg_sets_1 (XVECEXP (reg, 0, i));
5389 return;
5390 }
5391
5392 if (GET_CODE (reg) == REG)
5393 {
5394 regno = REGNO (reg);
5395 if (regno >= FIRST_PSEUDO_REGISTER)
5396 {
5397 /* Count (weighted) references, stores, etc. This counts a
5398 register twice if it is modified, but that is correct. */
5399 REG_N_SETS (regno)++;
5400
5401 REG_N_REFS (regno) += loop_depth;
5402 }
5403 }
5404 }
5405
5406 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5407 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5408
5409 static void
5410 count_reg_sets (x)
5411 rtx x;
5412 {
5413 register RTX_CODE code = GET_CODE (x);
5414
5415 if (code == SET || code == CLOBBER)
5416 count_reg_sets_1 (x);
5417 else if (code == PARALLEL)
5418 {
5419 register int i;
5420 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5421 {
5422 code = GET_CODE (XVECEXP (x, 0, i));
5423 if (code == SET || code == CLOBBER)
5424 count_reg_sets_1 (XVECEXP (x, 0, i));
5425 }
5426 }
5427 }
5428
5429 /* Increment REG_N_REFS by the current loop depth each register reference
5430 found in X. */
5431
5432 static void
5433 count_reg_references (x)
5434 rtx x;
5435 {
5436 register RTX_CODE code;
5437
5438 retry:
5439 code = GET_CODE (x);
5440 switch (code)
5441 {
5442 case LABEL_REF:
5443 case SYMBOL_REF:
5444 case CONST_INT:
5445 case CONST:
5446 case CONST_DOUBLE:
5447 case PC:
5448 case ADDR_VEC:
5449 case ADDR_DIFF_VEC:
5450 case ASM_INPUT:
5451 return;
5452
5453 #ifdef HAVE_cc0
5454 case CC0:
5455 return;
5456 #endif
5457
5458 case CLOBBER:
5459 /* If we are clobbering a MEM, mark any registers inside the address
5460 as being used. */
5461 if (GET_CODE (XEXP (x, 0)) == MEM)
5462 count_reg_references (XEXP (XEXP (x, 0), 0));
5463 return;
5464
5465 case SUBREG:
5466 /* While we're here, optimize this case. */
5467 x = SUBREG_REG (x);
5468
5469 /* In case the SUBREG is not of a register, don't optimize */
5470 if (GET_CODE (x) != REG)
5471 {
5472 count_reg_references (x);
5473 return;
5474 }
5475
5476 /* ... fall through ... */
5477
5478 case REG:
5479 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5480 REG_N_REFS (REGNO (x)) += loop_depth;
5481 return;
5482
5483 case SET:
5484 {
5485 register rtx testreg = SET_DEST (x);
5486 int mark_dest = 0;
5487
5488 /* If storing into MEM, don't show it as being used. But do
5489 show the address as being used. */
5490 if (GET_CODE (testreg) == MEM)
5491 {
5492 count_reg_references (XEXP (testreg, 0));
5493 count_reg_references (SET_SRC (x));
5494 return;
5495 }
5496
5497 /* Storing in STRICT_LOW_PART is like storing in a reg
5498 in that this SET might be dead, so ignore it in TESTREG.
5499 but in some other ways it is like using the reg.
5500
5501 Storing in a SUBREG or a bit field is like storing the entire
5502 register in that if the register's value is not used
5503 then this SET is not needed. */
5504 while (GET_CODE (testreg) == STRICT_LOW_PART
5505 || GET_CODE (testreg) == ZERO_EXTRACT
5506 || GET_CODE (testreg) == SIGN_EXTRACT
5507 || GET_CODE (testreg) == SUBREG)
5508 {
5509 /* Modifying a single register in an alternate mode
5510 does not use any of the old value. But these other
5511 ways of storing in a register do use the old value. */
5512 if (GET_CODE (testreg) == SUBREG
5513 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5514 ;
5515 else
5516 mark_dest = 1;
5517
5518 testreg = XEXP (testreg, 0);
5519 }
5520
5521 /* If this is a store into a register,
5522 recursively scan the value being stored. */
5523
5524 if ((GET_CODE (testreg) == PARALLEL
5525 && GET_MODE (testreg) == BLKmode)
5526 || GET_CODE (testreg) == REG)
5527 {
5528 count_reg_references (SET_SRC (x));
5529 if (mark_dest)
5530 count_reg_references (SET_DEST (x));
5531 return;
5532 }
5533 }
5534 break;
5535
5536 default:
5537 break;
5538 }
5539
5540 /* Recursively scan the operands of this expression. */
5541
5542 {
5543 register const char *fmt = GET_RTX_FORMAT (code);
5544 register int i;
5545
5546 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5547 {
5548 if (fmt[i] == 'e')
5549 {
5550 /* Tail recursive case: save a function call level. */
5551 if (i == 0)
5552 {
5553 x = XEXP (x, 0);
5554 goto retry;
5555 }
5556 count_reg_references (XEXP (x, i));
5557 }
5558 else if (fmt[i] == 'E')
5559 {
5560 register int j;
5561 for (j = 0; j < XVECLEN (x, i); j++)
5562 count_reg_references (XVECEXP (x, i, j));
5563 }
5564 }
5565 }
5566 }
5567
5568 /* Recompute register set/reference counts immediately prior to register
5569 allocation.
5570
5571 This avoids problems with set/reference counts changing to/from values
5572 which have special meanings to the register allocators.
5573
5574 Additionally, the reference counts are the primary component used by the
5575 register allocators to prioritize pseudos for allocation to hard regs.
5576 More accurate reference counts generally lead to better register allocation.
5577
5578 F is the first insn to be scanned.
5579 LOOP_STEP denotes how much loop_depth should be incremented per
5580 loop nesting level in order to increase the ref count more for references
5581 in a loop.
5582
5583 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5584 possibly other information which is used by the register allocators. */
5585
5586 void
5587 recompute_reg_usage (f, loop_step)
5588 rtx f;
5589 int loop_step;
5590 {
5591 rtx insn;
5592 int i, max_reg;
5593
5594 /* Clear out the old data. */
5595 max_reg = max_reg_num ();
5596 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5597 {
5598 REG_N_SETS (i) = 0;
5599 REG_N_REFS (i) = 0;
5600 }
5601
5602 /* Scan each insn in the chain and count how many times each register is
5603 set/used. */
5604 loop_depth = 1;
5605 for (insn = f; insn; insn = NEXT_INSN (insn))
5606 {
5607 /* Keep track of loop depth. */
5608 if (GET_CODE (insn) == NOTE)
5609 {
5610 /* Look for loop boundaries. */
5611 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5612 loop_depth -= loop_step;
5613 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5614 loop_depth += loop_step;
5615
5616 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5617 Abort now rather than setting register status incorrectly. */
5618 if (loop_depth == 0)
5619 abort ();
5620 }
5621 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5622 {
5623 rtx links;
5624
5625 /* This call will increment REG_N_SETS for each SET or CLOBBER
5626 of a register in INSN. It will also increment REG_N_REFS
5627 by the loop depth for each set of a register in INSN. */
5628 count_reg_sets (PATTERN (insn));
5629
5630 /* count_reg_sets does not detect autoincrement address modes, so
5631 detect them here by looking at the notes attached to INSN. */
5632 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5633 {
5634 if (REG_NOTE_KIND (links) == REG_INC)
5635 /* Count (weighted) references, stores, etc. This counts a
5636 register twice if it is modified, but that is correct. */
5637 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5638 }
5639
5640 /* This call will increment REG_N_REFS by the current loop depth for
5641 each reference to a register in INSN. */
5642 count_reg_references (PATTERN (insn));
5643
5644 /* count_reg_references will not include counts for arguments to
5645 function calls, so detect them here by examining the
5646 CALL_INSN_FUNCTION_USAGE data. */
5647 if (GET_CODE (insn) == CALL_INSN)
5648 {
5649 rtx note;
5650
5651 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5652 note;
5653 note = XEXP (note, 1))
5654 if (GET_CODE (XEXP (note, 0)) == USE)
5655 count_reg_references (XEXP (XEXP (note, 0), 0));
5656 }
5657 }
5658 }
5659 }
5660
5661 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5662 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5663 of the number of registers that died. */
5664
5665 int
5666 count_or_remove_death_notes (blocks, kill)
5667 sbitmap blocks;
5668 int kill;
5669 {
5670 int i, count = 0;
5671
5672 for (i = n_basic_blocks - 1; i >= 0; --i)
5673 {
5674 basic_block bb;
5675 rtx insn;
5676
5677 if (blocks && ! TEST_BIT (blocks, i))
5678 continue;
5679
5680 bb = BASIC_BLOCK (i);
5681
5682 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5683 {
5684 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5685 {
5686 rtx *pprev = &REG_NOTES (insn);
5687 rtx link = *pprev;
5688
5689 while (link)
5690 {
5691 switch (REG_NOTE_KIND (link))
5692 {
5693 case REG_DEAD:
5694 if (GET_CODE (XEXP (link, 0)) == REG)
5695 {
5696 rtx reg = XEXP (link, 0);
5697 int n;
5698
5699 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5700 n = 1;
5701 else
5702 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5703 count += n;
5704 }
5705 /* FALLTHRU */
5706
5707 case REG_UNUSED:
5708 if (kill)
5709 {
5710 rtx next = XEXP (link, 1);
5711 free_EXPR_LIST_node (link);
5712 *pprev = link = next;
5713 break;
5714 }
5715 /* FALLTHRU */
5716
5717 default:
5718 pprev = &XEXP (link, 1);
5719 link = *pprev;
5720 break;
5721 }
5722 }
5723 }
5724
5725 if (insn == bb->end)
5726 break;
5727 }
5728 }
5729
5730 return count;
5731 }
5732
5733 /* Record INSN's block as BB. */
5734
5735 void
5736 set_block_for_insn (insn, bb)
5737 rtx insn;
5738 basic_block bb;
5739 {
5740 size_t uid = INSN_UID (insn);
5741 if (uid >= basic_block_for_insn->num_elements)
5742 {
5743 int new_size;
5744
5745 /* Add one-eighth the size so we don't keep calling xrealloc. */
5746 new_size = uid + (uid + 7) / 8;
5747
5748 VARRAY_GROW (basic_block_for_insn, new_size);
5749 }
5750 VARRAY_BB (basic_block_for_insn, uid) = bb;
5751 }
5752
5753 /* Record INSN's block number as BB. */
5754 /* ??? This has got to go. */
5755
5756 void
5757 set_block_num (insn, bb)
5758 rtx insn;
5759 int bb;
5760 {
5761 set_block_for_insn (insn, BASIC_BLOCK (bb));
5762 }
5763 \f
5764 /* Verify the CFG consistency. This function check some CFG invariants and
5765 aborts when something is wrong. Hope that this function will help to
5766 convert many optimization passes to preserve CFG consistent.
5767
5768 Currently it does following checks:
5769
5770 - test head/end pointers
5771 - overlapping of basic blocks
5772 - edge list corectness
5773 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5774 - tails of basic blocks (ensure that boundary is necesary)
5775 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5776 and NOTE_INSN_BASIC_BLOCK
5777 - check that all insns are in the basic blocks
5778 (except the switch handling code, barriers and notes)
5779
5780 In future it can be extended check a lot of other stuff as well
5781 (reachability of basic blocks, life information, etc. etc.). */
5782
5783 void
5784 verify_flow_info ()
5785 {
5786 const int max_uid = get_max_uid ();
5787 const rtx rtx_first = get_insns ();
5788 basic_block *bb_info;
5789 rtx x;
5790 int i, err = 0;
5791
5792 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5793
5794 /* First pass check head/end pointers and set bb_info array used by
5795 later passes. */
5796 for (i = n_basic_blocks - 1; i >= 0; i--)
5797 {
5798 basic_block bb = BASIC_BLOCK (i);
5799
5800 /* Check the head pointer and make sure that it is pointing into
5801 insn list. */
5802 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5803 if (x == bb->head)
5804 break;
5805 if (!x)
5806 {
5807 error ("Head insn %d for block %d not found in the insn stream.",
5808 INSN_UID (bb->head), bb->index);
5809 err = 1;
5810 }
5811
5812 /* Check the end pointer and make sure that it is pointing into
5813 insn list. */
5814 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5815 {
5816 if (bb_info[INSN_UID (x)] != NULL)
5817 {
5818 error ("Insn %d is in multiple basic blocks (%d and %d)",
5819 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5820 err = 1;
5821 }
5822 bb_info[INSN_UID (x)] = bb;
5823
5824 if (x == bb->end)
5825 break;
5826 }
5827 if (!x)
5828 {
5829 error ("End insn %d for block %d not found in the insn stream.",
5830 INSN_UID (bb->end), bb->index);
5831 err = 1;
5832 }
5833 }
5834
5835 /* Now check the basic blocks (boundaries etc.) */
5836 for (i = n_basic_blocks - 1; i >= 0; i--)
5837 {
5838 basic_block bb = BASIC_BLOCK (i);
5839 /* Check corectness of edge lists */
5840 edge e;
5841
5842 e = bb->succ;
5843 while (e)
5844 {
5845 if (e->src != bb)
5846 {
5847 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5848 bb->index);
5849 fprintf (stderr, "Predecessor: ");
5850 dump_edge_info (stderr, e, 0);
5851 fprintf (stderr, "\nSuccessor: ");
5852 dump_edge_info (stderr, e, 1);
5853 fflush (stderr);
5854 err = 1;
5855 }
5856 if (e->dest != EXIT_BLOCK_PTR)
5857 {
5858 edge e2 = e->dest->pred;
5859 while (e2 && e2 != e)
5860 e2 = e2->pred_next;
5861 if (!e2)
5862 {
5863 error ("Basic block %i edge lists are corrupted", bb->index);
5864 err = 1;
5865 }
5866 }
5867 e = e->succ_next;
5868 }
5869
5870 e = bb->pred;
5871 while (e)
5872 {
5873 if (e->dest != bb)
5874 {
5875 error ("Basic block %d pred edge is corrupted", bb->index);
5876 fputs ("Predecessor: ", stderr);
5877 dump_edge_info (stderr, e, 0);
5878 fputs ("\nSuccessor: ", stderr);
5879 dump_edge_info (stderr, e, 1);
5880 fputc ('\n', stderr);
5881 err = 1;
5882 }
5883 if (e->src != ENTRY_BLOCK_PTR)
5884 {
5885 edge e2 = e->src->succ;
5886 while (e2 && e2 != e)
5887 e2 = e2->succ_next;
5888 if (!e2)
5889 {
5890 error ("Basic block %i edge lists are corrupted", bb->index);
5891 err = 1;
5892 }
5893 }
5894 e = e->pred_next;
5895 }
5896
5897 /* OK pointers are correct. Now check the header of basic
5898 block. It ought to contain optional CODE_LABEL followed
5899 by NOTE_BASIC_BLOCK. */
5900 x = bb->head;
5901 if (GET_CODE (x) == CODE_LABEL)
5902 {
5903 if (bb->end == x)
5904 {
5905 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5906 bb->index);
5907 err = 1;
5908 }
5909 x = NEXT_INSN (x);
5910 }
5911 if (GET_CODE (x) != NOTE
5912 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5913 || NOTE_BASIC_BLOCK (x) != bb)
5914 {
5915 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5916 bb->index);
5917 err = 1;
5918 }
5919
5920 if (bb->end == x)
5921 {
5922 /* Do checks for empty blocks here */
5923 }
5924 else
5925 {
5926 x = NEXT_INSN (x);
5927 while (x)
5928 {
5929 if (GET_CODE (x) == NOTE
5930 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5931 {
5932 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5933 INSN_UID (x), bb->index);
5934 err = 1;
5935 }
5936
5937 if (x == bb->end)
5938 break;
5939
5940 if (GET_CODE (x) == JUMP_INSN
5941 || GET_CODE (x) == CODE_LABEL
5942 || GET_CODE (x) == BARRIER)
5943 {
5944 error ("In basic block %d:", bb->index);
5945 fatal_insn ("Flow control insn inside a basic block", x);
5946 }
5947
5948 x = NEXT_INSN (x);
5949 }
5950 }
5951 }
5952
5953 x = rtx_first;
5954 while (x)
5955 {
5956 if (!bb_info[INSN_UID (x)])
5957 {
5958 switch (GET_CODE (x))
5959 {
5960 case BARRIER:
5961 case NOTE:
5962 break;
5963
5964 case CODE_LABEL:
5965 /* An addr_vec is placed outside any block block. */
5966 if (NEXT_INSN (x)
5967 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5968 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5969 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5970 {
5971 x = NEXT_INSN (x);
5972 }
5973
5974 /* But in any case, non-deletable labels can appear anywhere. */
5975 break;
5976
5977 default:
5978 fatal_insn ("Insn outside basic block", x);
5979 }
5980 }
5981
5982 x = NEXT_INSN (x);
5983 }
5984
5985 if (err)
5986 abort ();
5987
5988 /* Clean up. */
5989 free (bb_info);
5990 }
5991 \f
5992 /* Functions to access an edge list with a vector representation.
5993 Enough data is kept such that given an index number, the
5994 pred and succ that edge reprsents can be determined, or
5995 given a pred and a succ, it's index number can be returned.
5996 This allows algorithms which comsume a lot of memory to
5997 represent the normally full matrix of edge (pred,succ) with a
5998 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5999 wasted space in the client code due to sparse flow graphs. */
6000
6001 /* This functions initializes the edge list. Basically the entire
6002 flowgraph is processed, and all edges are assigned a number,
6003 and the data structure is filed in. */
6004 struct edge_list *
6005 create_edge_list ()
6006 {
6007 struct edge_list *elist;
6008 edge e;
6009 int num_edges;
6010 int x;
6011 int block_count;
6012
6013 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6014
6015 num_edges = 0;
6016
6017 /* Determine the number of edges in the flow graph by counting successor
6018 edges on each basic block. */
6019 for (x = 0; x < n_basic_blocks; x++)
6020 {
6021 basic_block bb = BASIC_BLOCK (x);
6022
6023 for (e = bb->succ; e; e = e->succ_next)
6024 num_edges++;
6025 }
6026 /* Don't forget successors of the entry block. */
6027 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6028 num_edges++;
6029
6030 elist = xmalloc (sizeof (struct edge_list));
6031 elist->num_blocks = block_count;
6032 elist->num_edges = num_edges;
6033 elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
6034
6035 num_edges = 0;
6036
6037 /* Follow successors of the entry block, and register these edges. */
6038 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6039 {
6040 elist->index_to_edge[num_edges] = e;
6041 num_edges++;
6042 }
6043
6044 for (x = 0; x < n_basic_blocks; x++)
6045 {
6046 basic_block bb = BASIC_BLOCK (x);
6047
6048 /* Follow all successors of blocks, and register these edges. */
6049 for (e = bb->succ; e; e = e->succ_next)
6050 {
6051 elist->index_to_edge[num_edges] = e;
6052 num_edges++;
6053 }
6054 }
6055 return elist;
6056 }
6057
6058 /* This function free's memory associated with an edge list. */
6059 void
6060 free_edge_list (elist)
6061 struct edge_list *elist;
6062 {
6063 if (elist)
6064 {
6065 free (elist->index_to_edge);
6066 free (elist);
6067 }
6068 }
6069
6070 /* This function provides debug output showing an edge list. */
6071 void
6072 print_edge_list (f, elist)
6073 FILE *f;
6074 struct edge_list *elist;
6075 {
6076 int x;
6077 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6078 elist->num_blocks - 2, elist->num_edges);
6079
6080 for (x = 0; x < elist->num_edges; x++)
6081 {
6082 fprintf (f, " %-4d - edge(", x);
6083 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6084 fprintf (f,"entry,");
6085 else
6086 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6087
6088 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6089 fprintf (f,"exit)\n");
6090 else
6091 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6092 }
6093 }
6094
6095 /* This function provides an internal consistancy check of an edge list,
6096 verifying that all edges are present, and that there are no
6097 extra edges. */
6098 void
6099 verify_edge_list (f, elist)
6100 FILE *f;
6101 struct edge_list *elist;
6102 {
6103 int x, pred, succ, index;
6104 edge e;
6105
6106 for (x = 0; x < n_basic_blocks; x++)
6107 {
6108 basic_block bb = BASIC_BLOCK (x);
6109
6110 for (e = bb->succ; e; e = e->succ_next)
6111 {
6112 pred = e->src->index;
6113 succ = e->dest->index;
6114 index = EDGE_INDEX (elist, e->src, e->dest);
6115 if (index == EDGE_INDEX_NO_EDGE)
6116 {
6117 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6118 continue;
6119 }
6120 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6121 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6122 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6123 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6124 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6125 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6126 }
6127 }
6128 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6129 {
6130 pred = e->src->index;
6131 succ = e->dest->index;
6132 index = EDGE_INDEX (elist, e->src, e->dest);
6133 if (index == EDGE_INDEX_NO_EDGE)
6134 {
6135 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6136 continue;
6137 }
6138 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6139 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6140 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6141 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6142 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6143 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6144 }
6145 /* We've verified that all the edges are in the list, no lets make sure
6146 there are no spurious edges in the list. */
6147
6148 for (pred = 0 ; pred < n_basic_blocks; pred++)
6149 for (succ = 0 ; succ < n_basic_blocks; succ++)
6150 {
6151 basic_block p = BASIC_BLOCK (pred);
6152 basic_block s = BASIC_BLOCK (succ);
6153
6154 int found_edge = 0;
6155
6156 for (e = p->succ; e; e = e->succ_next)
6157 if (e->dest == s)
6158 {
6159 found_edge = 1;
6160 break;
6161 }
6162 for (e = s->pred; e; e = e->pred_next)
6163 if (e->src == p)
6164 {
6165 found_edge = 1;
6166 break;
6167 }
6168 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6169 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6170 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6171 pred, succ);
6172 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6173 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6174 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6175 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6176 BASIC_BLOCK (succ)));
6177 }
6178 for (succ = 0 ; succ < n_basic_blocks; succ++)
6179 {
6180 basic_block p = ENTRY_BLOCK_PTR;
6181 basic_block s = BASIC_BLOCK (succ);
6182
6183 int found_edge = 0;
6184
6185 for (e = p->succ; e; e = e->succ_next)
6186 if (e->dest == s)
6187 {
6188 found_edge = 1;
6189 break;
6190 }
6191 for (e = s->pred; e; e = e->pred_next)
6192 if (e->src == p)
6193 {
6194 found_edge = 1;
6195 break;
6196 }
6197 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6198 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6199 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6200 succ);
6201 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6202 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6203 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6204 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6205 BASIC_BLOCK (succ)));
6206 }
6207 for (pred = 0 ; pred < n_basic_blocks; pred++)
6208 {
6209 basic_block p = BASIC_BLOCK (pred);
6210 basic_block s = EXIT_BLOCK_PTR;
6211
6212 int found_edge = 0;
6213
6214 for (e = p->succ; e; e = e->succ_next)
6215 if (e->dest == s)
6216 {
6217 found_edge = 1;
6218 break;
6219 }
6220 for (e = s->pred; e; e = e->pred_next)
6221 if (e->src == p)
6222 {
6223 found_edge = 1;
6224 break;
6225 }
6226 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6227 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6228 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6229 pred);
6230 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6231 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6232 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6233 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6234 EXIT_BLOCK_PTR));
6235 }
6236 }
6237
6238 /* This routine will determine what, if any, edge there is between
6239 a specified predecessor and successor. */
6240
6241 int
6242 find_edge_index (edge_list, pred, succ)
6243 struct edge_list *edge_list;
6244 basic_block pred, succ;
6245 {
6246 int x;
6247 for (x = 0; x < NUM_EDGES (edge_list); x++)
6248 {
6249 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6250 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6251 return x;
6252 }
6253 return (EDGE_INDEX_NO_EDGE);
6254 }
6255
6256 /* This function will remove an edge from the flow graph. */
6257 static void
6258 remove_edge (e)
6259 edge e;
6260 {
6261 edge last_pred = NULL;
6262 edge last_succ = NULL;
6263 edge tmp;
6264 basic_block src, dest;
6265 src = e->src;
6266 dest = e->dest;
6267 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6268 last_succ = tmp;
6269
6270 if (!tmp)
6271 abort ();
6272 if (last_succ)
6273 last_succ->succ_next = e->succ_next;
6274 else
6275 src->succ = e->succ_next;
6276
6277 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6278 last_pred = tmp;
6279
6280 if (!tmp)
6281 abort ();
6282 if (last_pred)
6283 last_pred->pred_next = e->pred_next;
6284 else
6285 dest->pred = e->pred_next;
6286
6287 n_edges--;
6288 free (e);
6289 }
6290
6291 /* This routine will remove any fake successor edges for a basic block.
6292 When the edge is removed, it is also removed from whatever predecessor
6293 list it is in. */
6294 static void
6295 remove_fake_successors (bb)
6296 basic_block bb;
6297 {
6298 edge e;
6299 for (e = bb->succ; e ; )
6300 {
6301 edge tmp = e;
6302 e = e->succ_next;
6303 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6304 remove_edge (tmp);
6305 }
6306 }
6307
6308 /* This routine will remove all fake edges from the flow graph. If
6309 we remove all fake successors, it will automatically remove all
6310 fake predecessors. */
6311 void
6312 remove_fake_edges ()
6313 {
6314 int x;
6315
6316 for (x = 0; x < n_basic_blocks; x++)
6317 remove_fake_successors (BASIC_BLOCK (x));
6318
6319 /* We've handled all successors except the entry block's. */
6320 remove_fake_successors (ENTRY_BLOCK_PTR);
6321 }
6322
6323 /* This functions will add a fake edge between any block which has no
6324 successors, and the exit block. Some data flow equations require these
6325 edges to exist. */
6326 void
6327 add_noreturn_fake_exit_edges ()
6328 {
6329 int x;
6330
6331 for (x = 0; x < n_basic_blocks; x++)
6332 if (BASIC_BLOCK (x)->succ == NULL)
6333 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6334 }
6335 \f
6336 /* Dump the list of basic blocks in the bitmap NODES. */
6337 static void
6338 flow_nodes_print (str, nodes, file)
6339 const char *str;
6340 const sbitmap nodes;
6341 FILE *file;
6342 {
6343 int node;
6344
6345 fprintf (file, "%s { ", str);
6346 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6347 fputs ("}\n", file);
6348 }
6349
6350
6351 /* Dump the list of exiting edges in the array EDGES. */
6352 static void
6353 flow_exits_print (str, edges, num_edges, file)
6354 const char *str;
6355 const edge *edges;
6356 int num_edges;
6357 FILE *file;
6358 {
6359 int i;
6360
6361 fprintf (file, "%s { ", str);
6362 for (i = 0; i < num_edges; i++)
6363 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6364 fputs ("}\n", file);
6365 }
6366
6367
6368 /* Dump loop related CFG information. */
6369 static void
6370 flow_loops_cfg_dump (loops, file)
6371 const struct loops *loops;
6372 FILE *file;
6373 {
6374 int i;
6375
6376 if (! loops->num || ! file || ! loops->cfg.dom)
6377 return;
6378
6379 for (i = 0; i < n_basic_blocks; i++)
6380 {
6381 edge succ;
6382
6383 fprintf (file, ";; %d succs { ", i);
6384 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6385 fprintf (file, "%d ", succ->dest->index);
6386 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6387 }
6388
6389
6390 /* Dump the DFS node order. */
6391 if (loops->cfg.dfs_order)
6392 {
6393 fputs (";; DFS order: ", file);
6394 for (i = 0; i < n_basic_blocks; i++)
6395 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6396 fputs ("\n", file);
6397 }
6398 }
6399
6400
6401 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6402 int
6403 flow_loop_nested_p (outer, loop)
6404 struct loop *outer;
6405 struct loop *loop;
6406 {
6407 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6408 }
6409
6410
6411 /* Dump the loop information specified by LOOPS to the stream FILE. */
6412 void
6413 flow_loops_dump (loops, file, verbose)
6414 const struct loops *loops;
6415 FILE *file;
6416 int verbose;
6417 {
6418 int i;
6419 int num_loops;
6420
6421 num_loops = loops->num;
6422 if (! num_loops || ! file)
6423 return;
6424
6425 fprintf (file, ";; %d loops found\n", num_loops);
6426
6427 for (i = 0; i < num_loops; i++)
6428 {
6429 struct loop *loop = &loops->array[i];
6430
6431 fprintf (file, ";; loop %d (%d to %d):\n"
6432 ";; header %d, latch %d, pre-header %d,"
6433 " depth %d, level %d, outer %d\n",
6434 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6435 loop->header->index, loop->latch->index,
6436 loop->pre_header ? loop->pre_header->index : -1,
6437 loop->depth, loop->level,
6438 loop->outer ? (loop->outer - loops->array) : -1);
6439 fprintf (file, ";; %d", loop->num_nodes);
6440 flow_nodes_print (" nodes", loop->nodes, file);
6441 fprintf (file, ";; %d", loop->num_exits);
6442 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6443
6444 if (loop->shared)
6445 {
6446 int j;
6447
6448 for (j = 0; j < i; j++)
6449 {
6450 struct loop *oloop = &loops->array[j];
6451
6452 if (loop->header == oloop->header)
6453 {
6454 int disjoint;
6455 int smaller;
6456
6457 smaller = loop->num_nodes < oloop->num_nodes;
6458
6459 /* If the union of LOOP and OLOOP is different than
6460 the larger of LOOP and OLOOP then LOOP and OLOOP
6461 must be disjoint. */
6462 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop);
6463 fprintf (file, ";; loop header %d shared by loops %d, %d"
6464 " %s\n",
6465 loop->header->index, i, j,
6466 disjoint ? "disjoint" : "nested");
6467 }
6468 }
6469 }
6470
6471 if (verbose)
6472 {
6473 /* Print diagnostics to compare our concept of a loop with
6474 what the loop notes say. */
6475 if (GET_CODE (PREV_INSN (loop->header->head)) != NOTE
6476 || NOTE_LINE_NUMBER (PREV_INSN (loop->header->head))
6477 != NOTE_INSN_LOOP_BEG)
6478 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6479 INSN_UID (PREV_INSN (loop->header->head)));
6480 if (GET_CODE (NEXT_INSN (loop->latch->end)) != NOTE
6481 || NOTE_LINE_NUMBER (NEXT_INSN (loop->latch->end))
6482 != NOTE_INSN_LOOP_END)
6483 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6484 INSN_UID (NEXT_INSN (loop->latch->end)));
6485 }
6486 }
6487
6488 if (verbose)
6489 flow_loops_cfg_dump (loops, file);
6490 }
6491
6492
6493 /* Free all the memory allocated for LOOPS. */
6494 void
6495 flow_loops_free (loops)
6496 struct loops *loops;
6497 {
6498 if (loops->array)
6499 {
6500 int i;
6501
6502 if (! loops->num)
6503 abort ();
6504
6505 /* Free the loop descriptors. */
6506 for (i = 0; i < loops->num; i++)
6507 {
6508 struct loop *loop = &loops->array[i];
6509
6510 if (loop->nodes)
6511 sbitmap_free (loop->nodes);
6512 if (loop->exits)
6513 free (loop->exits);
6514 }
6515 free (loops->array);
6516 loops->array = NULL;
6517
6518 if (loops->cfg.dom)
6519 sbitmap_vector_free (loops->cfg.dom);
6520 if (loops->cfg.dfs_order)
6521 free (loops->cfg.dfs_order);
6522
6523 sbitmap_free (loops->shared_headers);
6524 }
6525 }
6526
6527
6528 /* Find the exits from the loop using the bitmap of loop nodes NODES
6529 and store in EXITS array. Return the number of exits from the
6530 loop. */
6531 static int
6532 flow_loop_exits_find (nodes, exits)
6533 const sbitmap nodes;
6534 edge **exits;
6535 {
6536 edge e;
6537 int node;
6538 int num_exits;
6539
6540 *exits = NULL;
6541
6542 /* Check all nodes within the loop to see if there are any
6543 successors not in the loop. Note that a node may have multiple
6544 exiting edges. */
6545 num_exits = 0;
6546 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6547 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6548 {
6549 basic_block dest = e->dest;
6550
6551 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6552 num_exits++;
6553 }
6554 });
6555
6556 if (! num_exits)
6557 return 0;
6558
6559 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6560
6561 /* Store all exiting edges into an array. */
6562 num_exits = 0;
6563 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6564 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6565 {
6566 basic_block dest = e->dest;
6567
6568 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6569 (*exits)[num_exits++] = e;
6570 }
6571 });
6572
6573 return num_exits;
6574 }
6575
6576
6577 /* Find the nodes contained within the loop with header HEADER and
6578 latch LATCH and store in NODES. Return the number of nodes within
6579 the loop. */
6580 static int
6581 flow_loop_nodes_find (header, latch, nodes)
6582 basic_block header;
6583 basic_block latch;
6584 sbitmap nodes;
6585 {
6586 basic_block *stack;
6587 int sp;
6588 int num_nodes = 0;
6589
6590 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6591 sp = 0;
6592
6593 /* Start with only the loop header in the set of loop nodes. */
6594 sbitmap_zero (nodes);
6595 SET_BIT (nodes, header->index);
6596 num_nodes++;
6597
6598 /* Push the loop latch on to the stack. */
6599 if (! TEST_BIT (nodes, latch->index))
6600 {
6601 SET_BIT (nodes, latch->index);
6602 num_nodes++;
6603 stack[sp++] = latch;
6604 }
6605
6606 while (sp)
6607 {
6608 basic_block node;
6609 edge e;
6610
6611 node = stack[--sp];
6612 for (e = node->pred; e; e = e->pred_next)
6613 {
6614 basic_block ancestor = e->src;
6615
6616 /* If each ancestor not marked as part of loop, add to set of
6617 loop nodes and push on to stack. */
6618 if (ancestor != ENTRY_BLOCK_PTR
6619 && ! TEST_BIT (nodes, ancestor->index))
6620 {
6621 SET_BIT (nodes, ancestor->index);
6622 num_nodes++;
6623 stack[sp++] = ancestor;
6624 }
6625 }
6626 }
6627 free (stack);
6628 return num_nodes;
6629 }
6630
6631
6632 /* Compute the depth first search order and store in the array
6633 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6634 number of nodes visited. */
6635 static int
6636 flow_depth_first_order_compute (dfs_order)
6637 int *dfs_order;
6638 {
6639 edge e;
6640 edge *stack;
6641 int sp;
6642 int dfsnum = 0;
6643 sbitmap visited;
6644
6645 /* Allocate stack for back-tracking up CFG. */
6646 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6647 sp = 0;
6648
6649 /* Allocate bitmap to track nodes that have been visited. */
6650 visited = sbitmap_alloc (n_basic_blocks);
6651
6652 /* None of the nodes in the CFG have been visited yet. */
6653 sbitmap_zero (visited);
6654
6655 /* Start with the first successor edge from the entry block. */
6656 e = ENTRY_BLOCK_PTR->succ;
6657 while (e)
6658 {
6659 basic_block src = e->src;
6660 basic_block dest = e->dest;
6661
6662 /* Mark that we have visited this node. */
6663 if (src != ENTRY_BLOCK_PTR)
6664 SET_BIT (visited, src->index);
6665
6666 /* If this node has not been visited before, push the current
6667 edge on to the stack and proceed with the first successor
6668 edge of this node. */
6669 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6670 && dest->succ)
6671 {
6672 stack[sp++] = e;
6673 e = dest->succ;
6674 }
6675 else
6676 {
6677 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6678 && ! dest->succ)
6679 {
6680 /* DEST has no successors (for example, a non-returning
6681 function is called) so do not push the current edge
6682 but carry on with its next successor. */
6683 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6684 SET_BIT (visited, dest->index);
6685 }
6686
6687 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6688 {
6689 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6690
6691 /* Pop edge off stack. */
6692 e = stack[--sp];
6693 src = e->src;
6694 }
6695 e = e->succ_next;
6696 }
6697 }
6698 free (stack);
6699 sbitmap_free (visited);
6700
6701 /* The number of nodes visited should not be greater than
6702 n_basic_blocks. */
6703 if (dfsnum > n_basic_blocks)
6704 abort ();
6705
6706 /* There are some nodes left in the CFG that are unreachable. */
6707 if (dfsnum < n_basic_blocks)
6708 abort ();
6709 return dfsnum;
6710 }
6711
6712
6713 /* Return the block for the pre-header of the loop with header
6714 HEADER where DOM specifies the dominator information. Return NULL if
6715 there is no pre-header. */
6716 static basic_block
6717 flow_loop_pre_header_find (header, dom)
6718 basic_block header;
6719 const sbitmap *dom;
6720 {
6721 basic_block pre_header;
6722 edge e;
6723
6724 /* If block p is a predecessor of the header and is the only block
6725 that the header does not dominate, then it is the pre-header. */
6726 pre_header = NULL;
6727 for (e = header->pred; e; e = e->pred_next)
6728 {
6729 basic_block node = e->src;
6730
6731 if (node != ENTRY_BLOCK_PTR
6732 && ! TEST_BIT (dom[node->index], header->index))
6733 {
6734 if (pre_header == NULL)
6735 pre_header = node;
6736 else
6737 {
6738 /* There are multiple edges into the header from outside
6739 the loop so there is no pre-header block. */
6740 pre_header = NULL;
6741 break;
6742 }
6743 }
6744 }
6745 return pre_header;
6746 }
6747
6748
6749 /* Add LOOP to the loop hierarchy tree so that it is a sibling or a
6750 descendant of ROOT. */
6751 static void
6752 flow_loop_tree_node_add (root, loop)
6753 struct loop *root;
6754 struct loop *loop;
6755 {
6756 struct loop *outer;
6757
6758 if (! loop)
6759 return;
6760
6761 for (outer = root; outer; outer = outer->next)
6762 {
6763 if (flow_loop_nested_p (outer, loop))
6764 {
6765 if (outer->inner)
6766 {
6767 /* Add LOOP as a sibling or descendent of OUTER->INNER. */
6768 flow_loop_tree_node_add (outer->inner, loop);
6769 }
6770 else
6771 {
6772 /* Add LOOP as child of OUTER. */
6773 outer->inner = loop;
6774 loop->outer = outer;
6775 loop->next = NULL;
6776 }
6777 return;
6778 }
6779 }
6780 /* Add LOOP as a sibling of ROOT. */
6781 loop->next = root->next;
6782 root->next = loop;
6783 loop->outer = root->outer;
6784 }
6785
6786
6787 /* Build the loop hierarchy tree for LOOPS. */
6788 static void
6789 flow_loops_tree_build (loops)
6790 struct loops *loops;
6791 {
6792 int i;
6793 int num_loops;
6794
6795 num_loops = loops->num;
6796 if (! num_loops)
6797 return;
6798
6799 /* Root the loop hierarchy tree with the first loop found.
6800 Since we used a depth first search this should be the
6801 outermost loop. */
6802 loops->tree = &loops->array[0];
6803 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6804
6805 /* Add the remaining loops to the tree. */
6806 for (i = 1; i < num_loops; i++)
6807 flow_loop_tree_node_add (loops->tree, &loops->array[i]);
6808 }
6809
6810
6811 /* Helper function to compute loop nesting depth and enclosed loop level
6812 for the natural loop specified by LOOP at the loop depth DEPTH.
6813 Returns the loop level. */
6814 static int
6815 flow_loop_level_compute (loop, depth)
6816 struct loop *loop;
6817 int depth;
6818 {
6819 struct loop *inner;
6820 int level = 0;
6821
6822 if (! loop)
6823 return 0;
6824
6825 /* Traverse loop tree assigning depth and computing level as the
6826 maximum level of all the inner loops of this loop. The loop
6827 level is equivalent to the height of the loop in the loop tree
6828 and corresponds to the number of enclosed loop levels. */
6829 for (inner = loop->inner; inner; inner = inner->next)
6830 {
6831 int ilevel;
6832
6833 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6834
6835 if (ilevel > level)
6836 level = ilevel;
6837 }
6838 loop->level = level;
6839 loop->depth = depth;
6840 return level;
6841 }
6842
6843
6844 /* Compute the loop nesting depth and enclosed loop level for the loop
6845 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6846 level. */
6847 static int
6848 flow_loops_level_compute (loops)
6849 struct loops *loops;
6850 {
6851 return flow_loop_level_compute (loops->tree, 0);
6852 }
6853
6854
6855 /* Find all the natural loops in the function and save in LOOPS structure.
6856 Return the number of natural loops found. */
6857 int
6858 flow_loops_find (loops)
6859 struct loops *loops;
6860 {
6861 int i;
6862 int b;
6863 int num_loops;
6864 edge e;
6865 sbitmap headers;
6866 sbitmap *dom;
6867 int *dfs_order;
6868
6869 loops->num = 0;
6870 loops->array = NULL;
6871 loops->tree = NULL;
6872 dfs_order = NULL;
6873
6874 /* Taking care of this degenerate case makes the rest of
6875 this code simpler. */
6876 if (n_basic_blocks == 0)
6877 return 0;
6878
6879 /* Compute the dominators. */
6880 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6881 compute_flow_dominators (dom, NULL);
6882
6883 /* Count the number of loop edges (back edges). This should be the
6884 same as the number of natural loops. */
6885 num_loops = 0;
6886 for (b = 0; b < n_basic_blocks; b++)
6887 {
6888 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6889 {
6890 basic_block latch = e->src;
6891
6892 /* Look for back edges where a predecessor is dominated
6893 by this block. A natural loop has a single entry
6894 node (header) that dominates all the nodes in the
6895 loop. It also has single back edge to the header
6896 from a latch node. Note that multiple natural loops
6897 may share the same header. */
6898 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6899 num_loops++;
6900 }
6901 }
6902
6903 if (num_loops)
6904 {
6905 /* Compute depth first search order of the CFG so that outer
6906 natural loops will be found before inner natural loops. */
6907 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6908 flow_depth_first_order_compute (dfs_order);
6909
6910 /* Allocate loop structures. */
6911 loops->array = (struct loop *)
6912 xcalloc (num_loops, sizeof (struct loop));
6913
6914 headers = sbitmap_alloc (n_basic_blocks);
6915 sbitmap_zero (headers);
6916
6917 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6918 sbitmap_zero (loops->shared_headers);
6919
6920 /* Find and record information about all the natural loops
6921 in the CFG. */
6922 num_loops = 0;
6923 for (b = 0; b < n_basic_blocks; b++)
6924 {
6925 basic_block header;
6926
6927 /* Search the nodes of the CFG in DFS order that we can find
6928 outer loops first. */
6929 header = BASIC_BLOCK (dfs_order[b]);
6930
6931 /* Look for all the possible latch blocks for this header. */
6932 for (e = header->pred; e; e = e->pred_next)
6933 {
6934 basic_block latch = e->src;
6935
6936 /* Look for back edges where a predecessor is dominated
6937 by this block. A natural loop has a single entry
6938 node (header) that dominates all the nodes in the
6939 loop. It also has single back edge to the header
6940 from a latch node. Note that multiple natural loops
6941 may share the same header. */
6942 if (latch != ENTRY_BLOCK_PTR
6943 && TEST_BIT (dom[latch->index], header->index))
6944 {
6945 struct loop *loop;
6946
6947 loop = loops->array + num_loops;
6948
6949 loop->header = header;
6950 loop->latch = latch;
6951
6952 /* Keep track of blocks that are loop headers so
6953 that we can tell which loops should be merged. */
6954 if (TEST_BIT (headers, header->index))
6955 SET_BIT (loops->shared_headers, header->index);
6956 SET_BIT (headers, header->index);
6957
6958 /* Find nodes contained within the loop. */
6959 loop->nodes = sbitmap_alloc (n_basic_blocks);
6960 loop->num_nodes =
6961 flow_loop_nodes_find (header, latch, loop->nodes);
6962
6963 /* Find edges which exit the loop. Note that a node
6964 may have several exit edges. */
6965 loop->num_exits
6966 = flow_loop_exits_find (loop->nodes, &loop->exits);
6967
6968 /* Look to see if the loop has a pre-header node. */
6969 loop->pre_header
6970 = flow_loop_pre_header_find (header, dom);
6971
6972 num_loops++;
6973 }
6974 }
6975 }
6976
6977 /* Natural loops with shared headers may either be disjoint or
6978 nested. Disjoint loops with shared headers cannot be inner
6979 loops and should be merged. For now just mark loops that share
6980 headers. */
6981 for (i = 0; i < num_loops; i++)
6982 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
6983 loops->array[i].shared = 1;
6984
6985 sbitmap_free (headers);
6986 }
6987
6988 loops->num = num_loops;
6989
6990 /* Save CFG derived information to avoid recomputing it. */
6991 loops->cfg.dom = dom;
6992 loops->cfg.dfs_order = dfs_order;
6993
6994 /* Build the loop hierarchy tree. */
6995 flow_loops_tree_build (loops);
6996
6997 /* Assign the loop nesting depth and enclosed loop level for each
6998 loop. */
6999 flow_loops_level_compute (loops);
7000
7001 return num_loops;
7002 }
7003
7004
7005 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7006 int
7007 flow_loop_outside_edge_p (loop, e)
7008 const struct loop *loop;
7009 edge e;
7010 {
7011 if (e->dest != loop->header)
7012 abort ();
7013 return (e->src == ENTRY_BLOCK_PTR)
7014 || ! TEST_BIT (loop->nodes, e->src->index);
7015 }