]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/passes.c
c-common.c, [...]: Fix comment typos.
[thirdparty/gcc.git] / gcc / passes.c
CommitLineData
f6db1481
RH
1/* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22/* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
26
27#include "config.h"
28#undef FLOAT /* This is for hpux. They should change hpux. */
29#undef FFS /* Some systems define this in param.h. */
30#include "system.h"
31#include "coretypes.h"
32#include "tm.h"
33#include <signal.h>
34
35#ifdef HAVE_SYS_RESOURCE_H
36# include <sys/resource.h>
37#endif
38
39#ifdef HAVE_SYS_TIMES_H
40# include <sys/times.h>
41#endif
42
43#include "line-map.h"
44#include "input.h"
45#include "tree.h"
46#include "rtl.h"
47#include "tm_p.h"
48#include "flags.h"
49#include "insn-attr.h"
50#include "insn-config.h"
51#include "insn-flags.h"
52#include "hard-reg-set.h"
53#include "recog.h"
54#include "output.h"
55#include "except.h"
56#include "function.h"
57#include "toplev.h"
58#include "expr.h"
59#include "basic-block.h"
60#include "intl.h"
61#include "ggc.h"
62#include "graph.h"
63#include "loop.h"
64#include "regs.h"
65#include "timevar.h"
66#include "diagnostic.h"
67#include "params.h"
68#include "reload.h"
69#include "dwarf2asm.h"
70#include "integrate.h"
71#include "real.h"
72#include "debug.h"
73#include "target.h"
74#include "langhooks.h"
75#include "cfglayout.h"
76#include "cfgloop.h"
77#include "hosthooks.h"
78#include "cgraph.h"
79#include "opts.h"
80#include "coverage.h"
81#include "value-prof.h"
82#include "alloc-pool.h"
83
84#if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
85#include "dwarf2out.h"
86#endif
87
88#if defined(DBX_DEBUGGING_INFO) || defined(XCOFF_DEBUGGING_INFO)
89#include "dbxout.h"
90#endif
91
92#ifdef SDB_DEBUGGING_INFO
93#include "sdbout.h"
94#endif
95
96#ifdef XCOFF_DEBUGGING_INFO
97#include "xcoffout.h" /* Needed for external data
98 declarations for e.g. AIX 4.x. */
99#endif
100
101#ifndef HAVE_conditional_execution
102#define HAVE_conditional_execution 0
103#endif
104
105/* Format to use to print dumpfile index value */
106#ifndef DUMPFILE_FORMAT
107#define DUMPFILE_FORMAT ".%02d."
108#endif
109
110/* Describes a dump file. */
111
112struct dump_file_info
113{
114 /* The unique extension to apply, e.g. ".jump". */
115 const char *const extension;
116
117 /* The -d<c> character that enables this dump file. */
118 char const debug_switch;
119
120 /* True if there is a corresponding graph dump file. */
121 char const graph_dump_p;
122
123 /* True if the user selected this dump. */
124 char enabled;
125
126 /* True if the files have been initialized (ie truncated). */
127 char initialized;
128};
129
130/* Enumerate the extant dump files. */
131
132enum dump_file_index
133{
134 DFI_cgraph,
135 DFI_rtl,
136 DFI_sibling,
137 DFI_eh,
138 DFI_jump,
139 DFI_null,
140 DFI_cse,
141 DFI_addressof,
142 DFI_gcse,
143 DFI_loop,
144 DFI_bypass,
145 DFI_cfg,
146 DFI_bp,
147 DFI_vpt,
148 DFI_ce1,
149 DFI_tracer,
150 DFI_loop2,
151 DFI_web,
152 DFI_cse2,
153 DFI_life,
154 DFI_combine,
155 DFI_ce2,
156 DFI_regmove,
157 DFI_sched,
158 DFI_lreg,
159 DFI_greg,
160 DFI_postreload,
f9957958 161 DFI_gcse2,
f6db1481
RH
162 DFI_flow2,
163 DFI_peephole2,
164 DFI_ce3,
165 DFI_rnreg,
166 DFI_bbro,
167 DFI_branch_target_load,
168 DFI_sched2,
169 DFI_stack,
170 DFI_vartrack,
171 DFI_mach,
172 DFI_dbr,
173 DFI_MAX
174};
175
176/* Describes all the dump files. Should be kept in order of the
177 pass and in sync with dump_file_index above.
178
179 Remaining -d letters:
180
181 " e m q "
f9957958 182 " K O Q WXY "
f6db1481
RH
183*/
184
185static struct dump_file_info dump_file_tbl[DFI_MAX] =
186{
187 { "cgraph", 'U', 0, 0, 0 },
188 { "rtl", 'r', 0, 0, 0 },
189 { "sibling", 'i', 0, 0, 0 },
190 { "eh", 'h', 0, 0, 0 },
191 { "jump", 'j', 0, 0, 0 },
192 { "null", 'u', 0, 0, 0 },
193 { "cse", 's', 0, 0, 0 },
194 { "addressof", 'F', 0, 0, 0 },
195 { "gcse", 'G', 1, 0, 0 },
196 { "loop", 'L', 1, 0, 0 },
197 { "bypass", 'G', 1, 0, 0 }, /* Yes, duplicate enable switch. */
198 { "cfg", 'f', 1, 0, 0 },
199 { "bp", 'b', 1, 0, 0 },
200 { "vpt", 'V', 1, 0, 0 },
201 { "ce1", 'C', 1, 0, 0 },
202 { "tracer", 'T', 1, 0, 0 },
203 { "loop2", 'L', 1, 0, 0 },
204 { "web", 'Z', 0, 0, 0 },
205 { "cse2", 't', 1, 0, 0 },
206 { "life", 'f', 1, 0, 0 }, /* Yes, duplicate enable switch. */
207 { "combine", 'c', 1, 0, 0 },
208 { "ce2", 'C', 1, 0, 0 },
209 { "regmove", 'N', 1, 0, 0 },
210 { "sched", 'S', 1, 0, 0 },
211 { "lreg", 'l', 1, 0, 0 },
212 { "greg", 'g', 1, 0, 0 },
213 { "postreload", 'o', 1, 0, 0 },
f9957958 214 { "gcse2", 'J', 0, 0, 0 },
f6db1481
RH
215 { "flow2", 'w', 1, 0, 0 },
216 { "peephole2", 'z', 1, 0, 0 },
217 { "ce3", 'E', 1, 0, 0 },
218 { "rnreg", 'n', 1, 0, 0 },
219 { "bbro", 'B', 1, 0, 0 },
220 { "btl", 'd', 1, 0, 0 }, /* Yes, duplicate enable switch. */
221 { "sched2", 'R', 1, 0, 0 },
222 { "stack", 'k', 1, 0, 0 },
223 { "vartrack", 'V', 1, 0, 0 }, /* Yes, duplicate enable switch. */
224 { "mach", 'M', 1, 0, 0 },
225 { "dbr", 'd', 0, 0, 0 },
226};
227
228/* Routine to open a dump file. Return true if the dump file is enabled. */
229
230static int
231open_dump_file (enum dump_file_index index, tree decl)
232{
233 char *dump_name;
234 const char *open_arg;
235 char seq[16];
236
237 if (! dump_file_tbl[index].enabled)
238 return 0;
239
240 timevar_push (TV_DUMP);
241 if (dump_file != NULL)
242 fclose (dump_file);
243
244 sprintf (seq, DUMPFILE_FORMAT, index);
245
246 if (! dump_file_tbl[index].initialized)
247 {
248 /* If we've not initialized the files, do so now. */
249 if (graph_dump_format != no_graph
250 && dump_file_tbl[index].graph_dump_p)
251 {
252 dump_name = concat (seq, dump_file_tbl[index].extension, NULL);
253 clean_graph_dump_file (dump_base_name, dump_name);
254 free (dump_name);
255 }
256 dump_file_tbl[index].initialized = 1;
257 open_arg = "w";
258 }
259 else
260 open_arg = "a";
261
262 dump_name = concat (dump_base_name, seq,
263 dump_file_tbl[index].extension, NULL);
264
265 dump_file = fopen (dump_name, open_arg);
266 if (dump_file == NULL)
267 fatal_error ("can't open %s: %m", dump_name);
268
269 free (dump_name);
270
271 if (decl)
272 fprintf (dump_file, "\n;; Function %s%s\n\n",
ae2bcd98 273 lang_hooks.decl_printable_name (decl, 2),
f6db1481
RH
274 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
275 ? " (hot)"
276 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
277 ? " (unlikely executed)"
278 : "");
279
280 timevar_pop (TV_DUMP);
281 return 1;
282}
283
284/* Routine to close a dump file. */
285
286static void
287close_dump_file (enum dump_file_index index,
288 void (*func) (FILE *, rtx),
289 rtx insns)
290{
291 if (! dump_file)
292 return;
293
294 timevar_push (TV_DUMP);
295 if (insns
296 && graph_dump_format != no_graph
297 && dump_file_tbl[index].graph_dump_p)
298 {
299 char seq[16];
300 char *suffix;
301
302 sprintf (seq, DUMPFILE_FORMAT, index);
303 suffix = concat (seq, dump_file_tbl[index].extension, NULL);
304 print_rtl_graph_with_bb (dump_base_name, suffix, insns);
305 free (suffix);
306 }
307
308 if (func && insns)
309 func (dump_file, insns);
310
311 fflush (dump_file);
312 fclose (dump_file);
313
314 dump_file = NULL;
315 timevar_pop (TV_DUMP);
316}
317
318/* This is called from various places for FUNCTION_DECL, VAR_DECL,
319 and TYPE_DECL nodes.
320
321 This does nothing for local (non-static) variables, unless the
322 variable is a register variable with an ASMSPEC. In that case, or
323 if the variable is not an automatic, it sets up the RTL and
324 outputs any assembler code (label definition, storage allocation
325 and initialization).
326
327 DECL is the declaration. If ASMSPEC is nonzero, it specifies
328 the assembler symbol name to be used. TOP_LEVEL is nonzero
329 if this declaration is not within a function. */
330
331void
332rest_of_decl_compilation (tree decl,
333 const char *asmspec,
334 int top_level,
335 int at_end)
336{
337 /* We deferred calling assemble_alias so that we could collect
338 other attributes such as visibility. Emit the alias now. */
339 {
340 tree alias;
341 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
342 if (alias)
343 {
344 alias = TREE_VALUE (TREE_VALUE (alias));
345 alias = get_identifier (TREE_STRING_POINTER (alias));
346 assemble_alias (decl, alias);
347 }
348 }
349
350 /* Forward declarations for nested functions are not "external",
351 but we need to treat them as if they were. */
352 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
353 || TREE_CODE (decl) == FUNCTION_DECL)
354 {
355 timevar_push (TV_VARCONST);
356
357 if (asmspec)
358 make_decl_rtl (decl, asmspec);
359
360 /* Don't output anything when a tentative file-scope definition
361 is seen. But at end of compilation, do output code for them.
362
363 We do output all variables when unit-at-a-time is active and rely on
364 callgraph code to defer them except for forward declarations
365 (see gcc.c-torture/compile/920624-1.c) */
366 if ((at_end
367 || !DECL_DEFER_OUTPUT (decl)
368 || (flag_unit_at_a_time && DECL_INITIAL (decl)))
369 && !DECL_EXTERNAL (decl))
370 {
371 if (flag_unit_at_a_time && !cgraph_global_info_ready
372 && TREE_CODE (decl) != FUNCTION_DECL && top_level)
373 cgraph_varpool_finalize_decl (decl);
374 else
375 assemble_variable (decl, top_level, at_end, 0);
376 }
377
378#ifdef ASM_FINISH_DECLARE_OBJECT
379 if (decl == last_assemble_variable_decl)
380 {
381 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
382 top_level, at_end);
383 }
384#endif
385
386 timevar_pop (TV_VARCONST);
387 }
388 else if (DECL_REGISTER (decl) && asmspec != 0)
389 {
390 if (decode_reg_name (asmspec) >= 0)
391 {
392 SET_DECL_RTL (decl, NULL_RTX);
393 make_decl_rtl (decl, asmspec);
394 }
395 else
396 {
397 error ("invalid register name `%s' for register variable", asmspec);
398 DECL_REGISTER (decl) = 0;
399 if (!top_level)
400 expand_decl (decl);
401 }
402 }
403 else if (TREE_CODE (decl) == TYPE_DECL)
404 {
405 timevar_push (TV_SYMOUT);
406 debug_hooks->type_decl (decl, !top_level);
407 timevar_pop (TV_SYMOUT);
408 }
409}
410
411/* Called after finishing a record, union or enumeral type. */
412
413void
414rest_of_type_compilation (tree type, int toplev)
415{
416 /* Avoid confusing the debug information machinery when there are
417 errors. */
418 if (errorcount != 0 || sorrycount != 0)
419 return;
420
421 timevar_push (TV_SYMOUT);
422 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
423 timevar_pop (TV_SYMOUT);
424}
425
426/* Turn the RTL into assembly. */
427static void
428rest_of_handle_final (tree decl, rtx insns)
429{
430 timevar_push (TV_FINAL);
431 {
432 rtx x;
433 const char *fnname;
434
435 /* Get the function's name, as described by its RTL. This may be
436 different from the DECL_NAME name used in the source file. */
437
438 x = DECL_RTL (decl);
439 if (GET_CODE (x) != MEM)
440 abort ();
441 x = XEXP (x, 0);
442 if (GET_CODE (x) != SYMBOL_REF)
443 abort ();
444 fnname = XSTR (x, 0);
445
446 assemble_start_function (decl, fnname);
447 final_start_function (insns, asm_out_file, optimize);
448 final (insns, asm_out_file, optimize, 0);
449 final_end_function ();
450
451#ifdef IA64_UNWIND_INFO
452 /* ??? The IA-64 ".handlerdata" directive must be issued before
453 the ".endp" directive that closes the procedure descriptor. */
454 output_function_exception_table ();
455#endif
456
457 assemble_end_function (decl, fnname);
458
459#ifndef IA64_UNWIND_INFO
460 /* Otherwise, it feels unclean to switch sections in the middle. */
461 output_function_exception_table ();
462#endif
463
464 if (! quiet_flag)
465 fflush (asm_out_file);
466
467 /* Release all memory allocated by flow. */
468 free_basic_block_vars (0);
469
470 /* Release all memory held by regsets now. */
471 regset_release_memory ();
472 }
473 timevar_pop (TV_FINAL);
474
475 ggc_collect ();
476}
477
478#ifdef DELAY_SLOTS
479/* Run delay slot optimization. */
480static void
481rest_of_handle_delay_slots (tree decl, rtx insns)
482{
483 timevar_push (TV_DBR_SCHED);
484 open_dump_file (DFI_dbr, decl);
485
486 dbr_schedule (insns, dump_file);
487
488 close_dump_file (DFI_dbr, print_rtl, insns);
489 timevar_pop (TV_DBR_SCHED);
490
491 ggc_collect ();
492}
493#endif
494
495#ifdef STACK_REGS
496/* Convert register usage from flat register file usage to a stack
497 register file. */
498static void
499rest_of_handle_stack_regs (tree decl, rtx insns)
500{
501#if defined (HAVE_ATTR_length)
502 /* If flow2 creates new instructions which need splitting
503 and scheduling after reload is not done, they might not be
504 split until final which doesn't allow splitting
505 if HAVE_ATTR_length. */
506#ifdef INSN_SCHEDULING
507 if (optimize && !flag_schedule_insns_after_reload)
508#else
509 if (optimize)
510#endif
511 {
512 timevar_push (TV_SHORTEN_BRANCH);
513 split_all_insns (1);
514 timevar_pop (TV_SHORTEN_BRANCH);
515 }
516#endif
517
518 timevar_push (TV_REG_STACK);
519 open_dump_file (DFI_stack, decl);
520
521 if (reg_to_stack (insns, dump_file) && optimize)
522 {
523 if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
524 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0))
525 && flag_reorder_blocks)
526 {
527 reorder_basic_blocks ();
528 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
529 }
530 }
531
532 close_dump_file (DFI_stack, print_rtl_with_bb, insns);
533 timevar_pop (TV_REG_STACK);
534
535 ggc_collect ();
536}
537#endif
538
539/* Track the variables, ie. compute where the variable is stored at each position in function. */
540static void
541rest_of_handle_variable_tracking (tree decl, rtx insns)
542{
543 timevar_push (TV_VAR_TRACKING);
544 open_dump_file (DFI_vartrack, decl);
545
546 variable_tracking_main ();
547
548 close_dump_file (DFI_vartrack, print_rtl_with_bb, insns);
549 timevar_pop (TV_VAR_TRACKING);
550}
551
552/* Machine independent reorg pass. */
553static void
554rest_of_handle_machine_reorg (tree decl, rtx insns)
555{
556 timevar_push (TV_MACH_DEP);
557 open_dump_file (DFI_mach, decl);
558
5fd9b178 559 targetm.machine_dependent_reorg ();
f6db1481
RH
560
561 close_dump_file (DFI_mach, print_rtl, insns);
562 timevar_pop (TV_MACH_DEP);
563
564 ggc_collect ();
565}
566
567
568/* Run new register allocator. Return TRUE if we must exit
569 rest_of_compilation upon return. */
570static bool
571rest_of_handle_new_regalloc (tree decl, rtx insns)
572{
573 int failure;
574
575 delete_trivially_dead_insns (insns, max_reg_num ());
576 reg_alloc ();
577
578 timevar_pop (TV_LOCAL_ALLOC);
579 if (dump_file_tbl[DFI_lreg].enabled)
580 {
581 timevar_push (TV_DUMP);
582
583 close_dump_file (DFI_lreg, NULL, NULL);
584 timevar_pop (TV_DUMP);
585 }
586
587 /* XXX clean up the whole mess to bring live info in shape again. */
588 timevar_push (TV_GLOBAL_ALLOC);
589 open_dump_file (DFI_greg, decl);
590
591 build_insn_chain (insns);
592 failure = reload (insns, 0);
593
594 timevar_pop (TV_GLOBAL_ALLOC);
595
596 if (dump_file_tbl[DFI_greg].enabled)
597 {
598 timevar_push (TV_DUMP);
599
600 dump_global_regs (dump_file);
601
602 close_dump_file (DFI_greg, print_rtl_with_bb, insns);
603 timevar_pop (TV_DUMP);
604 }
605
606 if (failure)
607 return true;
608
609 reload_completed = 1;
610
611 return false;
612}
613
614/* Run old register allocator. Return TRUE if we must exit
615 rest_of_compilation upon return. */
616static bool
617rest_of_handle_old_regalloc (tree decl, rtx insns)
618{
619 int failure;
620 int rebuild_notes;
621
622 /* Allocate the reg_renumber array. */
623 allocate_reg_info (max_regno, FALSE, TRUE);
624
625 /* And the reg_equiv_memory_loc array. */
626 reg_equiv_memory_loc = xcalloc (max_regno, sizeof (rtx));
627
628 allocate_initial_values (reg_equiv_memory_loc);
629
630 regclass (insns, max_reg_num (), dump_file);
631 rebuild_notes = local_alloc ();
632
633 timevar_pop (TV_LOCAL_ALLOC);
634
635 /* Local allocation may have turned an indirect jump into a direct
636 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
637 instructions. */
638 if (rebuild_notes)
639 {
640 timevar_push (TV_JUMP);
641
642 rebuild_jump_labels (insns);
643 purge_all_dead_edges (0);
644
645 timevar_pop (TV_JUMP);
646 }
647
648 if (dump_file_tbl[DFI_lreg].enabled)
649 {
650 timevar_push (TV_DUMP);
651
652 dump_flow_info (dump_file);
653 dump_local_alloc (dump_file);
654
655 close_dump_file (DFI_lreg, print_rtl_with_bb, insns);
656 timevar_pop (TV_DUMP);
657 }
658
659 ggc_collect ();
660
661 timevar_push (TV_GLOBAL_ALLOC);
662 open_dump_file (DFI_greg, decl);
663
664 /* If optimizing, allocate remaining pseudo-regs. Do the reload
665 pass fixing up any insns that are invalid. */
666
667 if (optimize)
668 failure = global_alloc (dump_file);
669 else
670 {
671 build_insn_chain (insns);
672 failure = reload (insns, 0);
673 }
674
675 timevar_pop (TV_GLOBAL_ALLOC);
676
677 if (dump_file_tbl[DFI_greg].enabled)
678 {
679 timevar_push (TV_DUMP);
680
681 dump_global_regs (dump_file);
682
683 close_dump_file (DFI_greg, print_rtl_with_bb, insns);
684 timevar_pop (TV_DUMP);
685 }
686
687 return failure;
688}
689
690/* Run the regrename and cprop passes. */
691static void
692rest_of_handle_regrename (tree decl, rtx insns)
693{
694 timevar_push (TV_RENAME_REGISTERS);
695 open_dump_file (DFI_rnreg, decl);
696
697 if (flag_rename_registers)
698 regrename_optimize ();
699 if (flag_cprop_registers)
700 copyprop_hardreg_forward ();
701
702 close_dump_file (DFI_rnreg, print_rtl_with_bb, insns);
703 timevar_pop (TV_RENAME_REGISTERS);
704}
705
706/* Reorder basic blocks. */
707static void
708rest_of_handle_reorder_blocks (tree decl, rtx insns)
709{
710 bool changed;
711 open_dump_file (DFI_bbro, decl);
712
713 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
714 splitting possibly introduced more crossjumping opportunities. */
715 changed = cleanup_cfg (CLEANUP_EXPENSIVE
716 | (!HAVE_conditional_execution
717 ? CLEANUP_UPDATE_LIFE : 0));
718
719 if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
720 tracer ();
721 if (flag_reorder_blocks)
722 reorder_basic_blocks ();
723 if (flag_reorder_blocks
724 || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
725 changed |= cleanup_cfg (CLEANUP_EXPENSIVE
726 | (!HAVE_conditional_execution
727 ? CLEANUP_UPDATE_LIFE : 0));
728
729 /* On conditional execution targets we can not update the life cheaply, so
730 we deffer the updating to after both cleanups. This may lose some cases
731 but should not be terribly bad. */
732 if (changed && HAVE_conditional_execution)
733 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
734 PROP_DEATH_NOTES);
735 close_dump_file (DFI_bbro, print_rtl_with_bb, insns);
736}
737
738#ifdef INSN_SCHEDULING
739/* Run instruction scheduler. */
740static void
741rest_of_handle_sched (tree decl, rtx insns)
742{
743 timevar_push (TV_SCHED);
744
745 /* Print function header into sched dump now
746 because doing the sched analysis makes some of the dump. */
747 if (optimize > 0 && flag_schedule_insns)
748 {
749 open_dump_file (DFI_sched, decl);
750
751 /* Do control and data sched analysis,
752 and write some of the results to dump file. */
753
754 schedule_insns (dump_file);
755
756 close_dump_file (DFI_sched, print_rtl_with_bb, insns);
757 }
758 timevar_pop (TV_SCHED);
759
760 ggc_collect ();
761}
762
763/* Run second scheduling pass after reload. */
764static void
765rest_of_handle_sched2 (tree decl, rtx insns)
766{
767 timevar_push (TV_SCHED2);
768 open_dump_file (DFI_sched2, decl);
769
770 /* Do control and data sched analysis again,
771 and write some more of the results to dump file. */
772
773 split_all_insns (1);
774
775 if (flag_sched2_use_superblocks || flag_sched2_use_traces)
776 {
777 schedule_ebbs (dump_file);
778 /* No liveness updating code yet, but it should be easy to do.
4ee31f1e 779 reg-stack recomputes the liveness when needed for now. */
f6db1481
RH
780 count_or_remove_death_notes (NULL, 1);
781 cleanup_cfg (CLEANUP_EXPENSIVE);
782 }
783 else
784 schedule_insns (dump_file);
785
786 close_dump_file (DFI_sched2, print_rtl_with_bb, insns);
787 timevar_pop (TV_SCHED2);
788
789 ggc_collect ();
790}
791#endif
792
f9957958
MH
793static void
794rest_of_handle_gcse2 (tree decl, rtx insns)
795{
796 open_dump_file (DFI_gcse2, decl);
797
798 gcse_after_reload_main (insns, dump_file);
799 rebuild_jump_labels (insns);
800 delete_trivially_dead_insns (insns, max_reg_num ());
801 close_dump_file (DFI_gcse2, print_rtl_with_bb, insns);
802
803 ggc_collect ();
804
805#ifdef ENABLE_CHECKING
806 verify_flow_info ();
807#endif
808}
809
f6db1481
RH
810/* Register allocation pre-pass, to reduce number of moves necessary
811 for two-address machines. */
812static void
813rest_of_handle_regmove (tree decl, rtx insns)
814{
815 timevar_push (TV_REGMOVE);
816 open_dump_file (DFI_regmove, decl);
817
818 regmove_optimize (insns, max_reg_num (), dump_file);
819
820 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
821 close_dump_file (DFI_regmove, print_rtl_with_bb, insns);
822 timevar_pop (TV_REGMOVE);
823
824 ggc_collect ();
825}
826
827/* Run tracer. */
828static void
829rest_of_handle_tracer (tree decl, rtx insns)
830{
831 open_dump_file (DFI_tracer, decl);
832 if (dump_file)
833 dump_flow_info (dump_file);
834 tracer ();
835 cleanup_cfg (CLEANUP_EXPENSIVE);
836 reg_scan (insns, max_reg_num (), 0);
837 close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ());
838}
839
840/* If-conversion and CFG cleanup. */
841static void
842rest_of_handle_if_conversion (tree decl, rtx insns)
843{
844 open_dump_file (DFI_ce1, decl);
845 if (flag_if_conversion)
846 {
847 timevar_push (TV_IFCVT);
848 if (dump_file)
849 dump_flow_info (dump_file);
850 cleanup_cfg (CLEANUP_EXPENSIVE);
851 reg_scan (insns, max_reg_num (), 0);
852 if_convert (0);
853 timevar_pop (TV_IFCVT);
854 }
855 timevar_push (TV_JUMP);
856 cleanup_cfg (CLEANUP_EXPENSIVE);
857 reg_scan (insns, max_reg_num (), 0);
858 timevar_pop (TV_JUMP);
859 close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ());
860}
861
862/* Rerun if-conversion, as combine may have simplified things enough
863 to now meet sequence length restrictions. */
864static void
865rest_of_handle_if_after_combine (tree decl, rtx insns)
866{
867 timevar_push (TV_IFCVT);
868 open_dump_file (DFI_ce2, decl);
869
870 no_new_pseudos = 0;
871 if_convert (1);
872 no_new_pseudos = 1;
873
874 close_dump_file (DFI_ce2, print_rtl_with_bb, insns);
875 timevar_pop (TV_IFCVT);
876}
877
878static void
879rest_of_handle_web (tree decl, rtx insns)
880{
881 open_dump_file (DFI_web, decl);
882 timevar_push (TV_WEB);
883 web_main ();
884 delete_trivially_dead_insns (insns, max_reg_num ());
885 cleanup_cfg (CLEANUP_EXPENSIVE);
886
887 timevar_pop (TV_WEB);
888 close_dump_file (DFI_web, print_rtl_with_bb, insns);
889 reg_scan (get_insns (), max_reg_num (), 0);
890}
891
892/* Do branch profiling and static profile estimation passes. */
893static void
894rest_of_handle_branch_prob (tree decl, rtx insns)
895{
896 struct loops loops;
897
898 timevar_push (TV_BRANCH_PROB);
899 open_dump_file (DFI_bp, decl);
900
901 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
902 branch_prob ();
903
904 /* Discover and record the loop depth at the head of each basic
905 block. The loop infrastructure does the real job for us. */
906 flow_loops_find (&loops, LOOP_TREE);
907
908 if (dump_file)
909 flow_loops_dump (&loops, dump_file, NULL, 0);
910
911 /* Estimate using heuristics if no profiling info is available. */
912 if (flag_guess_branch_prob)
913 estimate_probability (&loops);
914
915 flow_loops_free (&loops);
916 free_dominance_info (CDI_DOMINATORS);
917 close_dump_file (DFI_bp, print_rtl_with_bb, insns);
918 timevar_pop (TV_BRANCH_PROB);
919}
920
921/* Do optimizations based on expression value profiles. */
922static void
923rest_of_handle_value_profile_transformations (tree decl, rtx insns)
924{
925 open_dump_file (DFI_vpt, decl);
926 timevar_push (TV_VPT);
927
928 if (value_profile_transformations ())
929 cleanup_cfg (CLEANUP_EXPENSIVE);
930
931 timevar_pop (TV_VPT);
932 close_dump_file (DFI_vpt, print_rtl_with_bb, insns);
933}
934
935/* Do control and data flow analysis; write some of the results to the
936 dump file. */
937static void
938rest_of_handle_cfg (tree decl, rtx insns)
939{
940 open_dump_file (DFI_cfg, decl);
941 if (dump_file)
942 dump_flow_info (dump_file);
943 if (optimize)
944 cleanup_cfg (CLEANUP_EXPENSIVE
945 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
946
947 /* It may make more sense to mark constant functions after dead code is
948 eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
949 may insert code making function non-constant, but we still must consider
950 it as constant, otherwise -fbranch-probabilities will not read data back.
951
952 life_analysis rarely eliminates modification of external memory.
953 */
954 if (optimize)
955 {
956 /* Alias analysis depends on this information and mark_constant_function
957 depends on alias analysis. */
958 reg_scan (insns, max_reg_num (), 1);
959 mark_constant_function ();
960 }
961
962 close_dump_file (DFI_cfg, print_rtl_with_bb, insns);
963}
964
965/* Purge addressofs. */
966static void
967rest_of_handle_addressof (tree decl, rtx insns)
968{
969 open_dump_file (DFI_addressof, decl);
970
971 purge_addressof (insns);
972 if (optimize && purge_all_dead_edges (0))
973 delete_unreachable_blocks ();
974 reg_scan (insns, max_reg_num (), 1);
975
976 close_dump_file (DFI_addressof, print_rtl, insns);
977}
978
979/* We may have potential sibling or tail recursion sites. Select one
980 (of possibly multiple) methods of performing the call. */
981static void
982rest_of_handle_sibling_calls (rtx insns)
983{
984 rtx insn;
985 optimize_sibling_and_tail_recursive_calls ();
986
987 /* Recompute the CFG as sibling optimization clobbers it randomly. */
988 free_bb_for_insn ();
989 find_exception_handler_labels ();
990 rebuild_jump_labels (insns);
991 find_basic_blocks (insns, max_reg_num (), dump_file);
992
993 /* There is pass ordering problem - we must lower NOTE_INSN_PREDICTION
994 notes before simplifying cfg and we must do lowering after sibcall
995 that unhides parts of RTL chain and cleans up the CFG.
996
997 Until sibcall is replaced by tree-level optimizer, lets just
998 sweep away the NOTE_INSN_PREDICTION notes that leaked out. */
999 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1000 if (GET_CODE (insn) == NOTE
1001 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
1002 delete_insn (insn);
1003
1004 close_dump_file (DFI_sibling, print_rtl, get_insns ());
1005}
1006
1007/* Perform jump bypassing and control flow optimizations. */
1008static void
1009rest_of_handle_jump_bypass (tree decl, rtx insns)
1010{
1011 timevar_push (TV_BYPASS);
1012 open_dump_file (DFI_bypass, decl);
1013
1014 cleanup_cfg (CLEANUP_EXPENSIVE);
1015 reg_scan (insns, max_reg_num (), 1);
1016
1017 if (bypass_jumps (dump_file))
1018 {
1019 rebuild_jump_labels (insns);
1020 cleanup_cfg (CLEANUP_EXPENSIVE);
1021 delete_trivially_dead_insns (insns, max_reg_num ());
1022 }
1023
1024 close_dump_file (DFI_bypass, print_rtl_with_bb, insns);
1025 timevar_pop (TV_BYPASS);
1026
1027 ggc_collect ();
1028
1029#ifdef ENABLE_CHECKING
1030 verify_flow_info ();
1031#endif
1032}
1033
1034/* Handle inlining of functions in rest_of_compilation. Return TRUE
1035 if we must exit rest_of_compilation upon return. */
1036static bool
1037rest_of_handle_inlining (tree decl)
1038{
1039 rtx insns;
1040 int inlinable = 0;
1041 tree parent;
1042 const char *lose;
1043
1044 /* If we are reconsidering an inline function at the end of
1045 compilation, skip the stuff for making it inline. */
1046 if (cfun->rtl_inline_init)
1047 return 0;
1048 cfun->rtl_inline_init = 1;
1049
1050 /* If this is nested inside an inlined external function, pretend
1051 it was only declared. Since we cannot inline such functions,
1052 generating code for this one is not only not necessary but will
1053 confuse some debugging output writers. */
1054 for (parent = DECL_CONTEXT (current_function_decl);
1055 parent != NULL_TREE;
1056 parent = get_containing_scope (parent))
1057 if (TREE_CODE (parent) == FUNCTION_DECL
1058 && DECL_INLINE (parent) && DECL_EXTERNAL (parent))
1059 {
1060 DECL_INITIAL (decl) = 0;
1061 return true;
1062 }
1063 else if (TYPE_P (parent))
1064 /* A function in a local class should be treated normally. */
1065 break;
1066
1067 /* If requested, consider whether to make this function inline. */
1068 if ((DECL_INLINE (decl) && !flag_no_inline)
1069 || flag_inline_functions)
1070 {
1071 timevar_push (TV_INTEGRATION);
1072 lose = function_cannot_inline_p (decl);
1073 timevar_pop (TV_INTEGRATION);
1074 if (lose || ! optimize)
1075 {
1076 if (warn_inline && lose && DECL_INLINE (decl))
1077 {
1078 char *msg = concat ("%J", lose, NULL);
1079 warning (msg, decl);
1080 free (msg);
1081 }
1082 DECL_ABSTRACT_ORIGIN (decl) = 0;
1083 /* Don't really compile an extern inline function.
1084 If we can't make it inline, pretend
1085 it was only declared. */
1086 if (DECL_EXTERNAL (decl))
1087 {
1088 DECL_INITIAL (decl) = 0;
1089 return true;
1090 }
1091 }
1092 else
1093 inlinable = DECL_INLINE (decl) = 1;
1094 }
1095
1096 insns = get_insns ();
1097
1098 /* Dump the rtl code if we are dumping rtl. */
1099
1100 if (open_dump_file (DFI_rtl, decl))
1101 {
1102 if (DECL_STRUCT_FUNCTION (decl)
1103 && DECL_STRUCT_FUNCTION (decl)->saved_for_inline)
1104 fprintf (dump_file, ";; (integrable)\n\n");
1105 close_dump_file (DFI_rtl, print_rtl, insns);
1106 }
1107
1108 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1109 sorts of eh initialization. Delay this until after the
1110 initial rtl dump so that we can see the original nesting. */
1111 convert_from_eh_region_ranges ();
1112
1113 /* If function is inline, and we don't yet know whether to
1114 compile it by itself, defer decision till end of compilation.
1115 wrapup_global_declarations will (indirectly) call
1116 rest_of_compilation again for those functions that need to
1117 be output. Also defer those functions that we are supposed
1118 to defer. */
1119
1120 if (inlinable
1121 || (DECL_INLINE (decl)
1122 /* Egad. This RTL deferral test conflicts with Fortran assumptions
1123 for unreferenced symbols. See g77.f-torture/execute/980520-1.f.
1124 But removing this line from the check breaks all languages that
1125 use the call graph to output symbols. This hard-coded check is
1126 the least invasive work-around. */
1127 && (flag_inline_functions
1128 || strcmp (lang_hooks.name, "GNU F77") == 0)
1129 && ((! TREE_PUBLIC (decl) && ! TREE_ADDRESSABLE (decl)
1130 && ! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1131 && ! flag_keep_inline_functions)
1132 || DECL_EXTERNAL (decl))))
1133 DECL_DEFER_OUTPUT (decl) = 1;
1134
1135 if (DECL_INLINE (decl))
1136 /* DWARF wants separate debugging info for abstract and
1137 concrete instances of all inline functions, including those
1138 declared inline but not inlined, and those inlined even
1139 though they weren't declared inline. Conveniently, that's
1140 what DECL_INLINE means at this point. */
1141 (*debug_hooks->deferred_inline_function) (decl);
1142
1143 if (DECL_DEFER_OUTPUT (decl))
1144 {
1145 /* If -Wreturn-type, we have to do a bit of compilation. We just
1146 want to call cleanup the cfg to figure out whether or not we can
1147 fall off the end of the function; we do the minimum amount of
1148 work necessary to make that safe. */
1149 if (warn_return_type)
1150 {
1151 int saved_optimize = optimize;
1152
1153 optimize = 0;
1154 rebuild_jump_labels (insns);
1155 find_exception_handler_labels ();
1156 find_basic_blocks (insns, max_reg_num (), dump_file);
1157 cleanup_cfg (CLEANUP_PRE_SIBCALL | CLEANUP_PRE_LOOP);
1158 optimize = saved_optimize;
1159
1160 /* CFG is no longer maintained up-to-date. */
1161 free_bb_for_insn ();
1162 }
1163
1164 set_nothrow_function_flags ();
1165 if (current_function_nothrow)
1166 /* Now we know that this can't throw; set the flag for the benefit
1167 of other functions later in this translation unit. */
1168 TREE_NOTHROW (current_function_decl) = 1;
1169
1170 timevar_push (TV_INTEGRATION);
1171 save_for_inline (decl);
1172 timevar_pop (TV_INTEGRATION);
1173 DECL_STRUCT_FUNCTION (decl)->inlinable = inlinable;
1174 return true;
1175 }
1176
1177 /* If specified extern inline but we aren't inlining it, we are
1178 done. This goes for anything that gets here with DECL_EXTERNAL
1179 set, not just things with DECL_INLINE. */
1180 return (bool) DECL_EXTERNAL (decl);
1181}
1182
1183/* Try to identify useless null pointer tests and delete them. */
1184static void
1185rest_of_handle_null_pointer (tree decl, rtx insns)
1186{
1187 open_dump_file (DFI_null, decl);
1188 if (dump_file)
1189 dump_flow_info (dump_file);
1190
1191 if (delete_null_pointer_checks (insns))
1192 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1193
1194 close_dump_file (DFI_null, print_rtl_with_bb, insns);
1195}
1196
1197/* Try combining insns through substitution. */
1198static void
1199rest_of_handle_combine (tree decl, rtx insns)
1200{
1201 int rebuild_jump_labels_after_combine = 0;
1202
1203 timevar_push (TV_COMBINE);
1204 open_dump_file (DFI_combine, decl);
1205
1206 rebuild_jump_labels_after_combine
1207 = combine_instructions (insns, max_reg_num ());
1208
1209 /* Combining insns may have turned an indirect jump into a
1210 direct jump. Rebuild the JUMP_LABEL fields of jumping
1211 instructions. */
1212 if (rebuild_jump_labels_after_combine)
1213 {
1214 timevar_push (TV_JUMP);
1215 rebuild_jump_labels (insns);
1216 timevar_pop (TV_JUMP);
1217
1218 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
1219 }
1220
1221 close_dump_file (DFI_combine, print_rtl_with_bb, insns);
1222 timevar_pop (TV_COMBINE);
1223
1224 ggc_collect ();
1225}
1226
1227/* Perform life analysis. */
1228static void
1229rest_of_handle_life (tree decl, rtx insns)
1230{
1231 open_dump_file (DFI_life, decl);
1232 regclass_init ();
1233
1234#ifdef ENABLE_CHECKING
1235 verify_flow_info ();
1236#endif
1237 life_analysis (insns, dump_file, PROP_FINAL);
1238 if (optimize)
1239 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE
1240 | CLEANUP_LOG_LINKS
1241 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1242 timevar_pop (TV_FLOW);
1243
1244 if (warn_uninitialized)
1245 {
1246 uninitialized_vars_warning (DECL_INITIAL (decl));
1247 if (extra_warnings)
1248 setjmp_args_warning ();
1249 }
1250
1251 if (optimize)
1252 {
1253 if (!flag_new_regalloc && initialize_uninitialized_subregs ())
1254 {
1255 /* Insns were inserted, and possibly pseudos created, so
1256 things might look a bit different. */
1257 insns = get_insns ();
1258 allocate_reg_life_data ();
1259 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1260 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1261 }
1262 }
1263
1264 no_new_pseudos = 1;
1265
1266 close_dump_file (DFI_life, print_rtl_with_bb, insns);
1267
1268 ggc_collect ();
1269}
1270
1271/* Perform common subexpression elimination. Nonzero value from
1272 `cse_main' means that jumps were simplified and some code may now
1273 be unreachable, so do jump optimization again. */
1274static void
1275rest_of_handle_cse (tree decl, rtx insns)
1276{
1277 int tem;
1278
1279 open_dump_file (DFI_cse, decl);
1280 if (dump_file)
1281 dump_flow_info (dump_file);
1282 timevar_push (TV_CSE);
1283
1284 reg_scan (insns, max_reg_num (), 1);
1285
1286 tem = cse_main (insns, max_reg_num (), 0, dump_file);
1287 if (tem)
1288 rebuild_jump_labels (insns);
1289 if (purge_all_dead_edges (0))
1290 delete_unreachable_blocks ();
1291
1292 delete_trivially_dead_insns (insns, max_reg_num ());
1293
1294 /* If we are not running more CSE passes, then we are no longer
1295 expecting CSE to be run. But always rerun it in a cheap mode. */
1296 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
1297
1298 if (tem || optimize > 1)
1299 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1300 /* Try to identify useless null pointer tests and delete them. */
1301 if (flag_delete_null_pointer_checks)
1302 {
1303 timevar_push (TV_JUMP);
1304
1305 if (delete_null_pointer_checks (insns))
1306 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1307 timevar_pop (TV_JUMP);
1308 }
1309
1310 /* The second pass of jump optimization is likely to have
1311 removed a bunch more instructions. */
1312 renumber_insns (dump_file);
1313
1314 timevar_pop (TV_CSE);
1315 close_dump_file (DFI_cse, print_rtl_with_bb, insns);
1316}
1317
1318/* Run second CSE pass after loop optimizations. */
1319static void
1320rest_of_handle_cse2 (tree decl, rtx insns)
1321{
1322 int tem;
1323
1324 timevar_push (TV_CSE2);
1325 open_dump_file (DFI_cse2, decl);
1326 if (dump_file)
1327 dump_flow_info (dump_file);
1328 /* CFG is no longer maintained up-to-date. */
1329 tem = cse_main (insns, max_reg_num (), 1, dump_file);
1330
1331 /* Run a pass to eliminate duplicated assignments to condition code
1332 registers. We have to run this after bypass_jumps, because it
1333 makes it harder for that pass to determine whether a jump can be
1334 bypassed safely. */
1335 cse_condition_code_reg ();
1336
1337 purge_all_dead_edges (0);
1338 delete_trivially_dead_insns (insns, max_reg_num ());
1339
1340 if (tem)
1341 {
1342 timevar_push (TV_JUMP);
1343 rebuild_jump_labels (insns);
1344 cleanup_cfg (CLEANUP_EXPENSIVE);
1345 timevar_pop (TV_JUMP);
1346 }
1347 reg_scan (insns, max_reg_num (), 0);
1348 close_dump_file (DFI_cse2, print_rtl_with_bb, insns);
1349 ggc_collect ();
1350 timevar_pop (TV_CSE2);
1351}
1352
1353/* Perform global cse. */
1354static void
1355rest_of_handle_gcse (tree decl, rtx insns)
1356{
1357 int save_csb, save_cfj;
1358 int tem2 = 0, tem;
1359
1360 timevar_push (TV_GCSE);
1361 open_dump_file (DFI_gcse, decl);
1362
1363 tem = gcse_main (insns, dump_file);
1364 rebuild_jump_labels (insns);
1365 delete_trivially_dead_insns (insns, max_reg_num ());
1366
1367 save_csb = flag_cse_skip_blocks;
1368 save_cfj = flag_cse_follow_jumps;
1369 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
1370
1371 /* Instantiate any remaining CONSTANT_P_RTX nodes. */
1372 if (current_function_calls_constant_p)
1373 purge_builtin_constant_p ();
1374
1375 /* If -fexpensive-optimizations, re-run CSE to clean up things done
1376 by gcse. */
1377 if (flag_expensive_optimizations)
1378 {
1379 timevar_push (TV_CSE);
1380 reg_scan (insns, max_reg_num (), 1);
1381 tem2 = cse_main (insns, max_reg_num (), 0, dump_file);
1382 purge_all_dead_edges (0);
1383 delete_trivially_dead_insns (insns, max_reg_num ());
1384 timevar_pop (TV_CSE);
1385 cse_not_expected = !flag_rerun_cse_after_loop;
1386 }
1387
1388 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
1389 things up. Then possibly re-run CSE again. */
1390 while (tem || tem2)
1391 {
1392 tem = tem2 = 0;
1393 timevar_push (TV_JUMP);
1394 rebuild_jump_labels (insns);
1395 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1396 timevar_pop (TV_JUMP);
1397
1398 if (flag_expensive_optimizations)
1399 {
1400 timevar_push (TV_CSE);
1401 reg_scan (insns, max_reg_num (), 1);
1402 tem2 = cse_main (insns, max_reg_num (), 0, dump_file);
1403 purge_all_dead_edges (0);
1404 delete_trivially_dead_insns (insns, max_reg_num ());
1405 timevar_pop (TV_CSE);
1406 }
1407 }
1408
1409 close_dump_file (DFI_gcse, print_rtl_with_bb, insns);
1410 timevar_pop (TV_GCSE);
1411
1412 ggc_collect ();
1413 flag_cse_skip_blocks = save_csb;
1414 flag_cse_follow_jumps = save_cfj;
1415#ifdef ENABLE_CHECKING
1416 verify_flow_info ();
1417#endif
1418}
1419
1420/* Move constant computations out of loops. */
1421static void
1422rest_of_handle_loop_optimize (tree decl, rtx insns)
1423{
1424 int do_unroll, do_prefetch;
1425
1426 timevar_push (TV_LOOP);
1427 delete_dead_jumptables ();
1428 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1429 open_dump_file (DFI_loop, decl);
1430
1431 /* CFG is no longer maintained up-to-date. */
1432 free_bb_for_insn ();
1433
1434 if (flag_unroll_loops)
1435 do_unroll = LOOP_AUTO_UNROLL; /* Having two unrollers is useless. */
1436 else
1437 do_unroll = flag_old_unroll_loops ? LOOP_UNROLL : LOOP_AUTO_UNROLL;
1438 do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0;
1439
1440 if (flag_rerun_loop_opt)
1441 {
1442 cleanup_barriers ();
1443
1444 /* We only want to perform unrolling once. */
1445 loop_optimize (insns, dump_file, do_unroll);
1446 do_unroll = 0;
1447
1448 /* The first call to loop_optimize makes some instructions
1449 trivially dead. We delete those instructions now in the
1450 hope that doing so will make the heuristics in loop work
1451 better and possibly speed up compilation. */
1452 delete_trivially_dead_insns (insns, max_reg_num ());
1453
1454 /* The regscan pass is currently necessary as the alias
1455 analysis code depends on this information. */
1456 reg_scan (insns, max_reg_num (), 1);
1457 }
1458 cleanup_barriers ();
689ba89d 1459 loop_optimize (insns, dump_file, do_unroll | do_prefetch);
f6db1481
RH
1460
1461 /* Loop can create trivially dead instructions. */
1462 delete_trivially_dead_insns (insns, max_reg_num ());
1463 close_dump_file (DFI_loop, print_rtl, insns);
1464 timevar_pop (TV_LOOP);
1465 find_basic_blocks (insns, max_reg_num (), dump_file);
1466
1467 ggc_collect ();
1468}
1469
1470/* Perform loop optimizations. It might be better to do them a bit
1471 sooner, but we want the profile feedback to work more
1472 efficiently. */
1473static void
1474rest_of_handle_loop2 (tree decl, rtx insns)
1475{
1476 struct loops *loops;
1477 basic_block bb;
1478
689ba89d
ZD
1479 if (!flag_unswitch_loops
1480 && !flag_peel_loops
1481 && !flag_unroll_loops
1482 && !flag_branch_on_count_reg)
1483 return;
1484
f6db1481
RH
1485 timevar_push (TV_LOOP);
1486 open_dump_file (DFI_loop2, decl);
1487 if (dump_file)
1488 dump_flow_info (dump_file);
1489
1490 /* Initialize structures for layout changes. */
1491 cfg_layout_initialize ();
1492
1493 loops = loop_optimizer_init (dump_file);
1494
1495 if (loops)
1496 {
1497 /* The optimizations: */
1498 if (flag_unswitch_loops)
1499 unswitch_loops (loops);
1500
1501 if (flag_peel_loops || flag_unroll_loops)
1502 unroll_and_peel_loops (loops,
1503 (flag_peel_loops ? UAP_PEEL : 0) |
1504 (flag_unroll_loops ? UAP_UNROLL : 0) |
1505 (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0));
1506
689ba89d
ZD
1507#ifdef HAVE_doloop_end
1508 if (flag_branch_on_count_reg && HAVE_doloop_end)
1509 doloop_optimize_loops (loops);
1510#endif /* HAVE_doloop_end */
1511
f6db1481
RH
1512 loop_optimizer_finalize (loops, dump_file);
1513 }
1514
1515 /* Finalize layout changes. */
1516 FOR_EACH_BB (bb)
1517 if (bb->next_bb != EXIT_BLOCK_PTR)
1518 bb->rbi->next = bb->next_bb;
1519 cfg_layout_finalize ();
1520
1521 cleanup_cfg (CLEANUP_EXPENSIVE);
1522 delete_trivially_dead_insns (insns, max_reg_num ());
1523 reg_scan (insns, max_reg_num (), 0);
1524 if (dump_file)
1525 dump_flow_info (dump_file);
1526 close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ());
1527 timevar_pop (TV_LOOP);
1528 ggc_collect ();
1529}
1530
1531/* This is called from finish_function (within langhooks.parse_file)
1532 after each top-level definition is parsed.
1533 It is supposed to compile that function or variable
1534 and output the assembler code for it.
1535 After we return, the tree storage is freed. */
1536
1537void
1538rest_of_compilation (tree decl)
1539{
1540 rtx insns;
1541
1542 timevar_push (TV_REST_OF_COMPILATION);
1543
1544 /* Register rtl specific functions for cfg. */
1545 rtl_register_cfg_hooks ();
1546
1547 /* Now that we're out of the frontend, we shouldn't have any more
1548 CONCATs anywhere. */
1549 generating_concat_p = 0;
1550
1551 /* When processing delayed functions, prepare_function_start() won't
1552 have been run to re-initialize it. */
1553 cse_not_expected = ! optimize;
1554
1555 /* First, make sure that NOTE_BLOCK is set correctly for each
1556 NOTE_INSN_BLOCK_BEG/NOTE_INSN_BLOCK_END note. */
1557 if (!cfun->x_whole_function_mode_p)
1558 identify_blocks ();
1559
1560 /* In function-at-a-time mode, we do not attempt to keep the BLOCK
1561 tree in sensible shape. So, we just recalculate it here. */
1562 if (cfun->x_whole_function_mode_p)
1563 reorder_blocks ();
1564
1565 init_flow ();
1566
1567 if (rest_of_handle_inlining (decl))
1568 goto exit_rest_of_compilation;
1569
1570 /* If we're emitting a nested function, make sure its parent gets
1571 emitted as well. Doing otherwise confuses debug info. */
1572 {
1573 tree parent;
1574 for (parent = DECL_CONTEXT (current_function_decl);
1575 parent != NULL_TREE;
1576 parent = get_containing_scope (parent))
1577 if (TREE_CODE (parent) == FUNCTION_DECL)
1578 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1579 }
1580
1581 /* We are now committed to emitting code for this function. Do any
1582 preparation, such as emitting abstract debug info for the inline
1583 before it gets mangled by optimization. */
1584 if (cgraph_function_possibly_inlined_p (decl))
1585 (*debug_hooks->outlining_inline_function) (decl);
1586
1587 /* Remove any notes we don't need. That will make iterating
1588 over the instruction sequence faster, and allow the garbage
1589 collector to reclaim the memory used by the notes. */
1590 remove_unnecessary_notes ();
1591 reorder_blocks ();
1592
1593 ggc_collect ();
1594
1595 /* Initialize some variables used by the optimizers. */
1596 init_function_for_compilation ();
1597
1598 if (! DECL_DEFER_OUTPUT (decl))
1599 TREE_ASM_WRITTEN (decl) = 1;
1600
1601 /* Now that integrate will no longer see our rtl, we need not
1602 distinguish between the return value of this function and the
1603 return value of called functions. Also, we can remove all SETs
1604 of subregs of hard registers; they are only here because of
1605 integrate. Also, we can now initialize pseudos intended to
1606 carry magic hard reg data throughout the function. */
1607 rtx_equal_function_value_matters = 0;
1608 purge_hard_subreg_sets (get_insns ());
1609
1610 /* Early return if there were errors. We can run afoul of our
1611 consistency checks, and there's not really much point in fixing them.
1612 Don't return yet if -Wreturn-type; we need to do cleanup_cfg. */
1613 if (((rtl_dump_and_exit || flag_syntax_only) && !warn_return_type)
1614 || errorcount || sorrycount)
1615 goto exit_rest_of_compilation;
1616
1617 timevar_push (TV_JUMP);
1618 open_dump_file (DFI_sibling, decl);
1619 insns = get_insns ();
1620 rebuild_jump_labels (insns);
1621 find_exception_handler_labels ();
1622 find_basic_blocks (insns, max_reg_num (), dump_file);
1623
1624 delete_unreachable_blocks ();
1625
1626 /* Turn NOTE_INSN_PREDICTIONs into branch predictions. */
1627 if (flag_guess_branch_prob)
1628 {
1629 timevar_push (TV_BRANCH_PROB);
1630 note_prediction_to_br_prob ();
1631 timevar_pop (TV_BRANCH_PROB);
1632 }
1633
1634 if (flag_optimize_sibling_calls)
1635 rest_of_handle_sibling_calls (insns);
1636
1637 /* We have to issue these warnings now already, because CFG cleanups
1638 further down may destroy the required information. However, this
1639 must be done after the sibcall optimization pass because the barrier
1640 emitted for noreturn calls that are candidate for the optimization
1641 is folded into the CALL_PLACEHOLDER until after this pass, so the
1642 CFG is inaccurate. */
1643 check_function_return_warnings ();
1644
1645 timevar_pop (TV_JUMP);
1646
1647 insn_locators_initialize ();
1648 /* Complete generation of exception handling code. */
1649 if (doing_eh (0))
1650 {
1651 timevar_push (TV_JUMP);
1652 open_dump_file (DFI_eh, decl);
1653
1654 finish_eh_generation ();
1655
1656 close_dump_file (DFI_eh, print_rtl, get_insns ());
1657 timevar_pop (TV_JUMP);
1658 }
1659
1660 /* Delay emitting hard_reg_initial_value sets until after EH landing pad
1661 generation, which might create new sets. */
1662 emit_initial_value_sets ();
1663
1664#ifdef FINALIZE_PIC
1665 /* If we are doing position-independent code generation, now
1666 is the time to output special prologues and epilogues.
1667 We do not want to do this earlier, because it just clutters
1668 up inline functions with meaningless insns. */
1669 if (flag_pic)
1670 FINALIZE_PIC;
1671#endif
1672
1673 insns = get_insns ();
1674
1675 /* Copy any shared structure that should not be shared. */
1676 unshare_all_rtl (current_function_decl, insns);
1677
1678#ifdef SETJMP_VIA_SAVE_AREA
1679 /* This must be performed before virtual register instantiation.
d1a6adeb 1680 Please be aware that everything in the compiler that can look
f6db1481
RH
1681 at the RTL up to this point must understand that REG_SAVE_AREA
1682 is just like a use of the REG contained inside. */
1683 if (current_function_calls_alloca)
1684 optimize_save_area_alloca (insns);
1685#endif
1686
1687 /* Instantiate all virtual registers. */
1688 instantiate_virtual_regs (current_function_decl, insns);
1689
1690 open_dump_file (DFI_jump, decl);
1691
1692 /* Always do one jump optimization pass to ensure that JUMP_LABEL fields
1693 are initialized and to compute whether control can drop off the end
1694 of the function. */
1695
1696 timevar_push (TV_JUMP);
1697 /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
1698 before jump optimization switches branch directions. */
1699 if (flag_guess_branch_prob)
1700 expected_value_to_br_prob ();
1701
1702 reg_scan (insns, max_reg_num (), 0);
1703 rebuild_jump_labels (insns);
1704 find_basic_blocks (insns, max_reg_num (), dump_file);
1705 delete_trivially_dead_insns (insns, max_reg_num ());
1706 if (dump_file)
1707 dump_flow_info (dump_file);
1708 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
1709 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1710
1711 if (optimize)
1712 {
1713 free_bb_for_insn ();
1714 copy_loop_headers (insns);
1715 find_basic_blocks (insns, max_reg_num (), dump_file);
1716 }
1717 purge_line_number_notes (insns);
1718
1719 timevar_pop (TV_JUMP);
1720 close_dump_file (DFI_jump, print_rtl, insns);
1721
1722 /* Now is when we stop if -fsyntax-only and -Wreturn-type. */
1723 if (rtl_dump_and_exit || flag_syntax_only || DECL_DEFER_OUTPUT (decl))
1724 goto exit_rest_of_compilation;
1725
1726 timevar_push (TV_JUMP);
1727
1728 if (optimize)
1729 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1730
1731 if (flag_delete_null_pointer_checks)
1732 rest_of_handle_null_pointer (decl, insns);
1733
1734 /* Jump optimization, and the removal of NULL pointer checks, may
1735 have reduced the number of instructions substantially. CSE, and
1736 future passes, allocate arrays whose dimensions involve the
1737 maximum instruction UID, so if we can reduce the maximum UID
1738 we'll save big on memory. */
1739 renumber_insns (dump_file);
1740 timevar_pop (TV_JUMP);
1741
1742 close_dump_file (DFI_jump, print_rtl_with_bb, insns);
1743
1744 ggc_collect ();
1745
1746 if (optimize > 0)
1747 rest_of_handle_cse (decl, insns);
1748
1749 rest_of_handle_addressof (decl, insns);
1750
1751 ggc_collect ();
1752
1753 if (optimize > 0)
1754 {
1755 if (flag_gcse)
1756 rest_of_handle_gcse (decl, insns);
1757
1758 if (flag_loop_optimize)
1759 rest_of_handle_loop_optimize (decl, insns);
1760
1761 if (flag_gcse)
1762 rest_of_handle_jump_bypass (decl, insns);
1763 }
1764
1765 timevar_push (TV_FLOW);
1766
1767 rest_of_handle_cfg (decl, insns);
1768
1769 if (optimize > 0
1770 || profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1771 {
1772 rest_of_handle_branch_prob (decl, insns);
1773
1774 if (flag_branch_probabilities
1775 && flag_profile_values
1776 && flag_value_profile_transformations)
1777 rest_of_handle_value_profile_transformations (decl, insns);
1778
1779 /* Remove the death notes created for vpt. */
1780 if (flag_profile_values)
1781 count_or_remove_death_notes (NULL, 1);
1782 }
1783
1784 if (optimize > 0)
1785 rest_of_handle_if_conversion (decl, insns);
1786
1787 if (flag_tracer)
1788 rest_of_handle_tracer (decl, insns);
1789
689ba89d 1790 if (optimize > 0)
f6db1481
RH
1791 rest_of_handle_loop2 (decl, insns);
1792
1793 if (flag_web)
1794 rest_of_handle_web (decl, insns);
1795
1796 if (flag_rerun_cse_after_loop)
1797 rest_of_handle_cse2 (decl, insns);
1798
1799 cse_not_expected = 1;
1800
1801 rest_of_handle_life (decl, insns);
1802
1803 if (optimize > 0)
1804 rest_of_handle_combine (decl, insns);
1805
1806 if (flag_if_conversion)
1807 rest_of_handle_if_after_combine (decl, insns);
1808
1809 if (optimize > 0 && (flag_regmove || flag_expensive_optimizations))
1810 rest_of_handle_regmove (decl, insns);
1811
1812 /* Do unconditional splitting before register allocation to allow machine
1813 description to add extra information not needed previously. */
1814 split_all_insns (1);
1815
1816#ifdef OPTIMIZE_MODE_SWITCHING
1817 timevar_push (TV_MODE_SWITCH);
1818
1819 no_new_pseudos = 0;
1820 optimize_mode_switching (NULL);
1821 no_new_pseudos = 1;
1822
1823 timevar_pop (TV_MODE_SWITCH);
1824#endif
1825
1826 /* Any of the several passes since flow1 will have munged register
1827 lifetime data a bit. We need it to be up to date for scheduling
1828 (see handling of reg_known_equiv in init_alias_analysis). */
1829 recompute_reg_usage (insns, !optimize_size);
1830
1831#ifdef INSN_SCHEDULING
1832 rest_of_handle_sched (decl, insns);
1833#endif
1834
1835 /* Determine if the current function is a leaf before running reload
1836 since this can impact optimizations done by the prologue and
1837 epilogue thus changing register elimination offsets. */
1838 current_function_is_leaf = leaf_function_p ();
1839
1840 timevar_push (TV_LOCAL_ALLOC);
1841 open_dump_file (DFI_lreg, decl);
1842
1843 if (flag_new_regalloc)
1844 {
1845 if (rest_of_handle_new_regalloc (decl, insns))
1846 goto exit_rest_of_compilation;
1847 }
1848 else
1849 {
1850 if (rest_of_handle_old_regalloc (decl, insns))
1851 goto exit_rest_of_compilation;
1852 }
1853
1854 ggc_collect ();
1855
1856 open_dump_file (DFI_postreload, decl);
1857
1858 /* Do a very simple CSE pass over just the hard registers. */
1859 if (optimize > 0)
1860 {
1861 timevar_push (TV_RELOAD_CSE_REGS);
1862 reload_cse_regs (insns);
1863 /* reload_cse_regs can eliminate potentially-trapping MEMs.
1864 Remove any EH edges associated with them. */
1865 if (flag_non_call_exceptions)
1866 purge_all_dead_edges (0);
1867 timevar_pop (TV_RELOAD_CSE_REGS);
1868 }
1869
1870 close_dump_file (DFI_postreload, print_rtl_with_bb, insns);
1871
f9957958
MH
1872 if (optimize > 0 && flag_gcse_after_reload)
1873 rest_of_handle_gcse2 (decl, insns);
1874
f6db1481
RH
1875 /* Re-create the death notes which were deleted during reload. */
1876 timevar_push (TV_FLOW2);
1877 open_dump_file (DFI_flow2, decl);
1878
1879#ifdef ENABLE_CHECKING
1880 verify_flow_info ();
1881#endif
1882
1883 /* If optimizing, then go ahead and split insns now. */
1884#ifndef STACK_REGS
1885 if (optimize > 0)
1886#endif
1887 split_all_insns (0);
1888
1889 if (flag_branch_target_load_optimize)
1890 {
1891 open_dump_file (DFI_branch_target_load, decl);
1892
1893 branch_target_load_optimize (insns, false);
1894
1895 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns);
1896
1897 ggc_collect ();
1898 }
1899
1900 if (optimize)
1901 cleanup_cfg (CLEANUP_EXPENSIVE);
1902
1903 /* On some machines, the prologue and epilogue code, or parts thereof,
1904 can be represented as RTL. Doing so lets us schedule insns between
1905 it and the rest of the code and also allows delayed branch
1906 scheduling to operate in the epilogue. */
1907 thread_prologue_and_epilogue_insns (insns);
1908 epilogue_completed = 1;
1909
1910 if (optimize)
1911 {
1912 life_analysis (insns, dump_file, PROP_POSTRELOAD);
1913 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
1914 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1915
1916 /* This is kind of a heuristic. We need to run combine_stack_adjustments
1917 even for machines with possibly nonzero RETURN_POPS_ARGS
1918 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
1919 push instructions will have popping returns. */
1920#ifndef PUSH_ROUNDING
1921 if (!ACCUMULATE_OUTGOING_ARGS)
1922#endif
1923 combine_stack_adjustments ();
1924
1925 ggc_collect ();
1926 }
1927
1928 flow2_completed = 1;
1929
1930 close_dump_file (DFI_flow2, print_rtl_with_bb, insns);
1931 timevar_pop (TV_FLOW2);
1932
1933#ifdef HAVE_peephole2
1934 if (optimize > 0 && flag_peephole2)
1935 {
1936 timevar_push (TV_PEEPHOLE2);
1937 open_dump_file (DFI_peephole2, decl);
1938
1939 peephole2_optimize (dump_file);
1940
1941 close_dump_file (DFI_peephole2, print_rtl_with_bb, insns);
1942 timevar_pop (TV_PEEPHOLE2);
1943 }
1944#endif
1945
1946 open_dump_file (DFI_ce3, decl);
1947 if (optimize)
1948 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
1949 splitting possibly introduced more crossjumping opportunities. */
1950 cleanup_cfg (CLEANUP_EXPENSIVE
1951 | CLEANUP_UPDATE_LIFE
1952 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1953 if (flag_if_conversion2)
1954 {
1955 timevar_push (TV_IFCVT2);
1956
1957 if_convert (1);
1958
1959 timevar_pop (TV_IFCVT2);
1960 }
1961 close_dump_file (DFI_ce3, print_rtl_with_bb, insns);
1962
1963 if (optimize > 0)
1964 {
1965 if (flag_rename_registers || flag_cprop_registers)
1966 rest_of_handle_regrename (decl, insns);
1967
1968 rest_of_handle_reorder_blocks (decl, insns);
1969 }
1970
1971 if (flag_branch_target_load_optimize2)
1972 {
1973 /* Leave this a warning for now so that it is possible to experiment
1974 with running this pass twice. In 3.6, we should either make this
1975 an error, or use separate dump files. */
1976 if (flag_branch_target_load_optimize)
1977 warning ("branch target register load optimization is not intended "
1978 "to be run twice");
1979
1980 open_dump_file (DFI_branch_target_load, decl);
1981
1982 branch_target_load_optimize (insns, true);
1983
1984 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns);
1985
1986 ggc_collect ();
1987 }
1988
1989#ifdef INSN_SCHEDULING
1990 if (optimize > 0 && flag_schedule_insns_after_reload)
1991 rest_of_handle_sched2 (decl, insns);
1992#endif
1993
1994#ifdef LEAF_REGISTERS
1995 current_function_uses_only_leaf_regs
1996 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
1997#endif
1998
1999#ifdef STACK_REGS
2000 rest_of_handle_stack_regs (decl, insns);
2001#endif
2002
2003 compute_alignments ();
2004
2005 if (flag_var_tracking)
2006 rest_of_handle_variable_tracking (decl, insns);
2007
2008 /* CFG is no longer maintained up-to-date. */
2009 free_bb_for_insn ();
2010
2011 if (targetm.machine_dependent_reorg != 0)
2012 rest_of_handle_machine_reorg (decl, insns);
2013
2014 purge_line_number_notes (insns);
2015 cleanup_barriers ();
2016
2017#ifdef DELAY_SLOTS
2018 if (optimize > 0 && flag_delayed_branch)
2019 rest_of_handle_delay_slots (decl, insns);
2020#endif
2021
2022#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
2023 timevar_push (TV_SHORTEN_BRANCH);
2024 split_all_insns_noflow ();
2025 timevar_pop (TV_SHORTEN_BRANCH);
2026#endif
2027
2028 convert_to_eh_region_ranges ();
2029
2030 /* Shorten branches. */
2031 timevar_push (TV_SHORTEN_BRANCH);
2032 shorten_branches (get_insns ());
2033 timevar_pop (TV_SHORTEN_BRANCH);
2034
2035 set_nothrow_function_flags ();
2036 if (current_function_nothrow)
2037 /* Now we know that this can't throw; set the flag for the benefit
2038 of other functions later in this translation unit. */
2039 TREE_NOTHROW (current_function_decl) = 1;
2040
2041 rest_of_handle_final (decl, insns);
2042
2043 /* Write DBX symbols if requested. */
2044
2045 /* Note that for those inline functions where we don't initially
2046 know for certain that we will be generating an out-of-line copy,
2047 the first invocation of this routine (rest_of_compilation) will
2048 skip over this code by doing a `goto exit_rest_of_compilation;'.
2049 Later on, wrapup_global_declarations will (indirectly) call
2050 rest_of_compilation again for those inline functions that need
2051 to have out-of-line copies generated. During that call, we
2052 *will* be routed past here. */
2053
2054 timevar_push (TV_SYMOUT);
2055 (*debug_hooks->function_decl) (decl);
2056 timevar_pop (TV_SYMOUT);
2057
2058 exit_rest_of_compilation:
2059
2060 coverage_end_function ();
2061
2062 /* In case the function was not output,
2063 don't leave any temporary anonymous types
2064 queued up for sdb output. */
2065#ifdef SDB_DEBUGGING_INFO
2066 if (write_symbols == SDB_DEBUG)
2067 sdbout_types (NULL_TREE);
2068#endif
2069
2070 reload_completed = 0;
2071 epilogue_completed = 0;
2072 flow2_completed = 0;
2073 no_new_pseudos = 0;
2074
2075 timevar_push (TV_FINAL);
2076
2077 /* Clear out the insn_length contents now that they are no
2078 longer valid. */
2079 init_insn_lengths ();
2080
2081 /* Show no temporary slots allocated. */
2082 init_temp_slots ();
2083
2084 free_basic_block_vars (0);
2085 free_bb_for_insn ();
2086
2087 timevar_pop (TV_FINAL);
2088
5fd9b178 2089 if (targetm.binds_local_p (current_function_decl))
f6db1481
RH
2090 {
2091 int pref = cfun->preferred_stack_boundary;
2092 if (cfun->recursive_call_emit
2093 && cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
2094 pref = cfun->stack_alignment_needed;
2095 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
2096 = pref;
2097 }
2098
2099 /* Make sure volatile mem refs aren't considered valid operands for
2100 arithmetic insns. We must call this here if this is a nested inline
2101 function, since the above code leaves us in the init_recog state
2102 (from final.c), and the function context push/pop code does not
2103 save/restore volatile_ok.
2104
2105 ??? Maybe it isn't necessary for expand_start_function to call this
2106 anymore if we do it here? */
2107
2108 init_recog_no_volatile ();
2109
2110 /* We're done with this function. Free up memory if we can. */
2111 free_after_parsing (cfun);
2112 if (! DECL_DEFER_OUTPUT (decl))
2113 {
2114 free_after_compilation (cfun);
2115 DECL_STRUCT_FUNCTION (decl) = 0;
2116 }
2117 cfun = 0;
2118
2119 ggc_collect ();
2120
2121 timevar_pop (TV_REST_OF_COMPILATION);
2122}
2123
2124void
2125init_optimization_passes (void)
2126{
2127 if (flag_unit_at_a_time)
2128 {
2129 open_dump_file (DFI_cgraph, NULL);
2130 cgraph_dump_file = dump_file;
2131 dump_file = NULL;
2132 }
2133}
2134
2135void
2136finish_optimization_passes (void)
2137{
2138 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
2139 {
2140 timevar_push (TV_DUMP);
2141 open_dump_file (DFI_bp, NULL);
2142
2143 end_branch_prob ();
2144
2145 close_dump_file (DFI_bp, NULL, NULL_RTX);
2146 timevar_pop (TV_DUMP);
2147 }
2148
2149 if (optimize > 0 && open_dump_file (DFI_combine, NULL))
2150 {
2151 timevar_push (TV_DUMP);
2152 dump_combine_total_stats (dump_file);
2153 close_dump_file (DFI_combine, NULL, NULL_RTX);
2154 timevar_pop (TV_DUMP);
2155 }
2156
2157 if (flag_unit_at_a_time)
2158 {
2159 dump_file = cgraph_dump_file;
2160 cgraph_dump_file = NULL;
2161 close_dump_file (DFI_cgraph, NULL, NULL_RTX);
2162 }
2163
2164 /* Do whatever is necessary to finish printing the graphs. */
2165 if (graph_dump_format != no_graph)
2166 {
2167 int i;
2168
2169 for (i = 0; i < (int) DFI_MAX; ++i)
2170 if (dump_file_tbl[i].initialized && dump_file_tbl[i].graph_dump_p)
2171 {
2172 char seq[16];
2173 char *suffix;
2174
2175 sprintf (seq, DUMPFILE_FORMAT, i);
2176 suffix = concat (seq, dump_file_tbl[i].extension, NULL);
2177 finish_graph_dump_file (dump_base_name, suffix);
2178 free (suffix);
2179 }
2180 }
2181
2182}
2183
2184bool
2185enable_rtl_dump_file (int letter)
2186{
2187 bool matched = false;
2188 int i;
2189
2190 if (letter == 'a')
2191 {
2192 for (i = 0; i < (int) DFI_MAX; ++i)
2193 dump_file_tbl[i].enabled = 1;
2194 matched = true;
2195 }
2196 else
2197 {
2198 for (i = 0; i < (int) DFI_MAX; ++i)
2199 if (letter == dump_file_tbl[i].debug_switch)
2200 {
2201 dump_file_tbl[i].enabled = 1;
2202 matched = true;
2203 }
2204 }
2205
2206 return matched;
2207}