]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/passes.c
eaf4de5df4123e12c68b7016190ba77be2c93316
[thirdparty/gcc.git] / gcc / passes.c
1 /* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
26
27 #include "config.h"
28 #undef FLOAT /* This is for hpux. They should change hpux. */
29 #undef FFS /* Some systems define this in param.h. */
30 #include "system.h"
31 #include "coretypes.h"
32 #include "tm.h"
33 #include <signal.h>
34
35 #ifdef HAVE_SYS_RESOURCE_H
36 # include <sys/resource.h>
37 #endif
38
39 #ifdef HAVE_SYS_TIMES_H
40 # include <sys/times.h>
41 #endif
42
43 #include "line-map.h"
44 #include "input.h"
45 #include "tree.h"
46 #include "rtl.h"
47 #include "tm_p.h"
48 #include "flags.h"
49 #include "insn-attr.h"
50 #include "insn-config.h"
51 #include "insn-flags.h"
52 #include "hard-reg-set.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "except.h"
56 #include "function.h"
57 #include "toplev.h"
58 #include "expr.h"
59 #include "basic-block.h"
60 #include "intl.h"
61 #include "ggc.h"
62 #include "graph.h"
63 #include "loop.h"
64 #include "regs.h"
65 #include "timevar.h"
66 #include "diagnostic.h"
67 #include "params.h"
68 #include "reload.h"
69 #include "dwarf2asm.h"
70 #include "integrate.h"
71 #include "real.h"
72 #include "debug.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "cfglayout.h"
76 #include "cfgloop.h"
77 #include "hosthooks.h"
78 #include "cgraph.h"
79 #include "opts.h"
80 #include "coverage.h"
81 #include "value-prof.h"
82 #include "alloc-pool.h"
83
84 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
85 #include "dwarf2out.h"
86 #endif
87
88 #if defined(DBX_DEBUGGING_INFO) || defined(XCOFF_DEBUGGING_INFO)
89 #include "dbxout.h"
90 #endif
91
92 #ifdef SDB_DEBUGGING_INFO
93 #include "sdbout.h"
94 #endif
95
96 #ifdef XCOFF_DEBUGGING_INFO
97 #include "xcoffout.h" /* Needed for external data
98 declarations for e.g. AIX 4.x. */
99 #endif
100
101 #ifndef HAVE_conditional_execution
102 #define HAVE_conditional_execution 0
103 #endif
104
105 /* Format to use to print dumpfile index value */
106 #ifndef DUMPFILE_FORMAT
107 #define DUMPFILE_FORMAT ".%02d."
108 #endif
109
110 /* Describes a dump file. */
111
112 struct dump_file_info
113 {
114 /* The unique extension to apply, e.g. ".jump". */
115 const char *const extension;
116
117 /* The -d<c> character that enables this dump file. */
118 char const debug_switch;
119
120 /* True if there is a corresponding graph dump file. */
121 char const graph_dump_p;
122
123 /* True if the user selected this dump. */
124 char enabled;
125
126 /* True if the files have been initialized (ie truncated). */
127 char initialized;
128 };
129
130 /* Enumerate the extant dump files. */
131
132 enum dump_file_index
133 {
134 DFI_cgraph,
135 DFI_rtl,
136 DFI_sibling,
137 DFI_eh,
138 DFI_jump,
139 DFI_null,
140 DFI_cse,
141 DFI_addressof,
142 DFI_gcse,
143 DFI_loop,
144 DFI_bypass,
145 DFI_cfg,
146 DFI_bp,
147 DFI_vpt,
148 DFI_ce1,
149 DFI_tracer,
150 DFI_loop2,
151 DFI_web,
152 DFI_cse2,
153 DFI_life,
154 DFI_combine,
155 DFI_ce2,
156 DFI_regmove,
157 DFI_sched,
158 DFI_lreg,
159 DFI_greg,
160 DFI_postreload,
161 DFI_gcse2,
162 DFI_flow2,
163 DFI_peephole2,
164 DFI_ce3,
165 DFI_rnreg,
166 DFI_bbro,
167 DFI_branch_target_load,
168 DFI_sched2,
169 DFI_stack,
170 DFI_vartrack,
171 DFI_mach,
172 DFI_dbr,
173 DFI_MAX
174 };
175
176 /* Describes all the dump files. Should be kept in order of the
177 pass and in sync with dump_file_index above.
178
179 Remaining -d letters:
180
181 " e m q "
182 " K O Q WXY "
183 */
184
185 static struct dump_file_info dump_file_tbl[DFI_MAX] =
186 {
187 { "cgraph", 'U', 0, 0, 0 },
188 { "rtl", 'r', 0, 0, 0 },
189 { "sibling", 'i', 0, 0, 0 },
190 { "eh", 'h', 0, 0, 0 },
191 { "jump", 'j', 0, 0, 0 },
192 { "null", 'u', 0, 0, 0 },
193 { "cse", 's', 0, 0, 0 },
194 { "addressof", 'F', 0, 0, 0 },
195 { "gcse", 'G', 1, 0, 0 },
196 { "loop", 'L', 1, 0, 0 },
197 { "bypass", 'G', 1, 0, 0 }, /* Yes, duplicate enable switch. */
198 { "cfg", 'f', 1, 0, 0 },
199 { "bp", 'b', 1, 0, 0 },
200 { "vpt", 'V', 1, 0, 0 },
201 { "ce1", 'C', 1, 0, 0 },
202 { "tracer", 'T', 1, 0, 0 },
203 { "loop2", 'L', 1, 0, 0 },
204 { "web", 'Z', 0, 0, 0 },
205 { "cse2", 't', 1, 0, 0 },
206 { "life", 'f', 1, 0, 0 }, /* Yes, duplicate enable switch. */
207 { "combine", 'c', 1, 0, 0 },
208 { "ce2", 'C', 1, 0, 0 },
209 { "regmove", 'N', 1, 0, 0 },
210 { "sched", 'S', 1, 0, 0 },
211 { "lreg", 'l', 1, 0, 0 },
212 { "greg", 'g', 1, 0, 0 },
213 { "postreload", 'o', 1, 0, 0 },
214 { "gcse2", 'J', 0, 0, 0 },
215 { "flow2", 'w', 1, 0, 0 },
216 { "peephole2", 'z', 1, 0, 0 },
217 { "ce3", 'E', 1, 0, 0 },
218 { "rnreg", 'n', 1, 0, 0 },
219 { "bbro", 'B', 1, 0, 0 },
220 { "btl", 'd', 1, 0, 0 }, /* Yes, duplicate enable switch. */
221 { "sched2", 'R', 1, 0, 0 },
222 { "stack", 'k', 1, 0, 0 },
223 { "vartrack", 'V', 1, 0, 0 }, /* Yes, duplicate enable switch. */
224 { "mach", 'M', 1, 0, 0 },
225 { "dbr", 'd', 0, 0, 0 },
226 };
227
228 /* Routine to open a dump file. Return true if the dump file is enabled. */
229
230 static int
231 open_dump_file (enum dump_file_index index, tree decl)
232 {
233 char *dump_name;
234 const char *open_arg;
235 char seq[16];
236
237 if (! dump_file_tbl[index].enabled)
238 return 0;
239
240 timevar_push (TV_DUMP);
241 if (dump_file != NULL)
242 fclose (dump_file);
243
244 sprintf (seq, DUMPFILE_FORMAT, index);
245
246 if (! dump_file_tbl[index].initialized)
247 {
248 /* If we've not initialized the files, do so now. */
249 if (graph_dump_format != no_graph
250 && dump_file_tbl[index].graph_dump_p)
251 {
252 dump_name = concat (seq, dump_file_tbl[index].extension, NULL);
253 clean_graph_dump_file (dump_base_name, dump_name);
254 free (dump_name);
255 }
256 dump_file_tbl[index].initialized = 1;
257 open_arg = "w";
258 }
259 else
260 open_arg = "a";
261
262 dump_name = concat (dump_base_name, seq,
263 dump_file_tbl[index].extension, NULL);
264
265 dump_file = fopen (dump_name, open_arg);
266 if (dump_file == NULL)
267 fatal_error ("can't open %s: %m", dump_name);
268
269 free (dump_name);
270
271 if (decl)
272 fprintf (dump_file, "\n;; Function %s%s\n\n",
273 lang_hooks.decl_printable_name (decl, 2),
274 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
275 ? " (hot)"
276 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
277 ? " (unlikely executed)"
278 : "");
279
280 timevar_pop (TV_DUMP);
281 return 1;
282 }
283
284 /* Routine to close a dump file. */
285
286 static void
287 close_dump_file (enum dump_file_index index,
288 void (*func) (FILE *, rtx),
289 rtx insns)
290 {
291 if (! dump_file)
292 return;
293
294 timevar_push (TV_DUMP);
295 if (insns
296 && graph_dump_format != no_graph
297 && dump_file_tbl[index].graph_dump_p)
298 {
299 char seq[16];
300 char *suffix;
301
302 sprintf (seq, DUMPFILE_FORMAT, index);
303 suffix = concat (seq, dump_file_tbl[index].extension, NULL);
304 print_rtl_graph_with_bb (dump_base_name, suffix, insns);
305 free (suffix);
306 }
307
308 if (func && insns)
309 func (dump_file, insns);
310
311 fflush (dump_file);
312 fclose (dump_file);
313
314 dump_file = NULL;
315 timevar_pop (TV_DUMP);
316 }
317
318 /* This is called from various places for FUNCTION_DECL, VAR_DECL,
319 and TYPE_DECL nodes.
320
321 This does nothing for local (non-static) variables, unless the
322 variable is a register variable with an ASMSPEC. In that case, or
323 if the variable is not an automatic, it sets up the RTL and
324 outputs any assembler code (label definition, storage allocation
325 and initialization).
326
327 DECL is the declaration. If ASMSPEC is nonzero, it specifies
328 the assembler symbol name to be used. TOP_LEVEL is nonzero
329 if this declaration is not within a function. */
330
331 void
332 rest_of_decl_compilation (tree decl,
333 const char *asmspec,
334 int top_level,
335 int at_end)
336 {
337 /* We deferred calling assemble_alias so that we could collect
338 other attributes such as visibility. Emit the alias now. */
339 {
340 tree alias;
341 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
342 if (alias)
343 {
344 alias = TREE_VALUE (TREE_VALUE (alias));
345 alias = get_identifier (TREE_STRING_POINTER (alias));
346 assemble_alias (decl, alias);
347 }
348 }
349
350 /* Forward declarations for nested functions are not "external",
351 but we need to treat them as if they were. */
352 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
353 || TREE_CODE (decl) == FUNCTION_DECL)
354 {
355 timevar_push (TV_VARCONST);
356
357 if (asmspec)
358 make_decl_rtl (decl, asmspec);
359
360 /* Don't output anything when a tentative file-scope definition
361 is seen. But at end of compilation, do output code for them.
362
363 We do output all variables when unit-at-a-time is active and rely on
364 callgraph code to defer them except for forward declarations
365 (see gcc.c-torture/compile/920624-1.c) */
366 if ((at_end
367 || !DECL_DEFER_OUTPUT (decl)
368 || (flag_unit_at_a_time && DECL_INITIAL (decl)))
369 && !DECL_EXTERNAL (decl))
370 {
371 if (flag_unit_at_a_time && !cgraph_global_info_ready
372 && TREE_CODE (decl) != FUNCTION_DECL && top_level)
373 cgraph_varpool_finalize_decl (decl);
374 else
375 assemble_variable (decl, top_level, at_end, 0);
376 }
377
378 #ifdef ASM_FINISH_DECLARE_OBJECT
379 if (decl == last_assemble_variable_decl)
380 {
381 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
382 top_level, at_end);
383 }
384 #endif
385
386 timevar_pop (TV_VARCONST);
387 }
388 else if (DECL_REGISTER (decl) && asmspec != 0)
389 {
390 if (decode_reg_name (asmspec) >= 0)
391 {
392 SET_DECL_RTL (decl, NULL_RTX);
393 make_decl_rtl (decl, asmspec);
394 }
395 else
396 {
397 error ("invalid register name `%s' for register variable", asmspec);
398 DECL_REGISTER (decl) = 0;
399 if (!top_level)
400 expand_decl (decl);
401 }
402 }
403 else if (TREE_CODE (decl) == TYPE_DECL)
404 {
405 timevar_push (TV_SYMOUT);
406 debug_hooks->type_decl (decl, !top_level);
407 timevar_pop (TV_SYMOUT);
408 }
409 }
410
411 /* Called after finishing a record, union or enumeral type. */
412
413 void
414 rest_of_type_compilation (tree type, int toplev)
415 {
416 /* Avoid confusing the debug information machinery when there are
417 errors. */
418 if (errorcount != 0 || sorrycount != 0)
419 return;
420
421 timevar_push (TV_SYMOUT);
422 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
423 timevar_pop (TV_SYMOUT);
424 }
425
426 /* Turn the RTL into assembly. */
427 static void
428 rest_of_handle_final (tree decl, rtx insns)
429 {
430 timevar_push (TV_FINAL);
431 {
432 rtx x;
433 const char *fnname;
434
435 /* Get the function's name, as described by its RTL. This may be
436 different from the DECL_NAME name used in the source file. */
437
438 x = DECL_RTL (decl);
439 if (GET_CODE (x) != MEM)
440 abort ();
441 x = XEXP (x, 0);
442 if (GET_CODE (x) != SYMBOL_REF)
443 abort ();
444 fnname = XSTR (x, 0);
445
446 assemble_start_function (decl, fnname);
447 final_start_function (insns, asm_out_file, optimize);
448 final (insns, asm_out_file, optimize, 0);
449 final_end_function ();
450
451 #ifdef IA64_UNWIND_INFO
452 /* ??? The IA-64 ".handlerdata" directive must be issued before
453 the ".endp" directive that closes the procedure descriptor. */
454 output_function_exception_table ();
455 #endif
456
457 assemble_end_function (decl, fnname);
458
459 #ifndef IA64_UNWIND_INFO
460 /* Otherwise, it feels unclean to switch sections in the middle. */
461 output_function_exception_table ();
462 #endif
463
464 if (! quiet_flag)
465 fflush (asm_out_file);
466
467 /* Release all memory allocated by flow. */
468 free_basic_block_vars ();
469
470 /* Release all memory held by regsets now. */
471 regset_release_memory ();
472 }
473 timevar_pop (TV_FINAL);
474
475 ggc_collect ();
476 }
477
478 #ifdef DELAY_SLOTS
479 /* Run delay slot optimization. */
480 static void
481 rest_of_handle_delay_slots (tree decl, rtx insns)
482 {
483 timevar_push (TV_DBR_SCHED);
484 open_dump_file (DFI_dbr, decl);
485
486 dbr_schedule (insns, dump_file);
487
488 close_dump_file (DFI_dbr, print_rtl, insns);
489 timevar_pop (TV_DBR_SCHED);
490
491 ggc_collect ();
492 }
493 #endif
494
495 #ifdef STACK_REGS
496 /* Convert register usage from flat register file usage to a stack
497 register file. */
498 static void
499 rest_of_handle_stack_regs (tree decl, rtx insns)
500 {
501 #if defined (HAVE_ATTR_length)
502 /* If flow2 creates new instructions which need splitting
503 and scheduling after reload is not done, they might not be
504 split until final which doesn't allow splitting
505 if HAVE_ATTR_length. */
506 #ifdef INSN_SCHEDULING
507 if (optimize && !flag_schedule_insns_after_reload)
508 #else
509 if (optimize)
510 #endif
511 {
512 timevar_push (TV_SHORTEN_BRANCH);
513 split_all_insns (1);
514 timevar_pop (TV_SHORTEN_BRANCH);
515 }
516 #endif
517
518 timevar_push (TV_REG_STACK);
519 open_dump_file (DFI_stack, decl);
520
521 if (reg_to_stack (insns, dump_file) && optimize)
522 {
523 if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
524 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0))
525 && (flag_reorder_blocks || flag_reorder_blocks_and_partition))
526 {
527 reorder_basic_blocks ();
528 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
529 }
530 }
531
532 close_dump_file (DFI_stack, print_rtl_with_bb, insns);
533 timevar_pop (TV_REG_STACK);
534
535 ggc_collect ();
536 }
537 #endif
538
539 /* Track the variables, ie. compute where the variable is stored at each position in function. */
540 static void
541 rest_of_handle_variable_tracking (tree decl, rtx insns)
542 {
543 timevar_push (TV_VAR_TRACKING);
544 open_dump_file (DFI_vartrack, decl);
545
546 variable_tracking_main ();
547
548 close_dump_file (DFI_vartrack, print_rtl_with_bb, insns);
549 timevar_pop (TV_VAR_TRACKING);
550 }
551
552 /* Machine independent reorg pass. */
553 static void
554 rest_of_handle_machine_reorg (tree decl, rtx insns)
555 {
556 timevar_push (TV_MACH_DEP);
557 open_dump_file (DFI_mach, decl);
558
559 targetm.machine_dependent_reorg ();
560
561 close_dump_file (DFI_mach, print_rtl, insns);
562 timevar_pop (TV_MACH_DEP);
563
564 ggc_collect ();
565 }
566
567
568 /* Run new register allocator. Return TRUE if we must exit
569 rest_of_compilation upon return. */
570 static bool
571 rest_of_handle_new_regalloc (tree decl, rtx insns)
572 {
573 int failure;
574
575 delete_trivially_dead_insns (insns, max_reg_num ());
576 reg_alloc ();
577
578 timevar_pop (TV_LOCAL_ALLOC);
579 if (dump_file_tbl[DFI_lreg].enabled)
580 {
581 timevar_push (TV_DUMP);
582
583 close_dump_file (DFI_lreg, NULL, NULL);
584 timevar_pop (TV_DUMP);
585 }
586
587 /* XXX clean up the whole mess to bring live info in shape again. */
588 timevar_push (TV_GLOBAL_ALLOC);
589 open_dump_file (DFI_greg, decl);
590
591 build_insn_chain (insns);
592 failure = reload (insns, 0);
593
594 timevar_pop (TV_GLOBAL_ALLOC);
595
596 if (dump_file_tbl[DFI_greg].enabled)
597 {
598 timevar_push (TV_DUMP);
599
600 dump_global_regs (dump_file);
601
602 close_dump_file (DFI_greg, print_rtl_with_bb, insns);
603 timevar_pop (TV_DUMP);
604 }
605
606 if (failure)
607 return true;
608
609 reload_completed = 1;
610
611 return false;
612 }
613
614 /* Run old register allocator. Return TRUE if we must exit
615 rest_of_compilation upon return. */
616 static bool
617 rest_of_handle_old_regalloc (tree decl, rtx insns)
618 {
619 int failure;
620 int rebuild_notes;
621
622 /* Allocate the reg_renumber array. */
623 allocate_reg_info (max_regno, FALSE, TRUE);
624
625 /* And the reg_equiv_memory_loc array. */
626 reg_equiv_memory_loc = xcalloc (max_regno, sizeof (rtx));
627
628 allocate_initial_values (reg_equiv_memory_loc);
629
630 regclass (insns, max_reg_num (), dump_file);
631 rebuild_notes = local_alloc ();
632
633 timevar_pop (TV_LOCAL_ALLOC);
634
635 /* Local allocation may have turned an indirect jump into a direct
636 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
637 instructions. */
638 if (rebuild_notes)
639 {
640 timevar_push (TV_JUMP);
641
642 rebuild_jump_labels (insns);
643 purge_all_dead_edges (0);
644
645 timevar_pop (TV_JUMP);
646 }
647
648 if (dump_file_tbl[DFI_lreg].enabled)
649 {
650 timevar_push (TV_DUMP);
651
652 dump_flow_info (dump_file);
653 dump_local_alloc (dump_file);
654
655 close_dump_file (DFI_lreg, print_rtl_with_bb, insns);
656 timevar_pop (TV_DUMP);
657 }
658
659 ggc_collect ();
660
661 timevar_push (TV_GLOBAL_ALLOC);
662 open_dump_file (DFI_greg, decl);
663
664 /* If optimizing, allocate remaining pseudo-regs. Do the reload
665 pass fixing up any insns that are invalid. */
666
667 if (optimize)
668 failure = global_alloc (dump_file);
669 else
670 {
671 build_insn_chain (insns);
672 failure = reload (insns, 0);
673 }
674
675 timevar_pop (TV_GLOBAL_ALLOC);
676
677 if (dump_file_tbl[DFI_greg].enabled)
678 {
679 timevar_push (TV_DUMP);
680
681 dump_global_regs (dump_file);
682
683 close_dump_file (DFI_greg, print_rtl_with_bb, insns);
684 timevar_pop (TV_DUMP);
685 }
686
687 return failure;
688 }
689
690 /* Run the regrename and cprop passes. */
691 static void
692 rest_of_handle_regrename (tree decl, rtx insns)
693 {
694 timevar_push (TV_RENAME_REGISTERS);
695 open_dump_file (DFI_rnreg, decl);
696
697 if (flag_rename_registers)
698 regrename_optimize ();
699 if (flag_cprop_registers)
700 copyprop_hardreg_forward ();
701
702 close_dump_file (DFI_rnreg, print_rtl_with_bb, insns);
703 timevar_pop (TV_RENAME_REGISTERS);
704 }
705
706 /* Reorder basic blocks. */
707 static void
708 rest_of_handle_reorder_blocks (tree decl, rtx insns)
709 {
710 bool changed;
711 open_dump_file (DFI_bbro, decl);
712
713 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
714 splitting possibly introduced more crossjumping opportunities. */
715 changed = cleanup_cfg (CLEANUP_EXPENSIVE
716 | (!HAVE_conditional_execution
717 ? CLEANUP_UPDATE_LIFE : 0));
718
719 if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
720 tracer ();
721 if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
722 reorder_basic_blocks ();
723 if (flag_reorder_blocks || flag_reorder_blocks_and_partition
724 || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
725 changed |= cleanup_cfg (CLEANUP_EXPENSIVE
726 | (!HAVE_conditional_execution
727 ? CLEANUP_UPDATE_LIFE : 0));
728
729 /* On conditional execution targets we can not update the life cheaply, so
730 we deffer the updating to after both cleanups. This may lose some cases
731 but should not be terribly bad. */
732 if (changed && HAVE_conditional_execution)
733 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
734 PROP_DEATH_NOTES);
735 close_dump_file (DFI_bbro, print_rtl_with_bb, insns);
736 }
737
738 #ifdef INSN_SCHEDULING
739 /* Run instruction scheduler. */
740 static void
741 rest_of_handle_sched (tree decl, rtx insns)
742 {
743 timevar_push (TV_SCHED);
744
745 /* Print function header into sched dump now
746 because doing the sched analysis makes some of the dump. */
747 if (optimize > 0 && flag_schedule_insns)
748 {
749 open_dump_file (DFI_sched, decl);
750
751 /* Do control and data sched analysis,
752 and write some of the results to dump file. */
753
754 schedule_insns (dump_file);
755
756 close_dump_file (DFI_sched, print_rtl_with_bb, insns);
757 }
758 timevar_pop (TV_SCHED);
759
760 ggc_collect ();
761 }
762
763 /* Run second scheduling pass after reload. */
764 static void
765 rest_of_handle_sched2 (tree decl, rtx insns)
766 {
767 timevar_push (TV_SCHED2);
768 open_dump_file (DFI_sched2, decl);
769
770 /* Do control and data sched analysis again,
771 and write some more of the results to dump file. */
772
773 split_all_insns (1);
774
775 if (flag_sched2_use_superblocks || flag_sched2_use_traces)
776 {
777 schedule_ebbs (dump_file);
778 /* No liveness updating code yet, but it should be easy to do.
779 reg-stack recomputes the liveness when needed for now. */
780 count_or_remove_death_notes (NULL, 1);
781 cleanup_cfg (CLEANUP_EXPENSIVE);
782 }
783 else
784 schedule_insns (dump_file);
785
786 close_dump_file (DFI_sched2, print_rtl_with_bb, insns);
787 timevar_pop (TV_SCHED2);
788
789 ggc_collect ();
790 }
791 #endif
792
793 static void
794 rest_of_handle_gcse2 (tree decl, rtx insns)
795 {
796 open_dump_file (DFI_gcse2, decl);
797
798 gcse_after_reload_main (insns, dump_file);
799 rebuild_jump_labels (insns);
800 delete_trivially_dead_insns (insns, max_reg_num ());
801 close_dump_file (DFI_gcse2, print_rtl_with_bb, insns);
802
803 ggc_collect ();
804
805 #ifdef ENABLE_CHECKING
806 verify_flow_info ();
807 #endif
808 }
809
810 /* Register allocation pre-pass, to reduce number of moves necessary
811 for two-address machines. */
812 static void
813 rest_of_handle_regmove (tree decl, rtx insns)
814 {
815 timevar_push (TV_REGMOVE);
816 open_dump_file (DFI_regmove, decl);
817
818 regmove_optimize (insns, max_reg_num (), dump_file);
819
820 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
821 close_dump_file (DFI_regmove, print_rtl_with_bb, insns);
822 timevar_pop (TV_REGMOVE);
823
824 ggc_collect ();
825 }
826
827 /* Run tracer. */
828 static void
829 rest_of_handle_tracer (tree decl, rtx insns)
830 {
831 open_dump_file (DFI_tracer, decl);
832 if (dump_file)
833 dump_flow_info (dump_file);
834 tracer ();
835 cleanup_cfg (CLEANUP_EXPENSIVE);
836 reg_scan (insns, max_reg_num (), 0);
837 close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ());
838 }
839
840 /* If-conversion and CFG cleanup. */
841 static void
842 rest_of_handle_if_conversion (tree decl, rtx insns)
843 {
844 open_dump_file (DFI_ce1, decl);
845 if (flag_if_conversion)
846 {
847 timevar_push (TV_IFCVT);
848 if (dump_file)
849 dump_flow_info (dump_file);
850 cleanup_cfg (CLEANUP_EXPENSIVE);
851 reg_scan (insns, max_reg_num (), 0);
852 if_convert (0);
853 timevar_pop (TV_IFCVT);
854 }
855 timevar_push (TV_JUMP);
856 cleanup_cfg (CLEANUP_EXPENSIVE);
857 reg_scan (insns, max_reg_num (), 0);
858 timevar_pop (TV_JUMP);
859 close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ());
860 }
861
862 /* Rerun if-conversion, as combine may have simplified things enough
863 to now meet sequence length restrictions. */
864 static void
865 rest_of_handle_if_after_combine (tree decl, rtx insns)
866 {
867 timevar_push (TV_IFCVT);
868 open_dump_file (DFI_ce2, decl);
869
870 no_new_pseudos = 0;
871 if_convert (1);
872 no_new_pseudos = 1;
873
874 close_dump_file (DFI_ce2, print_rtl_with_bb, insns);
875 timevar_pop (TV_IFCVT);
876 }
877
878 static void
879 rest_of_handle_web (tree decl, rtx insns)
880 {
881 open_dump_file (DFI_web, decl);
882 timevar_push (TV_WEB);
883 web_main ();
884 delete_trivially_dead_insns (insns, max_reg_num ());
885 cleanup_cfg (CLEANUP_EXPENSIVE);
886
887 timevar_pop (TV_WEB);
888 close_dump_file (DFI_web, print_rtl_with_bb, insns);
889 reg_scan (get_insns (), max_reg_num (), 0);
890 }
891
892 /* Do branch profiling and static profile estimation passes. */
893 static void
894 rest_of_handle_branch_prob (tree decl, rtx insns)
895 {
896 struct loops loops;
897
898 timevar_push (TV_BRANCH_PROB);
899 open_dump_file (DFI_bp, decl);
900
901 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
902 branch_prob ();
903
904 /* Discover and record the loop depth at the head of each basic
905 block. The loop infrastructure does the real job for us. */
906 flow_loops_find (&loops, LOOP_TREE);
907
908 if (dump_file)
909 flow_loops_dump (&loops, dump_file, NULL, 0);
910
911 /* Estimate using heuristics if no profiling info is available. */
912 if (flag_guess_branch_prob)
913 estimate_probability (&loops);
914
915 flow_loops_free (&loops);
916 free_dominance_info (CDI_DOMINATORS);
917 close_dump_file (DFI_bp, print_rtl_with_bb, insns);
918 timevar_pop (TV_BRANCH_PROB);
919 }
920
921 /* Do optimizations based on expression value profiles. */
922 static void
923 rest_of_handle_value_profile_transformations (tree decl, rtx insns)
924 {
925 open_dump_file (DFI_vpt, decl);
926 timevar_push (TV_VPT);
927
928 if (value_profile_transformations ())
929 cleanup_cfg (CLEANUP_EXPENSIVE);
930
931 timevar_pop (TV_VPT);
932 close_dump_file (DFI_vpt, print_rtl_with_bb, insns);
933 }
934
935 /* Do control and data flow analysis; write some of the results to the
936 dump file. */
937 static void
938 rest_of_handle_cfg (tree decl, rtx insns)
939 {
940 open_dump_file (DFI_cfg, decl);
941 if (dump_file)
942 dump_flow_info (dump_file);
943 if (optimize)
944 cleanup_cfg (CLEANUP_EXPENSIVE
945 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
946
947 /* It may make more sense to mark constant functions after dead code is
948 eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
949 may insert code making function non-constant, but we still must consider
950 it as constant, otherwise -fbranch-probabilities will not read data back.
951
952 life_analysis rarely eliminates modification of external memory.
953 */
954 if (optimize)
955 {
956 /* Alias analysis depends on this information and mark_constant_function
957 depends on alias analysis. */
958 reg_scan (insns, max_reg_num (), 1);
959 mark_constant_function ();
960 }
961
962 close_dump_file (DFI_cfg, print_rtl_with_bb, insns);
963 }
964
965 /* Purge addressofs. */
966 static void
967 rest_of_handle_addressof (tree decl, rtx insns)
968 {
969 open_dump_file (DFI_addressof, decl);
970
971 purge_addressof (insns);
972 if (optimize && purge_all_dead_edges (0))
973 delete_unreachable_blocks ();
974 reg_scan (insns, max_reg_num (), 1);
975
976 close_dump_file (DFI_addressof, print_rtl, insns);
977 }
978
979 /* We may have potential sibling or tail recursion sites. Select one
980 (of possibly multiple) methods of performing the call. */
981 static void
982 rest_of_handle_sibling_calls (rtx insns)
983 {
984 rtx insn;
985 optimize_sibling_and_tail_recursive_calls ();
986
987 /* Recompute the CFG as sibling optimization clobbers it randomly. */
988 free_bb_for_insn ();
989 find_exception_handler_labels ();
990 rebuild_jump_labels (insns);
991 find_basic_blocks (insns, max_reg_num (), dump_file);
992
993 /* There is pass ordering problem - we must lower NOTE_INSN_PREDICTION
994 notes before simplifying cfg and we must do lowering after sibcall
995 that unhides parts of RTL chain and cleans up the CFG.
996
997 Until sibcall is replaced by tree-level optimizer, lets just
998 sweep away the NOTE_INSN_PREDICTION notes that leaked out. */
999 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1000 if (GET_CODE (insn) == NOTE
1001 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
1002 delete_insn (insn);
1003
1004 close_dump_file (DFI_sibling, print_rtl, get_insns ());
1005 }
1006
1007 /* Perform jump bypassing and control flow optimizations. */
1008 static void
1009 rest_of_handle_jump_bypass (tree decl, rtx insns)
1010 {
1011 timevar_push (TV_BYPASS);
1012 open_dump_file (DFI_bypass, decl);
1013
1014 cleanup_cfg (CLEANUP_EXPENSIVE);
1015 reg_scan (insns, max_reg_num (), 1);
1016
1017 if (bypass_jumps (dump_file))
1018 {
1019 rebuild_jump_labels (insns);
1020 cleanup_cfg (CLEANUP_EXPENSIVE);
1021 delete_trivially_dead_insns (insns, max_reg_num ());
1022 }
1023
1024 close_dump_file (DFI_bypass, print_rtl_with_bb, insns);
1025 timevar_pop (TV_BYPASS);
1026
1027 ggc_collect ();
1028
1029 #ifdef ENABLE_CHECKING
1030 verify_flow_info ();
1031 #endif
1032 }
1033
1034 /* Handle inlining of functions in rest_of_compilation. Return TRUE
1035 if we must exit rest_of_compilation upon return. */
1036 static bool
1037 rest_of_handle_inlining (tree decl)
1038 {
1039 rtx insns;
1040 int inlinable = 0;
1041 tree parent;
1042 const char *lose;
1043
1044 /* If we are reconsidering an inline function at the end of
1045 compilation, skip the stuff for making it inline. */
1046 if (cfun->rtl_inline_init)
1047 return 0;
1048 cfun->rtl_inline_init = 1;
1049
1050 /* If this is nested inside an inlined external function, pretend
1051 it was only declared. Since we cannot inline such functions,
1052 generating code for this one is not only not necessary but will
1053 confuse some debugging output writers. */
1054 for (parent = DECL_CONTEXT (current_function_decl);
1055 parent != NULL_TREE;
1056 parent = get_containing_scope (parent))
1057 if (TREE_CODE (parent) == FUNCTION_DECL
1058 && DECL_INLINE (parent) && DECL_EXTERNAL (parent))
1059 {
1060 DECL_INITIAL (decl) = 0;
1061 return true;
1062 }
1063 else if (TYPE_P (parent))
1064 /* A function in a local class should be treated normally. */
1065 break;
1066
1067 /* If requested, consider whether to make this function inline. */
1068 if ((DECL_INLINE (decl) && !flag_no_inline)
1069 || flag_inline_functions)
1070 {
1071 timevar_push (TV_INTEGRATION);
1072 lose = function_cannot_inline_p (decl);
1073 timevar_pop (TV_INTEGRATION);
1074 if (lose || ! optimize)
1075 {
1076 if (warn_inline && lose && DECL_INLINE (decl))
1077 {
1078 char *msg = concat ("%J", lose, NULL);
1079 warning (msg, decl);
1080 free (msg);
1081 }
1082 DECL_ABSTRACT_ORIGIN (decl) = 0;
1083 /* Don't really compile an extern inline function.
1084 If we can't make it inline, pretend
1085 it was only declared. */
1086 if (DECL_EXTERNAL (decl))
1087 {
1088 DECL_INITIAL (decl) = 0;
1089 return true;
1090 }
1091 }
1092 else
1093 inlinable = DECL_INLINE (decl) = 1;
1094 }
1095
1096 insns = get_insns ();
1097
1098 /* Dump the rtl code if we are dumping rtl. */
1099
1100 if (open_dump_file (DFI_rtl, decl))
1101 {
1102 if (DECL_STRUCT_FUNCTION (decl)
1103 && DECL_STRUCT_FUNCTION (decl)->saved_for_inline)
1104 fprintf (dump_file, ";; (integrable)\n\n");
1105 close_dump_file (DFI_rtl, print_rtl, insns);
1106 }
1107
1108 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1109 sorts of eh initialization. Delay this until after the
1110 initial rtl dump so that we can see the original nesting. */
1111 convert_from_eh_region_ranges ();
1112
1113 /* If function is inline, and we don't yet know whether to
1114 compile it by itself, defer decision till end of compilation.
1115 wrapup_global_declarations will (indirectly) call
1116 rest_of_compilation again for those functions that need to
1117 be output. Also defer those functions that we are supposed
1118 to defer. */
1119
1120 if (inlinable
1121 || (DECL_INLINE (decl)
1122 /* Egad. This RTL deferral test conflicts with Fortran assumptions
1123 for unreferenced symbols. See g77.f-torture/execute/980520-1.f.
1124 But removing this line from the check breaks all languages that
1125 use the call graph to output symbols. This hard-coded check is
1126 the least invasive work-around. */
1127 && (flag_inline_functions
1128 || strcmp (lang_hooks.name, "GNU F77") == 0)
1129 && ((! TREE_PUBLIC (decl) && ! TREE_ADDRESSABLE (decl)
1130 && ! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1131 && ! flag_keep_inline_functions)
1132 || DECL_EXTERNAL (decl))))
1133 DECL_DEFER_OUTPUT (decl) = 1;
1134
1135 if (DECL_INLINE (decl))
1136 /* DWARF wants separate debugging info for abstract and
1137 concrete instances of all inline functions, including those
1138 declared inline but not inlined, and those inlined even
1139 though they weren't declared inline. Conveniently, that's
1140 what DECL_INLINE means at this point. */
1141 (*debug_hooks->deferred_inline_function) (decl);
1142
1143 if (DECL_DEFER_OUTPUT (decl))
1144 {
1145 /* If -Wreturn-type, we have to do a bit of compilation. We just
1146 want to call cleanup the cfg to figure out whether or not we can
1147 fall off the end of the function; we do the minimum amount of
1148 work necessary to make that safe. */
1149 if (warn_return_type)
1150 {
1151 int saved_optimize = optimize;
1152
1153 optimize = 0;
1154 rebuild_jump_labels (insns);
1155 find_exception_handler_labels ();
1156 find_basic_blocks (insns, max_reg_num (), dump_file);
1157 cleanup_cfg (CLEANUP_PRE_SIBCALL | CLEANUP_PRE_LOOP);
1158 optimize = saved_optimize;
1159
1160 /* CFG is no longer maintained up-to-date. */
1161 free_bb_for_insn ();
1162 }
1163
1164 set_nothrow_function_flags ();
1165 if (current_function_nothrow)
1166 /* Now we know that this can't throw; set the flag for the benefit
1167 of other functions later in this translation unit. */
1168 TREE_NOTHROW (current_function_decl) = 1;
1169
1170 timevar_push (TV_INTEGRATION);
1171 save_for_inline (decl);
1172 timevar_pop (TV_INTEGRATION);
1173 DECL_STRUCT_FUNCTION (decl)->inlinable = inlinable;
1174 return true;
1175 }
1176
1177 /* If specified extern inline but we aren't inlining it, we are
1178 done. This goes for anything that gets here with DECL_EXTERNAL
1179 set, not just things with DECL_INLINE. */
1180 return (bool) DECL_EXTERNAL (decl);
1181 }
1182
1183 /* Try to identify useless null pointer tests and delete them. */
1184 static void
1185 rest_of_handle_null_pointer (tree decl, rtx insns)
1186 {
1187 open_dump_file (DFI_null, decl);
1188 if (dump_file)
1189 dump_flow_info (dump_file);
1190
1191 if (delete_null_pointer_checks (insns))
1192 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1193
1194 close_dump_file (DFI_null, print_rtl_with_bb, insns);
1195 }
1196
1197 /* Try combining insns through substitution. */
1198 static void
1199 rest_of_handle_combine (tree decl, rtx insns)
1200 {
1201 int rebuild_jump_labels_after_combine = 0;
1202
1203 timevar_push (TV_COMBINE);
1204 open_dump_file (DFI_combine, decl);
1205
1206 rebuild_jump_labels_after_combine
1207 = combine_instructions (insns, max_reg_num ());
1208
1209 /* Combining insns may have turned an indirect jump into a
1210 direct jump. Rebuild the JUMP_LABEL fields of jumping
1211 instructions. */
1212 if (rebuild_jump_labels_after_combine)
1213 {
1214 timevar_push (TV_JUMP);
1215 rebuild_jump_labels (insns);
1216 timevar_pop (TV_JUMP);
1217
1218 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
1219 }
1220
1221 close_dump_file (DFI_combine, print_rtl_with_bb, insns);
1222 timevar_pop (TV_COMBINE);
1223
1224 ggc_collect ();
1225 }
1226
1227 /* Perform life analysis. */
1228 static void
1229 rest_of_handle_life (tree decl, rtx insns)
1230 {
1231 open_dump_file (DFI_life, decl);
1232 regclass_init ();
1233
1234 #ifdef ENABLE_CHECKING
1235 verify_flow_info ();
1236 #endif
1237 life_analysis (insns, dump_file, PROP_FINAL);
1238 if (optimize)
1239 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE
1240 | CLEANUP_LOG_LINKS
1241 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1242 timevar_pop (TV_FLOW);
1243
1244 if (warn_uninitialized)
1245 {
1246 uninitialized_vars_warning (DECL_INITIAL (decl));
1247 if (extra_warnings)
1248 setjmp_args_warning ();
1249 }
1250
1251 if (optimize)
1252 {
1253 if (!flag_new_regalloc && initialize_uninitialized_subregs ())
1254 {
1255 /* Insns were inserted, and possibly pseudos created, so
1256 things might look a bit different. */
1257 insns = get_insns ();
1258 allocate_reg_life_data ();
1259 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1260 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1261 }
1262 }
1263
1264 no_new_pseudos = 1;
1265
1266 close_dump_file (DFI_life, print_rtl_with_bb, insns);
1267
1268 ggc_collect ();
1269 }
1270
1271 /* Perform common subexpression elimination. Nonzero value from
1272 `cse_main' means that jumps were simplified and some code may now
1273 be unreachable, so do jump optimization again. */
1274 static void
1275 rest_of_handle_cse (tree decl, rtx insns)
1276 {
1277 int tem;
1278
1279 open_dump_file (DFI_cse, decl);
1280 if (dump_file)
1281 dump_flow_info (dump_file);
1282 timevar_push (TV_CSE);
1283
1284 reg_scan (insns, max_reg_num (), 1);
1285
1286 tem = cse_main (insns, max_reg_num (), 0, dump_file);
1287 if (tem)
1288 rebuild_jump_labels (insns);
1289 if (purge_all_dead_edges (0))
1290 delete_unreachable_blocks ();
1291
1292 delete_trivially_dead_insns (insns, max_reg_num ());
1293
1294 /* If we are not running more CSE passes, then we are no longer
1295 expecting CSE to be run. But always rerun it in a cheap mode. */
1296 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
1297
1298 if (tem || optimize > 1)
1299 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1300 /* Try to identify useless null pointer tests and delete them. */
1301 if (flag_delete_null_pointer_checks)
1302 {
1303 timevar_push (TV_JUMP);
1304
1305 if (delete_null_pointer_checks (insns))
1306 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1307 timevar_pop (TV_JUMP);
1308 }
1309
1310 /* The second pass of jump optimization is likely to have
1311 removed a bunch more instructions. */
1312 renumber_insns (dump_file);
1313
1314 timevar_pop (TV_CSE);
1315 close_dump_file (DFI_cse, print_rtl_with_bb, insns);
1316 }
1317
1318 /* Run second CSE pass after loop optimizations. */
1319 static void
1320 rest_of_handle_cse2 (tree decl, rtx insns)
1321 {
1322 int tem;
1323
1324 timevar_push (TV_CSE2);
1325 open_dump_file (DFI_cse2, decl);
1326 if (dump_file)
1327 dump_flow_info (dump_file);
1328 /* CFG is no longer maintained up-to-date. */
1329 tem = cse_main (insns, max_reg_num (), 1, dump_file);
1330
1331 /* Run a pass to eliminate duplicated assignments to condition code
1332 registers. We have to run this after bypass_jumps, because it
1333 makes it harder for that pass to determine whether a jump can be
1334 bypassed safely. */
1335 cse_condition_code_reg ();
1336
1337 purge_all_dead_edges (0);
1338 delete_trivially_dead_insns (insns, max_reg_num ());
1339
1340 if (tem)
1341 {
1342 timevar_push (TV_JUMP);
1343 rebuild_jump_labels (insns);
1344 cleanup_cfg (CLEANUP_EXPENSIVE);
1345 timevar_pop (TV_JUMP);
1346 }
1347 reg_scan (insns, max_reg_num (), 0);
1348 close_dump_file (DFI_cse2, print_rtl_with_bb, insns);
1349 ggc_collect ();
1350 timevar_pop (TV_CSE2);
1351 }
1352
1353 /* Perform global cse. */
1354 static void
1355 rest_of_handle_gcse (tree decl, rtx insns)
1356 {
1357 int save_csb, save_cfj;
1358 int tem2 = 0, tem;
1359
1360 timevar_push (TV_GCSE);
1361 open_dump_file (DFI_gcse, decl);
1362
1363 tem = gcse_main (insns, dump_file);
1364 rebuild_jump_labels (insns);
1365 delete_trivially_dead_insns (insns, max_reg_num ());
1366
1367 save_csb = flag_cse_skip_blocks;
1368 save_cfj = flag_cse_follow_jumps;
1369 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
1370
1371 /* Instantiate any remaining CONSTANT_P_RTX nodes. */
1372 if (current_function_calls_constant_p)
1373 purge_builtin_constant_p ();
1374
1375 /* If -fexpensive-optimizations, re-run CSE to clean up things done
1376 by gcse. */
1377 if (flag_expensive_optimizations)
1378 {
1379 timevar_push (TV_CSE);
1380 reg_scan (insns, max_reg_num (), 1);
1381 tem2 = cse_main (insns, max_reg_num (), 0, dump_file);
1382 purge_all_dead_edges (0);
1383 delete_trivially_dead_insns (insns, max_reg_num ());
1384 timevar_pop (TV_CSE);
1385 cse_not_expected = !flag_rerun_cse_after_loop;
1386 }
1387
1388 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
1389 things up. Then possibly re-run CSE again. */
1390 while (tem || tem2)
1391 {
1392 tem = tem2 = 0;
1393 timevar_push (TV_JUMP);
1394 rebuild_jump_labels (insns);
1395 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1396 timevar_pop (TV_JUMP);
1397
1398 if (flag_expensive_optimizations)
1399 {
1400 timevar_push (TV_CSE);
1401 reg_scan (insns, max_reg_num (), 1);
1402 tem2 = cse_main (insns, max_reg_num (), 0, dump_file);
1403 purge_all_dead_edges (0);
1404 delete_trivially_dead_insns (insns, max_reg_num ());
1405 timevar_pop (TV_CSE);
1406 }
1407 }
1408
1409 close_dump_file (DFI_gcse, print_rtl_with_bb, insns);
1410 timevar_pop (TV_GCSE);
1411
1412 ggc_collect ();
1413 flag_cse_skip_blocks = save_csb;
1414 flag_cse_follow_jumps = save_cfj;
1415 #ifdef ENABLE_CHECKING
1416 verify_flow_info ();
1417 #endif
1418 }
1419
1420 /* Move constant computations out of loops. */
1421 static void
1422 rest_of_handle_loop_optimize (tree decl, rtx insns)
1423 {
1424 int do_unroll, do_prefetch;
1425
1426 timevar_push (TV_LOOP);
1427 delete_dead_jumptables ();
1428 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1429 open_dump_file (DFI_loop, decl);
1430
1431 /* CFG is no longer maintained up-to-date. */
1432 free_bb_for_insn ();
1433
1434 if (flag_unroll_loops)
1435 do_unroll = LOOP_AUTO_UNROLL; /* Having two unrollers is useless. */
1436 else
1437 do_unroll = flag_old_unroll_loops ? LOOP_UNROLL : LOOP_AUTO_UNROLL;
1438 do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0;
1439
1440 if (flag_rerun_loop_opt)
1441 {
1442 cleanup_barriers ();
1443
1444 /* We only want to perform unrolling once. */
1445 loop_optimize (insns, dump_file, do_unroll);
1446 do_unroll = 0;
1447
1448 /* The first call to loop_optimize makes some instructions
1449 trivially dead. We delete those instructions now in the
1450 hope that doing so will make the heuristics in loop work
1451 better and possibly speed up compilation. */
1452 delete_trivially_dead_insns (insns, max_reg_num ());
1453
1454 /* The regscan pass is currently necessary as the alias
1455 analysis code depends on this information. */
1456 reg_scan (insns, max_reg_num (), 1);
1457 }
1458 cleanup_barriers ();
1459 loop_optimize (insns, dump_file, do_unroll | do_prefetch);
1460
1461 /* Loop can create trivially dead instructions. */
1462 delete_trivially_dead_insns (insns, max_reg_num ());
1463 close_dump_file (DFI_loop, print_rtl, insns);
1464 timevar_pop (TV_LOOP);
1465 find_basic_blocks (insns, max_reg_num (), dump_file);
1466
1467 ggc_collect ();
1468 }
1469
1470 /* Perform loop optimizations. It might be better to do them a bit
1471 sooner, but we want the profile feedback to work more
1472 efficiently. */
1473 static void
1474 rest_of_handle_loop2 (tree decl, rtx insns)
1475 {
1476 struct loops *loops;
1477 basic_block bb;
1478
1479 if (!flag_unswitch_loops
1480 && !flag_peel_loops
1481 && !flag_unroll_loops
1482 && !flag_branch_on_count_reg)
1483 return;
1484
1485 timevar_push (TV_LOOP);
1486 open_dump_file (DFI_loop2, decl);
1487 if (dump_file)
1488 dump_flow_info (dump_file);
1489
1490 /* Initialize structures for layout changes. */
1491 cfg_layout_initialize ();
1492
1493 loops = loop_optimizer_init (dump_file);
1494
1495 if (loops)
1496 {
1497 /* The optimizations: */
1498 if (flag_unswitch_loops)
1499 unswitch_loops (loops);
1500
1501 if (flag_peel_loops || flag_unroll_loops)
1502 unroll_and_peel_loops (loops,
1503 (flag_peel_loops ? UAP_PEEL : 0) |
1504 (flag_unroll_loops ? UAP_UNROLL : 0) |
1505 (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0));
1506
1507 #ifdef HAVE_doloop_end
1508 if (flag_branch_on_count_reg && HAVE_doloop_end)
1509 doloop_optimize_loops (loops);
1510 #endif /* HAVE_doloop_end */
1511
1512 loop_optimizer_finalize (loops, dump_file);
1513 }
1514
1515 /* Finalize layout changes. */
1516 FOR_EACH_BB (bb)
1517 if (bb->next_bb != EXIT_BLOCK_PTR)
1518 bb->rbi->next = bb->next_bb;
1519 cfg_layout_finalize ();
1520
1521 cleanup_cfg (CLEANUP_EXPENSIVE);
1522 delete_trivially_dead_insns (insns, max_reg_num ());
1523 reg_scan (insns, max_reg_num (), 0);
1524 if (dump_file)
1525 dump_flow_info (dump_file);
1526 close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ());
1527 timevar_pop (TV_LOOP);
1528 ggc_collect ();
1529 }
1530
1531 /* This is called from finish_function (within langhooks.parse_file)
1532 after each top-level definition is parsed.
1533 It is supposed to compile that function or variable
1534 and output the assembler code for it.
1535 After we return, the tree storage is freed. */
1536
1537 void
1538 rest_of_compilation (tree decl)
1539 {
1540 rtx insns;
1541
1542 timevar_push (TV_REST_OF_COMPILATION);
1543
1544 /* Register rtl specific functions for cfg. */
1545 rtl_register_cfg_hooks ();
1546
1547 /* Now that we're out of the frontend, we shouldn't have any more
1548 CONCATs anywhere. */
1549 generating_concat_p = 0;
1550
1551 /* When processing delayed functions, prepare_function_start() won't
1552 have been run to re-initialize it. */
1553 cse_not_expected = ! optimize;
1554
1555 /* First, make sure that NOTE_BLOCK is set correctly for each
1556 NOTE_INSN_BLOCK_BEG/NOTE_INSN_BLOCK_END note. */
1557 if (!cfun->x_whole_function_mode_p)
1558 identify_blocks ();
1559
1560 /* In function-at-a-time mode, we do not attempt to keep the BLOCK
1561 tree in sensible shape. So, we just recalculate it here. */
1562 if (cfun->x_whole_function_mode_p)
1563 reorder_blocks ();
1564
1565 init_flow ();
1566
1567 if (rest_of_handle_inlining (decl))
1568 goto exit_rest_of_compilation;
1569
1570 /* If we're emitting a nested function, make sure its parent gets
1571 emitted as well. Doing otherwise confuses debug info. */
1572 {
1573 tree parent;
1574 for (parent = DECL_CONTEXT (current_function_decl);
1575 parent != NULL_TREE;
1576 parent = get_containing_scope (parent))
1577 if (TREE_CODE (parent) == FUNCTION_DECL)
1578 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1579 }
1580
1581 /* We are now committed to emitting code for this function. Do any
1582 preparation, such as emitting abstract debug info for the inline
1583 before it gets mangled by optimization. */
1584 if (cgraph_function_possibly_inlined_p (decl))
1585 (*debug_hooks->outlining_inline_function) (decl);
1586
1587 /* Remove any notes we don't need. That will make iterating
1588 over the instruction sequence faster, and allow the garbage
1589 collector to reclaim the memory used by the notes. */
1590 remove_unnecessary_notes ();
1591 reorder_blocks ();
1592
1593 ggc_collect ();
1594
1595 /* Initialize some variables used by the optimizers. */
1596 init_function_for_compilation ();
1597
1598 if (! DECL_DEFER_OUTPUT (decl))
1599 TREE_ASM_WRITTEN (decl) = 1;
1600
1601 /* Now that integrate will no longer see our rtl, we need not
1602 distinguish between the return value of this function and the
1603 return value of called functions. Also, we can remove all SETs
1604 of subregs of hard registers; they are only here because of
1605 integrate. Also, we can now initialize pseudos intended to
1606 carry magic hard reg data throughout the function. */
1607 rtx_equal_function_value_matters = 0;
1608 purge_hard_subreg_sets (get_insns ());
1609
1610 /* Early return if there were errors. We can run afoul of our
1611 consistency checks, and there's not really much point in fixing them.
1612 Don't return yet if -Wreturn-type; we need to do cleanup_cfg. */
1613 if (((rtl_dump_and_exit || flag_syntax_only) && !warn_return_type)
1614 || errorcount || sorrycount)
1615 goto exit_rest_of_compilation;
1616
1617 timevar_push (TV_JUMP);
1618 open_dump_file (DFI_sibling, decl);
1619 insns = get_insns ();
1620 rebuild_jump_labels (insns);
1621 find_exception_handler_labels ();
1622 find_basic_blocks (insns, max_reg_num (), dump_file);
1623
1624 delete_unreachable_blocks ();
1625
1626 /* Turn NOTE_INSN_PREDICTIONs into branch predictions. */
1627 if (flag_guess_branch_prob)
1628 {
1629 timevar_push (TV_BRANCH_PROB);
1630 note_prediction_to_br_prob ();
1631 timevar_pop (TV_BRANCH_PROB);
1632 }
1633
1634 if (flag_optimize_sibling_calls)
1635 rest_of_handle_sibling_calls (insns);
1636
1637 /* We have to issue these warnings now already, because CFG cleanups
1638 further down may destroy the required information. However, this
1639 must be done after the sibcall optimization pass because the barrier
1640 emitted for noreturn calls that are candidate for the optimization
1641 is folded into the CALL_PLACEHOLDER until after this pass, so the
1642 CFG is inaccurate. */
1643 check_function_return_warnings ();
1644
1645 timevar_pop (TV_JUMP);
1646
1647 insn_locators_initialize ();
1648 /* Complete generation of exception handling code. */
1649 if (doing_eh (0))
1650 {
1651 timevar_push (TV_JUMP);
1652 open_dump_file (DFI_eh, decl);
1653
1654 finish_eh_generation ();
1655
1656 close_dump_file (DFI_eh, print_rtl, get_insns ());
1657 timevar_pop (TV_JUMP);
1658 }
1659
1660 /* Delay emitting hard_reg_initial_value sets until after EH landing pad
1661 generation, which might create new sets. */
1662 emit_initial_value_sets ();
1663
1664 #ifdef FINALIZE_PIC
1665 /* If we are doing position-independent code generation, now
1666 is the time to output special prologues and epilogues.
1667 We do not want to do this earlier, because it just clutters
1668 up inline functions with meaningless insns. */
1669 if (flag_pic)
1670 FINALIZE_PIC;
1671 #endif
1672
1673 insns = get_insns ();
1674
1675 /* Copy any shared structure that should not be shared. */
1676 unshare_all_rtl (current_function_decl, insns);
1677
1678 #ifdef SETJMP_VIA_SAVE_AREA
1679 /* This must be performed before virtual register instantiation.
1680 Please be aware that everything in the compiler that can look
1681 at the RTL up to this point must understand that REG_SAVE_AREA
1682 is just like a use of the REG contained inside. */
1683 if (current_function_calls_alloca)
1684 optimize_save_area_alloca (insns);
1685 #endif
1686
1687 /* Instantiate all virtual registers. */
1688 instantiate_virtual_regs (current_function_decl, insns);
1689
1690 open_dump_file (DFI_jump, decl);
1691
1692 /* Always do one jump optimization pass to ensure that JUMP_LABEL fields
1693 are initialized and to compute whether control can drop off the end
1694 of the function. */
1695
1696 timevar_push (TV_JUMP);
1697 /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
1698 before jump optimization switches branch directions. */
1699 if (flag_guess_branch_prob)
1700 expected_value_to_br_prob ();
1701
1702 reg_scan (insns, max_reg_num (), 0);
1703 rebuild_jump_labels (insns);
1704 find_basic_blocks (insns, max_reg_num (), dump_file);
1705 delete_trivially_dead_insns (insns, max_reg_num ());
1706 if (dump_file)
1707 dump_flow_info (dump_file);
1708 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
1709 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1710
1711 if (optimize)
1712 {
1713 free_bb_for_insn ();
1714 copy_loop_headers (insns);
1715 find_basic_blocks (insns, max_reg_num (), dump_file);
1716 }
1717 purge_line_number_notes (insns);
1718
1719 timevar_pop (TV_JUMP);
1720 close_dump_file (DFI_jump, print_rtl, insns);
1721
1722 /* Now is when we stop if -fsyntax-only and -Wreturn-type. */
1723 if (rtl_dump_and_exit || flag_syntax_only || DECL_DEFER_OUTPUT (decl))
1724 goto exit_rest_of_compilation;
1725
1726 timevar_push (TV_JUMP);
1727
1728 if (optimize)
1729 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1730
1731 if (flag_delete_null_pointer_checks)
1732 rest_of_handle_null_pointer (decl, insns);
1733
1734 /* Jump optimization, and the removal of NULL pointer checks, may
1735 have reduced the number of instructions substantially. CSE, and
1736 future passes, allocate arrays whose dimensions involve the
1737 maximum instruction UID, so if we can reduce the maximum UID
1738 we'll save big on memory. */
1739 renumber_insns (dump_file);
1740 timevar_pop (TV_JUMP);
1741
1742 close_dump_file (DFI_jump, print_rtl_with_bb, insns);
1743
1744 ggc_collect ();
1745
1746 if (optimize > 0)
1747 rest_of_handle_cse (decl, insns);
1748
1749 rest_of_handle_addressof (decl, insns);
1750
1751 ggc_collect ();
1752
1753 if (optimize > 0)
1754 {
1755 if (flag_gcse)
1756 rest_of_handle_gcse (decl, insns);
1757
1758 if (flag_loop_optimize)
1759 rest_of_handle_loop_optimize (decl, insns);
1760
1761 if (flag_gcse)
1762 rest_of_handle_jump_bypass (decl, insns);
1763 }
1764
1765 timevar_push (TV_FLOW);
1766
1767 rest_of_handle_cfg (decl, insns);
1768
1769 if (optimize > 0
1770 || profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1771 {
1772 rest_of_handle_branch_prob (decl, insns);
1773
1774 if (flag_branch_probabilities
1775 && flag_profile_values
1776 && flag_value_profile_transformations)
1777 rest_of_handle_value_profile_transformations (decl, insns);
1778
1779 /* Remove the death notes created for vpt. */
1780 if (flag_profile_values)
1781 count_or_remove_death_notes (NULL, 1);
1782 }
1783
1784 if (optimize > 0)
1785 rest_of_handle_if_conversion (decl, insns);
1786
1787 if (flag_tracer)
1788 rest_of_handle_tracer (decl, insns);
1789
1790 if (optimize > 0)
1791 rest_of_handle_loop2 (decl, insns);
1792
1793 if (flag_web)
1794 rest_of_handle_web (decl, insns);
1795
1796 if (flag_rerun_cse_after_loop)
1797 rest_of_handle_cse2 (decl, insns);
1798
1799 cse_not_expected = 1;
1800
1801 rest_of_handle_life (decl, insns);
1802
1803 if (optimize > 0)
1804 rest_of_handle_combine (decl, insns);
1805
1806 if (flag_if_conversion)
1807 rest_of_handle_if_after_combine (decl, insns);
1808
1809 /* The optimization to partition hot/cold basic blocks into separate
1810 sections of the .o file does not work well with exception handling.
1811 Don't call it if there are exceptions. */
1812
1813 if (flag_reorder_blocks_and_partition && !flag_exceptions)
1814 {
1815 no_new_pseudos = 0;
1816 partition_hot_cold_basic_blocks ();
1817 allocate_reg_life_data ();
1818 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1819 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1820 no_new_pseudos = 1;
1821 }
1822
1823 if (optimize > 0 && (flag_regmove || flag_expensive_optimizations))
1824 rest_of_handle_regmove (decl, insns);
1825
1826 /* Do unconditional splitting before register allocation to allow machine
1827 description to add extra information not needed previously. */
1828 split_all_insns (1);
1829
1830 #ifdef OPTIMIZE_MODE_SWITCHING
1831 timevar_push (TV_MODE_SWITCH);
1832
1833 no_new_pseudos = 0;
1834 optimize_mode_switching (NULL);
1835 no_new_pseudos = 1;
1836
1837 timevar_pop (TV_MODE_SWITCH);
1838 #endif
1839
1840 /* Any of the several passes since flow1 will have munged register
1841 lifetime data a bit. We need it to be up to date for scheduling
1842 (see handling of reg_known_equiv in init_alias_analysis). */
1843 recompute_reg_usage (insns, !optimize_size);
1844
1845 #ifdef INSN_SCHEDULING
1846 rest_of_handle_sched (decl, insns);
1847 #endif
1848
1849 /* Determine if the current function is a leaf before running reload
1850 since this can impact optimizations done by the prologue and
1851 epilogue thus changing register elimination offsets. */
1852 current_function_is_leaf = leaf_function_p ();
1853
1854 timevar_push (TV_LOCAL_ALLOC);
1855 open_dump_file (DFI_lreg, decl);
1856
1857 if (flag_new_regalloc)
1858 {
1859 if (rest_of_handle_new_regalloc (decl, insns))
1860 goto exit_rest_of_compilation;
1861 }
1862 else
1863 {
1864 if (rest_of_handle_old_regalloc (decl, insns))
1865 goto exit_rest_of_compilation;
1866 }
1867
1868 ggc_collect ();
1869
1870 open_dump_file (DFI_postreload, decl);
1871
1872 /* Do a very simple CSE pass over just the hard registers. */
1873 if (optimize > 0)
1874 {
1875 timevar_push (TV_RELOAD_CSE_REGS);
1876 reload_cse_regs (insns);
1877 /* reload_cse_regs can eliminate potentially-trapping MEMs.
1878 Remove any EH edges associated with them. */
1879 if (flag_non_call_exceptions)
1880 purge_all_dead_edges (0);
1881 timevar_pop (TV_RELOAD_CSE_REGS);
1882 }
1883
1884 close_dump_file (DFI_postreload, print_rtl_with_bb, insns);
1885
1886 if (optimize > 0 && flag_gcse_after_reload)
1887 rest_of_handle_gcse2 (decl, insns);
1888
1889 /* Re-create the death notes which were deleted during reload. */
1890 timevar_push (TV_FLOW2);
1891 open_dump_file (DFI_flow2, decl);
1892
1893 #ifdef ENABLE_CHECKING
1894 verify_flow_info ();
1895 #endif
1896
1897 /* If optimizing, then go ahead and split insns now. */
1898 #ifndef STACK_REGS
1899 if (optimize > 0)
1900 #endif
1901 split_all_insns (0);
1902
1903 if (flag_branch_target_load_optimize)
1904 {
1905 open_dump_file (DFI_branch_target_load, decl);
1906
1907 branch_target_load_optimize (insns, false);
1908
1909 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns);
1910
1911 ggc_collect ();
1912 }
1913
1914 if (optimize)
1915 cleanup_cfg (CLEANUP_EXPENSIVE);
1916
1917 /* On some machines, the prologue and epilogue code, or parts thereof,
1918 can be represented as RTL. Doing so lets us schedule insns between
1919 it and the rest of the code and also allows delayed branch
1920 scheduling to operate in the epilogue. */
1921 thread_prologue_and_epilogue_insns (insns);
1922 epilogue_completed = 1;
1923
1924 if (optimize)
1925 {
1926 life_analysis (insns, dump_file, PROP_POSTRELOAD);
1927 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
1928 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1929
1930 /* This is kind of a heuristic. We need to run combine_stack_adjustments
1931 even for machines with possibly nonzero RETURN_POPS_ARGS
1932 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
1933 push instructions will have popping returns. */
1934 #ifndef PUSH_ROUNDING
1935 if (!ACCUMULATE_OUTGOING_ARGS)
1936 #endif
1937 combine_stack_adjustments ();
1938
1939 ggc_collect ();
1940 }
1941
1942 flow2_completed = 1;
1943
1944 close_dump_file (DFI_flow2, print_rtl_with_bb, insns);
1945 timevar_pop (TV_FLOW2);
1946
1947 #ifdef HAVE_peephole2
1948 if (optimize > 0 && flag_peephole2)
1949 {
1950 timevar_push (TV_PEEPHOLE2);
1951 open_dump_file (DFI_peephole2, decl);
1952
1953 peephole2_optimize (dump_file);
1954
1955 close_dump_file (DFI_peephole2, print_rtl_with_bb, insns);
1956 timevar_pop (TV_PEEPHOLE2);
1957 }
1958 #endif
1959
1960 open_dump_file (DFI_ce3, decl);
1961 if (optimize)
1962 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
1963 splitting possibly introduced more crossjumping opportunities. */
1964 cleanup_cfg (CLEANUP_EXPENSIVE
1965 | CLEANUP_UPDATE_LIFE
1966 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1967 if (flag_if_conversion2)
1968 {
1969 timevar_push (TV_IFCVT2);
1970
1971 if_convert (1);
1972
1973 timevar_pop (TV_IFCVT2);
1974 }
1975 close_dump_file (DFI_ce3, print_rtl_with_bb, insns);
1976
1977 if (optimize > 0)
1978 {
1979 if (flag_rename_registers || flag_cprop_registers)
1980 rest_of_handle_regrename (decl, insns);
1981
1982 rest_of_handle_reorder_blocks (decl, insns);
1983 }
1984
1985 if (flag_branch_target_load_optimize2)
1986 {
1987 /* Leave this a warning for now so that it is possible to experiment
1988 with running this pass twice. In 3.6, we should either make this
1989 an error, or use separate dump files. */
1990 if (flag_branch_target_load_optimize)
1991 warning ("branch target register load optimization is not intended "
1992 "to be run twice");
1993
1994 open_dump_file (DFI_branch_target_load, decl);
1995
1996 branch_target_load_optimize (insns, true);
1997
1998 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns);
1999
2000 ggc_collect ();
2001 }
2002
2003 #ifdef INSN_SCHEDULING
2004 if (optimize > 0 && flag_schedule_insns_after_reload)
2005 rest_of_handle_sched2 (decl, insns);
2006 #endif
2007
2008 #ifdef LEAF_REGISTERS
2009 current_function_uses_only_leaf_regs
2010 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
2011 #endif
2012
2013 #ifdef STACK_REGS
2014 rest_of_handle_stack_regs (decl, insns);
2015 #endif
2016
2017 compute_alignments ();
2018
2019 if (flag_var_tracking)
2020 rest_of_handle_variable_tracking (decl, insns);
2021
2022 /* CFG is no longer maintained up-to-date. */
2023 free_bb_for_insn ();
2024
2025 if (targetm.machine_dependent_reorg != 0)
2026 rest_of_handle_machine_reorg (decl, insns);
2027
2028 purge_line_number_notes (insns);
2029 cleanup_barriers ();
2030
2031 #ifdef DELAY_SLOTS
2032 if (optimize > 0 && flag_delayed_branch)
2033 rest_of_handle_delay_slots (decl, insns);
2034 #endif
2035
2036 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
2037 timevar_push (TV_SHORTEN_BRANCH);
2038 split_all_insns_noflow ();
2039 timevar_pop (TV_SHORTEN_BRANCH);
2040 #endif
2041
2042 convert_to_eh_region_ranges ();
2043
2044 /* Shorten branches. */
2045 timevar_push (TV_SHORTEN_BRANCH);
2046 shorten_branches (get_insns ());
2047 timevar_pop (TV_SHORTEN_BRANCH);
2048
2049 set_nothrow_function_flags ();
2050 if (current_function_nothrow)
2051 /* Now we know that this can't throw; set the flag for the benefit
2052 of other functions later in this translation unit. */
2053 TREE_NOTHROW (current_function_decl) = 1;
2054
2055 rest_of_handle_final (decl, insns);
2056
2057 /* Write DBX symbols if requested. */
2058
2059 /* Note that for those inline functions where we don't initially
2060 know for certain that we will be generating an out-of-line copy,
2061 the first invocation of this routine (rest_of_compilation) will
2062 skip over this code by doing a `goto exit_rest_of_compilation;'.
2063 Later on, wrapup_global_declarations will (indirectly) call
2064 rest_of_compilation again for those inline functions that need
2065 to have out-of-line copies generated. During that call, we
2066 *will* be routed past here. */
2067
2068 timevar_push (TV_SYMOUT);
2069 (*debug_hooks->function_decl) (decl);
2070 timevar_pop (TV_SYMOUT);
2071
2072 exit_rest_of_compilation:
2073
2074 coverage_end_function ();
2075
2076 /* In case the function was not output,
2077 don't leave any temporary anonymous types
2078 queued up for sdb output. */
2079 #ifdef SDB_DEBUGGING_INFO
2080 if (write_symbols == SDB_DEBUG)
2081 sdbout_types (NULL_TREE);
2082 #endif
2083
2084 reload_completed = 0;
2085 epilogue_completed = 0;
2086 flow2_completed = 0;
2087 no_new_pseudos = 0;
2088
2089 timevar_push (TV_FINAL);
2090
2091 /* Clear out the insn_length contents now that they are no
2092 longer valid. */
2093 init_insn_lengths ();
2094
2095 /* Show no temporary slots allocated. */
2096 init_temp_slots ();
2097
2098 free_basic_block_vars ();
2099 free_bb_for_insn ();
2100
2101 timevar_pop (TV_FINAL);
2102
2103 if (targetm.binds_local_p (current_function_decl))
2104 {
2105 int pref = cfun->preferred_stack_boundary;
2106 if (cfun->recursive_call_emit
2107 && cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
2108 pref = cfun->stack_alignment_needed;
2109 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
2110 = pref;
2111 }
2112
2113 /* Make sure volatile mem refs aren't considered valid operands for
2114 arithmetic insns. We must call this here if this is a nested inline
2115 function, since the above code leaves us in the init_recog state
2116 (from final.c), and the function context push/pop code does not
2117 save/restore volatile_ok.
2118
2119 ??? Maybe it isn't necessary for expand_start_function to call this
2120 anymore if we do it here? */
2121
2122 init_recog_no_volatile ();
2123
2124 /* We're done with this function. Free up memory if we can. */
2125 free_after_parsing (cfun);
2126
2127 ggc_collect ();
2128
2129 timevar_pop (TV_REST_OF_COMPILATION);
2130 }
2131
2132 void
2133 init_optimization_passes (void)
2134 {
2135 open_dump_file (DFI_cgraph, NULL);
2136 cgraph_dump_file = dump_file;
2137 dump_file = NULL;
2138 }
2139
2140 void
2141 finish_optimization_passes (void)
2142 {
2143 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
2144 {
2145 timevar_push (TV_DUMP);
2146 open_dump_file (DFI_bp, NULL);
2147
2148 end_branch_prob ();
2149
2150 close_dump_file (DFI_bp, NULL, NULL_RTX);
2151 timevar_pop (TV_DUMP);
2152 }
2153
2154 if (optimize > 0 && open_dump_file (DFI_combine, NULL))
2155 {
2156 timevar_push (TV_DUMP);
2157 dump_combine_total_stats (dump_file);
2158 close_dump_file (DFI_combine, NULL, NULL_RTX);
2159 timevar_pop (TV_DUMP);
2160 }
2161
2162 dump_file = cgraph_dump_file;
2163 cgraph_dump_file = NULL;
2164 close_dump_file (DFI_cgraph, NULL, NULL_RTX);
2165
2166 /* Do whatever is necessary to finish printing the graphs. */
2167 if (graph_dump_format != no_graph)
2168 {
2169 int i;
2170
2171 for (i = 0; i < (int) DFI_MAX; ++i)
2172 if (dump_file_tbl[i].initialized && dump_file_tbl[i].graph_dump_p)
2173 {
2174 char seq[16];
2175 char *suffix;
2176
2177 sprintf (seq, DUMPFILE_FORMAT, i);
2178 suffix = concat (seq, dump_file_tbl[i].extension, NULL);
2179 finish_graph_dump_file (dump_base_name, suffix);
2180 free (suffix);
2181 }
2182 }
2183
2184 }
2185
2186 bool
2187 enable_rtl_dump_file (int letter)
2188 {
2189 bool matched = false;
2190 int i;
2191
2192 if (letter == 'a')
2193 {
2194 for (i = 0; i < (int) DFI_MAX; ++i)
2195 dump_file_tbl[i].enabled = 1;
2196 matched = true;
2197 }
2198 else
2199 {
2200 for (i = 0; i < (int) DFI_MAX; ++i)
2201 if (letter == dump_file_tbl[i].debug_switch)
2202 {
2203 dump_file_tbl[i].enabled = 1;
2204 matched = true;
2205 }
2206 }
2207
2208 return matched;
2209 }