]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/passes.c
rtl.def (ADDRESSOF): Remove.
[thirdparty/gcc.git] / gcc / passes.c
1 /* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
26
27 #include "config.h"
28 #undef FLOAT /* This is for hpux. They should change hpux. */
29 #undef FFS /* Some systems define this in param.h. */
30 #include "system.h"
31 #include "coretypes.h"
32 #include "tm.h"
33 #include <signal.h>
34
35 #ifdef HAVE_SYS_RESOURCE_H
36 # include <sys/resource.h>
37 #endif
38
39 #ifdef HAVE_SYS_TIMES_H
40 # include <sys/times.h>
41 #endif
42
43 #include "line-map.h"
44 #include "input.h"
45 #include "tree.h"
46 #include "rtl.h"
47 #include "tm_p.h"
48 #include "flags.h"
49 #include "insn-attr.h"
50 #include "insn-config.h"
51 #include "insn-flags.h"
52 #include "hard-reg-set.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "except.h"
56 #include "function.h"
57 #include "toplev.h"
58 #include "expr.h"
59 #include "basic-block.h"
60 #include "intl.h"
61 #include "ggc.h"
62 #include "graph.h"
63 #include "loop.h"
64 #include "regs.h"
65 #include "timevar.h"
66 #include "diagnostic.h"
67 #include "params.h"
68 #include "reload.h"
69 #include "dwarf2asm.h"
70 #include "integrate.h"
71 #include "real.h"
72 #include "debug.h"
73 #include "target.h"
74 #include "langhooks.h"
75 #include "cfglayout.h"
76 #include "cfgloop.h"
77 #include "hosthooks.h"
78 #include "cgraph.h"
79 #include "opts.h"
80 #include "coverage.h"
81 #include "value-prof.h"
82 #include "alloc-pool.h"
83 #include "tree-pass.h"
84
85 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
86 #include "dwarf2out.h"
87 #endif
88
89 #if defined(DBX_DEBUGGING_INFO) || defined(XCOFF_DEBUGGING_INFO)
90 #include "dbxout.h"
91 #endif
92
93 #ifdef SDB_DEBUGGING_INFO
94 #include "sdbout.h"
95 #endif
96
97 #ifdef XCOFF_DEBUGGING_INFO
98 #include "xcoffout.h" /* Needed for external data
99 declarations for e.g. AIX 4.x. */
100 #endif
101
102 #ifndef HAVE_conditional_execution
103 #define HAVE_conditional_execution 0
104 #endif
105
106 /* Format to use to print dumpfile index value */
107 #ifndef DUMPFILE_FORMAT
108 #define DUMPFILE_FORMAT ".%02d."
109 #endif
110
111 /* Describes a dump file. */
112
113 struct dump_file_info
114 {
115 /* The unique extension to apply, e.g. ".jump". */
116 const char *const extension;
117
118 /* The -d<c> character that enables this dump file. */
119 char const debug_switch;
120
121 /* True if there is a corresponding graph dump file. */
122 char const graph_dump_p;
123
124 /* True if the user selected this dump. */
125 char enabled;
126
127 /* True if the files have been initialized (ie truncated). */
128 char initialized;
129 };
130
131 /* Enumerate the extant dump files. */
132
133 enum dump_file_index
134 {
135 DFI_cgraph,
136 DFI_rtl,
137 DFI_sibling,
138 DFI_eh,
139 DFI_jump,
140 DFI_null,
141 DFI_cse,
142 DFI_gcse,
143 DFI_loop,
144 DFI_bypass,
145 DFI_cfg,
146 DFI_bp,
147 DFI_vpt,
148 DFI_ce1,
149 DFI_tracer,
150 DFI_loop2,
151 DFI_web,
152 DFI_cse2,
153 DFI_life,
154 DFI_combine,
155 DFI_ce2,
156 DFI_regmove,
157 DFI_sms,
158 DFI_sched,
159 DFI_lreg,
160 DFI_greg,
161 DFI_postreload,
162 DFI_gcse2,
163 DFI_flow2,
164 DFI_peephole2,
165 DFI_ce3,
166 DFI_rnreg,
167 DFI_bbro,
168 DFI_branch_target_load,
169 DFI_sched2,
170 DFI_stack,
171 DFI_vartrack,
172 DFI_mach,
173 DFI_dbr,
174 DFI_MAX
175 };
176
177 /* Describes all the dump files. Should be kept in order of the
178 pass and in sync with dump_file_index above.
179
180 Remaining -d letters:
181
182 " e q "
183 " F K O Q WXY "
184 */
185
186 static struct dump_file_info dump_file_tbl[DFI_MAX] =
187 {
188 { "cgraph", 'U', 0, 0, 0 },
189 { "rtl", 'r', 0, 0, 0 },
190 { "sibling", 'i', 0, 0, 0 },
191 { "eh", 'h', 0, 0, 0 },
192 { "jump", 'j', 0, 0, 0 },
193 { "null", 'u', 0, 0, 0 },
194 { "cse", 's', 0, 0, 0 },
195 { "gcse", 'G', 1, 0, 0 },
196 { "loop", 'L', 1, 0, 0 },
197 { "bypass", 'G', 1, 0, 0 }, /* Yes, duplicate enable switch. */
198 { "cfg", 'f', 1, 0, 0 },
199 { "bp", 'b', 1, 0, 0 },
200 { "vpt", 'V', 1, 0, 0 },
201 { "ce1", 'C', 1, 0, 0 },
202 { "tracer", 'T', 1, 0, 0 },
203 { "loop2", 'L', 1, 0, 0 },
204 { "web", 'Z', 0, 0, 0 },
205 { "cse2", 't', 1, 0, 0 },
206 { "life", 'f', 1, 0, 0 }, /* Yes, duplicate enable switch. */
207 { "combine", 'c', 1, 0, 0 },
208 { "ce2", 'C', 1, 0, 0 },
209 { "regmove", 'N', 1, 0, 0 },
210 { "sms", 'm', 0, 0, 0 },
211 { "sched", 'S', 1, 0, 0 },
212 { "lreg", 'l', 1, 0, 0 },
213 { "greg", 'g', 1, 0, 0 },
214 { "postreload", 'o', 1, 0, 0 },
215 { "gcse2", 'J', 0, 0, 0 },
216 { "flow2", 'w', 1, 0, 0 },
217 { "peephole2", 'z', 1, 0, 0 },
218 { "ce3", 'E', 1, 0, 0 },
219 { "rnreg", 'n', 1, 0, 0 },
220 { "bbro", 'B', 1, 0, 0 },
221 { "btl", 'd', 1, 0, 0 }, /* Yes, duplicate enable switch. */
222 { "sched2", 'R', 1, 0, 0 },
223 { "stack", 'k', 1, 0, 0 },
224 { "vartrack", 'V', 1, 0, 0 }, /* Yes, duplicate enable switch. */
225 { "mach", 'M', 1, 0, 0 },
226 { "dbr", 'd', 0, 0, 0 },
227 };
228
229 /* Routine to open a dump file. Return true if the dump file is enabled. */
230
231 static int
232 open_dump_file (enum dump_file_index index, tree decl)
233 {
234 char *dump_name;
235 const char *open_arg;
236 char seq[16];
237
238 if (! dump_file_tbl[index].enabled)
239 return 0;
240
241 timevar_push (TV_DUMP);
242 if (dump_file != NULL)
243 fclose (dump_file);
244
245 sprintf (seq, DUMPFILE_FORMAT, index);
246
247 if (! dump_file_tbl[index].initialized)
248 {
249 /* If we've not initialized the files, do so now. */
250 if (graph_dump_format != no_graph
251 && dump_file_tbl[index].graph_dump_p)
252 {
253 dump_name = concat (seq, dump_file_tbl[index].extension, NULL);
254 clean_graph_dump_file (dump_base_name, dump_name);
255 free (dump_name);
256 }
257 dump_file_tbl[index].initialized = 1;
258 open_arg = "w";
259 }
260 else
261 open_arg = "a";
262
263 dump_name = concat (dump_base_name, seq,
264 dump_file_tbl[index].extension, NULL);
265
266 dump_file = fopen (dump_name, open_arg);
267 if (dump_file == NULL)
268 fatal_error ("can't open %s: %m", dump_name);
269
270 free (dump_name);
271
272 if (decl)
273 fprintf (dump_file, "\n;; Function %s%s\n\n",
274 lang_hooks.decl_printable_name (decl, 2),
275 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
276 ? " (hot)"
277 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
278 ? " (unlikely executed)"
279 : "");
280
281 timevar_pop (TV_DUMP);
282 return 1;
283 }
284
285 /* Routine to close a dump file. */
286
287 static void
288 close_dump_file (enum dump_file_index index,
289 void (*func) (FILE *, rtx),
290 rtx insns)
291 {
292 if (! dump_file)
293 return;
294
295 timevar_push (TV_DUMP);
296 if (insns
297 && graph_dump_format != no_graph
298 && dump_file_tbl[index].graph_dump_p)
299 {
300 char seq[16];
301 char *suffix;
302
303 sprintf (seq, DUMPFILE_FORMAT, index);
304 suffix = concat (seq, dump_file_tbl[index].extension, NULL);
305 print_rtl_graph_with_bb (dump_base_name, suffix, insns);
306 free (suffix);
307 }
308
309 if (func && insns)
310 func (dump_file, insns);
311
312 fflush (dump_file);
313 fclose (dump_file);
314
315 dump_file = NULL;
316 timevar_pop (TV_DUMP);
317 }
318
319 /* This is called from various places for FUNCTION_DECL, VAR_DECL,
320 and TYPE_DECL nodes.
321
322 This does nothing for local (non-static) variables, unless the
323 variable is a register variable with an ASMSPEC. In that case, or
324 if the variable is not an automatic, it sets up the RTL and
325 outputs any assembler code (label definition, storage allocation
326 and initialization).
327
328 DECL is the declaration. If ASMSPEC is nonzero, it specifies
329 the assembler symbol name to be used. TOP_LEVEL is nonzero
330 if this declaration is not within a function. */
331
332 void
333 rest_of_decl_compilation (tree decl,
334 const char *asmspec,
335 int top_level,
336 int at_end)
337 {
338 /* We deferred calling assemble_alias so that we could collect
339 other attributes such as visibility. Emit the alias now. */
340 {
341 tree alias;
342 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
343 if (alias)
344 {
345 alias = TREE_VALUE (TREE_VALUE (alias));
346 alias = get_identifier (TREE_STRING_POINTER (alias));
347 assemble_alias (decl, alias);
348 }
349 }
350
351 /* Forward declarations for nested functions are not "external",
352 but we need to treat them as if they were. */
353 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
354 || TREE_CODE (decl) == FUNCTION_DECL)
355 {
356 timevar_push (TV_VARCONST);
357
358 if (asmspec)
359 make_decl_rtl (decl, asmspec);
360
361 /* Don't output anything when a tentative file-scope definition
362 is seen. But at end of compilation, do output code for them.
363
364 We do output all variables when unit-at-a-time is active and rely on
365 callgraph code to defer them except for forward declarations
366 (see gcc.c-torture/compile/920624-1.c) */
367 if ((at_end
368 || !DECL_DEFER_OUTPUT (decl)
369 || (flag_unit_at_a_time && DECL_INITIAL (decl)))
370 && !DECL_EXTERNAL (decl))
371 {
372 if (flag_unit_at_a_time && !cgraph_global_info_ready
373 && TREE_CODE (decl) != FUNCTION_DECL && top_level)
374 cgraph_varpool_finalize_decl (decl);
375 else
376 assemble_variable (decl, top_level, at_end, 0);
377 }
378
379 #ifdef ASM_FINISH_DECLARE_OBJECT
380 if (decl == last_assemble_variable_decl)
381 {
382 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
383 top_level, at_end);
384 }
385 #endif
386
387 timevar_pop (TV_VARCONST);
388 }
389 else if (DECL_REGISTER (decl) && asmspec != 0)
390 {
391 if (decode_reg_name (asmspec) >= 0)
392 {
393 SET_DECL_RTL (decl, NULL_RTX);
394 make_decl_rtl (decl, asmspec);
395 }
396 else
397 {
398 error ("%Hinvalid register name `%s' for register variable",
399 &DECL_SOURCE_LOCATION (decl), asmspec);
400 DECL_REGISTER (decl) = 0;
401 if (!top_level)
402 expand_decl (decl);
403 }
404 }
405 else if (TREE_CODE (decl) == TYPE_DECL)
406 {
407 timevar_push (TV_SYMOUT);
408 debug_hooks->type_decl (decl, !top_level);
409 timevar_pop (TV_SYMOUT);
410 }
411 }
412
413 /* Called after finishing a record, union or enumeral type. */
414
415 void
416 rest_of_type_compilation (tree type, int toplev)
417 {
418 /* Avoid confusing the debug information machinery when there are
419 errors. */
420 if (errorcount != 0 || sorrycount != 0)
421 return;
422
423 timevar_push (TV_SYMOUT);
424 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
425 timevar_pop (TV_SYMOUT);
426 }
427
428 /* Turn the RTL into assembly. */
429 static void
430 rest_of_handle_final (void)
431 {
432 timevar_push (TV_FINAL);
433 {
434 rtx x;
435 const char *fnname;
436
437 /* Get the function's name, as described by its RTL. This may be
438 different from the DECL_NAME name used in the source file. */
439
440 x = DECL_RTL (current_function_decl);
441 if (!MEM_P (x))
442 abort ();
443 x = XEXP (x, 0);
444 if (GET_CODE (x) != SYMBOL_REF)
445 abort ();
446 fnname = XSTR (x, 0);
447
448 assemble_start_function (current_function_decl, fnname);
449 final_start_function (get_insns (), asm_out_file, optimize);
450 final (get_insns (), asm_out_file, optimize, 0);
451 final_end_function ();
452
453 #ifdef IA64_UNWIND_INFO
454 /* ??? The IA-64 ".handlerdata" directive must be issued before
455 the ".endp" directive that closes the procedure descriptor. */
456 output_function_exception_table ();
457 #endif
458
459 assemble_end_function (current_function_decl, fnname);
460
461 #ifndef IA64_UNWIND_INFO
462 /* Otherwise, it feels unclean to switch sections in the middle. */
463 output_function_exception_table ();
464 #endif
465
466 if (! quiet_flag)
467 fflush (asm_out_file);
468
469 /* Release all memory allocated by flow. */
470 free_basic_block_vars ();
471
472 /* Release all memory held by regsets now. */
473 regset_release_memory ();
474 }
475 timevar_pop (TV_FINAL);
476
477 ggc_collect ();
478 }
479
480 #ifdef DELAY_SLOTS
481 /* Run delay slot optimization. */
482 static void
483 rest_of_handle_delay_slots (void)
484 {
485 timevar_push (TV_DBR_SCHED);
486 open_dump_file (DFI_dbr, current_function_decl);
487
488 dbr_schedule (get_insns (), dump_file);
489
490 close_dump_file (DFI_dbr, print_rtl, get_insns ());
491 timevar_pop (TV_DBR_SCHED);
492
493 ggc_collect ();
494 }
495 #endif
496
497 #ifdef STACK_REGS
498 /* Convert register usage from flat register file usage to a stack
499 register file. */
500 static void
501 rest_of_handle_stack_regs (void)
502 {
503 #if defined (HAVE_ATTR_length)
504 /* If flow2 creates new instructions which need splitting
505 and scheduling after reload is not done, they might not be
506 split until final which doesn't allow splitting
507 if HAVE_ATTR_length. */
508 #ifdef INSN_SCHEDULING
509 if (optimize && !flag_schedule_insns_after_reload)
510 #else
511 if (optimize)
512 #endif
513 {
514 timevar_push (TV_SHORTEN_BRANCH);
515 split_all_insns (1);
516 timevar_pop (TV_SHORTEN_BRANCH);
517 }
518 #endif
519
520 timevar_push (TV_REG_STACK);
521 open_dump_file (DFI_stack, current_function_decl);
522
523 if (reg_to_stack (dump_file) && optimize)
524 {
525 if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
526 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0))
527 && (flag_reorder_blocks || flag_reorder_blocks_and_partition))
528 {
529 reorder_basic_blocks ();
530 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
531 }
532 }
533
534 close_dump_file (DFI_stack, print_rtl_with_bb, get_insns ());
535 timevar_pop (TV_REG_STACK);
536
537 ggc_collect ();
538 }
539 #endif
540
541 /* Track the variables, ie. compute where the variable is stored at each position in function. */
542 static void
543 rest_of_handle_variable_tracking (void)
544 {
545 timevar_push (TV_VAR_TRACKING);
546 open_dump_file (DFI_vartrack, current_function_decl);
547
548 variable_tracking_main ();
549
550 close_dump_file (DFI_vartrack, print_rtl_with_bb, get_insns ());
551 timevar_pop (TV_VAR_TRACKING);
552 }
553
554 /* Machine independent reorg pass. */
555 static void
556 rest_of_handle_machine_reorg (void)
557 {
558 timevar_push (TV_MACH_DEP);
559 open_dump_file (DFI_mach, current_function_decl);
560
561 targetm.machine_dependent_reorg ();
562
563 close_dump_file (DFI_mach, print_rtl, get_insns ());
564 timevar_pop (TV_MACH_DEP);
565
566 ggc_collect ();
567 }
568
569
570 /* Run new register allocator. Return TRUE if we must exit
571 rest_of_compilation upon return. */
572 static bool
573 rest_of_handle_new_regalloc (void)
574 {
575 int failure;
576
577 delete_trivially_dead_insns (get_insns (), max_reg_num ());
578 reg_alloc ();
579
580 timevar_pop (TV_LOCAL_ALLOC);
581 if (dump_file_tbl[DFI_lreg].enabled)
582 {
583 timevar_push (TV_DUMP);
584
585 close_dump_file (DFI_lreg, NULL, NULL);
586 timevar_pop (TV_DUMP);
587 }
588
589 /* XXX clean up the whole mess to bring live info in shape again. */
590 timevar_push (TV_GLOBAL_ALLOC);
591 open_dump_file (DFI_greg, current_function_decl);
592
593 build_insn_chain (get_insns ());
594 failure = reload (get_insns (), 0);
595
596 timevar_pop (TV_GLOBAL_ALLOC);
597
598 if (dump_file_tbl[DFI_greg].enabled)
599 {
600 timevar_push (TV_DUMP);
601
602 dump_global_regs (dump_file);
603
604 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
605 timevar_pop (TV_DUMP);
606 }
607
608 if (failure)
609 return true;
610
611 reload_completed = 1;
612
613 return false;
614 }
615
616 /* Run old register allocator. Return TRUE if we must exit
617 rest_of_compilation upon return. */
618 static bool
619 rest_of_handle_old_regalloc (void)
620 {
621 int failure;
622 int rebuild_notes;
623
624 /* Allocate the reg_renumber array. */
625 allocate_reg_info (max_regno, FALSE, TRUE);
626
627 /* And the reg_equiv_memory_loc array. */
628 VARRAY_GROW (reg_equiv_memory_loc_varray, max_regno);
629 reg_equiv_memory_loc = &VARRAY_RTX (reg_equiv_memory_loc_varray, 0);
630
631 allocate_initial_values (reg_equiv_memory_loc);
632
633 regclass (get_insns (), max_reg_num (), dump_file);
634 rebuild_notes = local_alloc ();
635
636 timevar_pop (TV_LOCAL_ALLOC);
637
638 /* Local allocation may have turned an indirect jump into a direct
639 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
640 instructions. */
641 if (rebuild_notes)
642 {
643 timevar_push (TV_JUMP);
644
645 rebuild_jump_labels (get_insns ());
646 purge_all_dead_edges (0);
647
648 timevar_pop (TV_JUMP);
649 }
650
651 if (dump_file_tbl[DFI_lreg].enabled)
652 {
653 timevar_push (TV_DUMP);
654
655 dump_flow_info (dump_file);
656 dump_local_alloc (dump_file);
657
658 close_dump_file (DFI_lreg, print_rtl_with_bb, get_insns ());
659 timevar_pop (TV_DUMP);
660 }
661
662 ggc_collect ();
663
664 timevar_push (TV_GLOBAL_ALLOC);
665 open_dump_file (DFI_greg, current_function_decl);
666
667 /* If optimizing, allocate remaining pseudo-regs. Do the reload
668 pass fixing up any insns that are invalid. */
669
670 if (optimize)
671 failure = global_alloc (dump_file);
672 else
673 {
674 build_insn_chain (get_insns ());
675 failure = reload (get_insns (), 0);
676 }
677
678 timevar_pop (TV_GLOBAL_ALLOC);
679
680 if (dump_file_tbl[DFI_greg].enabled)
681 {
682 timevar_push (TV_DUMP);
683
684 dump_global_regs (dump_file);
685
686 close_dump_file (DFI_greg, print_rtl_with_bb, get_insns ());
687 timevar_pop (TV_DUMP);
688 }
689
690 return failure;
691 }
692
693 /* Run the regrename and cprop passes. */
694 static void
695 rest_of_handle_regrename (void)
696 {
697 timevar_push (TV_RENAME_REGISTERS);
698 open_dump_file (DFI_rnreg, current_function_decl);
699
700 if (flag_rename_registers)
701 regrename_optimize ();
702 if (flag_cprop_registers)
703 copyprop_hardreg_forward ();
704
705 close_dump_file (DFI_rnreg, print_rtl_with_bb, get_insns ());
706 timevar_pop (TV_RENAME_REGISTERS);
707 }
708
709 /* Reorder basic blocks. */
710 static void
711 rest_of_handle_reorder_blocks (void)
712 {
713 bool changed;
714 open_dump_file (DFI_bbro, current_function_decl);
715
716 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
717 splitting possibly introduced more crossjumping opportunities. */
718 changed = cleanup_cfg (CLEANUP_EXPENSIVE
719 | (!HAVE_conditional_execution
720 ? CLEANUP_UPDATE_LIFE : 0));
721
722 if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
723 tracer ();
724 if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
725 reorder_basic_blocks ();
726 if (flag_reorder_blocks || flag_reorder_blocks_and_partition
727 || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
728 changed |= cleanup_cfg (CLEANUP_EXPENSIVE
729 | (!HAVE_conditional_execution
730 ? CLEANUP_UPDATE_LIFE : 0));
731
732 /* On conditional execution targets we can not update the life cheaply, so
733 we deffer the updating to after both cleanups. This may lose some cases
734 but should not be terribly bad. */
735 if (changed && HAVE_conditional_execution)
736 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
737 PROP_DEATH_NOTES);
738 close_dump_file (DFI_bbro, print_rtl_with_bb, get_insns ());
739 }
740
741 #ifdef INSN_SCHEDULING
742 /* Run instruction scheduler. */
743 static void
744 rest_of_handle_sched (void)
745 {
746 timevar_push (TV_SMS);
747 if (optimize > 0 && flag_modulo_sched)
748 {
749
750 /* Perform SMS module scheduling. */
751 open_dump_file (DFI_sms, current_function_decl);
752
753 /* We want to be able to create new pseudos. */
754 no_new_pseudos = 0;
755 sms_schedule (dump_file);
756 close_dump_file (DFI_sms, print_rtl, get_insns ());
757
758
759 /* Update the life information, because we add pseudos. */
760 max_regno = max_reg_num ();
761 allocate_reg_info (max_regno, FALSE, FALSE);
762 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
763 (PROP_DEATH_NOTES
764 | PROP_KILL_DEAD_CODE
765 | PROP_SCAN_DEAD_CODE));
766 no_new_pseudos = 1;
767 }
768 timevar_pop (TV_SMS);
769 timevar_push (TV_SCHED);
770
771 /* Print function header into sched dump now
772 because doing the sched analysis makes some of the dump. */
773 if (optimize > 0 && flag_schedule_insns)
774 {
775 open_dump_file (DFI_sched, current_function_decl);
776
777 /* Do control and data sched analysis,
778 and write some of the results to dump file. */
779
780 schedule_insns (dump_file);
781
782 close_dump_file (DFI_sched, print_rtl_with_bb, get_insns ());
783 }
784 timevar_pop (TV_SCHED);
785
786 ggc_collect ();
787 }
788
789 /* Run second scheduling pass after reload. */
790 static void
791 rest_of_handle_sched2 (void)
792 {
793 timevar_push (TV_SCHED2);
794 open_dump_file (DFI_sched2, current_function_decl);
795
796 /* Do control and data sched analysis again,
797 and write some more of the results to dump file. */
798
799 split_all_insns (1);
800
801 if (flag_sched2_use_superblocks || flag_sched2_use_traces)
802 {
803 schedule_ebbs (dump_file);
804 /* No liveness updating code yet, but it should be easy to do.
805 reg-stack recomputes the liveness when needed for now. */
806 count_or_remove_death_notes (NULL, 1);
807 cleanup_cfg (CLEANUP_EXPENSIVE);
808 }
809 else
810 schedule_insns (dump_file);
811
812 close_dump_file (DFI_sched2, print_rtl_with_bb, get_insns ());
813 timevar_pop (TV_SCHED2);
814
815 ggc_collect ();
816 }
817 #endif
818
819 static void
820 rest_of_handle_gcse2 (void)
821 {
822 open_dump_file (DFI_gcse2, current_function_decl);
823
824 gcse_after_reload_main (get_insns (), dump_file);
825 rebuild_jump_labels (get_insns ());
826 delete_trivially_dead_insns (get_insns (), max_reg_num ());
827 close_dump_file (DFI_gcse2, print_rtl_with_bb, get_insns ());
828
829 ggc_collect ();
830
831 #ifdef ENABLE_CHECKING
832 verify_flow_info ();
833 #endif
834 }
835
836 /* Register allocation pre-pass, to reduce number of moves necessary
837 for two-address machines. */
838 static void
839 rest_of_handle_regmove (void)
840 {
841 timevar_push (TV_REGMOVE);
842 open_dump_file (DFI_regmove, current_function_decl);
843
844 regmove_optimize (get_insns (), max_reg_num (), dump_file);
845
846 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
847 close_dump_file (DFI_regmove, print_rtl_with_bb, get_insns ());
848 timevar_pop (TV_REGMOVE);
849
850 ggc_collect ();
851 }
852
853 /* Run tracer. */
854 static void
855 rest_of_handle_tracer (void)
856 {
857 open_dump_file (DFI_tracer, current_function_decl);
858 if (dump_file)
859 dump_flow_info (dump_file);
860 tracer ();
861 cleanup_cfg (CLEANUP_EXPENSIVE);
862 reg_scan (get_insns (), max_reg_num (), 0);
863 close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ());
864 }
865
866 /* If-conversion and CFG cleanup. */
867 static void
868 rest_of_handle_if_conversion (void)
869 {
870 open_dump_file (DFI_ce1, current_function_decl);
871 if (flag_if_conversion)
872 {
873 timevar_push (TV_IFCVT);
874 if (dump_file)
875 dump_flow_info (dump_file);
876 cleanup_cfg (CLEANUP_EXPENSIVE);
877 reg_scan (get_insns (), max_reg_num (), 0);
878 if_convert (0);
879 timevar_pop (TV_IFCVT);
880 }
881 timevar_push (TV_JUMP);
882 cleanup_cfg (CLEANUP_EXPENSIVE);
883 reg_scan (get_insns (), max_reg_num (), 0);
884 timevar_pop (TV_JUMP);
885 close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ());
886 }
887
888 /* Rerun if-conversion, as combine may have simplified things enough
889 to now meet sequence length restrictions. */
890 static void
891 rest_of_handle_if_after_combine (void)
892 {
893 timevar_push (TV_IFCVT);
894 open_dump_file (DFI_ce2, current_function_decl);
895
896 no_new_pseudos = 0;
897 if_convert (1);
898 no_new_pseudos = 1;
899
900 close_dump_file (DFI_ce2, print_rtl_with_bb, get_insns ());
901 timevar_pop (TV_IFCVT);
902 }
903
904 static void
905 rest_of_handle_web (void)
906 {
907 open_dump_file (DFI_web, current_function_decl);
908 timevar_push (TV_WEB);
909 web_main ();
910 delete_trivially_dead_insns (get_insns (), max_reg_num ());
911 cleanup_cfg (CLEANUP_EXPENSIVE);
912
913 timevar_pop (TV_WEB);
914 close_dump_file (DFI_web, print_rtl_with_bb, get_insns ());
915 reg_scan (get_insns (), max_reg_num (), 0);
916 }
917
918 /* Do branch profiling and static profile estimation passes. */
919 static void
920 rest_of_handle_branch_prob (void)
921 {
922 struct loops loops;
923 timevar_push (TV_BRANCH_PROB);
924 open_dump_file (DFI_bp, current_function_decl);
925
926 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
927 branch_prob ();
928
929 /* Discover and record the loop depth at the head of each basic
930 block. The loop infrastructure does the real job for us. */
931 flow_loops_find (&loops, LOOP_TREE);
932
933 if (dump_file)
934 flow_loops_dump (&loops, dump_file, NULL, 0);
935
936 /* Estimate using heuristics if no profiling info is available. */
937 if (flag_guess_branch_prob)
938 estimate_probability (&loops);
939
940 flow_loops_free (&loops);
941 free_dominance_info (CDI_DOMINATORS);
942 close_dump_file (DFI_bp, print_rtl_with_bb, get_insns ());
943 timevar_pop (TV_BRANCH_PROB);
944 }
945
946 /* Do optimizations based on expression value profiles. */
947 static void
948 rest_of_handle_value_profile_transformations (void)
949 {
950 open_dump_file (DFI_vpt, current_function_decl);
951 timevar_push (TV_VPT);
952
953 if (value_profile_transformations ())
954 cleanup_cfg (CLEANUP_EXPENSIVE);
955
956 timevar_pop (TV_VPT);
957 close_dump_file (DFI_vpt, print_rtl_with_bb, get_insns ());
958 }
959
960 /* Do control and data flow analysis; write some of the results to the
961 dump file. */
962 static void
963 rest_of_handle_cfg (void)
964 {
965 open_dump_file (DFI_cfg, current_function_decl);
966 if (dump_file)
967 dump_flow_info (dump_file);
968 if (optimize)
969 cleanup_cfg (CLEANUP_EXPENSIVE
970 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
971
972 /* It may make more sense to mark constant functions after dead code is
973 eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
974 may insert code making function non-constant, but we still must consider
975 it as constant, otherwise -fbranch-probabilities will not read data back.
976
977 life_analysis rarely eliminates modification of external memory.
978 */
979 if (optimize)
980 {
981 /* Alias analysis depends on this information and mark_constant_function
982 depends on alias analysis. */
983 reg_scan (get_insns (), max_reg_num (), 1);
984 mark_constant_function ();
985 }
986
987 close_dump_file (DFI_cfg, print_rtl_with_bb, get_insns ());
988 }
989
990 /* Perform jump bypassing and control flow optimizations. */
991 static void
992 rest_of_handle_jump_bypass (void)
993 {
994 timevar_push (TV_BYPASS);
995 open_dump_file (DFI_bypass, current_function_decl);
996
997 cleanup_cfg (CLEANUP_EXPENSIVE);
998 reg_scan (get_insns (), max_reg_num (), 1);
999
1000 if (bypass_jumps (dump_file))
1001 {
1002 rebuild_jump_labels (get_insns ());
1003 cleanup_cfg (CLEANUP_EXPENSIVE);
1004 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1005 }
1006
1007 close_dump_file (DFI_bypass, print_rtl_with_bb, get_insns ());
1008 timevar_pop (TV_BYPASS);
1009
1010 ggc_collect ();
1011
1012 #ifdef ENABLE_CHECKING
1013 verify_flow_info ();
1014 #endif
1015 }
1016
1017 /* Try combining insns through substitution. */
1018 static void
1019 rest_of_handle_combine (void)
1020 {
1021 int rebuild_jump_labels_after_combine = 0;
1022
1023 timevar_push (TV_COMBINE);
1024 open_dump_file (DFI_combine, current_function_decl);
1025
1026 rebuild_jump_labels_after_combine
1027 = combine_instructions (get_insns (), max_reg_num ());
1028
1029 /* Combining get_insns () may have turned an indirect jump into a
1030 direct jump. Rebuild the JUMP_LABEL fields of jumping
1031 instructions. */
1032 if (rebuild_jump_labels_after_combine)
1033 {
1034 timevar_push (TV_JUMP);
1035 rebuild_jump_labels (get_insns ());
1036 timevar_pop (TV_JUMP);
1037
1038 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
1039 }
1040
1041 close_dump_file (DFI_combine, print_rtl_with_bb, get_insns ());
1042 timevar_pop (TV_COMBINE);
1043
1044 ggc_collect ();
1045 }
1046
1047 /* Perform life analysis. */
1048 static void
1049 rest_of_handle_life (void)
1050 {
1051 open_dump_file (DFI_life, current_function_decl);
1052 regclass_init ();
1053
1054 #ifdef ENABLE_CHECKING
1055 verify_flow_info ();
1056 #endif
1057 life_analysis (dump_file, PROP_FINAL);
1058 if (optimize)
1059 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE
1060 | CLEANUP_LOG_LINKS
1061 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1062 timevar_pop (TV_FLOW);
1063
1064 if (extra_warnings)
1065 {
1066 setjmp_vars_warning (DECL_INITIAL (current_function_decl));
1067 setjmp_args_warning ();
1068 }
1069
1070 if (optimize)
1071 {
1072 if (!flag_new_regalloc && initialize_uninitialized_subregs ())
1073 {
1074 /* Insns were inserted, and possibly pseudos created, so
1075 things might look a bit different. */
1076 allocate_reg_life_data ();
1077 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1078 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1079 }
1080 }
1081
1082 no_new_pseudos = 1;
1083
1084 close_dump_file (DFI_life, print_rtl_with_bb, get_insns ());
1085
1086 ggc_collect ();
1087 }
1088
1089 /* Perform common subexpression elimination. Nonzero value from
1090 `cse_main' means that jumps were simplified and some code may now
1091 be unreachable, so do jump optimization again. */
1092 static void
1093 rest_of_handle_cse (void)
1094 {
1095 int tem;
1096 open_dump_file (DFI_cse, current_function_decl);
1097 if (dump_file)
1098 dump_flow_info (dump_file);
1099 timevar_push (TV_CSE);
1100
1101 reg_scan (get_insns (), max_reg_num (), 1);
1102
1103 tem = cse_main (get_insns (), max_reg_num (), 0, dump_file);
1104 if (tem)
1105 rebuild_jump_labels (get_insns ());
1106 if (purge_all_dead_edges (0))
1107 delete_unreachable_blocks ();
1108
1109 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1110
1111 /* If we are not running more CSE passes, then we are no longer
1112 expecting CSE to be run. But always rerun it in a cheap mode. */
1113 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
1114
1115 if (tem || optimize > 1)
1116 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1117
1118 timevar_pop (TV_CSE);
1119 close_dump_file (DFI_cse, print_rtl_with_bb, get_insns ());
1120 }
1121
1122 /* Run second CSE pass after loop optimizations. */
1123 static void
1124 rest_of_handle_cse2 (void)
1125 {
1126 int tem;
1127 timevar_push (TV_CSE2);
1128 open_dump_file (DFI_cse2, current_function_decl);
1129 if (dump_file)
1130 dump_flow_info (dump_file);
1131 /* CFG is no longer maintained up-to-date. */
1132 tem = cse_main (get_insns (), max_reg_num (), 1, dump_file);
1133
1134 /* Run a pass to eliminate duplicated assignments to condition code
1135 registers. We have to run this after bypass_jumps, because it
1136 makes it harder for that pass to determine whether a jump can be
1137 bypassed safely. */
1138 cse_condition_code_reg ();
1139
1140 purge_all_dead_edges (0);
1141 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1142
1143 if (tem)
1144 {
1145 timevar_push (TV_JUMP);
1146 rebuild_jump_labels (get_insns ());
1147 cleanup_cfg (CLEANUP_EXPENSIVE);
1148 timevar_pop (TV_JUMP);
1149 }
1150 reg_scan (get_insns (), max_reg_num (), 0);
1151 close_dump_file (DFI_cse2, print_rtl_with_bb, get_insns ());
1152 ggc_collect ();
1153 timevar_pop (TV_CSE2);
1154 }
1155
1156 /* Perform global cse. */
1157 static void
1158 rest_of_handle_gcse (void)
1159 {
1160 int save_csb, save_cfj;
1161 int tem2 = 0, tem;
1162 timevar_push (TV_GCSE);
1163 open_dump_file (DFI_gcse, current_function_decl);
1164
1165 tem = gcse_main (get_insns (), dump_file);
1166 rebuild_jump_labels (get_insns ());
1167 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1168
1169 save_csb = flag_cse_skip_blocks;
1170 save_cfj = flag_cse_follow_jumps;
1171 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
1172
1173 /* If -fexpensive-optimizations, re-run CSE to clean up things done
1174 by gcse. */
1175 if (flag_expensive_optimizations)
1176 {
1177 timevar_push (TV_CSE);
1178 reg_scan (get_insns (), max_reg_num (), 1);
1179 tem2 = cse_main (get_insns (), max_reg_num (), 0, dump_file);
1180 purge_all_dead_edges (0);
1181 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1182 timevar_pop (TV_CSE);
1183 cse_not_expected = !flag_rerun_cse_after_loop;
1184 }
1185
1186 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
1187 things up. Then possibly re-run CSE again. */
1188 while (tem || tem2)
1189 {
1190 tem = tem2 = 0;
1191 timevar_push (TV_JUMP);
1192 rebuild_jump_labels (get_insns ());
1193 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1194 timevar_pop (TV_JUMP);
1195
1196 if (flag_expensive_optimizations)
1197 {
1198 timevar_push (TV_CSE);
1199 reg_scan (get_insns (), max_reg_num (), 1);
1200 tem2 = cse_main (get_insns (), max_reg_num (), 0, dump_file);
1201 purge_all_dead_edges (0);
1202 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1203 timevar_pop (TV_CSE);
1204 }
1205 }
1206
1207 close_dump_file (DFI_gcse, print_rtl_with_bb, get_insns ());
1208 timevar_pop (TV_GCSE);
1209
1210 ggc_collect ();
1211 flag_cse_skip_blocks = save_csb;
1212 flag_cse_follow_jumps = save_cfj;
1213 #ifdef ENABLE_CHECKING
1214 verify_flow_info ();
1215 #endif
1216 }
1217
1218 /* Move constant computations out of loops. */
1219 static void
1220 rest_of_handle_loop_optimize (void)
1221 {
1222 int do_unroll, do_prefetch;
1223
1224 timevar_push (TV_LOOP);
1225 delete_dead_jumptables ();
1226 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1227 open_dump_file (DFI_loop, current_function_decl);
1228
1229 /* CFG is no longer maintained up-to-date. */
1230 free_bb_for_insn ();
1231
1232 if (flag_unroll_loops)
1233 do_unroll = LOOP_AUTO_UNROLL; /* Having two unrollers is useless. */
1234 else
1235 do_unroll = flag_old_unroll_loops ? LOOP_UNROLL : LOOP_AUTO_UNROLL;
1236 do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0;
1237
1238 if (flag_rerun_loop_opt)
1239 {
1240 cleanup_barriers ();
1241
1242 /* We only want to perform unrolling once. */
1243 loop_optimize (get_insns (), dump_file, do_unroll);
1244 do_unroll = 0;
1245
1246 /* The first call to loop_optimize makes some instructions
1247 trivially dead. We delete those instructions now in the
1248 hope that doing so will make the heuristics in loop work
1249 better and possibly speed up compilation. */
1250 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1251
1252 /* The regscan pass is currently necessary as the alias
1253 analysis code depends on this information. */
1254 reg_scan (get_insns (), max_reg_num (), 1);
1255 }
1256 cleanup_barriers ();
1257 loop_optimize (get_insns (), dump_file, do_unroll | do_prefetch);
1258
1259 /* Loop can create trivially dead instructions. */
1260 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1261 close_dump_file (DFI_loop, print_rtl, get_insns ());
1262 timevar_pop (TV_LOOP);
1263 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1264
1265 ggc_collect ();
1266 }
1267
1268 /* Perform loop optimizations. It might be better to do them a bit
1269 sooner, but we want the profile feedback to work more
1270 efficiently. */
1271 static void
1272 rest_of_handle_loop2 (void)
1273 {
1274 struct loops *loops;
1275 basic_block bb;
1276
1277 if (!flag_move_loop_invariants
1278 && !flag_unswitch_loops
1279 && !flag_peel_loops
1280 && !flag_unroll_loops
1281 && !flag_branch_on_count_reg)
1282 return;
1283
1284 timevar_push (TV_LOOP);
1285 open_dump_file (DFI_loop2, current_function_decl);
1286 if (dump_file)
1287 dump_flow_info (dump_file);
1288
1289 /* Initialize structures for layout changes. */
1290 cfg_layout_initialize ();
1291
1292 loops = loop_optimizer_init (dump_file);
1293
1294 if (loops)
1295 {
1296 /* The optimizations: */
1297 if (flag_move_loop_invariants)
1298 move_loop_invariants (loops);
1299
1300 if (flag_unswitch_loops)
1301 unswitch_loops (loops);
1302
1303 if (flag_peel_loops || flag_unroll_loops)
1304 unroll_and_peel_loops (loops,
1305 (flag_peel_loops ? UAP_PEEL : 0) |
1306 (flag_unroll_loops ? UAP_UNROLL : 0) |
1307 (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0));
1308
1309 #ifdef HAVE_doloop_end
1310 if (flag_branch_on_count_reg && HAVE_doloop_end)
1311 doloop_optimize_loops (loops);
1312 #endif /* HAVE_doloop_end */
1313
1314 loop_optimizer_finalize (loops, dump_file);
1315 }
1316
1317 /* Finalize layout changes. */
1318 FOR_EACH_BB (bb)
1319 if (bb->next_bb != EXIT_BLOCK_PTR)
1320 bb->rbi->next = bb->next_bb;
1321 cfg_layout_finalize ();
1322
1323 cleanup_cfg (CLEANUP_EXPENSIVE);
1324 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1325 reg_scan (get_insns (), max_reg_num (), 0);
1326 if (dump_file)
1327 dump_flow_info (dump_file);
1328 close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ());
1329 timevar_pop (TV_LOOP);
1330 ggc_collect ();
1331 }
1332
1333 /* This is called from finish_function (within langhooks.parse_file)
1334 after each top-level definition is parsed.
1335 It is supposed to compile that function or variable
1336 and output the assembler code for it.
1337 After we return, the tree storage is freed. */
1338
1339 void
1340 rest_of_compilation (void)
1341 {
1342 /* There's no need to defer outputting this function any more; we
1343 know we want to output it. */
1344 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1345
1346 /* There's no need to defer outputting this function any more; we
1347 know we want to output it. */
1348 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1349
1350 /* Register rtl specific functions for cfg. */
1351 rtl_register_cfg_hooks ();
1352
1353 /* Now that we're out of the frontend, we shouldn't have any more
1354 CONCATs anywhere. */
1355 generating_concat_p = 0;
1356
1357 /* When processing delayed functions, prepare_function_start() won't
1358 have been run to re-initialize it. */
1359 cse_not_expected = ! optimize;
1360
1361 finalize_block_changes ();
1362
1363 /* Dump the rtl code if we are dumping rtl. */
1364 if (open_dump_file (DFI_rtl, current_function_decl))
1365 close_dump_file (DFI_rtl, print_rtl, get_insns ());
1366
1367 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1368 sorts of eh initialization. Delay this until after the
1369 initial rtl dump so that we can see the original nesting. */
1370 convert_from_eh_region_ranges ();
1371
1372 /* If we're emitting a nested function, make sure its parent gets
1373 emitted as well. Doing otherwise confuses debug info. */
1374 {
1375 tree parent;
1376 for (parent = DECL_CONTEXT (current_function_decl);
1377 parent != NULL_TREE;
1378 parent = get_containing_scope (parent))
1379 if (TREE_CODE (parent) == FUNCTION_DECL)
1380 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1381 }
1382
1383 /* We are now committed to emitting code for this function. Do any
1384 preparation, such as emitting abstract debug info for the inline
1385 before it gets mangled by optimization. */
1386 if (cgraph_function_possibly_inlined_p (current_function_decl))
1387 (*debug_hooks->outlining_inline_function) (current_function_decl);
1388
1389 /* Remove any notes we don't need. That will make iterating
1390 over the instruction sequence faster, and allow the garbage
1391 collector to reclaim the memory used by the notes. */
1392 remove_unnecessary_notes ();
1393
1394 ggc_collect ();
1395
1396 /* Initialize some variables used by the optimizers. */
1397 init_function_for_compilation ();
1398
1399 TREE_ASM_WRITTEN (current_function_decl) = 1;
1400
1401 /* Now that integrate will no longer see our rtl, we need not
1402 distinguish between the return value of this function and the
1403 return value of called functions. Also, we can remove all SETs
1404 of subregs of hard registers; they are only here because of
1405 integrate. Also, we can now initialize pseudos intended to
1406 carry magic hard reg data throughout the function. */
1407 rtx_equal_function_value_matters = 0;
1408 purge_hard_subreg_sets (get_insns ());
1409
1410 /* Early return if there were errors. We can run afoul of our
1411 consistency checks, and there's not really much point in fixing them. */
1412 if (rtl_dump_and_exit || flag_syntax_only || errorcount || sorrycount)
1413 goto exit_rest_of_compilation;
1414
1415 timevar_push (TV_JUMP);
1416 open_dump_file (DFI_sibling, current_function_decl);
1417
1418 /* ??? We may get called either via tree_rest_of_compilation when the CFG
1419 is already built or directly (for instance from coverage code).
1420 The direct callers shall be updated. */
1421 if (!basic_block_info)
1422 {
1423 init_flow ();
1424 rebuild_jump_labels (get_insns ());
1425 find_exception_handler_labels ();
1426 find_basic_blocks (get_insns (), max_reg_num (), dump_file);
1427 }
1428 delete_unreachable_blocks ();
1429 #ifdef ENABLE_CHECKING
1430 verify_flow_info();
1431 #endif
1432
1433 /* Turn NOTE_INSN_PREDICTIONs into branch predictions. */
1434 if (flag_guess_branch_prob)
1435 {
1436 timevar_push (TV_BRANCH_PROB);
1437 note_prediction_to_br_prob ();
1438 timevar_pop (TV_BRANCH_PROB);
1439 }
1440
1441 timevar_pop (TV_JUMP);
1442
1443 if (cfun->tail_call_emit)
1444 fixup_tail_calls ();
1445
1446 insn_locators_initialize ();
1447 /* Complete generation of exception handling code. */
1448 if (doing_eh (0))
1449 {
1450 timevar_push (TV_JUMP);
1451 open_dump_file (DFI_eh, current_function_decl);
1452
1453 finish_eh_generation ();
1454
1455 close_dump_file (DFI_eh, print_rtl, get_insns ());
1456 timevar_pop (TV_JUMP);
1457 }
1458
1459 /* Delay emitting hard_reg_initial_value sets until after EH landing pad
1460 generation, which might create new sets. */
1461 emit_initial_value_sets ();
1462
1463 #ifdef FINALIZE_PIC
1464 /* If we are doing position-independent code generation, now
1465 is the time to output special prologues and epilogues.
1466 We do not want to do this earlier, because it just clutters
1467 up inline functions with meaningless insns. */
1468 if (flag_pic)
1469 FINALIZE_PIC;
1470 #endif
1471
1472 /* Copy any shared structure that should not be shared. */
1473 unshare_all_rtl ();
1474
1475 #ifdef SETJMP_VIA_SAVE_AREA
1476 /* This must be performed before virtual register instantiation.
1477 Please be aware that everything in the compiler that can look
1478 at the RTL up to this point must understand that REG_SAVE_AREA
1479 is just like a use of the REG contained inside. */
1480 if (current_function_calls_alloca)
1481 optimize_save_area_alloca (get_insns ());
1482 #endif
1483
1484 /* Instantiate all virtual registers. */
1485 instantiate_virtual_regs ();
1486
1487 open_dump_file (DFI_jump, current_function_decl);
1488
1489 /* Always do one jump optimization pass to ensure that JUMP_LABEL fields
1490 are initialized and to compute whether control can drop off the end
1491 of the function. */
1492
1493 timevar_push (TV_JUMP);
1494 /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
1495 before jump optimization switches branch directions. */
1496 if (flag_guess_branch_prob)
1497 expected_value_to_br_prob ();
1498
1499 delete_trivially_dead_insns (get_insns (), max_reg_num ());
1500 reg_scan (get_insns(), max_reg_num (), 0);
1501 if (dump_file)
1502 dump_flow_info (dump_file);
1503 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
1504 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1505
1506 create_loop_notes ();
1507
1508 purge_line_number_notes (get_insns ());
1509
1510 close_dump_file (DFI_jump, print_rtl, get_insns ());
1511
1512 if (optimize)
1513 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1514
1515 /* Jump optimization, and the removal of NULL pointer checks, may
1516 have reduced the number of instructions substantially. CSE, and
1517 future passes, allocate arrays whose dimensions involve the
1518 maximum instruction UID, so if we can reduce the maximum UID
1519 we'll save big on memory. */
1520 renumber_insns (dump_file);
1521 timevar_pop (TV_JUMP);
1522
1523 close_dump_file (DFI_jump, print_rtl_with_bb, get_insns ());
1524
1525 ggc_collect ();
1526
1527 if (optimize > 0)
1528 rest_of_handle_cse ();
1529
1530 ggc_collect ();
1531
1532 if (optimize > 0)
1533 {
1534 if (flag_gcse)
1535 rest_of_handle_gcse ();
1536
1537 if (flag_loop_optimize)
1538 rest_of_handle_loop_optimize ();
1539
1540 if (flag_gcse)
1541 rest_of_handle_jump_bypass ();
1542 }
1543
1544 timevar_push (TV_FLOW);
1545
1546 rest_of_handle_cfg ();
1547
1548 if (!flag_tree_based_profiling
1549 && (optimize > 0 || profile_arc_flag
1550 || flag_test_coverage || flag_branch_probabilities))
1551 {
1552 rtl_register_profile_hooks ();
1553 rtl_register_value_prof_hooks ();
1554 rest_of_handle_branch_prob ();
1555
1556 if (flag_branch_probabilities
1557 && flag_profile_values
1558 && flag_value_profile_transformations)
1559 rest_of_handle_value_profile_transformations ();
1560
1561 /* Remove the death notes created for vpt. */
1562 if (flag_profile_values)
1563 count_or_remove_death_notes (NULL, 1);
1564 }
1565
1566 if (optimize > 0)
1567 rest_of_handle_if_conversion ();
1568
1569 if (flag_tracer)
1570 rest_of_handle_tracer ();
1571
1572 if (optimize > 0
1573 && flag_loop_optimize2)
1574 rest_of_handle_loop2 ();
1575
1576 if (flag_web)
1577 rest_of_handle_web ();
1578
1579 if (flag_rerun_cse_after_loop)
1580 rest_of_handle_cse2 ();
1581
1582 cse_not_expected = 1;
1583
1584 rest_of_handle_life ();
1585
1586 if (optimize > 0)
1587 rest_of_handle_combine ();
1588
1589 if (flag_if_conversion)
1590 rest_of_handle_if_after_combine ();
1591
1592 /* The optimization to partition hot/cold basic blocks into separate
1593 sections of the .o file does not work well with exception handling.
1594 Don't call it if there are exceptions. */
1595
1596 if (flag_reorder_blocks_and_partition && !flag_exceptions)
1597 {
1598 no_new_pseudos = 0;
1599 partition_hot_cold_basic_blocks ();
1600 allocate_reg_life_data ();
1601 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1602 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1603 no_new_pseudos = 1;
1604 }
1605
1606 if (optimize > 0 && (flag_regmove || flag_expensive_optimizations))
1607 rest_of_handle_regmove ();
1608
1609 /* Do unconditional splitting before register allocation to allow machine
1610 description to add extra information not needed previously. */
1611 split_all_insns (1);
1612
1613 #ifdef OPTIMIZE_MODE_SWITCHING
1614 timevar_push (TV_MODE_SWITCH);
1615
1616 no_new_pseudos = 0;
1617 optimize_mode_switching (NULL);
1618 no_new_pseudos = 1;
1619
1620 timevar_pop (TV_MODE_SWITCH);
1621 #endif
1622
1623 /* Any of the several passes since flow1 will have munged register
1624 lifetime data a bit. We need it to be up to date for scheduling
1625 (see handling of reg_known_equiv in init_alias_analysis). */
1626 recompute_reg_usage (get_insns (), !optimize_size);
1627
1628 #ifdef INSN_SCHEDULING
1629 rest_of_handle_sched ();
1630 #endif
1631
1632 /* Determine if the current function is a leaf before running reload
1633 since this can impact optimizations done by the prologue and
1634 epilogue thus changing register elimination offsets. */
1635 current_function_is_leaf = leaf_function_p ();
1636
1637 timevar_push (TV_LOCAL_ALLOC);
1638 open_dump_file (DFI_lreg, current_function_decl);
1639
1640 if (flag_new_regalloc)
1641 {
1642 if (rest_of_handle_new_regalloc ())
1643 goto exit_rest_of_compilation;
1644 }
1645 else
1646 {
1647 if (rest_of_handle_old_regalloc ())
1648 goto exit_rest_of_compilation;
1649 }
1650
1651 ggc_collect ();
1652
1653 open_dump_file (DFI_postreload, current_function_decl);
1654
1655 /* Do a very simple CSE pass over just the hard registers. */
1656 if (optimize > 0)
1657 {
1658 timevar_push (TV_RELOAD_CSE_REGS);
1659 reload_cse_regs (get_insns ());
1660 /* reload_cse_regs can eliminate potentially-trapping MEMs.
1661 Remove any EH edges associated with them. */
1662 if (flag_non_call_exceptions)
1663 purge_all_dead_edges (0);
1664 timevar_pop (TV_RELOAD_CSE_REGS);
1665 }
1666
1667 close_dump_file (DFI_postreload, print_rtl_with_bb, get_insns ());
1668
1669 if (optimize > 0 && flag_gcse_after_reload)
1670 rest_of_handle_gcse2 ();
1671
1672 /* Re-create the death notes which were deleted during reload. */
1673 timevar_push (TV_FLOW2);
1674 open_dump_file (DFI_flow2, current_function_decl);
1675
1676 #ifdef ENABLE_CHECKING
1677 verify_flow_info ();
1678 #endif
1679
1680 /* If optimizing, then go ahead and split get_insns () now. */
1681 #ifndef STACK_REGS
1682 if (optimize > 0)
1683 #endif
1684 split_all_insns (0);
1685
1686 if (flag_branch_target_load_optimize)
1687 {
1688 open_dump_file (DFI_branch_target_load, current_function_decl);
1689
1690 branch_target_load_optimize (/*after_prologue_epilogue_gen=*/false);
1691
1692 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, get_insns ());
1693
1694 ggc_collect ();
1695 }
1696
1697 if (! targetm.late_rtl_prologue_epilogue)
1698 {
1699 if (optimize)
1700 cleanup_cfg (CLEANUP_EXPENSIVE);
1701
1702 /* On some machines, the prologue and epilogue code, or parts thereof,
1703 can be represented as RTL. Doing so lets us schedule insns between
1704 it and the rest of the code and also allows delayed branch
1705 scheduling to operate in the epilogue. */
1706 thread_prologue_and_epilogue_insns (get_insns ());
1707 epilogue_completed = 1;
1708 }
1709
1710 if (optimize)
1711 {
1712 life_analysis (dump_file, PROP_POSTRELOAD);
1713 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
1714 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1715
1716 /* This is kind of a heuristic. We need to run combine_stack_adjustments
1717 even for machines with possibly nonzero RETURN_POPS_ARGS
1718 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
1719 push instructions will have popping returns. */
1720 #ifndef PUSH_ROUNDING
1721 if (!ACCUMULATE_OUTGOING_ARGS)
1722 #endif
1723 combine_stack_adjustments ();
1724
1725 ggc_collect ();
1726 }
1727
1728 flow2_completed = 1;
1729
1730 close_dump_file (DFI_flow2, print_rtl_with_bb, get_insns ());
1731 timevar_pop (TV_FLOW2);
1732
1733 #ifdef HAVE_peephole2
1734 if (optimize > 0 && flag_peephole2)
1735 {
1736 timevar_push (TV_PEEPHOLE2);
1737 open_dump_file (DFI_peephole2, current_function_decl);
1738
1739 peephole2_optimize (dump_file);
1740
1741 close_dump_file (DFI_peephole2, print_rtl_with_bb, get_insns ());
1742 timevar_pop (TV_PEEPHOLE2);
1743 }
1744 #endif
1745
1746 open_dump_file (DFI_ce3, current_function_decl);
1747 if (optimize)
1748 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
1749 splitting possibly introduced more crossjumping opportunities. */
1750 cleanup_cfg (CLEANUP_EXPENSIVE
1751 | CLEANUP_UPDATE_LIFE
1752 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1753 if (flag_if_conversion2)
1754 {
1755 timevar_push (TV_IFCVT2);
1756
1757 if_convert (1);
1758
1759 timevar_pop (TV_IFCVT2);
1760 }
1761 close_dump_file (DFI_ce3, print_rtl_with_bb, get_insns ());
1762
1763 if (optimize > 0)
1764 {
1765 if (flag_rename_registers || flag_cprop_registers)
1766 rest_of_handle_regrename ();
1767
1768 rest_of_handle_reorder_blocks ();
1769 }
1770
1771 if (flag_branch_target_load_optimize2)
1772 {
1773 /* Leave this a warning for now so that it is possible to experiment
1774 with running this pass twice. In 3.6, we should either make this
1775 an error, or use separate dump files. */
1776 if (flag_branch_target_load_optimize)
1777 warning ("branch target register load optimization is not intended "
1778 "to be run twice");
1779
1780 open_dump_file (DFI_branch_target_load, current_function_decl);
1781
1782 branch_target_load_optimize (/*after_prologue_epilogue_gen=*/true);
1783
1784 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, get_insns ());
1785
1786 ggc_collect ();
1787 }
1788
1789 #ifdef LEAF_REGISTERS
1790 current_function_uses_only_leaf_regs
1791 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
1792 #endif
1793
1794 if (targetm.late_rtl_prologue_epilogue)
1795 {
1796 /* On some machines, the prologue and epilogue code, or parts thereof,
1797 can be represented as RTL. Doing so lets us schedule insns between
1798 it and the rest of the code and also allows delayed branch
1799 scheduling to operate in the epilogue. */
1800 thread_prologue_and_epilogue_insns (get_insns ());
1801 epilogue_completed = 1;
1802 if (optimize)
1803 life_analysis (dump_file, PROP_POSTRELOAD);
1804 }
1805
1806 #ifdef INSN_SCHEDULING
1807 if (optimize > 0 && flag_schedule_insns_after_reload)
1808 rest_of_handle_sched2 ();
1809 #endif
1810
1811 #ifdef STACK_REGS
1812 rest_of_handle_stack_regs ();
1813 #endif
1814
1815 compute_alignments ();
1816
1817 if (flag_var_tracking)
1818 rest_of_handle_variable_tracking ();
1819
1820 /* CFG is no longer maintained up-to-date. */
1821 free_bb_for_insn ();
1822
1823 if (targetm.machine_dependent_reorg != 0)
1824 rest_of_handle_machine_reorg ();
1825
1826 purge_line_number_notes (get_insns ());
1827 cleanup_barriers ();
1828
1829 #ifdef DELAY_SLOTS
1830 if (optimize > 0 && flag_delayed_branch)
1831 rest_of_handle_delay_slots ();
1832 #endif
1833
1834 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
1835 timevar_push (TV_SHORTEN_BRANCH);
1836 split_all_insns_noflow ();
1837 timevar_pop (TV_SHORTEN_BRANCH);
1838 #endif
1839
1840 convert_to_eh_region_ranges ();
1841
1842 /* Shorten branches. */
1843 timevar_push (TV_SHORTEN_BRANCH);
1844 shorten_branches (get_insns ());
1845 timevar_pop (TV_SHORTEN_BRANCH);
1846
1847 set_nothrow_function_flags ();
1848 if (current_function_nothrow)
1849 /* Now we know that this can't throw; set the flag for the benefit
1850 of other functions later in this translation unit. */
1851 TREE_NOTHROW (current_function_decl) = 1;
1852
1853 rest_of_handle_final ();
1854
1855 /* Write DBX symbols if requested. */
1856
1857 /* Note that for those inline functions where we don't initially
1858 know for certain that we will be generating an out-of-line copy,
1859 the first invocation of this routine (rest_of_compilation) will
1860 skip over this code by doing a `goto exit_rest_of_compilation;'.
1861 Later on, wrapup_global_declarations will (indirectly) call
1862 rest_of_compilation again for those inline functions that need
1863 to have out-of-line copies generated. During that call, we
1864 *will* be routed past here. */
1865
1866 timevar_push (TV_SYMOUT);
1867 (*debug_hooks->function_decl) (current_function_decl);
1868 timevar_pop (TV_SYMOUT);
1869
1870 exit_rest_of_compilation:
1871
1872 coverage_end_function ();
1873
1874 /* In case the function was not output,
1875 don't leave any temporary anonymous types
1876 queued up for sdb output. */
1877 #ifdef SDB_DEBUGGING_INFO
1878 if (write_symbols == SDB_DEBUG)
1879 sdbout_types (NULL_TREE);
1880 #endif
1881
1882 reload_completed = 0;
1883 epilogue_completed = 0;
1884 flow2_completed = 0;
1885 no_new_pseudos = 0;
1886
1887 timevar_push (TV_FINAL);
1888
1889 /* Clear out the insn_length contents now that they are no
1890 longer valid. */
1891 init_insn_lengths ();
1892
1893 /* Show no temporary slots allocated. */
1894 init_temp_slots ();
1895
1896 free_basic_block_vars ();
1897 free_bb_for_insn ();
1898
1899 timevar_pop (TV_FINAL);
1900
1901 if (targetm.binds_local_p (current_function_decl))
1902 {
1903 int pref = cfun->preferred_stack_boundary;
1904 if (cfun->recursive_call_emit
1905 && cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
1906 pref = cfun->stack_alignment_needed;
1907 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
1908 = pref;
1909 }
1910
1911 /* Make sure volatile mem refs aren't considered valid operands for
1912 arithmetic insns. We must call this here if this is a nested inline
1913 function, since the above code leaves us in the init_recog state
1914 (from final.c), and the function context push/pop code does not
1915 save/restore volatile_ok.
1916
1917 ??? Maybe it isn't necessary for expand_start_function to call this
1918 anymore if we do it here? */
1919
1920 init_recog_no_volatile ();
1921
1922 /* We're done with this function. Free up memory if we can. */
1923 free_after_parsing (cfun);
1924 }
1925
1926 void
1927 init_optimization_passes (void)
1928 {
1929 open_dump_file (DFI_cgraph, NULL);
1930 cgraph_dump_file = dump_file;
1931 dump_file = NULL;
1932 }
1933
1934 void
1935 finish_optimization_passes (void)
1936 {
1937 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1938 {
1939 timevar_push (TV_DUMP);
1940 open_dump_file (DFI_bp, NULL);
1941
1942 end_branch_prob ();
1943
1944 close_dump_file (DFI_bp, NULL, NULL_RTX);
1945 timevar_pop (TV_DUMP);
1946 }
1947
1948 if (optimize > 0 && open_dump_file (DFI_combine, NULL))
1949 {
1950 timevar_push (TV_DUMP);
1951 dump_combine_total_stats (dump_file);
1952 close_dump_file (DFI_combine, NULL, NULL_RTX);
1953 timevar_pop (TV_DUMP);
1954 }
1955
1956 dump_file = cgraph_dump_file;
1957 cgraph_dump_file = NULL;
1958 close_dump_file (DFI_cgraph, NULL, NULL_RTX);
1959
1960 /* Do whatever is necessary to finish printing the graphs. */
1961 if (graph_dump_format != no_graph)
1962 {
1963 int i;
1964
1965 for (i = 0; i < (int) DFI_MAX; ++i)
1966 if (dump_file_tbl[i].initialized && dump_file_tbl[i].graph_dump_p)
1967 {
1968 char seq[16];
1969 char *suffix;
1970
1971 sprintf (seq, DUMPFILE_FORMAT, i);
1972 suffix = concat (seq, dump_file_tbl[i].extension, NULL);
1973 finish_graph_dump_file (dump_base_name, suffix);
1974 free (suffix);
1975 }
1976 }
1977
1978 }
1979
1980 bool
1981 enable_rtl_dump_file (int letter)
1982 {
1983 bool matched = false;
1984 int i;
1985
1986 if (letter == 'a')
1987 {
1988 for (i = 0; i < (int) DFI_MAX; ++i)
1989 dump_file_tbl[i].enabled = 1;
1990 matched = true;
1991 }
1992 else
1993 {
1994 for (i = 0; i < (int) DFI_MAX; ++i)
1995 if (letter == dump_file_tbl[i].debug_switch)
1996 {
1997 dump_file_tbl[i].enabled = 1;
1998 matched = true;
1999 }
2000 }
2001
2002 return matched;
2003 }
2004
2005 struct tree_opt_pass pass_rest_of_compilation =
2006 {
2007 "rest of compilation", /* name */
2008 NULL, /* gate */
2009 rest_of_compilation, /* execute */
2010 NULL, /* sub */
2011 NULL, /* next */
2012 0, /* static_pass_number */
2013 TV_REST_OF_COMPILATION, /* tv_id */
2014 PROP_rtl, /* properties_required */
2015 0, /* properties_provided */
2016 PROP_rtl, /* properties_destroyed */
2017 0, /* todo_flags_start */
2018 TODO_ggc_collect /* todo_flags_finish */
2019 };
2020
2021