]>
Commit | Line | Data |
---|---|---|
a49a878f | 1 | /* Top level of GCC compilers (cc1, cc1plus, etc.) |
2 | Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, | |
3 | 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. | |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING. If not, write to the Free | |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
21 | ||
22 | /* This is the top level of cc1/c++. | |
23 | It parses command args, opens files, invokes the various passes | |
24 | in the proper order, and counts the time used by each. | |
25 | Error messages and low-level interface to malloc also handled here. */ | |
26 | ||
27 | #include "config.h" | |
28 | #undef FLOAT /* This is for hpux. They should change hpux. */ | |
29 | #undef FFS /* Some systems define this in param.h. */ | |
30 | #include "system.h" | |
31 | #include "coretypes.h" | |
32 | #include "tm.h" | |
33 | #include <signal.h> | |
34 | ||
35 | #ifdef HAVE_SYS_RESOURCE_H | |
36 | # include <sys/resource.h> | |
37 | #endif | |
38 | ||
39 | #ifdef HAVE_SYS_TIMES_H | |
40 | # include <sys/times.h> | |
41 | #endif | |
42 | ||
43 | #include "line-map.h" | |
44 | #include "input.h" | |
45 | #include "tree.h" | |
46 | #include "rtl.h" | |
47 | #include "tm_p.h" | |
48 | #include "flags.h" | |
49 | #include "insn-attr.h" | |
50 | #include "insn-config.h" | |
51 | #include "insn-flags.h" | |
52 | #include "hard-reg-set.h" | |
53 | #include "recog.h" | |
54 | #include "output.h" | |
55 | #include "except.h" | |
56 | #include "function.h" | |
57 | #include "toplev.h" | |
58 | #include "expr.h" | |
59 | #include "basic-block.h" | |
60 | #include "intl.h" | |
61 | #include "ggc.h" | |
62 | #include "graph.h" | |
63 | #include "loop.h" | |
64 | #include "regs.h" | |
65 | #include "timevar.h" | |
66 | #include "diagnostic.h" | |
67 | #include "params.h" | |
68 | #include "reload.h" | |
69 | #include "dwarf2asm.h" | |
70 | #include "integrate.h" | |
71 | #include "real.h" | |
72 | #include "debug.h" | |
73 | #include "target.h" | |
74 | #include "langhooks.h" | |
75 | #include "cfglayout.h" | |
76 | #include "cfgloop.h" | |
77 | #include "hosthooks.h" | |
78 | #include "cgraph.h" | |
79 | #include "opts.h" | |
80 | #include "coverage.h" | |
81 | #include "value-prof.h" | |
82 | #include "alloc-pool.h" | |
83 | ||
84 | #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO) | |
85 | #include "dwarf2out.h" | |
86 | #endif | |
87 | ||
88 | #if defined(DBX_DEBUGGING_INFO) || defined(XCOFF_DEBUGGING_INFO) | |
89 | #include "dbxout.h" | |
90 | #endif | |
91 | ||
92 | #ifdef SDB_DEBUGGING_INFO | |
93 | #include "sdbout.h" | |
94 | #endif | |
95 | ||
96 | #ifdef XCOFF_DEBUGGING_INFO | |
97 | #include "xcoffout.h" /* Needed for external data | |
98 | declarations for e.g. AIX 4.x. */ | |
99 | #endif | |
100 | ||
101 | #ifndef HAVE_conditional_execution | |
102 | #define HAVE_conditional_execution 0 | |
103 | #endif | |
104 | ||
105 | /* Format to use to print dumpfile index value */ | |
106 | #ifndef DUMPFILE_FORMAT | |
107 | #define DUMPFILE_FORMAT ".%02d." | |
108 | #endif | |
109 | ||
110 | /* Describes a dump file. */ | |
111 | ||
112 | struct dump_file_info | |
113 | { | |
114 | /* The unique extension to apply, e.g. ".jump". */ | |
115 | const char *const extension; | |
116 | ||
117 | /* The -d<c> character that enables this dump file. */ | |
118 | char const debug_switch; | |
119 | ||
120 | /* True if there is a corresponding graph dump file. */ | |
121 | char const graph_dump_p; | |
122 | ||
123 | /* True if the user selected this dump. */ | |
124 | char enabled; | |
125 | ||
126 | /* True if the files have been initialized (ie truncated). */ | |
127 | char initialized; | |
128 | }; | |
129 | ||
130 | /* Enumerate the extant dump files. */ | |
131 | ||
132 | enum dump_file_index | |
133 | { | |
134 | DFI_cgraph, | |
135 | DFI_rtl, | |
136 | DFI_sibling, | |
137 | DFI_eh, | |
138 | DFI_jump, | |
139 | DFI_null, | |
140 | DFI_cse, | |
141 | DFI_addressof, | |
142 | DFI_gcse, | |
143 | DFI_loop, | |
144 | DFI_bypass, | |
145 | DFI_cfg, | |
146 | DFI_bp, | |
147 | DFI_vpt, | |
148 | DFI_ce1, | |
149 | DFI_tracer, | |
150 | DFI_loop2, | |
151 | DFI_web, | |
152 | DFI_cse2, | |
153 | DFI_life, | |
154 | DFI_combine, | |
155 | DFI_ce2, | |
156 | DFI_regmove, | |
157 | DFI_sched, | |
158 | DFI_lreg, | |
159 | DFI_greg, | |
160 | DFI_postreload, | |
161 | DFI_flow2, | |
162 | DFI_peephole2, | |
163 | DFI_ce3, | |
164 | DFI_rnreg, | |
165 | DFI_bbro, | |
166 | DFI_branch_target_load, | |
167 | DFI_sched2, | |
168 | DFI_stack, | |
169 | DFI_vartrack, | |
170 | DFI_mach, | |
171 | DFI_dbr, | |
172 | DFI_MAX | |
173 | }; | |
174 | ||
175 | /* Describes all the dump files. Should be kept in order of the | |
176 | pass and in sync with dump_file_index above. | |
177 | ||
178 | Remaining -d letters: | |
179 | ||
180 | " e m q " | |
181 | " JK O Q WXY " | |
182 | */ | |
183 | ||
184 | static struct dump_file_info dump_file_tbl[DFI_MAX] = | |
185 | { | |
186 | { "cgraph", 'U', 0, 0, 0 }, | |
187 | { "rtl", 'r', 0, 0, 0 }, | |
188 | { "sibling", 'i', 0, 0, 0 }, | |
189 | { "eh", 'h', 0, 0, 0 }, | |
190 | { "jump", 'j', 0, 0, 0 }, | |
191 | { "null", 'u', 0, 0, 0 }, | |
192 | { "cse", 's', 0, 0, 0 }, | |
193 | { "addressof", 'F', 0, 0, 0 }, | |
194 | { "gcse", 'G', 1, 0, 0 }, | |
195 | { "loop", 'L', 1, 0, 0 }, | |
196 | { "bypass", 'G', 1, 0, 0 }, /* Yes, duplicate enable switch. */ | |
197 | { "cfg", 'f', 1, 0, 0 }, | |
198 | { "bp", 'b', 1, 0, 0 }, | |
199 | { "vpt", 'V', 1, 0, 0 }, | |
200 | { "ce1", 'C', 1, 0, 0 }, | |
201 | { "tracer", 'T', 1, 0, 0 }, | |
202 | { "loop2", 'L', 1, 0, 0 }, | |
203 | { "web", 'Z', 0, 0, 0 }, | |
204 | { "cse2", 't', 1, 0, 0 }, | |
205 | { "life", 'f', 1, 0, 0 }, /* Yes, duplicate enable switch. */ | |
206 | { "combine", 'c', 1, 0, 0 }, | |
207 | { "ce2", 'C', 1, 0, 0 }, | |
208 | { "regmove", 'N', 1, 0, 0 }, | |
209 | { "sched", 'S', 1, 0, 0 }, | |
210 | { "lreg", 'l', 1, 0, 0 }, | |
211 | { "greg", 'g', 1, 0, 0 }, | |
212 | { "postreload", 'o', 1, 0, 0 }, | |
213 | { "flow2", 'w', 1, 0, 0 }, | |
214 | { "peephole2", 'z', 1, 0, 0 }, | |
215 | { "ce3", 'E', 1, 0, 0 }, | |
216 | { "rnreg", 'n', 1, 0, 0 }, | |
217 | { "bbro", 'B', 1, 0, 0 }, | |
218 | { "btl", 'd', 1, 0, 0 }, /* Yes, duplicate enable switch. */ | |
219 | { "sched2", 'R', 1, 0, 0 }, | |
220 | { "stack", 'k', 1, 0, 0 }, | |
221 | { "vartrack", 'V', 1, 0, 0 }, /* Yes, duplicate enable switch. */ | |
222 | { "mach", 'M', 1, 0, 0 }, | |
223 | { "dbr", 'd', 0, 0, 0 }, | |
224 | }; | |
225 | ||
226 | /* Routine to open a dump file. Return true if the dump file is enabled. */ | |
227 | ||
228 | static int | |
229 | open_dump_file (enum dump_file_index index, tree decl) | |
230 | { | |
231 | char *dump_name; | |
232 | const char *open_arg; | |
233 | char seq[16]; | |
234 | ||
235 | if (! dump_file_tbl[index].enabled) | |
236 | return 0; | |
237 | ||
238 | timevar_push (TV_DUMP); | |
239 | if (dump_file != NULL) | |
240 | fclose (dump_file); | |
241 | ||
242 | sprintf (seq, DUMPFILE_FORMAT, index); | |
243 | ||
244 | if (! dump_file_tbl[index].initialized) | |
245 | { | |
246 | /* If we've not initialized the files, do so now. */ | |
247 | if (graph_dump_format != no_graph | |
248 | && dump_file_tbl[index].graph_dump_p) | |
249 | { | |
250 | dump_name = concat (seq, dump_file_tbl[index].extension, NULL); | |
251 | clean_graph_dump_file (dump_base_name, dump_name); | |
252 | free (dump_name); | |
253 | } | |
254 | dump_file_tbl[index].initialized = 1; | |
255 | open_arg = "w"; | |
256 | } | |
257 | else | |
258 | open_arg = "a"; | |
259 | ||
260 | dump_name = concat (dump_base_name, seq, | |
261 | dump_file_tbl[index].extension, NULL); | |
262 | ||
263 | dump_file = fopen (dump_name, open_arg); | |
264 | if (dump_file == NULL) | |
265 | fatal_error ("can't open %s: %m", dump_name); | |
266 | ||
267 | free (dump_name); | |
268 | ||
269 | if (decl) | |
270 | fprintf (dump_file, "\n;; Function %s%s\n\n", | |
271 | (*lang_hooks.decl_printable_name) (decl, 2), | |
272 | cfun->function_frequency == FUNCTION_FREQUENCY_HOT | |
273 | ? " (hot)" | |
274 | : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED | |
275 | ? " (unlikely executed)" | |
276 | : ""); | |
277 | ||
278 | timevar_pop (TV_DUMP); | |
279 | return 1; | |
280 | } | |
281 | ||
282 | /* Routine to close a dump file. */ | |
283 | ||
284 | static void | |
285 | close_dump_file (enum dump_file_index index, | |
286 | void (*func) (FILE *, rtx), | |
287 | rtx insns) | |
288 | { | |
289 | if (! dump_file) | |
290 | return; | |
291 | ||
292 | timevar_push (TV_DUMP); | |
293 | if (insns | |
294 | && graph_dump_format != no_graph | |
295 | && dump_file_tbl[index].graph_dump_p) | |
296 | { | |
297 | char seq[16]; | |
298 | char *suffix; | |
299 | ||
300 | sprintf (seq, DUMPFILE_FORMAT, index); | |
301 | suffix = concat (seq, dump_file_tbl[index].extension, NULL); | |
302 | print_rtl_graph_with_bb (dump_base_name, suffix, insns); | |
303 | free (suffix); | |
304 | } | |
305 | ||
306 | if (func && insns) | |
307 | func (dump_file, insns); | |
308 | ||
309 | fflush (dump_file); | |
310 | fclose (dump_file); | |
311 | ||
312 | dump_file = NULL; | |
313 | timevar_pop (TV_DUMP); | |
314 | } | |
315 | ||
316 | /* This is called from various places for FUNCTION_DECL, VAR_DECL, | |
317 | and TYPE_DECL nodes. | |
318 | ||
319 | This does nothing for local (non-static) variables, unless the | |
320 | variable is a register variable with an ASMSPEC. In that case, or | |
321 | if the variable is not an automatic, it sets up the RTL and | |
322 | outputs any assembler code (label definition, storage allocation | |
323 | and initialization). | |
324 | ||
325 | DECL is the declaration. If ASMSPEC is nonzero, it specifies | |
326 | the assembler symbol name to be used. TOP_LEVEL is nonzero | |
327 | if this declaration is not within a function. */ | |
328 | ||
329 | void | |
330 | rest_of_decl_compilation (tree decl, | |
331 | const char *asmspec, | |
332 | int top_level, | |
333 | int at_end) | |
334 | { | |
335 | /* We deferred calling assemble_alias so that we could collect | |
336 | other attributes such as visibility. Emit the alias now. */ | |
337 | { | |
338 | tree alias; | |
339 | alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl)); | |
340 | if (alias) | |
341 | { | |
342 | alias = TREE_VALUE (TREE_VALUE (alias)); | |
343 | alias = get_identifier (TREE_STRING_POINTER (alias)); | |
344 | assemble_alias (decl, alias); | |
345 | } | |
346 | } | |
347 | ||
348 | /* Forward declarations for nested functions are not "external", | |
349 | but we need to treat them as if they were. */ | |
350 | if (TREE_STATIC (decl) || DECL_EXTERNAL (decl) | |
351 | || TREE_CODE (decl) == FUNCTION_DECL) | |
352 | { | |
353 | timevar_push (TV_VARCONST); | |
354 | ||
355 | if (asmspec) | |
356 | make_decl_rtl (decl, asmspec); | |
357 | ||
358 | /* Don't output anything when a tentative file-scope definition | |
359 | is seen. But at end of compilation, do output code for them. | |
360 | ||
361 | We do output all variables when unit-at-a-time is active and rely on | |
362 | callgraph code to defer them except for forward declarations | |
363 | (see gcc.c-torture/compile/920624-1.c) */ | |
364 | if ((at_end | |
365 | || !DECL_DEFER_OUTPUT (decl) | |
366 | || (flag_unit_at_a_time && DECL_INITIAL (decl))) | |
367 | && !DECL_EXTERNAL (decl)) | |
368 | { | |
369 | if (flag_unit_at_a_time && !cgraph_global_info_ready | |
370 | && TREE_CODE (decl) != FUNCTION_DECL && top_level) | |
371 | cgraph_varpool_finalize_decl (decl); | |
372 | else | |
373 | assemble_variable (decl, top_level, at_end, 0); | |
374 | } | |
375 | ||
376 | #ifdef ASM_FINISH_DECLARE_OBJECT | |
377 | if (decl == last_assemble_variable_decl) | |
378 | { | |
379 | ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl, | |
380 | top_level, at_end); | |
381 | } | |
382 | #endif | |
383 | ||
384 | timevar_pop (TV_VARCONST); | |
385 | } | |
386 | else if (DECL_REGISTER (decl) && asmspec != 0) | |
387 | { | |
388 | if (decode_reg_name (asmspec) >= 0) | |
389 | { | |
390 | SET_DECL_RTL (decl, NULL_RTX); | |
391 | make_decl_rtl (decl, asmspec); | |
392 | } | |
393 | else | |
394 | { | |
395 | error ("invalid register name `%s' for register variable", asmspec); | |
396 | DECL_REGISTER (decl) = 0; | |
397 | if (!top_level) | |
398 | expand_decl (decl); | |
399 | } | |
400 | } | |
401 | else if (TREE_CODE (decl) == TYPE_DECL) | |
402 | { | |
403 | timevar_push (TV_SYMOUT); | |
404 | debug_hooks->type_decl (decl, !top_level); | |
405 | timevar_pop (TV_SYMOUT); | |
406 | } | |
407 | } | |
408 | ||
409 | /* Called after finishing a record, union or enumeral type. */ | |
410 | ||
411 | void | |
412 | rest_of_type_compilation (tree type, int toplev) | |
413 | { | |
414 | /* Avoid confusing the debug information machinery when there are | |
415 | errors. */ | |
416 | if (errorcount != 0 || sorrycount != 0) | |
417 | return; | |
418 | ||
419 | timevar_push (TV_SYMOUT); | |
420 | debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev); | |
421 | timevar_pop (TV_SYMOUT); | |
422 | } | |
423 | ||
424 | /* Turn the RTL into assembly. */ | |
425 | static void | |
426 | rest_of_handle_final (tree decl, rtx insns) | |
427 | { | |
428 | timevar_push (TV_FINAL); | |
429 | { | |
430 | rtx x; | |
431 | const char *fnname; | |
432 | ||
433 | /* Get the function's name, as described by its RTL. This may be | |
434 | different from the DECL_NAME name used in the source file. */ | |
435 | ||
436 | x = DECL_RTL (decl); | |
437 | if (GET_CODE (x) != MEM) | |
438 | abort (); | |
439 | x = XEXP (x, 0); | |
440 | if (GET_CODE (x) != SYMBOL_REF) | |
441 | abort (); | |
442 | fnname = XSTR (x, 0); | |
443 | ||
444 | assemble_start_function (decl, fnname); | |
445 | final_start_function (insns, asm_out_file, optimize); | |
446 | final (insns, asm_out_file, optimize, 0); | |
447 | final_end_function (); | |
448 | ||
449 | #ifdef IA64_UNWIND_INFO | |
450 | /* ??? The IA-64 ".handlerdata" directive must be issued before | |
451 | the ".endp" directive that closes the procedure descriptor. */ | |
452 | output_function_exception_table (); | |
453 | #endif | |
454 | ||
455 | assemble_end_function (decl, fnname); | |
456 | ||
457 | #ifndef IA64_UNWIND_INFO | |
458 | /* Otherwise, it feels unclean to switch sections in the middle. */ | |
459 | output_function_exception_table (); | |
460 | #endif | |
461 | ||
462 | if (! quiet_flag) | |
463 | fflush (asm_out_file); | |
464 | ||
465 | /* Release all memory allocated by flow. */ | |
466 | free_basic_block_vars (0); | |
467 | ||
468 | /* Release all memory held by regsets now. */ | |
469 | regset_release_memory (); | |
470 | } | |
471 | timevar_pop (TV_FINAL); | |
472 | ||
473 | ggc_collect (); | |
474 | } | |
475 | ||
476 | #ifdef DELAY_SLOTS | |
477 | /* Run delay slot optimization. */ | |
478 | static void | |
479 | rest_of_handle_delay_slots (tree decl, rtx insns) | |
480 | { | |
481 | timevar_push (TV_DBR_SCHED); | |
482 | open_dump_file (DFI_dbr, decl); | |
483 | ||
484 | dbr_schedule (insns, dump_file); | |
485 | ||
486 | close_dump_file (DFI_dbr, print_rtl, insns); | |
487 | timevar_pop (TV_DBR_SCHED); | |
488 | ||
489 | ggc_collect (); | |
490 | } | |
491 | #endif | |
492 | ||
493 | #ifdef STACK_REGS | |
494 | /* Convert register usage from flat register file usage to a stack | |
495 | register file. */ | |
496 | static void | |
497 | rest_of_handle_stack_regs (tree decl, rtx insns) | |
498 | { | |
499 | #if defined (HAVE_ATTR_length) | |
500 | /* If flow2 creates new instructions which need splitting | |
501 | and scheduling after reload is not done, they might not be | |
502 | split until final which doesn't allow splitting | |
503 | if HAVE_ATTR_length. */ | |
504 | #ifdef INSN_SCHEDULING | |
505 | if (optimize && !flag_schedule_insns_after_reload) | |
506 | #else | |
507 | if (optimize) | |
508 | #endif | |
509 | { | |
510 | timevar_push (TV_SHORTEN_BRANCH); | |
511 | split_all_insns (1); | |
512 | timevar_pop (TV_SHORTEN_BRANCH); | |
513 | } | |
514 | #endif | |
515 | ||
516 | timevar_push (TV_REG_STACK); | |
517 | open_dump_file (DFI_stack, decl); | |
518 | ||
519 | if (reg_to_stack (insns, dump_file) && optimize) | |
520 | { | |
521 | if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK | |
522 | | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0)) | |
523 | && flag_reorder_blocks) | |
524 | { | |
525 | reorder_basic_blocks (); | |
526 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK); | |
527 | } | |
528 | } | |
529 | ||
530 | close_dump_file (DFI_stack, print_rtl_with_bb, insns); | |
531 | timevar_pop (TV_REG_STACK); | |
532 | ||
533 | ggc_collect (); | |
534 | } | |
535 | #endif | |
536 | ||
537 | /* Track the variables, ie. compute where the variable is stored at each position in function. */ | |
538 | static void | |
539 | rest_of_handle_variable_tracking (tree decl, rtx insns) | |
540 | { | |
541 | timevar_push (TV_VAR_TRACKING); | |
542 | open_dump_file (DFI_vartrack, decl); | |
543 | ||
544 | variable_tracking_main (); | |
545 | ||
546 | close_dump_file (DFI_vartrack, print_rtl_with_bb, insns); | |
547 | timevar_pop (TV_VAR_TRACKING); | |
548 | } | |
549 | ||
550 | /* Machine independent reorg pass. */ | |
551 | static void | |
552 | rest_of_handle_machine_reorg (tree decl, rtx insns) | |
553 | { | |
554 | timevar_push (TV_MACH_DEP); | |
555 | open_dump_file (DFI_mach, decl); | |
556 | ||
557 | (*targetm.machine_dependent_reorg) (); | |
558 | ||
559 | close_dump_file (DFI_mach, print_rtl, insns); | |
560 | timevar_pop (TV_MACH_DEP); | |
561 | ||
562 | ggc_collect (); | |
563 | } | |
564 | ||
565 | ||
566 | /* Run new register allocator. Return TRUE if we must exit | |
567 | rest_of_compilation upon return. */ | |
568 | static bool | |
569 | rest_of_handle_new_regalloc (tree decl, rtx insns) | |
570 | { | |
571 | int failure; | |
572 | ||
573 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
574 | reg_alloc (); | |
575 | ||
576 | timevar_pop (TV_LOCAL_ALLOC); | |
577 | if (dump_file_tbl[DFI_lreg].enabled) | |
578 | { | |
579 | timevar_push (TV_DUMP); | |
580 | ||
581 | close_dump_file (DFI_lreg, NULL, NULL); | |
582 | timevar_pop (TV_DUMP); | |
583 | } | |
584 | ||
585 | /* XXX clean up the whole mess to bring live info in shape again. */ | |
586 | timevar_push (TV_GLOBAL_ALLOC); | |
587 | open_dump_file (DFI_greg, decl); | |
588 | ||
589 | build_insn_chain (insns); | |
590 | failure = reload (insns, 0); | |
591 | ||
592 | timevar_pop (TV_GLOBAL_ALLOC); | |
593 | ||
594 | if (dump_file_tbl[DFI_greg].enabled) | |
595 | { | |
596 | timevar_push (TV_DUMP); | |
597 | ||
598 | dump_global_regs (dump_file); | |
599 | ||
600 | close_dump_file (DFI_greg, print_rtl_with_bb, insns); | |
601 | timevar_pop (TV_DUMP); | |
602 | } | |
603 | ||
604 | if (failure) | |
605 | return true; | |
606 | ||
607 | reload_completed = 1; | |
608 | ||
609 | return false; | |
610 | } | |
611 | ||
612 | /* Run old register allocator. Return TRUE if we must exit | |
613 | rest_of_compilation upon return. */ | |
614 | static bool | |
615 | rest_of_handle_old_regalloc (tree decl, rtx insns) | |
616 | { | |
617 | int failure; | |
618 | int rebuild_notes; | |
619 | ||
620 | /* Allocate the reg_renumber array. */ | |
621 | allocate_reg_info (max_regno, FALSE, TRUE); | |
622 | ||
623 | /* And the reg_equiv_memory_loc array. */ | |
624 | reg_equiv_memory_loc = xcalloc (max_regno, sizeof (rtx)); | |
625 | ||
626 | allocate_initial_values (reg_equiv_memory_loc); | |
627 | ||
628 | regclass (insns, max_reg_num (), dump_file); | |
629 | rebuild_notes = local_alloc (); | |
630 | ||
631 | timevar_pop (TV_LOCAL_ALLOC); | |
632 | ||
633 | /* Local allocation may have turned an indirect jump into a direct | |
634 | jump. If so, we must rebuild the JUMP_LABEL fields of jumping | |
635 | instructions. */ | |
636 | if (rebuild_notes) | |
637 | { | |
638 | timevar_push (TV_JUMP); | |
639 | ||
640 | rebuild_jump_labels (insns); | |
641 | purge_all_dead_edges (0); | |
642 | ||
643 | timevar_pop (TV_JUMP); | |
644 | } | |
645 | ||
646 | if (dump_file_tbl[DFI_lreg].enabled) | |
647 | { | |
648 | timevar_push (TV_DUMP); | |
649 | ||
650 | dump_flow_info (dump_file); | |
651 | dump_local_alloc (dump_file); | |
652 | ||
653 | close_dump_file (DFI_lreg, print_rtl_with_bb, insns); | |
654 | timevar_pop (TV_DUMP); | |
655 | } | |
656 | ||
657 | ggc_collect (); | |
658 | ||
659 | timevar_push (TV_GLOBAL_ALLOC); | |
660 | open_dump_file (DFI_greg, decl); | |
661 | ||
662 | /* If optimizing, allocate remaining pseudo-regs. Do the reload | |
663 | pass fixing up any insns that are invalid. */ | |
664 | ||
665 | if (optimize) | |
666 | failure = global_alloc (dump_file); | |
667 | else | |
668 | { | |
669 | build_insn_chain (insns); | |
670 | failure = reload (insns, 0); | |
671 | } | |
672 | ||
673 | timevar_pop (TV_GLOBAL_ALLOC); | |
674 | ||
675 | if (dump_file_tbl[DFI_greg].enabled) | |
676 | { | |
677 | timevar_push (TV_DUMP); | |
678 | ||
679 | dump_global_regs (dump_file); | |
680 | ||
681 | close_dump_file (DFI_greg, print_rtl_with_bb, insns); | |
682 | timevar_pop (TV_DUMP); | |
683 | } | |
684 | ||
685 | return failure; | |
686 | } | |
687 | ||
688 | /* Run the regrename and cprop passes. */ | |
689 | static void | |
690 | rest_of_handle_regrename (tree decl, rtx insns) | |
691 | { | |
692 | timevar_push (TV_RENAME_REGISTERS); | |
693 | open_dump_file (DFI_rnreg, decl); | |
694 | ||
695 | if (flag_rename_registers) | |
696 | regrename_optimize (); | |
697 | if (flag_cprop_registers) | |
698 | copyprop_hardreg_forward (); | |
699 | ||
700 | close_dump_file (DFI_rnreg, print_rtl_with_bb, insns); | |
701 | timevar_pop (TV_RENAME_REGISTERS); | |
702 | } | |
703 | ||
704 | /* Reorder basic blocks. */ | |
705 | static void | |
706 | rest_of_handle_reorder_blocks (tree decl, rtx insns) | |
707 | { | |
708 | bool changed; | |
709 | open_dump_file (DFI_bbro, decl); | |
710 | ||
711 | /* Last attempt to optimize CFG, as scheduling, peepholing and insn | |
712 | splitting possibly introduced more crossjumping opportunities. */ | |
713 | changed = cleanup_cfg (CLEANUP_EXPENSIVE | |
714 | | (!HAVE_conditional_execution | |
715 | ? CLEANUP_UPDATE_LIFE : 0)); | |
716 | ||
717 | if (flag_sched2_use_traces && flag_schedule_insns_after_reload) | |
718 | tracer (); | |
719 | if (flag_reorder_blocks) | |
720 | reorder_basic_blocks (); | |
721 | if (flag_reorder_blocks | |
722 | || (flag_sched2_use_traces && flag_schedule_insns_after_reload)) | |
723 | changed |= cleanup_cfg (CLEANUP_EXPENSIVE | |
724 | | (!HAVE_conditional_execution | |
725 | ? CLEANUP_UPDATE_LIFE : 0)); | |
726 | ||
727 | /* On conditional execution targets we can not update the life cheaply, so | |
728 | we deffer the updating to after both cleanups. This may lose some cases | |
729 | but should not be terribly bad. */ | |
730 | if (changed && HAVE_conditional_execution) | |
731 | update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, | |
732 | PROP_DEATH_NOTES); | |
733 | close_dump_file (DFI_bbro, print_rtl_with_bb, insns); | |
734 | } | |
735 | ||
736 | #ifdef INSN_SCHEDULING | |
737 | /* Run instruction scheduler. */ | |
738 | static void | |
739 | rest_of_handle_sched (tree decl, rtx insns) | |
740 | { | |
741 | timevar_push (TV_SCHED); | |
742 | ||
743 | /* Print function header into sched dump now | |
744 | because doing the sched analysis makes some of the dump. */ | |
745 | if (optimize > 0 && flag_schedule_insns) | |
746 | { | |
747 | open_dump_file (DFI_sched, decl); | |
748 | ||
749 | /* Do control and data sched analysis, | |
750 | and write some of the results to dump file. */ | |
751 | ||
752 | schedule_insns (dump_file); | |
753 | ||
754 | close_dump_file (DFI_sched, print_rtl_with_bb, insns); | |
755 | } | |
756 | timevar_pop (TV_SCHED); | |
757 | ||
758 | ggc_collect (); | |
759 | } | |
760 | ||
761 | /* Run second scheduling pass after reload. */ | |
762 | static void | |
763 | rest_of_handle_sched2 (tree decl, rtx insns) | |
764 | { | |
765 | timevar_push (TV_SCHED2); | |
766 | open_dump_file (DFI_sched2, decl); | |
767 | ||
768 | /* Do control and data sched analysis again, | |
769 | and write some more of the results to dump file. */ | |
770 | ||
771 | split_all_insns (1); | |
772 | ||
773 | if (flag_sched2_use_superblocks || flag_sched2_use_traces) | |
774 | { | |
775 | schedule_ebbs (dump_file); | |
776 | /* No liveness updating code yet, but it should be easy to do. | |
777 | reg-stack recompute the liveness when needed for now. */ | |
778 | count_or_remove_death_notes (NULL, 1); | |
779 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
780 | } | |
781 | else | |
782 | schedule_insns (dump_file); | |
783 | ||
784 | close_dump_file (DFI_sched2, print_rtl_with_bb, insns); | |
785 | timevar_pop (TV_SCHED2); | |
786 | ||
787 | ggc_collect (); | |
788 | } | |
789 | #endif | |
790 | ||
791 | /* Register allocation pre-pass, to reduce number of moves necessary | |
792 | for two-address machines. */ | |
793 | static void | |
794 | rest_of_handle_regmove (tree decl, rtx insns) | |
795 | { | |
796 | timevar_push (TV_REGMOVE); | |
797 | open_dump_file (DFI_regmove, decl); | |
798 | ||
799 | regmove_optimize (insns, max_reg_num (), dump_file); | |
800 | ||
801 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE); | |
802 | close_dump_file (DFI_regmove, print_rtl_with_bb, insns); | |
803 | timevar_pop (TV_REGMOVE); | |
804 | ||
805 | ggc_collect (); | |
806 | } | |
807 | ||
808 | /* Run tracer. */ | |
809 | static void | |
810 | rest_of_handle_tracer (tree decl, rtx insns) | |
811 | { | |
812 | open_dump_file (DFI_tracer, decl); | |
813 | if (dump_file) | |
814 | dump_flow_info (dump_file); | |
815 | tracer (); | |
816 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
817 | reg_scan (insns, max_reg_num (), 0); | |
818 | close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ()); | |
819 | } | |
820 | ||
821 | /* If-conversion and CFG cleanup. */ | |
822 | static void | |
823 | rest_of_handle_if_conversion (tree decl, rtx insns) | |
824 | { | |
825 | open_dump_file (DFI_ce1, decl); | |
826 | if (flag_if_conversion) | |
827 | { | |
828 | timevar_push (TV_IFCVT); | |
829 | if (dump_file) | |
830 | dump_flow_info (dump_file); | |
831 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
832 | reg_scan (insns, max_reg_num (), 0); | |
833 | if_convert (0); | |
834 | timevar_pop (TV_IFCVT); | |
835 | } | |
836 | timevar_push (TV_JUMP); | |
837 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
838 | reg_scan (insns, max_reg_num (), 0); | |
839 | timevar_pop (TV_JUMP); | |
840 | close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ()); | |
841 | } | |
842 | ||
843 | /* Rerun if-conversion, as combine may have simplified things enough | |
844 | to now meet sequence length restrictions. */ | |
845 | static void | |
846 | rest_of_handle_if_after_combine (tree decl, rtx insns) | |
847 | { | |
848 | timevar_push (TV_IFCVT); | |
849 | open_dump_file (DFI_ce2, decl); | |
850 | ||
851 | no_new_pseudos = 0; | |
852 | if_convert (1); | |
853 | no_new_pseudos = 1; | |
854 | ||
855 | close_dump_file (DFI_ce2, print_rtl_with_bb, insns); | |
856 | timevar_pop (TV_IFCVT); | |
857 | } | |
858 | ||
859 | static void | |
860 | rest_of_handle_web (tree decl, rtx insns) | |
861 | { | |
862 | open_dump_file (DFI_web, decl); | |
863 | timevar_push (TV_WEB); | |
864 | web_main (); | |
865 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
866 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
867 | ||
868 | timevar_pop (TV_WEB); | |
869 | close_dump_file (DFI_web, print_rtl_with_bb, insns); | |
870 | reg_scan (get_insns (), max_reg_num (), 0); | |
871 | } | |
872 | ||
873 | /* Do branch profiling and static profile estimation passes. */ | |
874 | static void | |
875 | rest_of_handle_branch_prob (tree decl, rtx insns) | |
876 | { | |
877 | struct loops loops; | |
878 | ||
879 | timevar_push (TV_BRANCH_PROB); | |
880 | open_dump_file (DFI_bp, decl); | |
881 | ||
882 | if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities) | |
883 | branch_prob (); | |
884 | ||
885 | /* Discover and record the loop depth at the head of each basic | |
886 | block. The loop infrastructure does the real job for us. */ | |
887 | flow_loops_find (&loops, LOOP_TREE); | |
888 | ||
889 | if (dump_file) | |
890 | flow_loops_dump (&loops, dump_file, NULL, 0); | |
891 | ||
892 | /* Estimate using heuristics if no profiling info is available. */ | |
893 | if (flag_guess_branch_prob) | |
894 | estimate_probability (&loops); | |
895 | ||
896 | flow_loops_free (&loops); | |
897 | free_dominance_info (CDI_DOMINATORS); | |
898 | close_dump_file (DFI_bp, print_rtl_with_bb, insns); | |
899 | timevar_pop (TV_BRANCH_PROB); | |
900 | } | |
901 | ||
902 | /* Do optimizations based on expression value profiles. */ | |
903 | static void | |
904 | rest_of_handle_value_profile_transformations (tree decl, rtx insns) | |
905 | { | |
906 | open_dump_file (DFI_vpt, decl); | |
907 | timevar_push (TV_VPT); | |
908 | ||
909 | if (value_profile_transformations ()) | |
910 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
911 | ||
912 | timevar_pop (TV_VPT); | |
913 | close_dump_file (DFI_vpt, print_rtl_with_bb, insns); | |
914 | } | |
915 | ||
916 | /* Do control and data flow analysis; write some of the results to the | |
917 | dump file. */ | |
918 | static void | |
919 | rest_of_handle_cfg (tree decl, rtx insns) | |
920 | { | |
921 | open_dump_file (DFI_cfg, decl); | |
922 | if (dump_file) | |
923 | dump_flow_info (dump_file); | |
924 | if (optimize) | |
925 | cleanup_cfg (CLEANUP_EXPENSIVE | |
926 | | (flag_thread_jumps ? CLEANUP_THREADING : 0)); | |
927 | ||
928 | /* It may make more sense to mark constant functions after dead code is | |
929 | eliminated by life_analysis, but we need to do it early, as -fprofile-arcs | |
930 | may insert code making function non-constant, but we still must consider | |
931 | it as constant, otherwise -fbranch-probabilities will not read data back. | |
932 | ||
933 | life_analysis rarely eliminates modification of external memory. | |
934 | */ | |
935 | if (optimize) | |
936 | { | |
937 | /* Alias analysis depends on this information and mark_constant_function | |
938 | depends on alias analysis. */ | |
939 | reg_scan (insns, max_reg_num (), 1); | |
940 | mark_constant_function (); | |
941 | } | |
942 | ||
943 | close_dump_file (DFI_cfg, print_rtl_with_bb, insns); | |
944 | } | |
945 | ||
946 | /* Purge addressofs. */ | |
947 | static void | |
948 | rest_of_handle_addressof (tree decl, rtx insns) | |
949 | { | |
950 | open_dump_file (DFI_addressof, decl); | |
951 | ||
952 | purge_addressof (insns); | |
953 | if (optimize && purge_all_dead_edges (0)) | |
954 | delete_unreachable_blocks (); | |
955 | reg_scan (insns, max_reg_num (), 1); | |
956 | ||
957 | close_dump_file (DFI_addressof, print_rtl, insns); | |
958 | } | |
959 | ||
960 | /* We may have potential sibling or tail recursion sites. Select one | |
961 | (of possibly multiple) methods of performing the call. */ | |
962 | static void | |
963 | rest_of_handle_sibling_calls (rtx insns) | |
964 | { | |
965 | rtx insn; | |
966 | optimize_sibling_and_tail_recursive_calls (); | |
967 | ||
968 | /* Recompute the CFG as sibling optimization clobbers it randomly. */ | |
969 | free_bb_for_insn (); | |
970 | find_exception_handler_labels (); | |
971 | rebuild_jump_labels (insns); | |
972 | find_basic_blocks (insns, max_reg_num (), dump_file); | |
973 | ||
974 | /* There is pass ordering problem - we must lower NOTE_INSN_PREDICTION | |
975 | notes before simplifying cfg and we must do lowering after sibcall | |
976 | that unhides parts of RTL chain and cleans up the CFG. | |
977 | ||
978 | Until sibcall is replaced by tree-level optimizer, lets just | |
979 | sweep away the NOTE_INSN_PREDICTION notes that leaked out. */ | |
980 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
981 | if (GET_CODE (insn) == NOTE | |
982 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION) | |
983 | delete_insn (insn); | |
984 | ||
985 | close_dump_file (DFI_sibling, print_rtl, get_insns ()); | |
986 | } | |
987 | ||
988 | /* Perform jump bypassing and control flow optimizations. */ | |
989 | static void | |
990 | rest_of_handle_jump_bypass (tree decl, rtx insns) | |
991 | { | |
992 | timevar_push (TV_BYPASS); | |
993 | open_dump_file (DFI_bypass, decl); | |
994 | ||
995 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
996 | reg_scan (insns, max_reg_num (), 1); | |
997 | ||
998 | if (bypass_jumps (dump_file)) | |
999 | { | |
1000 | rebuild_jump_labels (insns); | |
1001 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
1002 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1003 | } | |
1004 | ||
1005 | close_dump_file (DFI_bypass, print_rtl_with_bb, insns); | |
1006 | timevar_pop (TV_BYPASS); | |
1007 | ||
1008 | ggc_collect (); | |
1009 | ||
1010 | #ifdef ENABLE_CHECKING | |
1011 | verify_flow_info (); | |
1012 | #endif | |
1013 | } | |
1014 | ||
1015 | /* Handle inlining of functions in rest_of_compilation. Return TRUE | |
1016 | if we must exit rest_of_compilation upon return. */ | |
1017 | static bool | |
1018 | rest_of_handle_inlining (tree decl) | |
1019 | { | |
1020 | rtx insns; | |
1021 | int inlinable = 0; | |
1022 | tree parent; | |
1023 | const char *lose; | |
1024 | ||
1025 | /* If we are reconsidering an inline function at the end of | |
1026 | compilation, skip the stuff for making it inline. */ | |
1027 | if (cfun->rtl_inline_init) | |
1028 | return 0; | |
1029 | cfun->rtl_inline_init = 1; | |
1030 | ||
1031 | /* If this is nested inside an inlined external function, pretend | |
1032 | it was only declared. Since we cannot inline such functions, | |
1033 | generating code for this one is not only not necessary but will | |
1034 | confuse some debugging output writers. */ | |
1035 | for (parent = DECL_CONTEXT (current_function_decl); | |
1036 | parent != NULL_TREE; | |
1037 | parent = get_containing_scope (parent)) | |
1038 | if (TREE_CODE (parent) == FUNCTION_DECL | |
1039 | && DECL_INLINE (parent) && DECL_EXTERNAL (parent)) | |
1040 | { | |
1041 | DECL_INITIAL (decl) = 0; | |
1042 | return true; | |
1043 | } | |
1044 | else if (TYPE_P (parent)) | |
1045 | /* A function in a local class should be treated normally. */ | |
1046 | break; | |
1047 | ||
1048 | /* If requested, consider whether to make this function inline. */ | |
1049 | if ((DECL_INLINE (decl) && !flag_no_inline) | |
1050 | || flag_inline_functions) | |
1051 | { | |
1052 | timevar_push (TV_INTEGRATION); | |
1053 | lose = function_cannot_inline_p (decl); | |
1054 | timevar_pop (TV_INTEGRATION); | |
1055 | if (lose || ! optimize) | |
1056 | { | |
1057 | if (warn_inline && lose && DECL_INLINE (decl)) | |
1058 | { | |
1059 | char *msg = concat ("%J", lose, NULL); | |
1060 | warning (msg, decl); | |
1061 | free (msg); | |
1062 | } | |
1063 | DECL_ABSTRACT_ORIGIN (decl) = 0; | |
1064 | /* Don't really compile an extern inline function. | |
1065 | If we can't make it inline, pretend | |
1066 | it was only declared. */ | |
1067 | if (DECL_EXTERNAL (decl)) | |
1068 | { | |
1069 | DECL_INITIAL (decl) = 0; | |
1070 | return true; | |
1071 | } | |
1072 | } | |
1073 | else | |
1074 | inlinable = DECL_INLINE (decl) = 1; | |
1075 | } | |
1076 | ||
1077 | insns = get_insns (); | |
1078 | ||
1079 | /* Dump the rtl code if we are dumping rtl. */ | |
1080 | ||
1081 | if (open_dump_file (DFI_rtl, decl)) | |
1082 | { | |
1083 | if (DECL_STRUCT_FUNCTION (decl) | |
1084 | && DECL_STRUCT_FUNCTION (decl)->saved_for_inline) | |
1085 | fprintf (dump_file, ";; (integrable)\n\n"); | |
1086 | close_dump_file (DFI_rtl, print_rtl, insns); | |
1087 | } | |
1088 | ||
1089 | /* Convert from NOTE_INSN_EH_REGION style notes, and do other | |
1090 | sorts of eh initialization. Delay this until after the | |
1091 | initial rtl dump so that we can see the original nesting. */ | |
1092 | convert_from_eh_region_ranges (); | |
1093 | ||
1094 | /* If function is inline, and we don't yet know whether to | |
1095 | compile it by itself, defer decision till end of compilation. | |
1096 | wrapup_global_declarations will (indirectly) call | |
1097 | rest_of_compilation again for those functions that need to | |
1098 | be output. Also defer those functions that we are supposed | |
1099 | to defer. */ | |
1100 | ||
1101 | if (inlinable | |
1102 | || (DECL_INLINE (decl) | |
1103 | /* Egad. This RTL deferral test conflicts with Fortran assumptions | |
1104 | for unreferenced symbols. See g77.f-torture/execute/980520-1.f. | |
1105 | But removing this line from the check breaks all languages that | |
1106 | use the call graph to output symbols. This hard-coded check is | |
1107 | the least invasive work-around. */ | |
1108 | && (flag_inline_functions | |
1109 | || strcmp (lang_hooks.name, "GNU F77") == 0) | |
1110 | && ((! TREE_PUBLIC (decl) && ! TREE_ADDRESSABLE (decl) | |
1111 | && ! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)) | |
1112 | && ! flag_keep_inline_functions) | |
1113 | || DECL_EXTERNAL (decl)))) | |
1114 | DECL_DEFER_OUTPUT (decl) = 1; | |
1115 | ||
1116 | if (DECL_INLINE (decl)) | |
1117 | /* DWARF wants separate debugging info for abstract and | |
1118 | concrete instances of all inline functions, including those | |
1119 | declared inline but not inlined, and those inlined even | |
1120 | though they weren't declared inline. Conveniently, that's | |
1121 | what DECL_INLINE means at this point. */ | |
1122 | (*debug_hooks->deferred_inline_function) (decl); | |
1123 | ||
1124 | if (DECL_DEFER_OUTPUT (decl)) | |
1125 | { | |
1126 | /* If -Wreturn-type, we have to do a bit of compilation. We just | |
1127 | want to call cleanup the cfg to figure out whether or not we can | |
1128 | fall off the end of the function; we do the minimum amount of | |
1129 | work necessary to make that safe. */ | |
1130 | if (warn_return_type) | |
1131 | { | |
1132 | int saved_optimize = optimize; | |
1133 | ||
1134 | optimize = 0; | |
1135 | rebuild_jump_labels (insns); | |
1136 | find_exception_handler_labels (); | |
1137 | find_basic_blocks (insns, max_reg_num (), dump_file); | |
1138 | cleanup_cfg (CLEANUP_PRE_SIBCALL | CLEANUP_PRE_LOOP); | |
1139 | optimize = saved_optimize; | |
1140 | ||
1141 | /* CFG is no longer maintained up-to-date. */ | |
1142 | free_bb_for_insn (); | |
1143 | } | |
1144 | ||
1145 | set_nothrow_function_flags (); | |
1146 | if (current_function_nothrow) | |
1147 | /* Now we know that this can't throw; set the flag for the benefit | |
1148 | of other functions later in this translation unit. */ | |
1149 | TREE_NOTHROW (current_function_decl) = 1; | |
1150 | ||
1151 | timevar_push (TV_INTEGRATION); | |
1152 | save_for_inline (decl); | |
1153 | timevar_pop (TV_INTEGRATION); | |
1154 | DECL_STRUCT_FUNCTION (decl)->inlinable = inlinable; | |
1155 | return true; | |
1156 | } | |
1157 | ||
1158 | /* If specified extern inline but we aren't inlining it, we are | |
1159 | done. This goes for anything that gets here with DECL_EXTERNAL | |
1160 | set, not just things with DECL_INLINE. */ | |
1161 | return (bool) DECL_EXTERNAL (decl); | |
1162 | } | |
1163 | ||
1164 | /* Try to identify useless null pointer tests and delete them. */ | |
1165 | static void | |
1166 | rest_of_handle_null_pointer (tree decl, rtx insns) | |
1167 | { | |
1168 | open_dump_file (DFI_null, decl); | |
1169 | if (dump_file) | |
1170 | dump_flow_info (dump_file); | |
1171 | ||
1172 | if (delete_null_pointer_checks (insns)) | |
1173 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP); | |
1174 | ||
1175 | close_dump_file (DFI_null, print_rtl_with_bb, insns); | |
1176 | } | |
1177 | ||
1178 | /* Try combining insns through substitution. */ | |
1179 | static void | |
1180 | rest_of_handle_combine (tree decl, rtx insns) | |
1181 | { | |
1182 | int rebuild_jump_labels_after_combine = 0; | |
1183 | ||
1184 | timevar_push (TV_COMBINE); | |
1185 | open_dump_file (DFI_combine, decl); | |
1186 | ||
1187 | rebuild_jump_labels_after_combine | |
1188 | = combine_instructions (insns, max_reg_num ()); | |
1189 | ||
1190 | /* Combining insns may have turned an indirect jump into a | |
1191 | direct jump. Rebuild the JUMP_LABEL fields of jumping | |
1192 | instructions. */ | |
1193 | if (rebuild_jump_labels_after_combine) | |
1194 | { | |
1195 | timevar_push (TV_JUMP); | |
1196 | rebuild_jump_labels (insns); | |
1197 | timevar_pop (TV_JUMP); | |
1198 | ||
1199 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE); | |
1200 | } | |
1201 | ||
1202 | close_dump_file (DFI_combine, print_rtl_with_bb, insns); | |
1203 | timevar_pop (TV_COMBINE); | |
1204 | ||
1205 | ggc_collect (); | |
1206 | } | |
1207 | ||
1208 | /* Perform life analysis. */ | |
1209 | static void | |
1210 | rest_of_handle_life (tree decl, rtx insns) | |
1211 | { | |
1212 | open_dump_file (DFI_life, decl); | |
1213 | regclass_init (); | |
1214 | ||
1215 | #ifdef ENABLE_CHECKING | |
1216 | verify_flow_info (); | |
1217 | #endif | |
1218 | life_analysis (insns, dump_file, PROP_FINAL); | |
1219 | if (optimize) | |
1220 | cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE | |
1221 | | CLEANUP_LOG_LINKS | |
1222 | | (flag_thread_jumps ? CLEANUP_THREADING : 0)); | |
1223 | timevar_pop (TV_FLOW); | |
1224 | ||
1225 | if (warn_uninitialized) | |
1226 | { | |
1227 | uninitialized_vars_warning (DECL_INITIAL (decl)); | |
1228 | if (extra_warnings) | |
1229 | setjmp_args_warning (); | |
1230 | } | |
1231 | ||
1232 | if (optimize) | |
1233 | { | |
1234 | if (!flag_new_regalloc && initialize_uninitialized_subregs ()) | |
1235 | { | |
1236 | /* Insns were inserted, and possibly pseudos created, so | |
1237 | things might look a bit different. */ | |
1238 | insns = get_insns (); | |
1239 | allocate_reg_life_data (); | |
1240 | update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES, | |
1241 | PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES); | |
1242 | } | |
1243 | } | |
1244 | ||
1245 | no_new_pseudos = 1; | |
1246 | ||
1247 | close_dump_file (DFI_life, print_rtl_with_bb, insns); | |
1248 | ||
1249 | ggc_collect (); | |
1250 | } | |
1251 | ||
1252 | /* Perform common subexpression elimination. Nonzero value from | |
1253 | `cse_main' means that jumps were simplified and some code may now | |
1254 | be unreachable, so do jump optimization again. */ | |
1255 | static void | |
1256 | rest_of_handle_cse (tree decl, rtx insns) | |
1257 | { | |
1258 | int tem; | |
1259 | ||
1260 | open_dump_file (DFI_cse, decl); | |
1261 | if (dump_file) | |
1262 | dump_flow_info (dump_file); | |
1263 | timevar_push (TV_CSE); | |
1264 | ||
1265 | reg_scan (insns, max_reg_num (), 1); | |
1266 | ||
1267 | tem = cse_main (insns, max_reg_num (), 0, dump_file); | |
1268 | if (tem) | |
1269 | rebuild_jump_labels (insns); | |
1270 | if (purge_all_dead_edges (0)) | |
1271 | delete_unreachable_blocks (); | |
1272 | ||
1273 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1274 | ||
1275 | /* If we are not running more CSE passes, then we are no longer | |
1276 | expecting CSE to be run. But always rerun it in a cheap mode. */ | |
1277 | cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse; | |
1278 | ||
1279 | if (tem || optimize > 1) | |
1280 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP); | |
1281 | /* Try to identify useless null pointer tests and delete them. */ | |
1282 | if (flag_delete_null_pointer_checks) | |
1283 | { | |
1284 | timevar_push (TV_JUMP); | |
1285 | ||
1286 | if (delete_null_pointer_checks (insns)) | |
1287 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP); | |
1288 | timevar_pop (TV_JUMP); | |
1289 | } | |
1290 | ||
1291 | /* The second pass of jump optimization is likely to have | |
1292 | removed a bunch more instructions. */ | |
1293 | renumber_insns (dump_file); | |
1294 | ||
1295 | timevar_pop (TV_CSE); | |
1296 | close_dump_file (DFI_cse, print_rtl_with_bb, insns); | |
1297 | } | |
1298 | ||
1299 | /* Run second CSE pass after loop optimizations. */ | |
1300 | static void | |
1301 | rest_of_handle_cse2 (tree decl, rtx insns) | |
1302 | { | |
1303 | int tem; | |
1304 | ||
1305 | timevar_push (TV_CSE2); | |
1306 | open_dump_file (DFI_cse2, decl); | |
1307 | if (dump_file) | |
1308 | dump_flow_info (dump_file); | |
1309 | /* CFG is no longer maintained up-to-date. */ | |
1310 | tem = cse_main (insns, max_reg_num (), 1, dump_file); | |
1311 | ||
1312 | /* Run a pass to eliminate duplicated assignments to condition code | |
1313 | registers. We have to run this after bypass_jumps, because it | |
1314 | makes it harder for that pass to determine whether a jump can be | |
1315 | bypassed safely. */ | |
1316 | cse_condition_code_reg (); | |
1317 | ||
1318 | purge_all_dead_edges (0); | |
1319 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1320 | ||
1321 | if (tem) | |
1322 | { | |
1323 | timevar_push (TV_JUMP); | |
1324 | rebuild_jump_labels (insns); | |
1325 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
1326 | timevar_pop (TV_JUMP); | |
1327 | } | |
1328 | reg_scan (insns, max_reg_num (), 0); | |
1329 | close_dump_file (DFI_cse2, print_rtl_with_bb, insns); | |
1330 | ggc_collect (); | |
1331 | timevar_pop (TV_CSE2); | |
1332 | } | |
1333 | ||
1334 | /* Perform global cse. */ | |
1335 | static void | |
1336 | rest_of_handle_gcse (tree decl, rtx insns) | |
1337 | { | |
1338 | int save_csb, save_cfj; | |
1339 | int tem2 = 0, tem; | |
1340 | ||
1341 | timevar_push (TV_GCSE); | |
1342 | open_dump_file (DFI_gcse, decl); | |
1343 | ||
1344 | tem = gcse_main (insns, dump_file); | |
1345 | rebuild_jump_labels (insns); | |
1346 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1347 | ||
1348 | save_csb = flag_cse_skip_blocks; | |
1349 | save_cfj = flag_cse_follow_jumps; | |
1350 | flag_cse_skip_blocks = flag_cse_follow_jumps = 0; | |
1351 | ||
1352 | /* Instantiate any remaining CONSTANT_P_RTX nodes. */ | |
1353 | if (current_function_calls_constant_p) | |
1354 | purge_builtin_constant_p (); | |
1355 | ||
1356 | /* If -fexpensive-optimizations, re-run CSE to clean up things done | |
1357 | by gcse. */ | |
1358 | if (flag_expensive_optimizations) | |
1359 | { | |
1360 | timevar_push (TV_CSE); | |
1361 | reg_scan (insns, max_reg_num (), 1); | |
1362 | tem2 = cse_main (insns, max_reg_num (), 0, dump_file); | |
1363 | purge_all_dead_edges (0); | |
1364 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1365 | timevar_pop (TV_CSE); | |
1366 | cse_not_expected = !flag_rerun_cse_after_loop; | |
1367 | } | |
1368 | ||
1369 | /* If gcse or cse altered any jumps, rerun jump optimizations to clean | |
1370 | things up. Then possibly re-run CSE again. */ | |
1371 | while (tem || tem2) | |
1372 | { | |
1373 | tem = tem2 = 0; | |
1374 | timevar_push (TV_JUMP); | |
1375 | rebuild_jump_labels (insns); | |
1376 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP); | |
1377 | timevar_pop (TV_JUMP); | |
1378 | ||
1379 | if (flag_expensive_optimizations) | |
1380 | { | |
1381 | timevar_push (TV_CSE); | |
1382 | reg_scan (insns, max_reg_num (), 1); | |
1383 | tem2 = cse_main (insns, max_reg_num (), 0, dump_file); | |
1384 | purge_all_dead_edges (0); | |
1385 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1386 | timevar_pop (TV_CSE); | |
1387 | } | |
1388 | } | |
1389 | ||
1390 | close_dump_file (DFI_gcse, print_rtl_with_bb, insns); | |
1391 | timevar_pop (TV_GCSE); | |
1392 | ||
1393 | ggc_collect (); | |
1394 | flag_cse_skip_blocks = save_csb; | |
1395 | flag_cse_follow_jumps = save_cfj; | |
1396 | #ifdef ENABLE_CHECKING | |
1397 | verify_flow_info (); | |
1398 | #endif | |
1399 | } | |
1400 | ||
1401 | /* Move constant computations out of loops. */ | |
1402 | static void | |
1403 | rest_of_handle_loop_optimize (tree decl, rtx insns) | |
1404 | { | |
1405 | int do_unroll, do_prefetch; | |
1406 | ||
1407 | timevar_push (TV_LOOP); | |
1408 | delete_dead_jumptables (); | |
1409 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP); | |
1410 | open_dump_file (DFI_loop, decl); | |
1411 | ||
1412 | /* CFG is no longer maintained up-to-date. */ | |
1413 | free_bb_for_insn (); | |
1414 | ||
1415 | if (flag_unroll_loops) | |
1416 | do_unroll = LOOP_AUTO_UNROLL; /* Having two unrollers is useless. */ | |
1417 | else | |
1418 | do_unroll = flag_old_unroll_loops ? LOOP_UNROLL : LOOP_AUTO_UNROLL; | |
1419 | do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0; | |
1420 | ||
1421 | if (flag_rerun_loop_opt) | |
1422 | { | |
1423 | cleanup_barriers (); | |
1424 | ||
1425 | /* We only want to perform unrolling once. */ | |
1426 | loop_optimize (insns, dump_file, do_unroll); | |
1427 | do_unroll = 0; | |
1428 | ||
1429 | /* The first call to loop_optimize makes some instructions | |
1430 | trivially dead. We delete those instructions now in the | |
1431 | hope that doing so will make the heuristics in loop work | |
1432 | better and possibly speed up compilation. */ | |
1433 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1434 | ||
1435 | /* The regscan pass is currently necessary as the alias | |
1436 | analysis code depends on this information. */ | |
1437 | reg_scan (insns, max_reg_num (), 1); | |
1438 | } | |
1439 | cleanup_barriers (); | |
1440 | loop_optimize (insns, dump_file, do_unroll | LOOP_BCT | do_prefetch); | |
1441 | ||
1442 | /* Loop can create trivially dead instructions. */ | |
1443 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1444 | close_dump_file (DFI_loop, print_rtl, insns); | |
1445 | timevar_pop (TV_LOOP); | |
1446 | find_basic_blocks (insns, max_reg_num (), dump_file); | |
1447 | ||
1448 | ggc_collect (); | |
1449 | } | |
1450 | ||
1451 | /* Perform loop optimizations. It might be better to do them a bit | |
1452 | sooner, but we want the profile feedback to work more | |
1453 | efficiently. */ | |
1454 | static void | |
1455 | rest_of_handle_loop2 (tree decl, rtx insns) | |
1456 | { | |
1457 | struct loops *loops; | |
1458 | basic_block bb; | |
1459 | ||
1460 | timevar_push (TV_LOOP); | |
1461 | open_dump_file (DFI_loop2, decl); | |
1462 | if (dump_file) | |
1463 | dump_flow_info (dump_file); | |
1464 | ||
1465 | /* Initialize structures for layout changes. */ | |
1466 | cfg_layout_initialize (); | |
1467 | ||
1468 | loops = loop_optimizer_init (dump_file); | |
1469 | ||
1470 | if (loops) | |
1471 | { | |
1472 | /* The optimizations: */ | |
1473 | if (flag_unswitch_loops) | |
1474 | unswitch_loops (loops); | |
1475 | ||
1476 | if (flag_peel_loops || flag_unroll_loops) | |
1477 | unroll_and_peel_loops (loops, | |
1478 | (flag_peel_loops ? UAP_PEEL : 0) | | |
1479 | (flag_unroll_loops ? UAP_UNROLL : 0) | | |
1480 | (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0)); | |
1481 | ||
1482 | loop_optimizer_finalize (loops, dump_file); | |
1483 | } | |
1484 | ||
1485 | /* Finalize layout changes. */ | |
1486 | FOR_EACH_BB (bb) | |
1487 | if (bb->next_bb != EXIT_BLOCK_PTR) | |
1488 | bb->rbi->next = bb->next_bb; | |
1489 | cfg_layout_finalize (); | |
1490 | ||
1491 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
1492 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1493 | reg_scan (insns, max_reg_num (), 0); | |
1494 | if (dump_file) | |
1495 | dump_flow_info (dump_file); | |
1496 | close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ()); | |
1497 | timevar_pop (TV_LOOP); | |
1498 | ggc_collect (); | |
1499 | } | |
1500 | ||
1501 | /* This is called from finish_function (within langhooks.parse_file) | |
1502 | after each top-level definition is parsed. | |
1503 | It is supposed to compile that function or variable | |
1504 | and output the assembler code for it. | |
1505 | After we return, the tree storage is freed. */ | |
1506 | ||
1507 | void | |
1508 | rest_of_compilation (tree decl) | |
1509 | { | |
1510 | rtx insns; | |
1511 | ||
1512 | timevar_push (TV_REST_OF_COMPILATION); | |
1513 | ||
1514 | /* Register rtl specific functions for cfg. */ | |
1515 | rtl_register_cfg_hooks (); | |
1516 | ||
1517 | /* Now that we're out of the frontend, we shouldn't have any more | |
1518 | CONCATs anywhere. */ | |
1519 | generating_concat_p = 0; | |
1520 | ||
1521 | /* When processing delayed functions, prepare_function_start() won't | |
1522 | have been run to re-initialize it. */ | |
1523 | cse_not_expected = ! optimize; | |
1524 | ||
1525 | /* First, make sure that NOTE_BLOCK is set correctly for each | |
1526 | NOTE_INSN_BLOCK_BEG/NOTE_INSN_BLOCK_END note. */ | |
1527 | if (!cfun->x_whole_function_mode_p) | |
1528 | identify_blocks (); | |
1529 | ||
1530 | /* In function-at-a-time mode, we do not attempt to keep the BLOCK | |
1531 | tree in sensible shape. So, we just recalculate it here. */ | |
1532 | if (cfun->x_whole_function_mode_p) | |
1533 | reorder_blocks (); | |
1534 | ||
1535 | init_flow (); | |
1536 | ||
1537 | if (rest_of_handle_inlining (decl)) | |
1538 | goto exit_rest_of_compilation; | |
1539 | ||
1540 | /* If we're emitting a nested function, make sure its parent gets | |
1541 | emitted as well. Doing otherwise confuses debug info. */ | |
1542 | { | |
1543 | tree parent; | |
1544 | for (parent = DECL_CONTEXT (current_function_decl); | |
1545 | parent != NULL_TREE; | |
1546 | parent = get_containing_scope (parent)) | |
1547 | if (TREE_CODE (parent) == FUNCTION_DECL) | |
1548 | TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; | |
1549 | } | |
1550 | ||
1551 | /* We are now committed to emitting code for this function. Do any | |
1552 | preparation, such as emitting abstract debug info for the inline | |
1553 | before it gets mangled by optimization. */ | |
1554 | if (cgraph_function_possibly_inlined_p (decl)) | |
1555 | (*debug_hooks->outlining_inline_function) (decl); | |
1556 | ||
1557 | /* Remove any notes we don't need. That will make iterating | |
1558 | over the instruction sequence faster, and allow the garbage | |
1559 | collector to reclaim the memory used by the notes. */ | |
1560 | remove_unnecessary_notes (); | |
1561 | reorder_blocks (); | |
1562 | ||
1563 | ggc_collect (); | |
1564 | ||
1565 | /* Initialize some variables used by the optimizers. */ | |
1566 | init_function_for_compilation (); | |
1567 | ||
1568 | if (! DECL_DEFER_OUTPUT (decl)) | |
1569 | TREE_ASM_WRITTEN (decl) = 1; | |
1570 | ||
1571 | /* Now that integrate will no longer see our rtl, we need not | |
1572 | distinguish between the return value of this function and the | |
1573 | return value of called functions. Also, we can remove all SETs | |
1574 | of subregs of hard registers; they are only here because of | |
1575 | integrate. Also, we can now initialize pseudos intended to | |
1576 | carry magic hard reg data throughout the function. */ | |
1577 | rtx_equal_function_value_matters = 0; | |
1578 | purge_hard_subreg_sets (get_insns ()); | |
1579 | ||
1580 | /* Early return if there were errors. We can run afoul of our | |
1581 | consistency checks, and there's not really much point in fixing them. | |
1582 | Don't return yet if -Wreturn-type; we need to do cleanup_cfg. */ | |
1583 | if (((rtl_dump_and_exit || flag_syntax_only) && !warn_return_type) | |
1584 | || errorcount || sorrycount) | |
1585 | goto exit_rest_of_compilation; | |
1586 | ||
1587 | timevar_push (TV_JUMP); | |
1588 | open_dump_file (DFI_sibling, decl); | |
1589 | insns = get_insns (); | |
1590 | rebuild_jump_labels (insns); | |
1591 | find_exception_handler_labels (); | |
1592 | find_basic_blocks (insns, max_reg_num (), dump_file); | |
1593 | ||
1594 | delete_unreachable_blocks (); | |
1595 | ||
1596 | /* Turn NOTE_INSN_PREDICTIONs into branch predictions. */ | |
1597 | if (flag_guess_branch_prob) | |
1598 | { | |
1599 | timevar_push (TV_BRANCH_PROB); | |
1600 | note_prediction_to_br_prob (); | |
1601 | timevar_pop (TV_BRANCH_PROB); | |
1602 | } | |
1603 | ||
1604 | if (flag_optimize_sibling_calls) | |
1605 | rest_of_handle_sibling_calls (insns); | |
1606 | ||
1607 | /* We have to issue these warnings now already, because CFG cleanups | |
1608 | further down may destroy the required information. However, this | |
1609 | must be done after the sibcall optimization pass because the barrier | |
1610 | emitted for noreturn calls that are candidate for the optimization | |
1611 | is folded into the CALL_PLACEHOLDER until after this pass, so the | |
1612 | CFG is inaccurate. */ | |
1613 | check_function_return_warnings (); | |
1614 | ||
1615 | timevar_pop (TV_JUMP); | |
1616 | ||
1617 | insn_locators_initialize (); | |
1618 | /* Complete generation of exception handling code. */ | |
1619 | if (doing_eh (0)) | |
1620 | { | |
1621 | timevar_push (TV_JUMP); | |
1622 | open_dump_file (DFI_eh, decl); | |
1623 | ||
1624 | finish_eh_generation (); | |
1625 | ||
1626 | close_dump_file (DFI_eh, print_rtl, get_insns ()); | |
1627 | timevar_pop (TV_JUMP); | |
1628 | } | |
1629 | ||
1630 | /* Delay emitting hard_reg_initial_value sets until after EH landing pad | |
1631 | generation, which might create new sets. */ | |
1632 | emit_initial_value_sets (); | |
1633 | ||
1634 | #ifdef FINALIZE_PIC | |
1635 | /* If we are doing position-independent code generation, now | |
1636 | is the time to output special prologues and epilogues. | |
1637 | We do not want to do this earlier, because it just clutters | |
1638 | up inline functions with meaningless insns. */ | |
1639 | if (flag_pic) | |
1640 | FINALIZE_PIC; | |
1641 | #endif | |
1642 | ||
1643 | insns = get_insns (); | |
1644 | ||
1645 | /* Copy any shared structure that should not be shared. */ | |
1646 | unshare_all_rtl (current_function_decl, insns); | |
1647 | ||
1648 | #ifdef SETJMP_VIA_SAVE_AREA | |
1649 | /* This must be performed before virtual register instantiation. | |
1650 | Please be aware the everything in the compiler that can look | |
1651 | at the RTL up to this point must understand that REG_SAVE_AREA | |
1652 | is just like a use of the REG contained inside. */ | |
1653 | if (current_function_calls_alloca) | |
1654 | optimize_save_area_alloca (insns); | |
1655 | #endif | |
1656 | ||
1657 | /* Instantiate all virtual registers. */ | |
1658 | instantiate_virtual_regs (current_function_decl, insns); | |
1659 | ||
1660 | open_dump_file (DFI_jump, decl); | |
1661 | ||
1662 | /* Always do one jump optimization pass to ensure that JUMP_LABEL fields | |
1663 | are initialized and to compute whether control can drop off the end | |
1664 | of the function. */ | |
1665 | ||
1666 | timevar_push (TV_JUMP); | |
1667 | /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this | |
1668 | before jump optimization switches branch directions. */ | |
1669 | if (flag_guess_branch_prob) | |
1670 | expected_value_to_br_prob (); | |
1671 | ||
1672 | reg_scan (insns, max_reg_num (), 0); | |
1673 | rebuild_jump_labels (insns); | |
1674 | find_basic_blocks (insns, max_reg_num (), dump_file); | |
1675 | delete_trivially_dead_insns (insns, max_reg_num ()); | |
1676 | if (dump_file) | |
1677 | dump_flow_info (dump_file); | |
1678 | cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP | |
1679 | | (flag_thread_jumps ? CLEANUP_THREADING : 0)); | |
1680 | ||
1681 | if (optimize) | |
1682 | { | |
1683 | free_bb_for_insn (); | |
1684 | copy_loop_headers (insns); | |
1685 | find_basic_blocks (insns, max_reg_num (), dump_file); | |
1686 | } | |
1687 | purge_line_number_notes (insns); | |
1688 | ||
1689 | timevar_pop (TV_JUMP); | |
1690 | close_dump_file (DFI_jump, print_rtl, insns); | |
1691 | ||
1692 | /* Now is when we stop if -fsyntax-only and -Wreturn-type. */ | |
1693 | if (rtl_dump_and_exit || flag_syntax_only || DECL_DEFER_OUTPUT (decl)) | |
1694 | goto exit_rest_of_compilation; | |
1695 | ||
1696 | timevar_push (TV_JUMP); | |
1697 | ||
1698 | if (optimize) | |
1699 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP); | |
1700 | ||
1701 | if (flag_delete_null_pointer_checks) | |
1702 | rest_of_handle_null_pointer (decl, insns); | |
1703 | ||
1704 | /* Jump optimization, and the removal of NULL pointer checks, may | |
1705 | have reduced the number of instructions substantially. CSE, and | |
1706 | future passes, allocate arrays whose dimensions involve the | |
1707 | maximum instruction UID, so if we can reduce the maximum UID | |
1708 | we'll save big on memory. */ | |
1709 | renumber_insns (dump_file); | |
1710 | timevar_pop (TV_JUMP); | |
1711 | ||
1712 | close_dump_file (DFI_jump, print_rtl_with_bb, insns); | |
1713 | ||
1714 | ggc_collect (); | |
1715 | ||
1716 | if (optimize > 0) | |
1717 | rest_of_handle_cse (decl, insns); | |
1718 | ||
1719 | rest_of_handle_addressof (decl, insns); | |
1720 | ||
1721 | ggc_collect (); | |
1722 | ||
1723 | if (optimize > 0) | |
1724 | { | |
1725 | if (flag_gcse) | |
1726 | rest_of_handle_gcse (decl, insns); | |
1727 | ||
1728 | if (flag_loop_optimize) | |
1729 | rest_of_handle_loop_optimize (decl, insns); | |
1730 | ||
1731 | if (flag_gcse) | |
1732 | rest_of_handle_jump_bypass (decl, insns); | |
1733 | } | |
1734 | ||
1735 | timevar_push (TV_FLOW); | |
1736 | ||
1737 | rest_of_handle_cfg (decl, insns); | |
1738 | ||
1739 | if (optimize > 0 | |
1740 | || profile_arc_flag || flag_test_coverage || flag_branch_probabilities) | |
1741 | { | |
1742 | rest_of_handle_branch_prob (decl, insns); | |
1743 | ||
1744 | if (flag_branch_probabilities | |
1745 | && flag_profile_values | |
1746 | && flag_value_profile_transformations) | |
1747 | rest_of_handle_value_profile_transformations (decl, insns); | |
1748 | ||
1749 | /* Remove the death notes created for vpt. */ | |
1750 | if (flag_profile_values) | |
1751 | count_or_remove_death_notes (NULL, 1); | |
1752 | } | |
1753 | ||
1754 | if (optimize > 0) | |
1755 | rest_of_handle_if_conversion (decl, insns); | |
1756 | ||
1757 | if (flag_tracer) | |
1758 | rest_of_handle_tracer (decl, insns); | |
1759 | ||
1760 | if (optimize > 0 | |
1761 | && (flag_unswitch_loops | |
1762 | || flag_peel_loops | |
1763 | || flag_unroll_loops)) | |
1764 | rest_of_handle_loop2 (decl, insns); | |
1765 | ||
1766 | if (flag_web) | |
1767 | rest_of_handle_web (decl, insns); | |
1768 | ||
1769 | if (flag_rerun_cse_after_loop) | |
1770 | rest_of_handle_cse2 (decl, insns); | |
1771 | ||
1772 | cse_not_expected = 1; | |
1773 | ||
1774 | rest_of_handle_life (decl, insns); | |
1775 | ||
1776 | if (optimize > 0) | |
1777 | rest_of_handle_combine (decl, insns); | |
1778 | ||
1779 | if (flag_if_conversion) | |
1780 | rest_of_handle_if_after_combine (decl, insns); | |
1781 | ||
1782 | if (optimize > 0 && (flag_regmove || flag_expensive_optimizations)) | |
1783 | rest_of_handle_regmove (decl, insns); | |
1784 | ||
1785 | /* Do unconditional splitting before register allocation to allow machine | |
1786 | description to add extra information not needed previously. */ | |
1787 | split_all_insns (1); | |
1788 | ||
1789 | #ifdef OPTIMIZE_MODE_SWITCHING | |
1790 | timevar_push (TV_MODE_SWITCH); | |
1791 | ||
1792 | no_new_pseudos = 0; | |
1793 | optimize_mode_switching (NULL); | |
1794 | no_new_pseudos = 1; | |
1795 | ||
1796 | timevar_pop (TV_MODE_SWITCH); | |
1797 | #endif | |
1798 | ||
1799 | /* Any of the several passes since flow1 will have munged register | |
1800 | lifetime data a bit. We need it to be up to date for scheduling | |
1801 | (see handling of reg_known_equiv in init_alias_analysis). */ | |
1802 | recompute_reg_usage (insns, !optimize_size); | |
1803 | ||
1804 | #ifdef INSN_SCHEDULING | |
1805 | rest_of_handle_sched (decl, insns); | |
1806 | #endif | |
1807 | ||
1808 | /* Determine if the current function is a leaf before running reload | |
1809 | since this can impact optimizations done by the prologue and | |
1810 | epilogue thus changing register elimination offsets. */ | |
1811 | current_function_is_leaf = leaf_function_p (); | |
1812 | ||
1813 | timevar_push (TV_LOCAL_ALLOC); | |
1814 | open_dump_file (DFI_lreg, decl); | |
1815 | ||
1816 | if (flag_new_regalloc) | |
1817 | { | |
1818 | if (rest_of_handle_new_regalloc (decl, insns)) | |
1819 | goto exit_rest_of_compilation; | |
1820 | } | |
1821 | else | |
1822 | { | |
1823 | if (rest_of_handle_old_regalloc (decl, insns)) | |
1824 | goto exit_rest_of_compilation; | |
1825 | } | |
1826 | ||
1827 | ggc_collect (); | |
1828 | ||
1829 | open_dump_file (DFI_postreload, decl); | |
1830 | ||
1831 | /* Do a very simple CSE pass over just the hard registers. */ | |
1832 | if (optimize > 0) | |
1833 | { | |
1834 | timevar_push (TV_RELOAD_CSE_REGS); | |
1835 | reload_cse_regs (insns); | |
1836 | /* reload_cse_regs can eliminate potentially-trapping MEMs. | |
1837 | Remove any EH edges associated with them. */ | |
1838 | if (flag_non_call_exceptions) | |
1839 | purge_all_dead_edges (0); | |
1840 | timevar_pop (TV_RELOAD_CSE_REGS); | |
1841 | } | |
1842 | ||
1843 | close_dump_file (DFI_postreload, print_rtl_with_bb, insns); | |
1844 | ||
1845 | /* Re-create the death notes which were deleted during reload. */ | |
1846 | timevar_push (TV_FLOW2); | |
1847 | open_dump_file (DFI_flow2, decl); | |
1848 | ||
1849 | #ifdef ENABLE_CHECKING | |
1850 | verify_flow_info (); | |
1851 | #endif | |
1852 | ||
1853 | /* If optimizing, then go ahead and split insns now. */ | |
1854 | #ifndef STACK_REGS | |
1855 | if (optimize > 0) | |
1856 | #endif | |
1857 | split_all_insns (0); | |
1858 | ||
1859 | if (flag_branch_target_load_optimize) | |
1860 | { | |
1861 | open_dump_file (DFI_branch_target_load, decl); | |
1862 | ||
1863 | branch_target_load_optimize (insns, false); | |
1864 | ||
1865 | close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns); | |
1866 | ||
1867 | ggc_collect (); | |
1868 | } | |
1869 | ||
1870 | if (optimize) | |
1871 | cleanup_cfg (CLEANUP_EXPENSIVE); | |
1872 | ||
1873 | /* On some machines, the prologue and epilogue code, or parts thereof, | |
1874 | can be represented as RTL. Doing so lets us schedule insns between | |
1875 | it and the rest of the code and also allows delayed branch | |
1876 | scheduling to operate in the epilogue. */ | |
1877 | thread_prologue_and_epilogue_insns (insns); | |
1878 | epilogue_completed = 1; | |
1879 | ||
1880 | if (optimize) | |
1881 | { | |
1882 | life_analysis (insns, dump_file, PROP_POSTRELOAD); | |
1883 | cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE | |
1884 | | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0)); | |
1885 | ||
1886 | /* This is kind of a heuristic. We need to run combine_stack_adjustments | |
1887 | even for machines with possibly nonzero RETURN_POPS_ARGS | |
1888 | and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having | |
1889 | push instructions will have popping returns. */ | |
1890 | #ifndef PUSH_ROUNDING | |
1891 | if (!ACCUMULATE_OUTGOING_ARGS) | |
1892 | #endif | |
1893 | combine_stack_adjustments (); | |
1894 | ||
1895 | ggc_collect (); | |
1896 | } | |
1897 | ||
1898 | flow2_completed = 1; | |
1899 | ||
1900 | close_dump_file (DFI_flow2, print_rtl_with_bb, insns); | |
1901 | timevar_pop (TV_FLOW2); | |
1902 | ||
1903 | #ifdef HAVE_peephole2 | |
1904 | if (optimize > 0 && flag_peephole2) | |
1905 | { | |
1906 | timevar_push (TV_PEEPHOLE2); | |
1907 | open_dump_file (DFI_peephole2, decl); | |
1908 | ||
1909 | peephole2_optimize (dump_file); | |
1910 | ||
1911 | close_dump_file (DFI_peephole2, print_rtl_with_bb, insns); | |
1912 | timevar_pop (TV_PEEPHOLE2); | |
1913 | } | |
1914 | #endif | |
1915 | ||
1916 | open_dump_file (DFI_ce3, decl); | |
1917 | if (optimize) | |
1918 | /* Last attempt to optimize CFG, as scheduling, peepholing and insn | |
1919 | splitting possibly introduced more crossjumping opportunities. */ | |
1920 | cleanup_cfg (CLEANUP_EXPENSIVE | |
1921 | | CLEANUP_UPDATE_LIFE | |
1922 | | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0)); | |
1923 | if (flag_if_conversion2) | |
1924 | { | |
1925 | timevar_push (TV_IFCVT2); | |
1926 | ||
1927 | if_convert (1); | |
1928 | ||
1929 | timevar_pop (TV_IFCVT2); | |
1930 | } | |
1931 | close_dump_file (DFI_ce3, print_rtl_with_bb, insns); | |
1932 | ||
1933 | if (optimize > 0) | |
1934 | { | |
1935 | if (flag_rename_registers || flag_cprop_registers) | |
1936 | rest_of_handle_regrename (decl, insns); | |
1937 | ||
1938 | rest_of_handle_reorder_blocks (decl, insns); | |
1939 | } | |
1940 | ||
1941 | if (flag_branch_target_load_optimize2) | |
1942 | { | |
1943 | /* Leave this a warning for now so that it is possible to experiment | |
1944 | with running this pass twice. In 3.6, we should either make this | |
1945 | an error, or use separate dump files. */ | |
1946 | if (flag_branch_target_load_optimize) | |
1947 | warning ("branch target register load optimization is not intended " | |
1948 | "to be run twice"); | |
1949 | ||
1950 | open_dump_file (DFI_branch_target_load, decl); | |
1951 | ||
1952 | branch_target_load_optimize (insns, true); | |
1953 | ||
1954 | close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns); | |
1955 | ||
1956 | ggc_collect (); | |
1957 | } | |
1958 | ||
1959 | #ifdef INSN_SCHEDULING | |
1960 | if (optimize > 0 && flag_schedule_insns_after_reload) | |
1961 | rest_of_handle_sched2 (decl, insns); | |
1962 | #endif | |
1963 | ||
1964 | #ifdef LEAF_REGISTERS | |
1965 | current_function_uses_only_leaf_regs | |
1966 | = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); | |
1967 | #endif | |
1968 | ||
1969 | #ifdef STACK_REGS | |
1970 | rest_of_handle_stack_regs (decl, insns); | |
1971 | #endif | |
1972 | ||
1973 | compute_alignments (); | |
1974 | ||
1975 | if (flag_var_tracking) | |
1976 | rest_of_handle_variable_tracking (decl, insns); | |
1977 | ||
1978 | /* CFG is no longer maintained up-to-date. */ | |
1979 | free_bb_for_insn (); | |
1980 | ||
1981 | if (targetm.machine_dependent_reorg != 0) | |
1982 | rest_of_handle_machine_reorg (decl, insns); | |
1983 | ||
1984 | purge_line_number_notes (insns); | |
1985 | cleanup_barriers (); | |
1986 | ||
1987 | #ifdef DELAY_SLOTS | |
1988 | if (optimize > 0 && flag_delayed_branch) | |
1989 | rest_of_handle_delay_slots (decl, insns); | |
1990 | #endif | |
1991 | ||
1992 | #if defined (HAVE_ATTR_length) && !defined (STACK_REGS) | |
1993 | timevar_push (TV_SHORTEN_BRANCH); | |
1994 | split_all_insns_noflow (); | |
1995 | timevar_pop (TV_SHORTEN_BRANCH); | |
1996 | #endif | |
1997 | ||
1998 | convert_to_eh_region_ranges (); | |
1999 | ||
2000 | /* Shorten branches. */ | |
2001 | timevar_push (TV_SHORTEN_BRANCH); | |
2002 | shorten_branches (get_insns ()); | |
2003 | timevar_pop (TV_SHORTEN_BRANCH); | |
2004 | ||
2005 | set_nothrow_function_flags (); | |
2006 | if (current_function_nothrow) | |
2007 | /* Now we know that this can't throw; set the flag for the benefit | |
2008 | of other functions later in this translation unit. */ | |
2009 | TREE_NOTHROW (current_function_decl) = 1; | |
2010 | ||
2011 | rest_of_handle_final (decl, insns); | |
2012 | ||
2013 | /* Write DBX symbols if requested. */ | |
2014 | ||
2015 | /* Note that for those inline functions where we don't initially | |
2016 | know for certain that we will be generating an out-of-line copy, | |
2017 | the first invocation of this routine (rest_of_compilation) will | |
2018 | skip over this code by doing a `goto exit_rest_of_compilation;'. | |
2019 | Later on, wrapup_global_declarations will (indirectly) call | |
2020 | rest_of_compilation again for those inline functions that need | |
2021 | to have out-of-line copies generated. During that call, we | |
2022 | *will* be routed past here. */ | |
2023 | ||
2024 | timevar_push (TV_SYMOUT); | |
2025 | (*debug_hooks->function_decl) (decl); | |
2026 | timevar_pop (TV_SYMOUT); | |
2027 | ||
2028 | exit_rest_of_compilation: | |
2029 | ||
2030 | coverage_end_function (); | |
2031 | ||
2032 | /* In case the function was not output, | |
2033 | don't leave any temporary anonymous types | |
2034 | queued up for sdb output. */ | |
2035 | #ifdef SDB_DEBUGGING_INFO | |
2036 | if (write_symbols == SDB_DEBUG) | |
2037 | sdbout_types (NULL_TREE); | |
2038 | #endif | |
2039 | ||
2040 | reload_completed = 0; | |
2041 | epilogue_completed = 0; | |
2042 | flow2_completed = 0; | |
2043 | no_new_pseudos = 0; | |
2044 | ||
2045 | timevar_push (TV_FINAL); | |
2046 | ||
2047 | /* Clear out the insn_length contents now that they are no | |
2048 | longer valid. */ | |
2049 | init_insn_lengths (); | |
2050 | ||
2051 | /* Show no temporary slots allocated. */ | |
2052 | init_temp_slots (); | |
2053 | ||
2054 | free_basic_block_vars (0); | |
2055 | free_bb_for_insn (); | |
2056 | ||
2057 | timevar_pop (TV_FINAL); | |
2058 | ||
2059 | if ((*targetm.binds_local_p) (current_function_decl)) | |
2060 | { | |
2061 | int pref = cfun->preferred_stack_boundary; | |
2062 | if (cfun->recursive_call_emit | |
2063 | && cfun->stack_alignment_needed > cfun->preferred_stack_boundary) | |
2064 | pref = cfun->stack_alignment_needed; | |
2065 | cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary | |
2066 | = pref; | |
2067 | } | |
2068 | ||
2069 | /* Make sure volatile mem refs aren't considered valid operands for | |
2070 | arithmetic insns. We must call this here if this is a nested inline | |
2071 | function, since the above code leaves us in the init_recog state | |
2072 | (from final.c), and the function context push/pop code does not | |
2073 | save/restore volatile_ok. | |
2074 | ||
2075 | ??? Maybe it isn't necessary for expand_start_function to call this | |
2076 | anymore if we do it here? */ | |
2077 | ||
2078 | init_recog_no_volatile (); | |
2079 | ||
2080 | /* We're done with this function. Free up memory if we can. */ | |
2081 | free_after_parsing (cfun); | |
2082 | if (! DECL_DEFER_OUTPUT (decl)) | |
2083 | { | |
2084 | free_after_compilation (cfun); | |
2085 | DECL_STRUCT_FUNCTION (decl) = 0; | |
2086 | } | |
2087 | cfun = 0; | |
2088 | ||
2089 | ggc_collect (); | |
2090 | ||
2091 | timevar_pop (TV_REST_OF_COMPILATION); | |
2092 | } | |
2093 | ||
2094 | void | |
2095 | init_optimization_passes (void) | |
2096 | { | |
2097 | if (flag_unit_at_a_time) | |
2098 | { | |
2099 | open_dump_file (DFI_cgraph, NULL); | |
2100 | cgraph_dump_file = dump_file; | |
2101 | dump_file = NULL; | |
2102 | } | |
2103 | } | |
2104 | ||
2105 | void | |
2106 | finish_optimization_passes (void) | |
2107 | { | |
2108 | if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities) | |
2109 | { | |
2110 | timevar_push (TV_DUMP); | |
2111 | open_dump_file (DFI_bp, NULL); | |
2112 | ||
2113 | end_branch_prob (); | |
2114 | ||
2115 | close_dump_file (DFI_bp, NULL, NULL_RTX); | |
2116 | timevar_pop (TV_DUMP); | |
2117 | } | |
2118 | ||
2119 | if (optimize > 0 && open_dump_file (DFI_combine, NULL)) | |
2120 | { | |
2121 | timevar_push (TV_DUMP); | |
2122 | dump_combine_total_stats (dump_file); | |
2123 | close_dump_file (DFI_combine, NULL, NULL_RTX); | |
2124 | timevar_pop (TV_DUMP); | |
2125 | } | |
2126 | ||
2127 | if (flag_unit_at_a_time) | |
2128 | { | |
2129 | dump_file = cgraph_dump_file; | |
2130 | cgraph_dump_file = NULL; | |
2131 | close_dump_file (DFI_cgraph, NULL, NULL_RTX); | |
2132 | } | |
2133 | ||
2134 | /* Do whatever is necessary to finish printing the graphs. */ | |
2135 | if (graph_dump_format != no_graph) | |
2136 | { | |
2137 | int i; | |
2138 | ||
2139 | for (i = 0; i < (int) DFI_MAX; ++i) | |
2140 | if (dump_file_tbl[i].initialized && dump_file_tbl[i].graph_dump_p) | |
2141 | { | |
2142 | char seq[16]; | |
2143 | char *suffix; | |
2144 | ||
2145 | sprintf (seq, DUMPFILE_FORMAT, i); | |
2146 | suffix = concat (seq, dump_file_tbl[i].extension, NULL); | |
2147 | finish_graph_dump_file (dump_base_name, suffix); | |
2148 | free (suffix); | |
2149 | } | |
2150 | } | |
2151 | ||
2152 | } | |
2153 | ||
2154 | bool | |
2155 | enable_rtl_dump_file (int letter) | |
2156 | { | |
2157 | bool matched = false; | |
2158 | int i; | |
2159 | ||
2160 | if (letter == 'a') | |
2161 | { | |
2162 | for (i = 0; i < (int) DFI_MAX; ++i) | |
2163 | dump_file_tbl[i].enabled = 1; | |
2164 | matched = true; | |
2165 | } | |
2166 | else | |
2167 | { | |
2168 | for (i = 0; i < (int) DFI_MAX; ++i) | |
2169 | if (letter == dump_file_tbl[i].debug_switch) | |
2170 | { | |
2171 | dump_file_tbl[i].enabled = 1; | |
2172 | matched = true; | |
2173 | } | |
2174 | } | |
2175 | ||
2176 | return matched; | |
2177 | } |