]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-optimize.c
trans-array.c (gfc_conv_descriptor_data_get): Rename from gfc_conv_descriptor_data.
[thirdparty/gcc.git] / gcc / tree-optimize.c
1 /* Top-level control of tree optimizations.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "expr.h"
33 #include "diagnostic.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "tree-flow.h"
37 #include "tree-dump.h"
38 #include "timevar.h"
39 #include "function.h"
40 #include "langhooks.h"
41 #include "toplev.h"
42 #include "flags.h"
43 #include "cgraph.h"
44 #include "tree-inline.h"
45 #include "tree-mudflap.h"
46 #include "tree-pass.h"
47 #include "ggc.h"
48 #include "cgraph.h"
49 #include "graph.h"
50 #include "cfgloop.h"
51 #include "except.h"
52
53
54 /* Global variables used to communicate with passes. */
55 int dump_flags;
56 bool in_gimple_form;
57
58 /* The root of the compilation pass tree, once constructed. */
59 static struct tree_opt_pass *all_passes, *all_ipa_passes, * all_lowering_passes;
60
61 /* Gate: execute, or not, all of the non-trivial optimizations. */
62
63 static bool
64 gate_all_optimizations (void)
65 {
66 return (optimize >= 1
67 /* Don't bother doing anything if the program has errors. */
68 && !(errorcount || sorrycount));
69 }
70
71 static struct tree_opt_pass pass_all_optimizations =
72 {
73 NULL, /* name */
74 gate_all_optimizations, /* gate */
75 NULL, /* execute */
76 NULL, /* sub */
77 NULL, /* next */
78 0, /* static_pass_number */
79 0, /* tv_id */
80 0, /* properties_required */
81 0, /* properties_provided */
82 0, /* properties_destroyed */
83 0, /* todo_flags_start */
84 0, /* todo_flags_finish */
85 0 /* letter */
86 };
87
88 /* Pass: cleanup the CFG just before expanding trees to RTL.
89 This is just a round of label cleanups and case node grouping
90 because after the tree optimizers have run such cleanups may
91 be necessary. */
92
93 static void
94 execute_cleanup_cfg_post_optimizing (void)
95 {
96 cleanup_tree_cfg ();
97 cleanup_dead_labels ();
98 group_case_labels ();
99 }
100
101 static struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
102 {
103 "final_cleanup", /* name */
104 NULL, /* gate */
105 execute_cleanup_cfg_post_optimizing, /* execute */
106 NULL, /* sub */
107 NULL, /* next */
108 0, /* static_pass_number */
109 0, /* tv_id */
110 PROP_cfg, /* properties_required */
111 0, /* properties_provided */
112 0, /* properties_destroyed */
113 0, /* todo_flags_start */
114 TODO_dump_func, /* todo_flags_finish */
115 0 /* letter */
116 };
117
118 /* Pass: do the actions required to finish with tree-ssa optimization
119 passes. */
120
121 static void
122 execute_free_datastructures (void)
123 {
124 tree *chain;
125
126 /* ??? This isn't the right place for this. Worse, it got computed
127 more or less at random in various passes. */
128 free_dominance_info (CDI_DOMINATORS);
129
130 /* Emit gotos for implicit jumps. */
131 disband_implicit_edges ();
132
133 /* Remove the ssa structures. Do it here since this includes statement
134 annotations that need to be intact during disband_implicit_edges. */
135 delete_tree_ssa ();
136
137 /* Re-chain the statements from the blocks. */
138 chain = &DECL_SAVED_TREE (current_function_decl);
139 *chain = alloc_stmt_list ();
140
141 /* And get rid of annotations we no longer need. */
142 delete_tree_cfg_annotations ();
143 }
144
145 static struct tree_opt_pass pass_free_datastructures =
146 {
147 NULL, /* name */
148 NULL, /* gate */
149 execute_free_datastructures, /* execute */
150 NULL, /* sub */
151 NULL, /* next */
152 0, /* static_pass_number */
153 0, /* tv_id */
154 PROP_cfg, /* properties_required */
155 0, /* properties_provided */
156 0, /* properties_destroyed */
157 0, /* todo_flags_start */
158 0, /* todo_flags_finish */
159 0 /* letter */
160 };
161
162 /* Pass: fixup_cfg - IPA passes or compilation of earlier functions might've
163 changed some properties - such as marked functions nothrow. Remove now
164 redundant edges and basic blocks. */
165
166 static void
167 execute_fixup_cfg (void)
168 {
169 basic_block bb;
170 block_stmt_iterator bsi;
171
172 if (cfun->eh)
173 FOR_EACH_BB (bb)
174 {
175 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
176 {
177 tree stmt = bsi_stmt (bsi);
178 tree call = get_call_expr_in (stmt);
179
180 if (call && call_expr_flags (call) & (ECF_CONST | ECF_PURE))
181 TREE_SIDE_EFFECTS (call) = 0;
182 if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
183 remove_stmt_from_eh_region (stmt);
184 }
185 tree_purge_dead_eh_edges (bb);
186 }
187
188 cleanup_tree_cfg ();
189 }
190
191 static struct tree_opt_pass pass_fixup_cfg =
192 {
193 NULL, /* name */
194 NULL, /* gate */
195 execute_fixup_cfg, /* execute */
196 NULL, /* sub */
197 NULL, /* next */
198 0, /* static_pass_number */
199 0, /* tv_id */
200 PROP_cfg, /* properties_required */
201 0, /* properties_provided */
202 0, /* properties_destroyed */
203 0, /* todo_flags_start */
204 0, /* todo_flags_finish */
205 0 /* letter */
206 };
207
208 /* Do the actions required to initialize internal data structures used
209 in tree-ssa optimization passes. */
210
211 static void
212 execute_init_datastructures (void)
213 {
214 /* Allocate hash tables, arrays and other structures. */
215 init_tree_ssa ();
216 }
217
218 static struct tree_opt_pass pass_init_datastructures =
219 {
220 NULL, /* name */
221 NULL, /* gate */
222 execute_init_datastructures, /* execute */
223 NULL, /* sub */
224 NULL, /* next */
225 0, /* static_pass_number */
226 0, /* tv_id */
227 PROP_cfg, /* properties_required */
228 0, /* properties_provided */
229 0, /* properties_destroyed */
230 0, /* todo_flags_start */
231 0, /* todo_flags_finish */
232 0 /* letter */
233 };
234
235 /* Iterate over the pass tree allocating dump file numbers. We want
236 to do this depth first, and independent of whether the pass is
237 enabled or not. */
238
239 static void
240 register_one_dump_file (struct tree_opt_pass *pass, bool ipa, int n)
241 {
242 char *dot_name, *flag_name, *glob_name;
243 char num[10];
244
245 /* See below in next_pass_1. */
246 num[0] = '\0';
247 if (pass->static_pass_number != -1)
248 sprintf (num, "%d", ((int) pass->static_pass_number < 0
249 ? 1 : pass->static_pass_number));
250
251 dot_name = concat (".", pass->name, num, NULL);
252 if (ipa)
253 {
254 flag_name = concat ("ipa-", pass->name, num, NULL);
255 glob_name = concat ("ipa-", pass->name, NULL);
256 /* First IPA dump is cgraph that is dumped via separate channels. */
257 pass->static_pass_number = dump_register (dot_name, flag_name, glob_name,
258 TDF_IPA, n + 1, 0);
259 }
260 else if (pass->properties_provided & PROP_trees)
261 {
262 flag_name = concat ("tree-", pass->name, num, NULL);
263 glob_name = concat ("tree-", pass->name, NULL);
264 pass->static_pass_number = dump_register (dot_name, flag_name, glob_name,
265 TDF_TREE, n + TDI_tree_all, 0);
266 }
267 else
268 {
269 flag_name = concat ("rtl-", pass->name, num, NULL);
270 glob_name = concat ("rtl-", pass->name, NULL);
271 pass->static_pass_number = dump_register (dot_name, flag_name, glob_name,
272 TDF_RTL, n, pass->letter);
273 }
274 }
275
276 static int
277 register_dump_files (struct tree_opt_pass *pass, bool ipa, int properties)
278 {
279 static int n = 0;
280 do
281 {
282 int new_properties;
283 int pass_number;
284
285 pass->properties_required = properties;
286 new_properties =
287 (properties | pass->properties_provided) & ~pass->properties_destroyed;
288
289 /* Reset the counter when we reach RTL-based passes. */
290 if ((pass->properties_provided ^ pass->properties_required) & PROP_rtl)
291 n = 0;
292
293 pass_number = n;
294 if (pass->name)
295 n++;
296
297 if (pass->sub)
298 new_properties = register_dump_files (pass->sub, ipa, new_properties);
299
300 /* If we have a gate, combine the properties that we could have with
301 and without the pass being examined. */
302 if (pass->gate)
303 properties &= new_properties;
304 else
305 properties = new_properties;
306
307 pass->properties_provided = properties;
308 if (pass->name)
309 register_one_dump_file (pass, ipa, pass_number);
310
311 pass = pass->next;
312 }
313 while (pass);
314
315 return properties;
316 }
317
318 /* Add a pass to the pass list. Duplicate the pass if it's already
319 in the list. */
320
321 static struct tree_opt_pass **
322 next_pass_1 (struct tree_opt_pass **list, struct tree_opt_pass *pass)
323 {
324
325 /* A nonzero static_pass_number indicates that the
326 pass is already in the list. */
327 if (pass->static_pass_number)
328 {
329 struct tree_opt_pass *new;
330
331 new = xmalloc (sizeof (*new));
332 memcpy (new, pass, sizeof (*new));
333
334 /* Indicate to register_dump_files that this pass has duplicates,
335 and so it should rename the dump file. The first instance will
336 be -1, and be number of duplicates = -static_pass_number - 1.
337 Subsequent instances will be > 0 and just the duplicate number. */
338 if (pass->name)
339 {
340 pass->static_pass_number -= 1;
341 new->static_pass_number = -pass->static_pass_number;
342 }
343
344 *list = new;
345 }
346 else
347 {
348 pass->static_pass_number = -1;
349 *list = pass;
350 }
351
352 return &(*list)->next;
353
354 }
355
356 /* Construct the pass tree. */
357
358 void
359 init_tree_optimization_passes (void)
360 {
361 struct tree_opt_pass **p;
362
363 #define NEXT_PASS(PASS) (p = next_pass_1 (p, &PASS))
364 /* Intraprocedural optimization passes. */
365 p = &all_ipa_passes;
366 NEXT_PASS (pass_ipa_inline);
367 *p = NULL;
368
369 /* All passes needed to lower the function into shape optimizers can operate
370 on. These passes are performed before interprocedural passes, unlike rest
371 of local passes (all_passes). */
372 p = &all_lowering_passes;
373 NEXT_PASS (pass_remove_useless_stmts);
374 NEXT_PASS (pass_mudflap_1);
375 NEXT_PASS (pass_lower_cf);
376 NEXT_PASS (pass_lower_eh);
377 NEXT_PASS (pass_build_cfg);
378 NEXT_PASS (pass_lower_complex_O0);
379 NEXT_PASS (pass_lower_vector);
380 NEXT_PASS (pass_warn_function_return);
381 NEXT_PASS (pass_tree_profile);
382 *p = NULL;
383
384 p = &all_passes;
385 NEXT_PASS (pass_fixup_cfg);
386 NEXT_PASS (pass_init_datastructures);
387 NEXT_PASS (pass_all_optimizations);
388 NEXT_PASS (pass_warn_function_noreturn);
389 NEXT_PASS (pass_mudflap_2);
390 NEXT_PASS (pass_free_datastructures);
391 NEXT_PASS (pass_expand);
392 NEXT_PASS (pass_rest_of_compilation);
393 *p = NULL;
394
395 p = &pass_all_optimizations.sub;
396 NEXT_PASS (pass_referenced_vars);
397 NEXT_PASS (pass_create_structure_vars);
398 NEXT_PASS (pass_build_ssa);
399 NEXT_PASS (pass_build_pta);
400 NEXT_PASS (pass_may_alias);
401 NEXT_PASS (pass_del_pta);
402 NEXT_PASS (pass_rename_ssa_copies);
403 NEXT_PASS (pass_early_warn_uninitialized);
404
405 /* Initial scalar cleanups. */
406 NEXT_PASS (pass_ccp);
407 NEXT_PASS (pass_fre);
408 NEXT_PASS (pass_dce);
409 NEXT_PASS (pass_forwprop);
410 NEXT_PASS (pass_vrp);
411 NEXT_PASS (pass_copy_prop);
412 NEXT_PASS (pass_dce);
413 NEXT_PASS (pass_merge_phi);
414 NEXT_PASS (pass_dominator);
415
416 NEXT_PASS (pass_phiopt);
417 NEXT_PASS (pass_build_pta);
418 NEXT_PASS (pass_may_alias);
419 NEXT_PASS (pass_del_pta);
420 NEXT_PASS (pass_tail_recursion);
421 NEXT_PASS (pass_profile);
422 NEXT_PASS (pass_ch);
423 NEXT_PASS (pass_stdarg);
424 NEXT_PASS (pass_lower_complex);
425 NEXT_PASS (pass_sra);
426 /* FIXME: SRA may generate arbitrary gimple code, exposing new
427 aliased and call-clobbered variables. As mentioned below,
428 pass_may_alias should be a TODO item. */
429 NEXT_PASS (pass_may_alias);
430 NEXT_PASS (pass_rename_ssa_copies);
431 NEXT_PASS (pass_dominator);
432 NEXT_PASS (pass_copy_prop);
433 NEXT_PASS (pass_dce);
434 NEXT_PASS (pass_dse);
435 NEXT_PASS (pass_may_alias);
436 NEXT_PASS (pass_forwprop);
437 NEXT_PASS (pass_phiopt);
438 NEXT_PASS (pass_store_ccp);
439 NEXT_PASS (pass_store_copy_prop);
440 NEXT_PASS (pass_fold_builtins);
441 /* FIXME: May alias should a TODO but for 4.0.0,
442 we add may_alias right after fold builtins
443 which can create arbitrary GIMPLE. */
444 NEXT_PASS (pass_may_alias);
445 NEXT_PASS (pass_cse_reciprocals);
446 NEXT_PASS (pass_split_crit_edges);
447 NEXT_PASS (pass_reassoc);
448 NEXT_PASS (pass_pre);
449 NEXT_PASS (pass_sink_code);
450 NEXT_PASS (pass_loop);
451 NEXT_PASS (pass_dominator);
452 NEXT_PASS (pass_copy_prop);
453 NEXT_PASS (pass_cd_dce);
454 /* FIXME: If DCE is not run before checking for uninitialized uses,
455 we may get false warnings (e.g., testsuite/gcc.dg/uninit-5.c).
456 However, this also causes us to misdiagnose cases that should be
457 real warnings (e.g., testsuite/gcc.dg/pr18501.c).
458
459 To fix the false positives in uninit-5.c, we would have to
460 account for the predicates protecting the set and the use of each
461 variable. Using a representation like Gated Single Assignment
462 may help. */
463 NEXT_PASS (pass_late_warn_uninitialized);
464 NEXT_PASS (pass_dse);
465 NEXT_PASS (pass_forwprop);
466 NEXT_PASS (pass_phiopt);
467 NEXT_PASS (pass_tail_calls);
468 NEXT_PASS (pass_rename_ssa_copies);
469 NEXT_PASS (pass_uncprop);
470 NEXT_PASS (pass_del_ssa);
471 NEXT_PASS (pass_nrv);
472 NEXT_PASS (pass_remove_useless_vars);
473 NEXT_PASS (pass_mark_used_blocks);
474 NEXT_PASS (pass_cleanup_cfg_post_optimizing);
475 *p = NULL;
476
477 p = &pass_loop.sub;
478 NEXT_PASS (pass_loop_init);
479 NEXT_PASS (pass_copy_prop);
480 NEXT_PASS (pass_lim);
481 NEXT_PASS (pass_unswitch);
482 NEXT_PASS (pass_scev_cprop);
483 NEXT_PASS (pass_record_bounds);
484 NEXT_PASS (pass_linear_transform);
485 NEXT_PASS (pass_iv_canon);
486 NEXT_PASS (pass_if_conversion);
487 NEXT_PASS (pass_vectorize);
488 /* NEXT_PASS (pass_may_alias) cannot be done again because the
489 vectorizer creates alias relations that are not supported by
490 pass_may_alias. */
491 NEXT_PASS (pass_lower_vector_ssa);
492 NEXT_PASS (pass_complete_unroll);
493 NEXT_PASS (pass_iv_optimize);
494 NEXT_PASS (pass_loop_done);
495 *p = NULL;
496
497 #undef NEXT_PASS
498
499 register_dump_files (all_lowering_passes, false, PROP_gimple_any);
500 register_dump_files (all_passes, false, PROP_gimple_any
501 | PROP_gimple_lcf
502 | PROP_gimple_leh
503 | PROP_cfg);
504 register_dump_files (all_ipa_passes, true, PROP_gimple_any
505 | PROP_gimple_lcf
506 | PROP_gimple_leh
507 | PROP_cfg);
508 }
509
510 static unsigned int last_verified;
511
512 static void
513 execute_todo (struct tree_opt_pass *pass, unsigned int flags, bool use_required)
514 {
515 int properties
516 = use_required ? pass->properties_required : pass->properties_provided;
517
518 #if defined ENABLE_CHECKING
519 if (need_ssa_update_p ())
520 gcc_assert (flags & TODO_update_ssa_any);
521 #endif
522
523 if (flags & TODO_update_ssa_any)
524 {
525 unsigned update_flags = flags & TODO_update_ssa_any;
526 update_ssa (update_flags);
527 }
528
529 if (flags & TODO_cleanup_cfg)
530 {
531 if (current_loops)
532 cleanup_tree_cfg_loop ();
533 else
534 cleanup_tree_cfg ();
535 }
536
537 if ((flags & TODO_dump_func)
538 && dump_file && current_function_decl)
539 {
540 if (properties & PROP_trees)
541 dump_function_to_file (current_function_decl,
542 dump_file, dump_flags);
543 else if (properties & PROP_cfg)
544 print_rtl_with_bb (dump_file, get_insns ());
545 else
546 print_rtl (dump_file, get_insns ());
547
548 /* Flush the file. If verification fails, we won't be able to
549 close the file before dieing. */
550 fflush (dump_file);
551 }
552 if ((flags & TODO_dump_cgraph)
553 && dump_file && !current_function_decl)
554 {
555 dump_cgraph (dump_file);
556 /* Flush the file. If verification fails, we won't be able to
557 close the file before aborting. */
558 fflush (dump_file);
559 }
560
561 if (flags & TODO_ggc_collect)
562 {
563 ggc_collect ();
564 }
565
566 #if defined ENABLE_CHECKING
567 if ((pass->properties_required & PROP_ssa)
568 && !(pass->properties_destroyed & PROP_ssa))
569 verify_ssa (true);
570 if (flags & TODO_verify_flow)
571 verify_flow_info ();
572 if (flags & TODO_verify_stmts)
573 verify_stmts ();
574 if (flags & TODO_verify_loops)
575 verify_loop_closed_ssa ();
576 #endif
577 }
578
579 static bool
580 execute_one_pass (struct tree_opt_pass *pass)
581 {
582 unsigned int todo;
583
584 /* See if we're supposed to run this pass. */
585 if (pass->gate && !pass->gate ())
586 return false;
587
588 /* Note that the folders should only create gimple expressions.
589 This is a hack until the new folder is ready. */
590 in_gimple_form = (pass->properties_provided & PROP_trees) != 0;
591
592 /* Run pre-pass verification. */
593 todo = pass->todo_flags_start & ~last_verified;
594 if (todo)
595 execute_todo (pass, todo, true);
596
597 /* If a dump file name is present, open it if enabled. */
598 if (pass->static_pass_number != -1)
599 {
600 bool initializing_dump = !dump_initialized_p (pass->static_pass_number);
601 dump_file_name = get_dump_file_name (pass->static_pass_number);
602 dump_file = dump_begin (pass->static_pass_number, &dump_flags);
603 if (dump_file && current_function_decl)
604 {
605 const char *dname, *aname;
606 dname = lang_hooks.decl_printable_name (current_function_decl, 2);
607 aname = (IDENTIFIER_POINTER
608 (DECL_ASSEMBLER_NAME (current_function_decl)));
609 fprintf (dump_file, "\n;; Function %s (%s)%s\n\n", dname, aname,
610 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
611 ? " (hot)"
612 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
613 ? " (unlikely executed)"
614 : "");
615 }
616
617 if (initializing_dump
618 && graph_dump_format != no_graph
619 && (pass->properties_provided & (PROP_cfg | PROP_rtl))
620 == (PROP_cfg | PROP_rtl))
621 clean_graph_dump_file (dump_file_name);
622 }
623
624 /* If a timevar is present, start it. */
625 if (pass->tv_id)
626 timevar_push (pass->tv_id);
627
628 /* Do it! */
629 if (pass->execute)
630 pass->execute ();
631
632 /* Stop timevar. */
633 if (pass->tv_id)
634 timevar_pop (pass->tv_id);
635
636 if (dump_file
637 && (pass->properties_provided & (PROP_cfg | PROP_rtl))
638 == (PROP_cfg | PROP_rtl))
639 print_rtl_with_bb (dump_file, get_insns ());
640
641 /* Run post-pass cleanup and verification. */
642 todo = pass->todo_flags_finish;
643 last_verified = todo & TODO_verify_all;
644 if (todo)
645 execute_todo (pass, todo, false);
646
647 /* Flush and close dump file. */
648 if (dump_file_name)
649 {
650 free ((char *) dump_file_name);
651 dump_file_name = NULL;
652 }
653 if (dump_file)
654 {
655 dump_end (pass->static_pass_number, dump_file);
656 dump_file = NULL;
657 }
658
659 return true;
660 }
661
662 static void
663 execute_pass_list (struct tree_opt_pass *pass)
664 {
665 do
666 {
667 if (execute_one_pass (pass) && pass->sub)
668 execute_pass_list (pass->sub);
669 pass = pass->next;
670 }
671 while (pass);
672 }
673
674 void
675 tree_lowering_passes (tree fn)
676 {
677 tree saved_current_function_decl = current_function_decl;
678
679 current_function_decl = fn;
680 push_cfun (DECL_STRUCT_FUNCTION (fn));
681 tree_register_cfg_hooks ();
682 bitmap_obstack_initialize (NULL);
683 execute_pass_list (all_lowering_passes);
684 free_dominance_info (CDI_POST_DOMINATORS);
685 compact_blocks ();
686 current_function_decl = saved_current_function_decl;
687 bitmap_obstack_release (NULL);
688 pop_cfun ();
689 }
690
691 /* Execute all IPA passes. */
692 void
693 ipa_passes (void)
694 {
695 bitmap_obstack_initialize (NULL);
696 execute_pass_list (all_ipa_passes);
697 bitmap_obstack_release (NULL);
698 }
699 \f
700
701 /* Update recursively all inlined_to pointers of functions
702 inlined into NODE to INLINED_TO. */
703 static void
704 update_inlined_to_pointers (struct cgraph_node *node,
705 struct cgraph_node *inlined_to)
706 {
707 struct cgraph_edge *e;
708 for (e = node->callees; e; e = e->next_callee)
709 {
710 if (e->callee->global.inlined_to)
711 {
712 e->callee->global.inlined_to = inlined_to;
713 update_inlined_to_pointers (e->callee, inlined_to);
714 }
715 }
716 }
717
718 \f
719 /* For functions-as-trees languages, this performs all optimization and
720 compilation for FNDECL. */
721
722 void
723 tree_rest_of_compilation (tree fndecl)
724 {
725 location_t saved_loc;
726 struct cgraph_node *saved_node = NULL, *node;
727
728 timevar_push (TV_EXPAND);
729
730 gcc_assert (!flag_unit_at_a_time || cgraph_global_info_ready);
731
732 /* Initialize the RTL code for the function. */
733 current_function_decl = fndecl;
734 saved_loc = input_location;
735 input_location = DECL_SOURCE_LOCATION (fndecl);
736 init_function_start (fndecl);
737
738 /* Even though we're inside a function body, we still don't want to
739 call expand_expr to calculate the size of a variable-sized array.
740 We haven't necessarily assigned RTL to all variables yet, so it's
741 not safe to try to expand expressions involving them. */
742 cfun->x_dont_save_pending_sizes_p = 1;
743 cfun->after_inlining = true;
744
745 node = cgraph_node (fndecl);
746
747 /* We might need the body of this function so that we can expand
748 it inline somewhere else. This means not lowering some constructs
749 such as exception handling. */
750 if (cgraph_preserve_function_body_p (fndecl))
751 {
752 if (!flag_unit_at_a_time)
753 {
754 struct cgraph_edge *e;
755
756 saved_node = cgraph_clone_node (node, node->count, 1);
757 for (e = saved_node->callees; e; e = e->next_callee)
758 if (!e->inline_failed)
759 cgraph_clone_inlined_nodes (e, true);
760 }
761 cfun->saved_static_chain_decl = cfun->static_chain_decl;
762 save_body (fndecl, &cfun->saved_args, &cfun->saved_static_chain_decl);
763 }
764
765 if (flag_inline_trees)
766 {
767 struct cgraph_edge *e;
768 for (e = node->callees; e; e = e->next_callee)
769 if (!e->inline_failed || warn_inline)
770 break;
771 if (e)
772 {
773 timevar_push (TV_INTEGRATION);
774 optimize_inline_calls (fndecl);
775 timevar_pop (TV_INTEGRATION);
776 }
777 }
778 /* We are not going to maintain the cgraph edges up to date.
779 Kill it so it won't confuse us. */
780 while (node->callees)
781 {
782 /* In non-unit-at-a-time we must mark all referenced functions as needed.
783 */
784 if (node->callees->callee->analyzed && !flag_unit_at_a_time)
785 cgraph_mark_needed_node (node->callees->callee);
786 cgraph_remove_edge (node->callees);
787 }
788
789 /* We are not going to maintain the cgraph edges up to date.
790 Kill it so it won't confuse us. */
791 cgraph_node_remove_callees (node);
792
793
794 /* Initialize the default bitmap obstack. */
795 bitmap_obstack_initialize (NULL);
796 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
797
798 tree_register_cfg_hooks ();
799 /* Perform all tree transforms and optimizations. */
800 execute_pass_list (all_passes);
801
802 bitmap_obstack_release (&reg_obstack);
803
804 /* Release the default bitmap obstack. */
805 bitmap_obstack_release (NULL);
806
807 /* Restore original body if still needed. */
808 if (cfun->saved_cfg)
809 {
810 DECL_ARGUMENTS (fndecl) = cfun->saved_args;
811 cfun->cfg = cfun->saved_cfg;
812 cfun->eh = cfun->saved_eh;
813 cfun->saved_cfg = NULL;
814 cfun->saved_eh = NULL;
815 cfun->saved_args = NULL_TREE;
816 cfun->static_chain_decl = cfun->saved_static_chain_decl;
817 cfun->saved_static_chain_decl = NULL;
818 /* When not in unit-at-a-time mode, we must preserve out of line copy
819 representing node before inlining. Restore original outgoing edges
820 using clone we created earlier. */
821 if (!flag_unit_at_a_time)
822 {
823 struct cgraph_edge *e;
824
825 node = cgraph_node (current_function_decl);
826 cgraph_node_remove_callees (node);
827 node->callees = saved_node->callees;
828 saved_node->callees = NULL;
829 update_inlined_to_pointers (node, node);
830 for (e = node->callees; e; e = e->next_callee)
831 e->caller = node;
832 cgraph_remove_node (saved_node);
833 }
834 }
835 else
836 DECL_SAVED_TREE (fndecl) = NULL;
837 cfun = 0;
838
839 /* If requested, warn about function definitions where the function will
840 return a value (usually of some struct or union type) which itself will
841 take up a lot of stack space. */
842 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
843 {
844 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
845
846 if (ret_type && TYPE_SIZE_UNIT (ret_type)
847 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
848 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
849 larger_than_size))
850 {
851 unsigned int size_as_int
852 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
853
854 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
855 warning (0, "%Jsize of return value of %qD is %u bytes",
856 fndecl, fndecl, size_as_int);
857 else
858 warning (0, "%Jsize of return value of %qD is larger than %wd bytes",
859 fndecl, fndecl, larger_than_size);
860 }
861 }
862
863 if (!flag_inline_trees)
864 {
865 DECL_SAVED_TREE (fndecl) = NULL;
866 if (DECL_STRUCT_FUNCTION (fndecl) == 0
867 && !cgraph_node (fndecl)->origin)
868 {
869 /* Stop pointing to the local nodes about to be freed.
870 But DECL_INITIAL must remain nonzero so we know this
871 was an actual function definition.
872 For a nested function, this is done in c_pop_function_context.
873 If rest_of_compilation set this to 0, leave it 0. */
874 if (DECL_INITIAL (fndecl) != 0)
875 DECL_INITIAL (fndecl) = error_mark_node;
876 }
877 }
878
879 input_location = saved_loc;
880
881 ggc_collect ();
882 timevar_pop (TV_EXPAND);
883 }