]>
Commit | Line | Data |
---|---|---|
da5e1e7c | 1 | /* Driver of optimization process |
aed6e608 | 2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, |
5c8ad8f5 | 3 | 2011, 2012 Free Software Foundation, Inc. |
ae01b312 | 4 | Contributed by Jan Hubicka |
5 | ||
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify it under | |
9 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 10 | Software Foundation; either version 3, or (at your option) any later |
ae01b312 | 11 | version. |
12 | ||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
ae01b312 | 21 | |
da5e1e7c | 22 | /* This module implements main driver of compilation process. |
b0cdf642 | 23 | |
24 | The main scope of this file is to act as an interface in between | |
da5e1e7c | 25 | tree based frontends and the backend. |
b0cdf642 | 26 | |
27 | The front-end is supposed to use following functionality: | |
28 | ||
29 | - cgraph_finalize_function | |
30 | ||
31 | This function is called once front-end has parsed whole body of function | |
32 | and it is certain that the function body nor the declaration will change. | |
33 | ||
b326746d | 34 | (There is one exception needed for implementing GCC extern inline |
35 | function.) | |
b0cdf642 | 36 | |
1d416bd7 | 37 | - varpool_finalize_variable |
b0cdf642 | 38 | |
7bd28bba | 39 | This function has same behavior as the above but is used for static |
b0cdf642 | 40 | variables. |
41 | ||
cf951b1a | 42 | - add_asm_node |
43 | ||
44 | Insert new toplevel ASM statement | |
45 | ||
46 | - finalize_compilation_unit | |
b0cdf642 | 47 | |
b326746d | 48 | This function is called once (source level) compilation unit is finalized |
49 | and it will no longer change. | |
b0cdf642 | 50 | |
da5e1e7c | 51 | The symbol table is constructed starting from the trivially needed |
52 | symbols finalized by the frontend. Functions are lowered into | |
53 | GIMPLE representation and callgraph/reference lists are constructed. | |
54 | Those are used to discover other neccesary functions and variables. | |
55 | ||
56 | At the end the bodies of unreachable functions are removed. | |
b0cdf642 | 57 | |
b326746d | 58 | The function can be called multiple times when multiple source level |
da5e1e7c | 59 | compilation units are combined. |
b0cdf642 | 60 | |
cf951b1a | 61 | - compile |
b0cdf642 | 62 | |
da5e1e7c | 63 | This passes control to the back-end. Optimizations are performed and |
64 | final assembler is generated. This is done in the following way. Note | |
65 | that with link time optimization the process is split into three | |
66 | stages (compile time, linktime analysis and parallel linktime as | |
67 | indicated bellow). | |
68 | ||
69 | Compile time: | |
70 | ||
71 | 1) Inter-procedural optimization. | |
72 | (ipa_passes) | |
73 | ||
74 | This part is further split into: | |
75 | ||
76 | a) early optimizations. These are local passes executed in | |
77 | the topological order on the callgraph. | |
78 | ||
79 | The purpose of early optimiations is to optimize away simple | |
80 | things that may otherwise confuse IP analysis. Very simple | |
81 | propagation across the callgraph is done i.e. to discover | |
82 | functions without side effects and simple inlining is performed. | |
83 | ||
84 | b) early small interprocedural passes. | |
85 | ||
86 | Those are interprocedural passes executed only at compilation | |
87 | time. These include, for exmaple, transational memory lowering, | |
88 | unreachable code removal and other simple transformations. | |
89 | ||
90 | c) IP analysis stage. All interprocedural passes do their | |
91 | analysis. | |
92 | ||
93 | Interprocedural passes differ from small interprocedural | |
94 | passes by their ability to operate across whole program | |
95 | at linktime. Their analysis stage is performed early to | |
96 | both reduce linking times and linktime memory usage by | |
97 | not having to represent whole program in memory. | |
98 | ||
99 | d) LTO sreaming. When doing LTO, everything important gets | |
100 | streamed into the object file. | |
101 | ||
102 | Compile time and or linktime analysis stage (WPA): | |
103 | ||
104 | At linktime units gets streamed back and symbol table is | |
105 | merged. Function bodies are not streamed in and not | |
106 | available. | |
107 | e) IP propagation stage. All IP passes execute their | |
108 | IP propagation. This is done based on the earlier analysis | |
109 | without having function bodies at hand. | |
110 | f) Ltrans streaming. When doing WHOPR LTO, the program | |
111 | is partitioned and streamed into multple object files. | |
b0cdf642 | 112 | |
da5e1e7c | 113 | Compile time and/or parallel linktime stage (ltrans) |
b0cdf642 | 114 | |
da5e1e7c | 115 | Each of the object files is streamed back and compiled |
116 | separately. Now the function bodies becomes available | |
117 | again. | |
b0cdf642 | 118 | |
da5e1e7c | 119 | 2) Virtual clone materialization |
120 | (cgraph_materialize_clone) | |
b0cdf642 | 121 | |
da5e1e7c | 122 | IP passes can produce copies of existing functoins (such |
123 | as versioned clones or inline clones) without actually | |
124 | manipulating their bodies by creating virtual clones in | |
125 | the callgraph. At this time the virtual clones are | |
126 | turned into real functions | |
127 | 3) IP transformation | |
b0cdf642 | 128 | |
da5e1e7c | 129 | All IP passes transform function bodies based on earlier |
130 | decision of the IP propagation. | |
b0cdf642 | 131 | |
da5e1e7c | 132 | 4) late small IP passes |
b0cdf642 | 133 | |
da5e1e7c | 134 | Simple IP passes working within single program partition. |
b0cdf642 | 135 | |
da5e1e7c | 136 | 5) Expansion |
cf951b1a | 137 | (expand_all_functions) |
b0cdf642 | 138 | |
da5e1e7c | 139 | At this stage functions that needs to be output into |
140 | assembler are identified and compiled in topological order | |
141 | 6) Output of variables and aliases | |
142 | Now it is known what variable references was not optimized | |
143 | out and thus all variables are output to the file. | |
b0cdf642 | 144 | |
da5e1e7c | 145 | Note that with -fno-toplevel-reorder passes 5 and 6 |
146 | are combined together in cgraph_output_in_order. | |
b0cdf642 | 147 | |
da5e1e7c | 148 | Finally there are functions to manipulate the callgraph from |
149 | backend. | |
150 | - cgraph_add_new_function is used to add backend produced | |
151 | functions introduced after the unit is finalized. | |
152 | The functions are enqueue for later processing and inserted | |
153 | into callgraph with cgraph_process_new_functions. | |
121f3051 | 154 | |
da5e1e7c | 155 | - cgraph_function_versioning |
156 | ||
157 | produces a copy of function into new one (a version) | |
158 | and apply simple transformations | |
159 | */ | |
acc70efa | 160 | |
ae01b312 | 161 | #include "config.h" |
162 | #include "system.h" | |
163 | #include "coretypes.h" | |
164 | #include "tm.h" | |
165 | #include "tree.h" | |
941366fd | 166 | #include "output.h" |
b5530559 | 167 | #include "rtl.h" |
acc70efa | 168 | #include "tree-flow.h" |
ae01b312 | 169 | #include "tree-inline.h" |
170 | #include "langhooks.h" | |
c6224531 | 171 | #include "pointer-set.h" |
ae01b312 | 172 | #include "toplev.h" |
173 | #include "flags.h" | |
174 | #include "ggc.h" | |
175 | #include "debug.h" | |
176 | #include "target.h" | |
177 | #include "cgraph.h" | |
80a85d8a | 178 | #include "diagnostic.h" |
f79b6507 | 179 | #include "timevar.h" |
d7c6d889 | 180 | #include "params.h" |
181 | #include "fibheap.h" | |
611e5405 | 182 | #include "intl.h" |
b69eb0ff | 183 | #include "function.h" |
b5d36404 | 184 | #include "ipa-prop.h" |
75a70cf9 | 185 | #include "gimple.h" |
186 | #include "tree-iterator.h" | |
f1e2a033 | 187 | #include "tree-pass.h" |
bfec3452 | 188 | #include "tree-dump.h" |
da5e1e7c | 189 | #include "gimple-pretty-print.h" |
c1dcd13c | 190 | #include "output.h" |
9ed5b1f5 | 191 | #include "coverage.h" |
c9036234 | 192 | #include "plugin.h" |
a41f2a28 | 193 | #include "ipa-inline.h" |
7771d558 | 194 | #include "ipa-utils.h" |
a0605d65 | 195 | #include "lto-streamer.h" |
3db65b62 | 196 | #include "except.h" |
941366fd | 197 | #include "regset.h" /* FIXME: For reg_obstack. */ |
d7c6d889 | 198 | |
ff2a5ada | 199 | /* Queue of cgraph nodes scheduled to be added into cgraph. This is a |
200 | secondary queue used during optimization to accommodate passes that | |
201 | may generate new functions that need to be optimized and expanded. */ | |
202 | cgraph_node_set cgraph_new_nodes; | |
203 | ||
cf951b1a | 204 | static void expand_all_functions (void); |
205 | static void mark_functions_to_output (void); | |
206 | static void expand_function (struct cgraph_node *); | |
da5e1e7c | 207 | static void cgraph_analyze_function (struct cgraph_node *); |
25bb88de | 208 | |
ecb08119 | 209 | FILE *cgraph_dump_file; |
121f3051 | 210 | |
cf951b1a | 211 | /* Linked list of cgraph asm nodes. */ |
212 | struct asm_node *asm_nodes; | |
213 | ||
214 | /* Last node in cgraph_asm_nodes. */ | |
215 | static GTY(()) struct asm_node *asm_last_node; | |
216 | ||
28454517 | 217 | /* Used for vtable lookup in thunk adjusting. */ |
218 | static GTY (()) tree vtable_entry_type; | |
219 | ||
8efa224a | 220 | /* Determine if function DECL is trivially needed and should stay in the |
221 | compilation unit. This is used at the symbol table construction time | |
222 | and differs from later logic removing unnecesary functions that can | |
223 | take into account results of analysis, whole program info etc. */ | |
2c0b522d | 224 | |
da5e1e7c | 225 | static bool |
7bfefa9d | 226 | cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl) |
2c0b522d | 227 | { |
3f82b628 | 228 | /* If the user told us it is used, then it must be so. */ |
8efa224a | 229 | if (node->symbol.force_output) |
05806473 | 230 | return true; |
231 | ||
8efa224a | 232 | /* Double check that no one output the function into assembly file |
233 | early. */ | |
234 | gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl) | |
235 | || (node->thunk.thunk_p || node->same_body_alias) | |
236 | || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))); | |
3f82b628 | 237 | |
55680bef | 238 | |
8efa224a | 239 | /* Keep constructors, destructors and virtual functions. */ |
240 | if (DECL_STATIC_CONSTRUCTOR (decl) | |
241 | || DECL_STATIC_DESTRUCTOR (decl) | |
242 | || (DECL_VIRTUAL_P (decl) | |
243 | && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl)))) | |
244 | return true; | |
2c0b522d | 245 | |
246 | /* Externally visible functions must be output. The exception is | |
8efa224a | 247 | COMDAT functions that must be output only when they are needed. */ |
8baa9d15 | 248 | |
8efa224a | 249 | if (TREE_PUBLIC (decl) |
62eec3b4 | 250 | && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)) |
2c0b522d | 251 | return true; |
252 | ||
2c0b522d | 253 | return false; |
254 | } | |
255 | ||
ff2a5ada | 256 | /* Head of the queue of nodes to be processed while building callgraph */ |
257 | ||
258 | static symtab_node first = (symtab_node)(void *)1; | |
259 | ||
260 | /* Add NODE to queue starting at FIRST. | |
261 | The queue is linked via AUX pointers and terminated by pointer to 1. */ | |
262 | ||
263 | static void | |
264 | enqueue_node (symtab_node node) | |
265 | { | |
266 | if (node->symbol.aux) | |
267 | return; | |
268 | gcc_checking_assert (first); | |
269 | node->symbol.aux = first; | |
270 | first = node; | |
271 | } | |
272 | ||
bdc40eb8 | 273 | /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these |
523c1122 | 274 | functions into callgraph in a way so they look like ordinary reachable |
275 | functions inserted into callgraph already at construction time. */ | |
276 | ||
277 | bool | |
278 | cgraph_process_new_functions (void) | |
279 | { | |
280 | bool output = false; | |
281 | tree fndecl; | |
282 | struct cgraph_node *node; | |
ff2a5ada | 283 | cgraph_node_set_iterator csi; |
523c1122 | 284 | |
ff2a5ada | 285 | if (!cgraph_new_nodes) |
286 | return false; | |
523c1122 | 287 | /* Note that this queue may grow as its being processed, as the new |
288 | functions may generate new ones. */ | |
ff2a5ada | 289 | for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi)) |
523c1122 | 290 | { |
ff2a5ada | 291 | node = csi_node (csi); |
7d0d0ce1 | 292 | fndecl = node->symbol.decl; |
523c1122 | 293 | switch (cgraph_state) |
294 | { | |
295 | case CGRAPH_STATE_CONSTRUCTION: | |
296 | /* At construction time we just need to finalize function and move | |
297 | it into reachable functions list. */ | |
298 | ||
523c1122 | 299 | cgraph_finalize_function (fndecl, false); |
523c1122 | 300 | output = true; |
4f7a1122 | 301 | cgraph_call_function_insertion_hooks (node); |
ff2a5ada | 302 | enqueue_node ((symtab_node) node); |
523c1122 | 303 | break; |
304 | ||
305 | case CGRAPH_STATE_IPA: | |
f517b36e | 306 | case CGRAPH_STATE_IPA_SSA: |
523c1122 | 307 | /* When IPA optimization already started, do all essential |
308 | transformations that has been already performed on the whole | |
309 | cgraph but not on this function. */ | |
310 | ||
75a70cf9 | 311 | gimple_register_cfg_hooks (); |
523c1122 | 312 | if (!node->analyzed) |
313 | cgraph_analyze_function (node); | |
314 | push_cfun (DECL_STRUCT_FUNCTION (fndecl)); | |
315 | current_function_decl = fndecl; | |
f517b36e | 316 | if ((cgraph_state == CGRAPH_STATE_IPA_SSA |
317 | && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl))) | |
318 | /* When not optimizing, be sure we run early local passes anyway | |
319 | to expand OMP. */ | |
320 | || !optimize) | |
20099e35 | 321 | execute_pass_list (pass_early_local_passes.pass.sub); |
649597af | 322 | else |
a41f2a28 | 323 | compute_inline_parameters (node, true); |
523c1122 | 324 | free_dominance_info (CDI_POST_DOMINATORS); |
325 | free_dominance_info (CDI_DOMINATORS); | |
326 | pop_cfun (); | |
327 | current_function_decl = NULL; | |
4f7a1122 | 328 | cgraph_call_function_insertion_hooks (node); |
523c1122 | 329 | break; |
330 | ||
331 | case CGRAPH_STATE_EXPANSION: | |
332 | /* Functions created during expansion shall be compiled | |
333 | directly. */ | |
09fc9532 | 334 | node->process = 0; |
4f7a1122 | 335 | cgraph_call_function_insertion_hooks (node); |
cf951b1a | 336 | expand_function (node); |
523c1122 | 337 | break; |
338 | ||
339 | default: | |
340 | gcc_unreachable (); | |
341 | break; | |
342 | } | |
343 | } | |
ff2a5ada | 344 | free_cgraph_node_set (cgraph_new_nodes); |
345 | cgraph_new_nodes = NULL; | |
523c1122 | 346 | return output; |
347 | } | |
348 | ||
9b8fb23a | 349 | /* As an GCC extension we allow redefinition of the function. The |
350 | semantics when both copies of bodies differ is not well defined. | |
351 | We replace the old body with new body so in unit at a time mode | |
352 | we always use new body, while in normal mode we may end up with | |
353 | old body inlined into some functions and new body expanded and | |
354 | inlined in others. | |
355 | ||
356 | ??? It may make more sense to use one body for inlining and other | |
357 | body for expanding the function but this is difficult to do. */ | |
358 | ||
359 | static void | |
360 | cgraph_reset_node (struct cgraph_node *node) | |
361 | { | |
09fc9532 | 362 | /* If node->process is set, then we have already begun whole-unit analysis. |
6329636b | 363 | This is *not* testing for whether we've already emitted the function. |
364 | That case can be sort-of legitimately seen with real function redefinition | |
365 | errors. I would argue that the front end should never present us with | |
366 | such a case, but don't enforce that for now. */ | |
09fc9532 | 367 | gcc_assert (!node->process); |
9b8fb23a | 368 | |
369 | /* Reset our data structures so we can analyze the function again. */ | |
370 | memset (&node->local, 0, sizeof (node->local)); | |
371 | memset (&node->global, 0, sizeof (node->global)); | |
372 | memset (&node->rtl, 0, sizeof (node->rtl)); | |
373 | node->analyzed = false; | |
9b8fb23a | 374 | node->local.finalized = false; |
375 | ||
9b8fb23a | 376 | cgraph_node_remove_callees (node); |
9b8fb23a | 377 | } |
c08871a9 | 378 | |
9a2639fc | 379 | /* Return true when there are references to NODE. */ |
380 | ||
381 | static bool | |
382 | referred_to_p (symtab_node node) | |
383 | { | |
9a2639fc | 384 | struct ipa_ref *ref; |
385 | ||
cf951b1a | 386 | /* See if there are any refrences at all. */ |
387 | if (ipa_ref_list_referring_iterate (&node->symbol.ref_list, 0, ref)) | |
9a2639fc | 388 | return true; |
cf951b1a | 389 | /* For functions check also calls. */ |
9a2639fc | 390 | if (symtab_function_p (node) && cgraph (node)->callers) |
391 | return true; | |
392 | return false; | |
393 | } | |
394 | ||
28df663b | 395 | /* DECL has been parsed. Take it, queue it, compile it at the whim of the |
396 | logic in effect. If NESTED is true, then our caller cannot stand to have | |
397 | the garbage collector run at the moment. We would need to either create | |
398 | a new GC context, or just not compile right now. */ | |
ae01b312 | 399 | |
400 | void | |
28df663b | 401 | cgraph_finalize_function (tree decl, bool nested) |
ae01b312 | 402 | { |
5a90471f | 403 | struct cgraph_node *node = cgraph_get_create_node (decl); |
ae01b312 | 404 | |
c08871a9 | 405 | if (node->local.finalized) |
443089c1 | 406 | { |
407 | cgraph_reset_node (node); | |
408 | node->local.redefined_extern_inline = true; | |
409 | } | |
28df663b | 410 | |
c08871a9 | 411 | notice_global_symbol (decl); |
79bb87b4 | 412 | node->local.finalized = true; |
e27482aa | 413 | node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL; |
ae01b312 | 414 | |
8efa224a | 415 | /* With -fkeep-inline-functions we are keeping all inline functions except |
416 | for extern inline ones. */ | |
417 | if (flag_keep_inline_functions | |
418 | && DECL_DECLARED_INLINE_P (decl) | |
419 | && !DECL_EXTERNAL (decl) | |
420 | && !DECL_DISREGARD_INLINE_LIMITS (decl)) | |
421 | node->symbol.force_output = 1; | |
2c0b522d | 422 | |
8efa224a | 423 | /* When not optimizing, also output the static functions. (see |
424 | PR24561), but don't do so for always_inline functions, functions | |
425 | declared inline and nested functions. These were optimized out | |
426 | in the original implementation and it is unclear whether we want | |
427 | to change the behavior here. */ | |
428 | if ((!optimize | |
429 | && !node->same_body_alias | |
430 | && !DECL_DISREGARD_INLINE_LIMITS (decl) | |
431 | && !DECL_DECLARED_INLINE_P (decl) | |
432 | && !(DECL_CONTEXT (decl) | |
433 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)) | |
434 | && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)) | |
435 | node->symbol.force_output = 1; | |
436 | ||
2c0b522d | 437 | /* If we've not yet emitted decl, tell the debug info about it. */ |
28df663b | 438 | if (!TREE_ASM_WRITTEN (decl)) |
2c0b522d | 439 | (*debug_hooks->deferred_inline_function) (decl); |
4e8871a0 | 440 | |
b69eb0ff | 441 | /* Possibly warn about unused parameters. */ |
442 | if (warn_unused_parameter) | |
443 | do_warn_unused_parameter (decl); | |
6329636b | 444 | |
445 | if (!nested) | |
446 | ggc_collect (); | |
9a2639fc | 447 | |
448 | if (cgraph_state == CGRAPH_STATE_CONSTRUCTION | |
449 | && (cgraph_decide_is_function_needed (node, decl) | |
450 | || referred_to_p ((symtab_node)node))) | |
451 | enqueue_node ((symtab_node)node); | |
ae01b312 | 452 | } |
453 | ||
3db65b62 | 454 | /* Add the function FNDECL to the call graph. |
455 | Unlike cgraph_finalize_function, this function is intended to be used | |
456 | by middle end and allows insertion of new function at arbitrary point | |
457 | of compilation. The function can be either in high, low or SSA form | |
458 | GIMPLE. | |
459 | ||
460 | The function is assumed to be reachable and have address taken (so no | |
461 | API breaking optimizations are performed on it). | |
462 | ||
463 | Main work done by this function is to enqueue the function for later | |
464 | processing to avoid need the passes to be re-entrant. */ | |
465 | ||
466 | void | |
467 | cgraph_add_new_function (tree fndecl, bool lowered) | |
468 | { | |
469 | struct cgraph_node *node; | |
470 | switch (cgraph_state) | |
471 | { | |
ff2a5ada | 472 | case CGRAPH_STATE_PARSING: |
473 | cgraph_finalize_function (fndecl, false); | |
474 | break; | |
3db65b62 | 475 | case CGRAPH_STATE_CONSTRUCTION: |
476 | /* Just enqueue function to be processed at nearest occurrence. */ | |
477 | node = cgraph_create_node (fndecl); | |
3db65b62 | 478 | if (lowered) |
479 | node->lowered = true; | |
ff2a5ada | 480 | if (!cgraph_new_nodes) |
481 | cgraph_new_nodes = cgraph_node_set_new (); | |
482 | cgraph_node_set_add (cgraph_new_nodes, node); | |
3db65b62 | 483 | break; |
484 | ||
485 | case CGRAPH_STATE_IPA: | |
486 | case CGRAPH_STATE_IPA_SSA: | |
487 | case CGRAPH_STATE_EXPANSION: | |
488 | /* Bring the function into finalized state and enqueue for later | |
489 | analyzing and compilation. */ | |
490 | node = cgraph_get_create_node (fndecl); | |
491 | node->local.local = false; | |
492 | node->local.finalized = true; | |
da751785 | 493 | node->symbol.force_output = true; |
3db65b62 | 494 | if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION) |
495 | { | |
496 | push_cfun (DECL_STRUCT_FUNCTION (fndecl)); | |
497 | current_function_decl = fndecl; | |
498 | gimple_register_cfg_hooks (); | |
499 | bitmap_obstack_initialize (NULL); | |
500 | execute_pass_list (all_lowering_passes); | |
501 | execute_pass_list (pass_early_local_passes.pass.sub); | |
502 | bitmap_obstack_release (NULL); | |
503 | pop_cfun (); | |
504 | current_function_decl = NULL; | |
505 | ||
506 | lowered = true; | |
507 | } | |
508 | if (lowered) | |
509 | node->lowered = true; | |
ff2a5ada | 510 | if (!cgraph_new_nodes) |
511 | cgraph_new_nodes = cgraph_node_set_new (); | |
512 | cgraph_node_set_add (cgraph_new_nodes, node); | |
3db65b62 | 513 | break; |
514 | ||
515 | case CGRAPH_STATE_FINISHED: | |
516 | /* At the very end of compilation we have to do all the work up | |
517 | to expansion. */ | |
518 | node = cgraph_create_node (fndecl); | |
519 | if (lowered) | |
520 | node->lowered = true; | |
521 | cgraph_analyze_function (node); | |
522 | push_cfun (DECL_STRUCT_FUNCTION (fndecl)); | |
523 | current_function_decl = fndecl; | |
524 | gimple_register_cfg_hooks (); | |
525 | bitmap_obstack_initialize (NULL); | |
526 | if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl))) | |
527 | execute_pass_list (pass_early_local_passes.pass.sub); | |
528 | bitmap_obstack_release (NULL); | |
3db65b62 | 529 | pop_cfun (); |
cf951b1a | 530 | expand_function (node); |
3db65b62 | 531 | current_function_decl = NULL; |
532 | break; | |
533 | ||
534 | default: | |
535 | gcc_unreachable (); | |
536 | } | |
537 | ||
538 | /* Set a personality if required and we already passed EH lowering. */ | |
539 | if (lowered | |
540 | && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl)) | |
541 | == eh_personality_lang)) | |
542 | DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality (); | |
543 | } | |
544 | ||
cf951b1a | 545 | /* Add a top-level asm statement to the list. */ |
546 | ||
547 | struct asm_node * | |
548 | add_asm_node (tree asm_str) | |
549 | { | |
550 | struct asm_node *node; | |
551 | ||
552 | node = ggc_alloc_cleared_asm_node (); | |
553 | node->asm_str = asm_str; | |
554 | node->order = symtab_order++; | |
555 | node->next = NULL; | |
556 | if (asm_nodes == NULL) | |
557 | asm_nodes = node; | |
558 | else | |
559 | asm_last_node->next = node; | |
560 | asm_last_node = node; | |
561 | return node; | |
562 | } | |
563 | ||
56af936e | 564 | /* Output all asm statements we have stored up to be output. */ |
565 | ||
566 | static void | |
cf951b1a | 567 | output_asm_statements (void) |
56af936e | 568 | { |
cf951b1a | 569 | struct asm_node *can; |
56af936e | 570 | |
852f689e | 571 | if (seen_error ()) |
56af936e | 572 | return; |
573 | ||
cf951b1a | 574 | for (can = asm_nodes; can; can = can->next) |
56af936e | 575 | assemble_asm (can->asm_str); |
cf951b1a | 576 | asm_nodes = NULL; |
577 | } | |
578 | ||
579 | /* C++ FE sometimes change linkage flags after producing same body aliases. */ | |
580 | void | |
581 | fixup_same_cpp_alias_visibility (symtab_node node, symtab_node target, tree alias) | |
582 | { | |
583 | DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (alias); | |
584 | if (TREE_PUBLIC (node->symbol.decl)) | |
585 | { | |
586 | DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (alias); | |
587 | DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (alias); | |
588 | DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (alias); | |
589 | if (DECL_ONE_ONLY (alias) | |
590 | && !node->symbol.same_comdat_group) | |
591 | symtab_add_to_same_comdat_group ((symtab_node)node, (symtab_node)target); | |
592 | } | |
56af936e | 593 | } |
594 | ||
0785e435 | 595 | /* Analyze the function scheduled to be output. */ |
da5e1e7c | 596 | static void |
0785e435 | 597 | cgraph_analyze_function (struct cgraph_node *node) |
598 | { | |
bfec3452 | 599 | tree save = current_function_decl; |
7d0d0ce1 | 600 | tree decl = node->symbol.decl; |
da5e1e7c | 601 | location_t saved_loc = input_location; |
602 | input_location = DECL_SOURCE_LOCATION (decl); | |
0785e435 | 603 | |
c70f46b0 | 604 | if (node->alias && node->thunk.alias) |
605 | { | |
606 | struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias); | |
b0898cb7 | 607 | struct cgraph_node *n; |
608 | ||
609 | for (n = tgt; n && n->alias; | |
610 | n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL) | |
611 | if (n == node) | |
612 | { | |
7d0d0ce1 | 613 | error ("function %q+D part of alias cycle", node->symbol.decl); |
b0898cb7 | 614 | node->alias = false; |
da5e1e7c | 615 | input_location = saved_loc; |
b0898cb7 | 616 | return; |
617 | } | |
7d0d0ce1 | 618 | if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references)) |
04ec15fa | 619 | ipa_record_reference ((symtab_node)node, (symtab_node)tgt, |
620 | IPA_REF_ALIAS, NULL); | |
c70f46b0 | 621 | if (node->same_body_alias) |
622 | { | |
7d0d0ce1 | 623 | DECL_DECLARED_INLINE_P (node->symbol.decl) |
c70f46b0 | 624 | = DECL_DECLARED_INLINE_P (node->thunk.alias); |
7d0d0ce1 | 625 | DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl) |
c70f46b0 | 626 | = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias); |
cf951b1a | 627 | fixup_same_cpp_alias_visibility ((symtab_node) node, (symtab_node) tgt, node->thunk.alias); |
c70f46b0 | 628 | } |
629 | ||
7d0d0ce1 | 630 | if (node->symbol.address_taken) |
c70f46b0 | 631 | cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node)); |
c70f46b0 | 632 | } |
633 | else if (node->thunk.thunk_p) | |
91bf9d9a | 634 | { |
635 | cgraph_create_edge (node, cgraph_get_node (node->thunk.alias), | |
636 | NULL, 0, CGRAPH_FREQ_BASE); | |
637 | } | |
638 | else | |
639 | { | |
640 | current_function_decl = decl; | |
641 | push_cfun (DECL_STRUCT_FUNCTION (decl)); | |
bfec3452 | 642 | |
7d0d0ce1 | 643 | assign_assembler_name_if_neeeded (node->symbol.decl); |
6816d0c4 | 644 | |
91bf9d9a | 645 | /* Make sure to gimplify bodies only once. During analyzing a |
646 | function we lower it, which will require gimplified nested | |
647 | functions, so we can end up here with an already gimplified | |
648 | body. */ | |
649 | if (!gimple_body (decl)) | |
650 | gimplify_function_tree (decl); | |
651 | dump_function (TDI_generic, decl); | |
bfec3452 | 652 | |
47199071 | 653 | /* Lower the function. */ |
654 | if (!node->lowered) | |
655 | { | |
656 | if (node->nested) | |
7d0d0ce1 | 657 | lower_nested_functions (node->symbol.decl); |
47199071 | 658 | gcc_assert (!node->nested); |
659 | ||
660 | gimple_register_cfg_hooks (); | |
661 | bitmap_obstack_initialize (NULL); | |
662 | execute_pass_list (all_lowering_passes); | |
663 | free_dominance_info (CDI_POST_DOMINATORS); | |
664 | free_dominance_info (CDI_DOMINATORS); | |
665 | compact_blocks (); | |
666 | bitmap_obstack_release (NULL); | |
667 | node->lowered = true; | |
668 | } | |
669 | ||
91bf9d9a | 670 | pop_cfun (); |
671 | } | |
6e8d6e86 | 672 | node->analyzed = true; |
0785e435 | 673 | |
bfec3452 | 674 | current_function_decl = save; |
da5e1e7c | 675 | input_location = saved_loc; |
0785e435 | 676 | } |
677 | ||
c70f46b0 | 678 | /* C++ frontend produce same body aliases all over the place, even before PCH |
679 | gets streamed out. It relies on us linking the aliases with their function | |
680 | in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we | |
681 | first produce aliases without links, but once C++ FE is sure he won't sream | |
682 | PCH we build the links via this function. */ | |
683 | ||
684 | void | |
685 | cgraph_process_same_body_aliases (void) | |
686 | { | |
687 | struct cgraph_node *node; | |
7c455d87 | 688 | FOR_EACH_FUNCTION (node) |
c70f46b0 | 689 | if (node->same_body_alias |
7d0d0ce1 | 690 | && !VEC_length (ipa_ref_t, node->symbol.ref_list.references)) |
c70f46b0 | 691 | { |
692 | struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias); | |
04ec15fa | 693 | ipa_record_reference ((symtab_node)node, (symtab_node)tgt, |
694 | IPA_REF_ALIAS, NULL); | |
c70f46b0 | 695 | } |
696 | same_body_aliases_done = true; | |
697 | } | |
698 | ||
d05db70d | 699 | /* Process attributes common for vars and functions. */ |
700 | ||
701 | static void | |
702 | process_common_attributes (tree decl) | |
703 | { | |
704 | tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)); | |
705 | ||
706 | if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl))) | |
707 | { | |
708 | warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes, | |
709 | "%<weakref%> attribute should be accompanied with" | |
710 | " an %<alias%> attribute"); | |
711 | DECL_WEAK (decl) = 0; | |
40b32d93 | 712 | DECL_ATTRIBUTES (decl) = remove_attribute ("weakref", |
713 | DECL_ATTRIBUTES (decl)); | |
d05db70d | 714 | } |
715 | } | |
716 | ||
05806473 | 717 | /* Look for externally_visible and used attributes and mark cgraph nodes |
718 | accordingly. | |
719 | ||
720 | We cannot mark the nodes at the point the attributes are processed (in | |
721 | handle_*_attribute) because the copy of the declarations available at that | |
722 | point may not be canonical. For example, in: | |
723 | ||
724 | void f(); | |
725 | void f() __attribute__((used)); | |
726 | ||
727 | the declaration we see in handle_used_attribute will be the second | |
728 | declaration -- but the front end will subsequently merge that declaration | |
729 | with the original declaration and discard the second declaration. | |
730 | ||
731 | Furthermore, we can't mark these nodes in cgraph_finalize_function because: | |
732 | ||
733 | void f() {} | |
734 | void f() __attribute__((externally_visible)); | |
735 | ||
736 | is valid. | |
737 | ||
738 | So, we walk the nodes at the end of the translation unit, applying the | |
739 | attributes at that point. */ | |
740 | ||
741 | static void | |
742 | process_function_and_variable_attributes (struct cgraph_node *first, | |
1d416bd7 | 743 | struct varpool_node *first_var) |
05806473 | 744 | { |
745 | struct cgraph_node *node; | |
1d416bd7 | 746 | struct varpool_node *vnode; |
05806473 | 747 | |
0704fb2e | 748 | for (node = cgraph_first_function (); node != first; |
749 | node = cgraph_next_function (node)) | |
05806473 | 750 | { |
7d0d0ce1 | 751 | tree decl = node->symbol.decl; |
83a23b05 | 752 | if (DECL_PRESERVE_P (decl)) |
8efa224a | 753 | cgraph_mark_force_output_node (node); |
62433d51 | 754 | else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl))) |
05806473 | 755 | { |
7d0d0ce1 | 756 | if (! TREE_PUBLIC (node->symbol.decl)) |
757 | warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes, | |
712d2297 | 758 | "%<externally_visible%>" |
759 | " attribute have effect only on public objects"); | |
05806473 | 760 | } |
40b32d93 | 761 | if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)) |
c70f46b0 | 762 | && (node->local.finalized && !node->alias)) |
40b32d93 | 763 | { |
7d0d0ce1 | 764 | warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes, |
40b32d93 | 765 | "%<weakref%> attribute ignored" |
766 | " because function is defined"); | |
767 | DECL_WEAK (decl) = 0; | |
768 | DECL_ATTRIBUTES (decl) = remove_attribute ("weakref", | |
769 | DECL_ATTRIBUTES (decl)); | |
770 | } | |
a522e9eb | 771 | |
772 | if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)) | |
773 | && !DECL_DECLARED_INLINE_P (decl) | |
774 | /* redefining extern inline function makes it DECL_UNINLINABLE. */ | |
775 | && !DECL_UNINLINABLE (decl)) | |
776 | warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes, | |
777 | "always_inline function might not be inlinable"); | |
778 | ||
d05db70d | 779 | process_common_attributes (decl); |
05806473 | 780 | } |
0704fb2e | 781 | for (vnode = varpool_first_variable (); vnode != first_var; |
782 | vnode = varpool_next_variable (vnode)) | |
05806473 | 783 | { |
7d0d0ce1 | 784 | tree decl = vnode->symbol.decl; |
83a23b05 | 785 | if (DECL_PRESERVE_P (decl)) |
ff2a5ada | 786 | vnode->symbol.force_output = true; |
62433d51 | 787 | else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl))) |
05806473 | 788 | { |
7d0d0ce1 | 789 | if (! TREE_PUBLIC (vnode->symbol.decl)) |
790 | warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes, | |
712d2297 | 791 | "%<externally_visible%>" |
792 | " attribute have effect only on public objects"); | |
05806473 | 793 | } |
40b32d93 | 794 | if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)) |
795 | && vnode->finalized | |
796 | && DECL_INITIAL (decl)) | |
797 | { | |
7d0d0ce1 | 798 | warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes, |
40b32d93 | 799 | "%<weakref%> attribute ignored" |
800 | " because variable is initialized"); | |
801 | DECL_WEAK (decl) = 0; | |
802 | DECL_ATTRIBUTES (decl) = remove_attribute ("weakref", | |
803 | DECL_ATTRIBUTES (decl)); | |
804 | } | |
d05db70d | 805 | process_common_attributes (decl); |
05806473 | 806 | } |
807 | } | |
808 | ||
ff2a5ada | 809 | /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the |
810 | middle end to output the variable to asm file, if needed or externally | |
811 | visible. */ | |
812 | ||
813 | void | |
814 | varpool_finalize_decl (tree decl) | |
815 | { | |
816 | struct varpool_node *node = varpool_node (decl); | |
817 | ||
818 | gcc_assert (TREE_STATIC (decl)); | |
819 | ||
820 | if (node->finalized) | |
821 | return; | |
822 | notice_global_symbol (decl); | |
823 | node->finalized = true; | |
824 | if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl) | |
825 | /* Traditionally we do not eliminate static variables when not | |
826 | optimizing and when not doing toplevel reoder. */ | |
827 | || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl) | |
828 | && !DECL_ARTIFICIAL (node->symbol.decl))) | |
829 | node->symbol.force_output = true; | |
830 | ||
831 | if (cgraph_state == CGRAPH_STATE_CONSTRUCTION | |
832 | && (decide_is_variable_needed (node, decl) | |
833 | || referred_to_p ((symtab_node)node))) | |
834 | enqueue_node ((symtab_node)node); | |
835 | if (cgraph_state >= CGRAPH_STATE_IPA_SSA) | |
836 | varpool_analyze_node (node); | |
837 | } | |
838 | ||
839 | /* Discover all functions and variables that are trivially needed, analyze | |
840 | them as well as all functions and variables referred by them */ | |
ae01b312 | 841 | |
aeeb194b | 842 | static void |
843 | cgraph_analyze_functions (void) | |
ae01b312 | 844 | { |
c1dcd13c | 845 | /* Keep track of already processed nodes when called multiple times for |
06b27565 | 846 | intermodule optimization. */ |
c1dcd13c | 847 | static struct cgraph_node *first_analyzed; |
ff2a5ada | 848 | struct cgraph_node *first_handled = first_analyzed; |
1d416bd7 | 849 | static struct varpool_node *first_analyzed_var; |
ff2a5ada | 850 | struct varpool_node *first_handled_var = first_analyzed_var; |
851 | ||
852 | symtab_node node, next; | |
853 | int i; | |
854 | struct ipa_ref *ref; | |
855 | bool changed = true; | |
ae01b312 | 856 | |
f1c35659 | 857 | bitmap_obstack_initialize (NULL); |
ff2a5ada | 858 | cgraph_state = CGRAPH_STATE_CONSTRUCTION; |
ae01b312 | 859 | |
ff2a5ada | 860 | /* Analysis adds static variables that in turn adds references to new functions. |
861 | So we need to iterate the process until it stabilize. */ | |
862 | while (changed) | |
ae01b312 | 863 | { |
ff2a5ada | 864 | changed = false; |
865 | process_function_and_variable_attributes (first_analyzed, | |
866 | first_analyzed_var); | |
867 | ||
868 | /* First identify the trivially needed symbols. */ | |
869 | for (node = symtab_nodes; | |
870 | node != (symtab_node)first_analyzed | |
871 | && node != (symtab_node)first_analyzed_var; node = node->symbol.next) | |
9b8fb23a | 872 | { |
ff2a5ada | 873 | if ((symtab_function_p (node) |
874 | && cgraph (node)->local.finalized | |
875 | && cgraph_decide_is_function_needed (cgraph (node), node->symbol.decl)) | |
876 | || (symtab_variable_p (node) | |
877 | && varpool (node)->finalized | |
878 | && !DECL_EXTERNAL (node->symbol.decl) | |
879 | && decide_is_variable_needed (varpool (node), node->symbol.decl))) | |
880 | { | |
881 | enqueue_node (node); | |
882 | if (!changed && cgraph_dump_file) | |
883 | fprintf (cgraph_dump_file, "Trivially needed symbols:"); | |
884 | changed = true; | |
885 | if (cgraph_dump_file) | |
886 | fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node)); | |
887 | } | |
888 | if (node == (symtab_node)first_analyzed | |
889 | || node == (symtab_node)first_analyzed_var) | |
890 | break; | |
9b8fb23a | 891 | } |
ff2a5ada | 892 | cgraph_process_new_functions (); |
893 | first_analyzed_var = varpool_first_variable (); | |
894 | first_analyzed = cgraph_first_function (); | |
638531ad | 895 | |
ff2a5ada | 896 | if (changed && dump_file) |
897 | fprintf (cgraph_dump_file, "\n"); | |
2c0b522d | 898 | |
ff2a5ada | 899 | /* Lower representation, build callgraph edges and references for all trivially |
900 | needed symbols and all symbols referred by them. */ | |
901 | while (first != (symtab_node)(void *)1) | |
61c2c7b1 | 902 | { |
ff2a5ada | 903 | changed = true; |
904 | node = first; | |
905 | first = (symtab_node)first->symbol.aux; | |
906 | if (symtab_function_p (node) && cgraph (node)->local.finalized) | |
907 | { | |
908 | struct cgraph_edge *edge; | |
909 | struct cgraph_node *cnode; | |
910 | tree decl; | |
911 | ||
912 | cnode = cgraph (node); | |
913 | decl = cnode->symbol.decl; | |
914 | ||
915 | /* ??? It is possible to create extern inline function and later using | |
916 | weak alias attribute to kill its body. See | |
917 | gcc.c-torture/compile/20011119-1.c */ | |
918 | if (!DECL_STRUCT_FUNCTION (decl) | |
919 | && (!cnode->alias || !cnode->thunk.alias) | |
920 | && !cnode->thunk.thunk_p) | |
921 | { | |
922 | cgraph_reset_node (cnode); | |
923 | cnode->local.redefined_extern_inline = true; | |
924 | continue; | |
925 | } | |
61c2c7b1 | 926 | |
ff2a5ada | 927 | if (!cnode->analyzed) |
928 | cgraph_analyze_function (cnode); | |
d544ceff | 929 | |
ff2a5ada | 930 | for (edge = cnode->callees; edge; edge = edge->next_callee) |
da751785 | 931 | if (edge->callee->local.finalized) |
932 | enqueue_node ((symtab_node)edge->callee); | |
ff2a5ada | 933 | |
934 | /* If decl is a clone of an abstract function, mark that abstract | |
935 | function so that we don't release its body. The DECL_INITIAL() of that | |
936 | abstract function declaration will be later needed to output debug | |
937 | info. */ | |
938 | if (DECL_ABSTRACT_ORIGIN (decl)) | |
939 | { | |
940 | struct cgraph_node *origin_node; | |
941 | origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl)); | |
942 | origin_node->abstract_and_needed = true; | |
943 | } | |
944 | ||
945 | } | |
946 | else if (symtab_variable_p (node) | |
947 | && varpool (node)->finalized) | |
da751785 | 948 | varpool_analyze_node (varpool (node)); |
ff2a5ada | 949 | |
950 | if (node->symbol.same_comdat_group) | |
951 | { | |
952 | symtab_node next; | |
953 | for (next = node->symbol.same_comdat_group; | |
954 | next != node; | |
955 | next = next->symbol.same_comdat_group) | |
956 | enqueue_node (next); | |
957 | } | |
958 | for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++) | |
959 | if ((symtab_function_p (ref->referred) && cgraph (ref->referred)->local.finalized) | |
960 | || (symtab_variable_p (ref->referred) && varpool (ref->referred)->finalized)) | |
961 | enqueue_node (ref->referred); | |
962 | cgraph_process_new_functions (); | |
963 | } | |
ae01b312 | 964 | } |
2c0b522d | 965 | |
aa5e06c7 | 966 | /* Collect entry points to the unit. */ |
f79b6507 | 967 | if (cgraph_dump_file) |
3d7bfc56 | 968 | { |
e4200070 | 969 | fprintf (cgraph_dump_file, "\n\nInitial "); |
18841b0c | 970 | dump_symtab (cgraph_dump_file); |
3d7bfc56 | 971 | } |
e6d2b2d8 | 972 | |
f79b6507 | 973 | if (cgraph_dump_file) |
ff2a5ada | 974 | fprintf (cgraph_dump_file, "\nRemoving unused symbols:"); |
ae01b312 | 975 | |
ff2a5ada | 976 | for (node = symtab_nodes; |
977 | node != (symtab_node)first_handled | |
978 | && node != (symtab_node)first_handled_var; node = next) | |
ae01b312 | 979 | { |
ff2a5ada | 980 | next = node->symbol.next; |
981 | if (!node->symbol.aux && !referred_to_p (node)) | |
ae01b312 | 982 | { |
f79b6507 | 983 | if (cgraph_dump_file) |
ff2a5ada | 984 | fprintf (cgraph_dump_file, " %s", symtab_node_name (node)); |
985 | symtab_remove_node (node); | |
9b8fb23a | 986 | continue; |
ae01b312 | 987 | } |
ff2a5ada | 988 | if (symtab_function_p (node)) |
989 | { | |
990 | tree decl = node->symbol.decl; | |
991 | struct cgraph_node *cnode = cgraph (node); | |
992 | ||
993 | if (cnode->local.finalized && !gimple_has_body_p (decl) | |
994 | && (!cnode->alias || !cnode->thunk.alias) | |
995 | && !cnode->thunk.thunk_p) | |
996 | cgraph_reset_node (cnode); | |
997 | ||
998 | gcc_assert (!cnode->local.finalized || cnode->thunk.thunk_p | |
999 | || cnode->alias | |
1000 | || gimple_has_body_p (decl)); | |
1001 | gcc_assert (cnode->analyzed == cnode->local.finalized); | |
1002 | } | |
1003 | node->symbol.aux = NULL; | |
ae01b312 | 1004 | } |
ff2a5ada | 1005 | first_analyzed = cgraph_first_function (); |
1006 | first_analyzed_var = varpool_first_variable (); | |
f79b6507 | 1007 | if (cgraph_dump_file) |
e4200070 | 1008 | { |
1009 | fprintf (cgraph_dump_file, "\n\nReclaimed "); | |
18841b0c | 1010 | dump_symtab (cgraph_dump_file); |
e4200070 | 1011 | } |
f1c35659 | 1012 | bitmap_obstack_release (NULL); |
ae01b312 | 1013 | ggc_collect (); |
aeeb194b | 1014 | } |
1015 | ||
3a849bc1 | 1016 | /* Translate the ugly representation of aliases as alias pairs into nice |
1017 | representation in callgraph. We don't handle all cases yet, | |
1018 | unforutnately. */ | |
1019 | ||
1020 | static void | |
1021 | handle_alias_pairs (void) | |
1022 | { | |
1023 | alias_pair *p; | |
1024 | unsigned i; | |
1025 | struct cgraph_node *target_node; | |
1026 | struct cgraph_node *src_node; | |
e0eaac80 | 1027 | struct varpool_node *target_vnode; |
3a849bc1 | 1028 | |
1029 | for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);) | |
1030 | { | |
1031 | if (TREE_CODE (p->decl) == FUNCTION_DECL | |
3a849bc1 | 1032 | && (target_node = cgraph_node_for_asm (p->target)) != NULL) |
1033 | { | |
1034 | src_node = cgraph_get_node (p->decl); | |
1035 | if (src_node && src_node->local.finalized) | |
1036 | cgraph_reset_node (src_node); | |
1037 | /* Normally EXTERNAL flag is used to mark external inlines, | |
1038 | however for aliases it seems to be allowed to use it w/o | |
1039 | any meaning. See gcc.dg/attr-alias-3.c | |
1040 | However for weakref we insist on EXTERNAL flag being set. | |
1041 | See gcc.dg/attr-alias-5.c */ | |
1042 | if (DECL_EXTERNAL (p->decl)) | |
7d0d0ce1 | 1043 | DECL_EXTERNAL (p->decl) |
1044 | = lookup_attribute ("weakref", | |
1045 | DECL_ATTRIBUTES (p->decl)) != NULL; | |
1046 | cgraph_create_function_alias (p->decl, target_node->symbol.decl); | |
3a849bc1 | 1047 | VEC_unordered_remove (alias_pair, alias_pairs, i); |
1048 | } | |
e0eaac80 | 1049 | else if (TREE_CODE (p->decl) == VAR_DECL |
e0eaac80 | 1050 | && (target_vnode = varpool_node_for_asm (p->target)) != NULL) |
1051 | { | |
1052 | /* Normally EXTERNAL flag is used to mark external inlines, | |
1053 | however for aliases it seems to be allowed to use it w/o | |
1054 | any meaning. See gcc.dg/attr-alias-3.c | |
1055 | However for weakref we insist on EXTERNAL flag being set. | |
1056 | See gcc.dg/attr-alias-5.c */ | |
1057 | if (DECL_EXTERNAL (p->decl)) | |
7d0d0ce1 | 1058 | DECL_EXTERNAL (p->decl) |
1059 | = lookup_attribute ("weakref", | |
1060 | DECL_ATTRIBUTES (p->decl)) != NULL; | |
1061 | varpool_create_variable_alias (p->decl, target_vnode->symbol.decl); | |
e0eaac80 | 1062 | VEC_unordered_remove (alias_pair, alias_pairs, i); |
1063 | } | |
badeded8 | 1064 | /* Weakrefs with target not defined in current unit are easy to handle; they |
1065 | behave just as external variables except we need to note the alias flag | |
1066 | to later output the weakref pseudo op into asm file. */ | |
1067 | else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL | |
1068 | && (TREE_CODE (p->decl) == FUNCTION_DECL | |
1069 | ? (varpool_node_for_asm (p->target) == NULL) | |
1070 | : (cgraph_node_for_asm (p->target) == NULL))) | |
1071 | { | |
1072 | if (TREE_CODE (p->decl) == FUNCTION_DECL) | |
1073 | cgraph_get_create_node (p->decl)->alias = true; | |
1074 | else | |
1075 | varpool_get_node (p->decl)->alias = true; | |
1076 | DECL_EXTERNAL (p->decl) = 1; | |
1077 | VEC_unordered_remove (alias_pair, alias_pairs, i); | |
1078 | } | |
3a849bc1 | 1079 | else |
1080 | { | |
1081 | if (dump_file) | |
1082 | fprintf (dump_file, "Unhandled alias %s->%s\n", | |
1083 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)), | |
1084 | IDENTIFIER_POINTER (p->target)); | |
1085 | ||
1086 | i++; | |
1087 | } | |
1088 | } | |
1089 | } | |
1090 | ||
8f69fd82 | 1091 | |
ae01b312 | 1092 | /* Figure out what functions we want to assemble. */ |
1093 | ||
1094 | static void | |
cf951b1a | 1095 | mark_functions_to_output (void) |
ae01b312 | 1096 | { |
1097 | struct cgraph_node *node; | |
61c2c7b1 | 1098 | #ifdef ENABLE_CHECKING |
1099 | bool check_same_comdat_groups = false; | |
1100 | ||
7c455d87 | 1101 | FOR_EACH_FUNCTION (node) |
61c2c7b1 | 1102 | gcc_assert (!node->process); |
1103 | #endif | |
ae01b312 | 1104 | |
7c455d87 | 1105 | FOR_EACH_FUNCTION (node) |
ae01b312 | 1106 | { |
7d0d0ce1 | 1107 | tree decl = node->symbol.decl; |
a0c938f0 | 1108 | |
7d0d0ce1 | 1109 | gcc_assert (!node->process || node->symbol.same_comdat_group); |
61c2c7b1 | 1110 | if (node->process) |
1111 | continue; | |
d7c6d889 | 1112 | |
e6d2b2d8 | 1113 | /* We need to output all local functions that are used and not |
1114 | always inlined, as well as those that are reachable from | |
1115 | outside the current compilation unit. */ | |
1a1a827a | 1116 | if (node->analyzed |
91bf9d9a | 1117 | && !node->thunk.thunk_p |
c70f46b0 | 1118 | && !node->alias |
b0cdf642 | 1119 | && !node->global.inlined_to |
4ee9c684 | 1120 | && !TREE_ASM_WRITTEN (decl) |
ae01b312 | 1121 | && !DECL_EXTERNAL (decl)) |
61c2c7b1 | 1122 | { |
1123 | node->process = 1; | |
7d0d0ce1 | 1124 | if (node->symbol.same_comdat_group) |
61c2c7b1 | 1125 | { |
1126 | struct cgraph_node *next; | |
7d0d0ce1 | 1127 | for (next = cgraph (node->symbol.same_comdat_group); |
61c2c7b1 | 1128 | next != node; |
7d0d0ce1 | 1129 | next = cgraph (next->symbol.same_comdat_group)) |
c70f46b0 | 1130 | if (!next->thunk.thunk_p && !next->alias) |
91bf9d9a | 1131 | next->process = 1; |
61c2c7b1 | 1132 | } |
1133 | } | |
7d0d0ce1 | 1134 | else if (node->symbol.same_comdat_group) |
61c2c7b1 | 1135 | { |
1136 | #ifdef ENABLE_CHECKING | |
1137 | check_same_comdat_groups = true; | |
1138 | #endif | |
1139 | } | |
cc636d56 | 1140 | else |
9cee7c3f | 1141 | { |
1142 | /* We should've reclaimed all functions that are not needed. */ | |
1143 | #ifdef ENABLE_CHECKING | |
75a70cf9 | 1144 | if (!node->global.inlined_to |
1a1a827a | 1145 | && gimple_has_body_p (decl) |
08843223 | 1146 | /* FIXME: in ltrans unit when offline copy is outside partition but inline copies |
1147 | are inside partition, we can end up not removing the body since we no longer | |
1148 | have analyzed node pointing to it. */ | |
7d0d0ce1 | 1149 | && !node->symbol.in_other_partition |
c70f46b0 | 1150 | && !node->alias |
9cee7c3f | 1151 | && !DECL_EXTERNAL (decl)) |
1152 | { | |
1153 | dump_cgraph_node (stderr, node); | |
1154 | internal_error ("failed to reclaim unneeded function"); | |
1155 | } | |
1156 | #endif | |
75a70cf9 | 1157 | gcc_assert (node->global.inlined_to |
1a1a827a | 1158 | || !gimple_has_body_p (decl) |
7d0d0ce1 | 1159 | || node->symbol.in_other_partition |
9cee7c3f | 1160 | || DECL_EXTERNAL (decl)); |
1161 | ||
1162 | } | |
a0c938f0 | 1163 | |
961e3b13 | 1164 | } |
61c2c7b1 | 1165 | #ifdef ENABLE_CHECKING |
1166 | if (check_same_comdat_groups) | |
7c455d87 | 1167 | FOR_EACH_FUNCTION (node) |
7d0d0ce1 | 1168 | if (node->symbol.same_comdat_group && !node->process) |
61c2c7b1 | 1169 | { |
7d0d0ce1 | 1170 | tree decl = node->symbol.decl; |
61c2c7b1 | 1171 | if (!node->global.inlined_to |
1172 | && gimple_has_body_p (decl) | |
6d36105a | 1173 | /* FIXME: in an ltrans unit when the offline copy is outside a |
1174 | partition but inline copies are inside a partition, we can | |
1175 | end up not removing the body since we no longer have an | |
1176 | analyzed node pointing to it. */ | |
7d0d0ce1 | 1177 | && !node->symbol.in_other_partition |
61c2c7b1 | 1178 | && !DECL_EXTERNAL (decl)) |
1179 | { | |
1180 | dump_cgraph_node (stderr, node); | |
6d36105a | 1181 | internal_error ("failed to reclaim unneeded function in same " |
1182 | "comdat group"); | |
61c2c7b1 | 1183 | } |
1184 | } | |
1185 | #endif | |
961e3b13 | 1186 | } |
1187 | ||
28454517 | 1188 | /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function |
1189 | in lowered gimple form. | |
1190 | ||
1191 | Set current_function_decl and cfun to newly constructed empty function body. | |
1192 | return basic block in the function body. */ | |
1193 | ||
1194 | static basic_block | |
1195 | init_lowered_empty_function (tree decl) | |
1196 | { | |
1197 | basic_block bb; | |
1198 | ||
1199 | current_function_decl = decl; | |
1200 | allocate_struct_function (decl, false); | |
1201 | gimple_register_cfg_hooks (); | |
1202 | init_empty_tree_cfg (); | |
1203 | init_tree_ssa (cfun); | |
1204 | init_ssa_operands (); | |
1205 | cfun->gimple_df->in_ssa_p = true; | |
1206 | DECL_INITIAL (decl) = make_node (BLOCK); | |
1207 | ||
1208 | DECL_SAVED_TREE (decl) = error_mark_node; | |
1209 | cfun->curr_properties |= | |
1210 | (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars | | |
657e3a56 | 1211 | PROP_ssa | PROP_gimple_any); |
28454517 | 1212 | |
1213 | /* Create BB for body of the function and connect it properly. */ | |
1214 | bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR); | |
167ef6d9 | 1215 | make_edge (ENTRY_BLOCK_PTR, bb, 0); |
1216 | make_edge (bb, EXIT_BLOCK_PTR, 0); | |
28454517 | 1217 | |
1218 | return bb; | |
1219 | } | |
1220 | ||
1221 | /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable | |
1222 | offset indicated by VIRTUAL_OFFSET, if that is | |
1223 | non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and | |
1224 | zero for a result adjusting thunk. */ | |
1225 | ||
1226 | static tree | |
1227 | thunk_adjust (gimple_stmt_iterator * bsi, | |
1228 | tree ptr, bool this_adjusting, | |
1229 | HOST_WIDE_INT fixed_offset, tree virtual_offset) | |
1230 | { | |
1231 | gimple stmt; | |
1232 | tree ret; | |
1233 | ||
55d6cb23 | 1234 | if (this_adjusting |
1235 | && fixed_offset != 0) | |
28454517 | 1236 | { |
2cc66f2a | 1237 | stmt = gimple_build_assign |
1238 | (ptr, fold_build_pointer_plus_hwi_loc (input_location, | |
1239 | ptr, | |
1240 | fixed_offset)); | |
28454517 | 1241 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); |
1242 | } | |
1243 | ||
1244 | /* If there's a virtual offset, look up that value in the vtable and | |
1245 | adjust the pointer again. */ | |
1246 | if (virtual_offset) | |
1247 | { | |
1248 | tree vtabletmp; | |
1249 | tree vtabletmp2; | |
1250 | tree vtabletmp3; | |
28454517 | 1251 | |
1252 | if (!vtable_entry_type) | |
1253 | { | |
1254 | tree vfunc_type = make_node (FUNCTION_TYPE); | |
1255 | TREE_TYPE (vfunc_type) = integer_type_node; | |
1256 | TYPE_ARG_TYPES (vfunc_type) = NULL_TREE; | |
1257 | layout_type (vfunc_type); | |
1258 | ||
1259 | vtable_entry_type = build_pointer_type (vfunc_type); | |
1260 | } | |
1261 | ||
1262 | vtabletmp = | |
1263 | create_tmp_var (build_pointer_type | |
1264 | (build_pointer_type (vtable_entry_type)), "vptr"); | |
1265 | ||
1266 | /* The vptr is always at offset zero in the object. */ | |
1267 | stmt = gimple_build_assign (vtabletmp, | |
1268 | build1 (NOP_EXPR, TREE_TYPE (vtabletmp), | |
1269 | ptr)); | |
1270 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1271 | mark_symbols_for_renaming (stmt); | |
1272 | find_referenced_vars_in (stmt); | |
1273 | ||
1274 | /* Form the vtable address. */ | |
1275 | vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)), | |
1276 | "vtableaddr"); | |
1277 | stmt = gimple_build_assign (vtabletmp2, | |
182cf5a9 | 1278 | build_simple_mem_ref (vtabletmp)); |
28454517 | 1279 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); |
1280 | mark_symbols_for_renaming (stmt); | |
1281 | find_referenced_vars_in (stmt); | |
1282 | ||
1283 | /* Find the entry with the vcall offset. */ | |
1284 | stmt = gimple_build_assign (vtabletmp2, | |
2cc66f2a | 1285 | fold_build_pointer_plus_loc (input_location, |
1286 | vtabletmp2, | |
1287 | virtual_offset)); | |
28454517 | 1288 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); |
1289 | ||
1290 | /* Get the offset itself. */ | |
1291 | vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)), | |
1292 | "vcalloffset"); | |
1293 | stmt = gimple_build_assign (vtabletmp3, | |
182cf5a9 | 1294 | build_simple_mem_ref (vtabletmp2)); |
28454517 | 1295 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); |
1296 | mark_symbols_for_renaming (stmt); | |
1297 | find_referenced_vars_in (stmt); | |
1298 | ||
28454517 | 1299 | /* Adjust the `this' pointer. */ |
a0553bff | 1300 | ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3); |
1301 | ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false, | |
1302 | GSI_CONTINUE_LINKING); | |
28454517 | 1303 | } |
1304 | ||
55d6cb23 | 1305 | if (!this_adjusting |
1306 | && fixed_offset != 0) | |
28454517 | 1307 | /* Adjust the pointer by the constant. */ |
1308 | { | |
1309 | tree ptrtmp; | |
1310 | ||
1311 | if (TREE_CODE (ptr) == VAR_DECL) | |
1312 | ptrtmp = ptr; | |
1313 | else | |
1314 | { | |
1315 | ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr"); | |
1316 | stmt = gimple_build_assign (ptrtmp, ptr); | |
1317 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1318 | mark_symbols_for_renaming (stmt); | |
1319 | find_referenced_vars_in (stmt); | |
1320 | } | |
2cc66f2a | 1321 | ptr = fold_build_pointer_plus_hwi_loc (input_location, |
1322 | ptrtmp, fixed_offset); | |
28454517 | 1323 | } |
1324 | ||
1325 | /* Emit the statement and gimplify the adjustment expression. */ | |
1326 | ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this"); | |
1327 | stmt = gimple_build_assign (ret, ptr); | |
1328 | mark_symbols_for_renaming (stmt); | |
1329 | find_referenced_vars_in (stmt); | |
1330 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1331 | ||
1332 | return ret; | |
1333 | } | |
1334 | ||
1335 | /* Produce assembler for thunk NODE. */ | |
1336 | ||
1337 | static void | |
1338 | assemble_thunk (struct cgraph_node *node) | |
1339 | { | |
1340 | bool this_adjusting = node->thunk.this_adjusting; | |
1341 | HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset; | |
1342 | HOST_WIDE_INT virtual_value = node->thunk.virtual_value; | |
1343 | tree virtual_offset = NULL; | |
1344 | tree alias = node->thunk.alias; | |
7d0d0ce1 | 1345 | tree thunk_fndecl = node->symbol.decl; |
28454517 | 1346 | tree a = DECL_ARGUMENTS (thunk_fndecl); |
1347 | ||
1348 | current_function_decl = thunk_fndecl; | |
1349 | ||
aed6e608 | 1350 | /* Ensure thunks are emitted in their correct sections. */ |
1351 | resolve_unique_section (thunk_fndecl, 0, flag_function_sections); | |
1352 | ||
28454517 | 1353 | if (this_adjusting |
1354 | && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset, | |
1355 | virtual_value, alias)) | |
1356 | { | |
1357 | const char *fnname; | |
1358 | tree fn_block; | |
28b2c6a7 | 1359 | tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl)); |
28454517 | 1360 | |
1361 | DECL_RESULT (thunk_fndecl) | |
1362 | = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl), | |
28b2c6a7 | 1363 | RESULT_DECL, 0, restype); |
22ea3b47 | 1364 | fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl)); |
28454517 | 1365 | |
1366 | /* The back end expects DECL_INITIAL to contain a BLOCK, so we | |
1367 | create one. */ | |
1368 | fn_block = make_node (BLOCK); | |
1369 | BLOCK_VARS (fn_block) = a; | |
1370 | DECL_INITIAL (thunk_fndecl) = fn_block; | |
1371 | init_function_start (thunk_fndecl); | |
1372 | cfun->is_thunk = 1; | |
1373 | assemble_start_function (thunk_fndecl, fnname); | |
1374 | ||
1375 | targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl, | |
1376 | fixed_offset, virtual_value, alias); | |
1377 | ||
1378 | assemble_end_function (thunk_fndecl, fnname); | |
1379 | init_insn_lengths (); | |
1380 | free_after_compilation (cfun); | |
1381 | set_cfun (NULL); | |
1382 | TREE_ASM_WRITTEN (thunk_fndecl) = 1; | |
91bf9d9a | 1383 | node->thunk.thunk_p = false; |
1384 | node->analyzed = false; | |
28454517 | 1385 | } |
1386 | else | |
1387 | { | |
1388 | tree restype; | |
1389 | basic_block bb, then_bb, else_bb, return_bb; | |
1390 | gimple_stmt_iterator bsi; | |
1391 | int nargs = 0; | |
1392 | tree arg; | |
1393 | int i; | |
1394 | tree resdecl; | |
1395 | tree restmp = NULL; | |
1396 | VEC(tree, heap) *vargs; | |
1397 | ||
1398 | gimple call; | |
1399 | gimple ret; | |
1400 | ||
1401 | DECL_IGNORED_P (thunk_fndecl) = 1; | |
1402 | bitmap_obstack_initialize (NULL); | |
1403 | ||
1404 | if (node->thunk.virtual_offset_p) | |
1405 | virtual_offset = size_int (virtual_value); | |
1406 | ||
1407 | /* Build the return declaration for the function. */ | |
1408 | restype = TREE_TYPE (TREE_TYPE (thunk_fndecl)); | |
1409 | if (DECL_RESULT (thunk_fndecl) == NULL_TREE) | |
1410 | { | |
1411 | resdecl = build_decl (input_location, RESULT_DECL, 0, restype); | |
1412 | DECL_ARTIFICIAL (resdecl) = 1; | |
1413 | DECL_IGNORED_P (resdecl) = 1; | |
1414 | DECL_RESULT (thunk_fndecl) = resdecl; | |
1415 | } | |
1416 | else | |
1417 | resdecl = DECL_RESULT (thunk_fndecl); | |
1418 | ||
1419 | bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl); | |
1420 | ||
1421 | bsi = gsi_start_bb (bb); | |
1422 | ||
1423 | /* Build call to the function being thunked. */ | |
1424 | if (!VOID_TYPE_P (restype)) | |
1425 | { | |
1426 | if (!is_gimple_reg_type (restype)) | |
1427 | { | |
1428 | restmp = resdecl; | |
2ab2ce89 | 1429 | add_local_decl (cfun, restmp); |
28454517 | 1430 | BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp; |
1431 | } | |
1432 | else | |
1433 | restmp = create_tmp_var_raw (restype, "retval"); | |
1434 | } | |
1435 | ||
1767a056 | 1436 | for (arg = a; arg; arg = DECL_CHAIN (arg)) |
28454517 | 1437 | nargs++; |
1438 | vargs = VEC_alloc (tree, heap, nargs); | |
1439 | if (this_adjusting) | |
1440 | VEC_quick_push (tree, vargs, | |
1441 | thunk_adjust (&bsi, | |
1442 | a, 1, fixed_offset, | |
1443 | virtual_offset)); | |
1444 | else | |
1445 | VEC_quick_push (tree, vargs, a); | |
1767a056 | 1446 | for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg)) |
28454517 | 1447 | VEC_quick_push (tree, vargs, arg); |
1448 | call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs); | |
1449 | VEC_free (tree, heap, vargs); | |
28454517 | 1450 | gimple_call_set_from_thunk (call, true); |
1451 | if (restmp) | |
1452 | gimple_call_set_lhs (call, restmp); | |
1453 | gsi_insert_after (&bsi, call, GSI_NEW_STMT); | |
1454 | mark_symbols_for_renaming (call); | |
1455 | find_referenced_vars_in (call); | |
1456 | update_stmt (call); | |
1457 | ||
1458 | if (restmp && !this_adjusting) | |
1459 | { | |
57ab8ec3 | 1460 | tree true_label = NULL_TREE; |
28454517 | 1461 | |
1462 | if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE) | |
1463 | { | |
1464 | gimple stmt; | |
1465 | /* If the return type is a pointer, we need to | |
1466 | protect against NULL. We know there will be an | |
1467 | adjustment, because that's why we're emitting a | |
1468 | thunk. */ | |
1469 | then_bb = create_basic_block (NULL, (void *) 0, bb); | |
1470 | return_bb = create_basic_block (NULL, (void *) 0, then_bb); | |
1471 | else_bb = create_basic_block (NULL, (void *) 0, else_bb); | |
1472 | remove_edge (single_succ_edge (bb)); | |
1473 | true_label = gimple_block_label (then_bb); | |
28454517 | 1474 | stmt = gimple_build_cond (NE_EXPR, restmp, |
385f3f36 | 1475 | build_zero_cst (TREE_TYPE (restmp)), |
28454517 | 1476 | NULL_TREE, NULL_TREE); |
1477 | gsi_insert_after (&bsi, stmt, GSI_NEW_STMT); | |
1478 | make_edge (bb, then_bb, EDGE_TRUE_VALUE); | |
1479 | make_edge (bb, else_bb, EDGE_FALSE_VALUE); | |
1480 | make_edge (return_bb, EXIT_BLOCK_PTR, 0); | |
1481 | make_edge (then_bb, return_bb, EDGE_FALLTHRU); | |
1482 | make_edge (else_bb, return_bb, EDGE_FALLTHRU); | |
1483 | bsi = gsi_last_bb (then_bb); | |
1484 | } | |
1485 | ||
1486 | restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0, | |
1487 | fixed_offset, virtual_offset); | |
1488 | if (true_label) | |
1489 | { | |
1490 | gimple stmt; | |
1491 | bsi = gsi_last_bb (else_bb); | |
385f3f36 | 1492 | stmt = gimple_build_assign (restmp, |
1493 | build_zero_cst (TREE_TYPE (restmp))); | |
28454517 | 1494 | gsi_insert_after (&bsi, stmt, GSI_NEW_STMT); |
1495 | bsi = gsi_last_bb (return_bb); | |
1496 | } | |
1497 | } | |
1498 | else | |
1499 | gimple_call_set_tail (call, true); | |
1500 | ||
1501 | /* Build return value. */ | |
1502 | ret = gimple_build_return (restmp); | |
1503 | gsi_insert_after (&bsi, ret, GSI_NEW_STMT); | |
1504 | ||
1505 | delete_unreachable_blocks (); | |
1506 | update_ssa (TODO_update_ssa); | |
1507 | ||
28454517 | 1508 | /* Since we want to emit the thunk, we explicitly mark its name as |
1509 | referenced. */ | |
91bf9d9a | 1510 | node->thunk.thunk_p = false; |
1511 | cgraph_node_remove_callees (node); | |
28454517 | 1512 | cgraph_add_new_function (thunk_fndecl, true); |
1513 | bitmap_obstack_release (NULL); | |
1514 | } | |
1515 | current_function_decl = NULL; | |
1516 | } | |
1517 | ||
91bf9d9a | 1518 | |
c70f46b0 | 1519 | |
1520 | /* Assemble thunks and aliases asociated to NODE. */ | |
91bf9d9a | 1521 | |
1522 | static void | |
c70f46b0 | 1523 | assemble_thunks_and_aliases (struct cgraph_node *node) |
91bf9d9a | 1524 | { |
1525 | struct cgraph_edge *e; | |
c70f46b0 | 1526 | int i; |
1527 | struct ipa_ref *ref; | |
1528 | ||
91bf9d9a | 1529 | for (e = node->callers; e;) |
1530 | if (e->caller->thunk.thunk_p) | |
1531 | { | |
1532 | struct cgraph_node *thunk = e->caller; | |
1533 | ||
1534 | e = e->next_caller; | |
c70f46b0 | 1535 | assemble_thunks_and_aliases (thunk); |
91bf9d9a | 1536 | assemble_thunk (thunk); |
1537 | } | |
1538 | else | |
1539 | e = e->next_caller; | |
04ec15fa | 1540 | for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list, |
7d0d0ce1 | 1541 | i, ref); i++) |
c70f46b0 | 1542 | if (ref->use == IPA_REF_ALIAS) |
1543 | { | |
04ec15fa | 1544 | struct cgraph_node *alias = ipa_ref_referring_node (ref); |
968b8c52 | 1545 | bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias); |
1546 | ||
1547 | /* Force assemble_alias to really output the alias this time instead | |
1548 | of buffering it in same alias pairs. */ | |
1549 | TREE_ASM_WRITTEN (alias->thunk.alias) = 1; | |
7d0d0ce1 | 1550 | assemble_alias (alias->symbol.decl, |
c70f46b0 | 1551 | DECL_ASSEMBLER_NAME (alias->thunk.alias)); |
1552 | assemble_thunks_and_aliases (alias); | |
968b8c52 | 1553 | TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written; |
c70f46b0 | 1554 | } |
91bf9d9a | 1555 | } |
1556 | ||
da5e1e7c | 1557 | /* Expand function specified by NODE. */ |
941366fd | 1558 | |
3db65b62 | 1559 | static void |
cf951b1a | 1560 | expand_function (struct cgraph_node *node) |
941366fd | 1561 | { |
da5e1e7c | 1562 | tree decl = node->symbol.decl; |
941366fd | 1563 | location_t saved_loc; |
1564 | ||
da5e1e7c | 1565 | /* We ought to not compile any inline clones. */ |
1566 | gcc_assert (!node->global.inlined_to); | |
1567 | ||
1568 | announce_function (decl); | |
1569 | node->process = 0; | |
1570 | gcc_assert (node->lowered); | |
1571 | ||
1572 | /* Generate RTL for the body of DECL. */ | |
1573 | ||
941366fd | 1574 | timevar_push (TV_REST_OF_COMPILATION); |
1575 | ||
1576 | gcc_assert (cgraph_global_info_ready); | |
1577 | ||
1578 | /* Initialize the default bitmap obstack. */ | |
1579 | bitmap_obstack_initialize (NULL); | |
1580 | ||
1581 | /* Initialize the RTL code for the function. */ | |
da5e1e7c | 1582 | current_function_decl = decl; |
941366fd | 1583 | saved_loc = input_location; |
da5e1e7c | 1584 | input_location = DECL_SOURCE_LOCATION (decl); |
1585 | init_function_start (decl); | |
941366fd | 1586 | |
1587 | gimple_register_cfg_hooks (); | |
1588 | ||
1589 | bitmap_obstack_initialize (®_obstack); /* FIXME, only at RTL generation*/ | |
1590 | ||
1591 | execute_all_ipa_transforms (); | |
1592 | ||
1593 | /* Perform all tree transforms and optimizations. */ | |
1594 | ||
1595 | /* Signal the start of passes. */ | |
1596 | invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL); | |
1597 | ||
1598 | execute_pass_list (all_passes); | |
1599 | ||
1600 | /* Signal the end of passes. */ | |
1601 | invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL); | |
1602 | ||
1603 | bitmap_obstack_release (®_obstack); | |
1604 | ||
1605 | /* Release the default bitmap obstack. */ | |
1606 | bitmap_obstack_release (NULL); | |
1607 | ||
1608 | set_cfun (NULL); | |
1609 | ||
1610 | /* If requested, warn about function definitions where the function will | |
1611 | return a value (usually of some struct or union type) which itself will | |
1612 | take up a lot of stack space. */ | |
da5e1e7c | 1613 | if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl)) |
941366fd | 1614 | { |
da5e1e7c | 1615 | tree ret_type = TREE_TYPE (TREE_TYPE (decl)); |
941366fd | 1616 | |
1617 | if (ret_type && TYPE_SIZE_UNIT (ret_type) | |
1618 | && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST | |
1619 | && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type), | |
1620 | larger_than_size)) | |
1621 | { | |
1622 | unsigned int size_as_int | |
1623 | = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type)); | |
1624 | ||
1625 | if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0) | |
1626 | warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes", | |
da5e1e7c | 1627 | decl, size_as_int); |
941366fd | 1628 | else |
1629 | warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes", | |
da5e1e7c | 1630 | decl, larger_than_size); |
941366fd | 1631 | } |
1632 | } | |
1633 | ||
da5e1e7c | 1634 | gimple_set_body (decl, NULL); |
1635 | if (DECL_STRUCT_FUNCTION (decl) == 0 | |
1636 | && !cgraph_get_node (decl)->origin) | |
941366fd | 1637 | { |
1638 | /* Stop pointing to the local nodes about to be freed. | |
1639 | But DECL_INITIAL must remain nonzero so we know this | |
1640 | was an actual function definition. | |
1641 | For a nested function, this is done in c_pop_function_context. | |
1642 | If rest_of_compilation set this to 0, leave it 0. */ | |
da5e1e7c | 1643 | if (DECL_INITIAL (decl) != 0) |
1644 | DECL_INITIAL (decl) = error_mark_node; | |
941366fd | 1645 | } |
1646 | ||
1647 | input_location = saved_loc; | |
1648 | ||
1649 | ggc_collect (); | |
1650 | timevar_pop (TV_REST_OF_COMPILATION); | |
f7777314 | 1651 | |
1652 | /* Make sure that BE didn't give up on compiling. */ | |
1653 | gcc_assert (TREE_ASM_WRITTEN (decl)); | |
1654 | current_function_decl = NULL; | |
f76f7453 | 1655 | |
1656 | /* It would make a lot more sense to output thunks before function body to get more | |
cf951b1a | 1657 | forward and lest backwarding jumps. This however would need solving problem |
f76f7453 | 1658 | with comdats. See PR48668. Also aliases must come after function itself to |
cf951b1a | 1659 | make one pass assemblers, like one on AIX, happy. See PR 50689. |
f76f7453 | 1660 | FIXME: Perhaps thunks should be move before function IFF they are not in comdat |
1661 | groups. */ | |
1662 | assemble_thunks_and_aliases (node); | |
1a1a827a | 1663 | cgraph_release_function_body (node); |
1664 | /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer | |
1665 | points to the dead function body. */ | |
1666 | cgraph_node_remove_callees (node); | |
ae01b312 | 1667 | } |
1668 | ||
acc70efa | 1669 | |
d9d9733a | 1670 | /* Expand all functions that must be output. |
1671 | ||
d7c6d889 | 1672 | Attempt to topologically sort the nodes so function is output when |
1673 | all called functions are already assembled to allow data to be | |
91c82c20 | 1674 | propagated across the callgraph. Use a stack to get smaller distance |
3927afe0 | 1675 | between a function and its callees (later we may choose to use a more |
d7c6d889 | 1676 | sophisticated algorithm for function reordering; we will likely want |
1677 | to use subsections to make the output functions appear in top-down | |
1678 | order). */ | |
1679 | ||
1680 | static void | |
cf951b1a | 1681 | expand_all_functions (void) |
d7c6d889 | 1682 | { |
1683 | struct cgraph_node *node; | |
4c36ffe6 | 1684 | struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); |
c04e3894 | 1685 | int order_pos, new_order_pos = 0; |
d7c6d889 | 1686 | int i; |
1687 | ||
7771d558 | 1688 | order_pos = ipa_reverse_postorder (order); |
cc636d56 | 1689 | gcc_assert (order_pos == cgraph_n_nodes); |
d7c6d889 | 1690 | |
7bd28bba | 1691 | /* Garbage collector may remove inline clones we eliminate during |
b0cdf642 | 1692 | optimization. So we must be sure to not reference them. */ |
1693 | for (i = 0; i < order_pos; i++) | |
09fc9532 | 1694 | if (order[i]->process) |
b0cdf642 | 1695 | order[new_order_pos++] = order[i]; |
1696 | ||
1697 | for (i = new_order_pos - 1; i >= 0; i--) | |
d7c6d889 | 1698 | { |
1699 | node = order[i]; | |
09fc9532 | 1700 | if (node->process) |
d7c6d889 | 1701 | { |
09fc9532 | 1702 | node->process = 0; |
cf951b1a | 1703 | expand_function (node); |
d7c6d889 | 1704 | } |
1705 | } | |
523c1122 | 1706 | cgraph_process_new_functions (); |
773c5ba7 | 1707 | |
d7c6d889 | 1708 | free (order); |
773c5ba7 | 1709 | |
d7c6d889 | 1710 | } |
1711 | ||
56af936e | 1712 | /* This is used to sort the node types by the cgraph order number. */ |
1713 | ||
0b09525f | 1714 | enum cgraph_order_sort_kind |
1715 | { | |
1716 | ORDER_UNDEFINED = 0, | |
1717 | ORDER_FUNCTION, | |
1718 | ORDER_VAR, | |
1719 | ORDER_ASM | |
1720 | }; | |
1721 | ||
56af936e | 1722 | struct cgraph_order_sort |
1723 | { | |
0b09525f | 1724 | enum cgraph_order_sort_kind kind; |
56af936e | 1725 | union |
1726 | { | |
1727 | struct cgraph_node *f; | |
1d416bd7 | 1728 | struct varpool_node *v; |
cf951b1a | 1729 | struct asm_node *a; |
56af936e | 1730 | } u; |
1731 | }; | |
1732 | ||
1733 | /* Output all functions, variables, and asm statements in the order | |
1734 | according to their order fields, which is the order in which they | |
1735 | appeared in the file. This implements -fno-toplevel-reorder. In | |
1736 | this mode we may output functions and variables which don't really | |
1737 | need to be output. */ | |
1738 | ||
1739 | static void | |
cf951b1a | 1740 | output_in_order (void) |
56af936e | 1741 | { |
1742 | int max; | |
56af936e | 1743 | struct cgraph_order_sort *nodes; |
1744 | int i; | |
1745 | struct cgraph_node *pf; | |
1d416bd7 | 1746 | struct varpool_node *pv; |
cf951b1a | 1747 | struct asm_node *pa; |
56af936e | 1748 | |
0704fb2e | 1749 | max = symtab_order; |
3e1cde87 | 1750 | nodes = XCNEWVEC (struct cgraph_order_sort, max); |
56af936e | 1751 | |
7c455d87 | 1752 | FOR_EACH_DEFINED_FUNCTION (pf) |
56af936e | 1753 | { |
c70f46b0 | 1754 | if (pf->process && !pf->thunk.thunk_p && !pf->alias) |
56af936e | 1755 | { |
7d0d0ce1 | 1756 | i = pf->symbol.order; |
56af936e | 1757 | gcc_assert (nodes[i].kind == ORDER_UNDEFINED); |
1758 | nodes[i].kind = ORDER_FUNCTION; | |
1759 | nodes[i].u.f = pf; | |
1760 | } | |
1761 | } | |
1762 | ||
7c455d87 | 1763 | FOR_EACH_DEFINED_VARIABLE (pv) |
56af936e | 1764 | { |
7d0d0ce1 | 1765 | i = pv->symbol.order; |
56af936e | 1766 | gcc_assert (nodes[i].kind == ORDER_UNDEFINED); |
1767 | nodes[i].kind = ORDER_VAR; | |
1768 | nodes[i].u.v = pv; | |
1769 | } | |
1770 | ||
cf951b1a | 1771 | for (pa = asm_nodes; pa; pa = pa->next) |
56af936e | 1772 | { |
1773 | i = pa->order; | |
1774 | gcc_assert (nodes[i].kind == ORDER_UNDEFINED); | |
1775 | nodes[i].kind = ORDER_ASM; | |
1776 | nodes[i].u.a = pa; | |
1777 | } | |
56af936e | 1778 | |
304e5318 | 1779 | /* In toplevel reorder mode we output all statics; mark them as needed. */ |
304e5318 | 1780 | |
91da0f1c | 1781 | for (i = 0; i < max; ++i) |
1782 | if (nodes[i].kind == ORDER_VAR) | |
1783 | varpool_finalize_named_section_flags (nodes[i].u.v); | |
1784 | ||
56af936e | 1785 | for (i = 0; i < max; ++i) |
1786 | { | |
1787 | switch (nodes[i].kind) | |
1788 | { | |
1789 | case ORDER_FUNCTION: | |
09fc9532 | 1790 | nodes[i].u.f->process = 0; |
cf951b1a | 1791 | expand_function (nodes[i].u.f); |
56af936e | 1792 | break; |
1793 | ||
1794 | case ORDER_VAR: | |
1d416bd7 | 1795 | varpool_assemble_decl (nodes[i].u.v); |
56af936e | 1796 | break; |
1797 | ||
1798 | case ORDER_ASM: | |
1799 | assemble_asm (nodes[i].u.a->asm_str); | |
1800 | break; | |
1801 | ||
1802 | case ORDER_UNDEFINED: | |
1803 | break; | |
1804 | ||
1805 | default: | |
1806 | gcc_unreachable (); | |
1807 | } | |
1808 | } | |
4b4ea2db | 1809 | |
cf951b1a | 1810 | asm_nodes = NULL; |
3e1cde87 | 1811 | free (nodes); |
56af936e | 1812 | } |
1813 | ||
77fce4cd | 1814 | static void |
1815 | ipa_passes (void) | |
1816 | { | |
87d4aa85 | 1817 | set_cfun (NULL); |
4b14adf9 | 1818 | current_function_decl = NULL; |
75a70cf9 | 1819 | gimple_register_cfg_hooks (); |
77fce4cd | 1820 | bitmap_obstack_initialize (NULL); |
59dd4830 | 1821 | |
c9036234 | 1822 | invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL); |
1823 | ||
59dd4830 | 1824 | if (!in_lto_p) |
7b2e8956 | 1825 | { |
1826 | execute_ipa_pass_list (all_small_ipa_passes); | |
1827 | if (seen_error ()) | |
1828 | return; | |
1829 | } | |
9ed5b1f5 | 1830 | |
941125aa | 1831 | /* We never run removal of unreachable nodes after early passes. This is |
1832 | because TODO is run before the subpasses. It is important to remove | |
1833 | the unreachable functions to save works at IPA level and to get LTO | |
1834 | symbol tables right. */ | |
1835 | cgraph_remove_unreachable_nodes (true, cgraph_dump_file); | |
1836 | ||
7bfefa9d | 1837 | /* If pass_all_early_optimizations was not scheduled, the state of |
1838 | the cgraph will not be properly updated. Update it now. */ | |
1839 | if (cgraph_state < CGRAPH_STATE_IPA_SSA) | |
1840 | cgraph_state = CGRAPH_STATE_IPA_SSA; | |
9ed5b1f5 | 1841 | |
7bfefa9d | 1842 | if (!in_lto_p) |
1843 | { | |
1844 | /* Generate coverage variables and constructors. */ | |
1845 | coverage_finish (); | |
1846 | ||
1847 | /* Process new functions added. */ | |
1848 | set_cfun (NULL); | |
1849 | current_function_decl = NULL; | |
1850 | cgraph_process_new_functions (); | |
7bfefa9d | 1851 | |
c9036234 | 1852 | execute_ipa_summary_passes |
1853 | ((struct ipa_opt_pass_d *) all_regular_ipa_passes); | |
8867b500 | 1854 | } |
23433d72 | 1855 | |
1856 | /* Some targets need to handle LTO assembler output specially. */ | |
1857 | if (flag_generate_lto) | |
1858 | targetm.asm_out.lto_start (); | |
1859 | ||
7bfefa9d | 1860 | execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes); |
1861 | ||
1862 | if (!in_lto_p) | |
1863 | ipa_write_summaries (); | |
1864 | ||
23433d72 | 1865 | if (flag_generate_lto) |
1866 | targetm.asm_out.lto_end (); | |
1867 | ||
b33542ab | 1868 | if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects)) |
8867b500 | 1869 | execute_ipa_pass_list (all_regular_ipa_passes); |
c9036234 | 1870 | invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL); |
9ed5b1f5 | 1871 | |
77fce4cd | 1872 | bitmap_obstack_release (NULL); |
1873 | } | |
1874 | ||
badeded8 | 1875 | |
1876 | /* Return string alias is alias of. */ | |
1877 | ||
1878 | static tree | |
1879 | get_alias_symbol (tree decl) | |
1880 | { | |
1881 | tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl)); | |
1882 | return get_identifier (TREE_STRING_POINTER | |
1883 | (TREE_VALUE (TREE_VALUE (alias)))); | |
1884 | } | |
1885 | ||
1886 | ||
5e712541 | 1887 | /* Weakrefs may be associated to external decls and thus not output |
1888 | at expansion time. Emit all neccesary aliases. */ | |
1889 | ||
5139ff04 | 1890 | static void |
5e712541 | 1891 | output_weakrefs (void) |
1892 | { | |
1893 | struct cgraph_node *node; | |
1894 | struct varpool_node *vnode; | |
7c455d87 | 1895 | FOR_EACH_FUNCTION (node) |
7d0d0ce1 | 1896 | if (node->alias && DECL_EXTERNAL (node->symbol.decl) |
1897 | && !TREE_ASM_WRITTEN (node->symbol.decl) | |
1898 | && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl))) | |
1899 | assemble_alias (node->symbol.decl, | |
badeded8 | 1900 | node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias) |
7d0d0ce1 | 1901 | : get_alias_symbol (node->symbol.decl)); |
7c455d87 | 1902 | FOR_EACH_VARIABLE (vnode) |
7d0d0ce1 | 1903 | if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl) |
1904 | && !TREE_ASM_WRITTEN (vnode->symbol.decl) | |
1905 | && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl))) | |
1906 | assemble_alias (vnode->symbol.decl, | |
badeded8 | 1907 | vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of) |
7d0d0ce1 | 1908 | : get_alias_symbol (vnode->symbol.decl)); |
5e712541 | 1909 | } |
1910 | ||
da5e1e7c | 1911 | /* Initialize callgraph dump file. */ |
34e5cced | 1912 | |
121f3051 | 1913 | void |
1914 | init_cgraph (void) | |
1915 | { | |
01ec0a6c | 1916 | if (!cgraph_dump_file) |
1917 | cgraph_dump_file = dump_begin (TDI_cgraph, NULL); | |
121f3051 | 1918 | } |
b5d36404 | 1919 | |
a0c938f0 | 1920 | /* The edges representing the callers of the NEW_VERSION node were |
b5d36404 | 1921 | fixed by cgraph_function_versioning (), now the call_expr in their |
1922 | respective tree code should be updated to call the NEW_VERSION. */ | |
1923 | ||
1924 | static void | |
1925 | update_call_expr (struct cgraph_node *new_version) | |
1926 | { | |
1927 | struct cgraph_edge *e; | |
1928 | ||
1929 | gcc_assert (new_version); | |
75a70cf9 | 1930 | |
1931 | /* Update the call expr on the edges to call the new version. */ | |
b5d36404 | 1932 | for (e = new_version->callers; e; e = e->next_caller) |
e03a95e7 | 1933 | { |
7d0d0ce1 | 1934 | struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->symbol.decl); |
1935 | gimple_call_set_fndecl (e->call_stmt, new_version->symbol.decl); | |
e38def9c | 1936 | maybe_clean_eh_stmt_fn (inner_function, e->call_stmt); |
e03a95e7 | 1937 | } |
b5d36404 | 1938 | } |
1939 | ||
1940 | ||
1941 | /* Create a new cgraph node which is the new version of | |
1942 | OLD_VERSION node. REDIRECT_CALLERS holds the callers | |
1943 | edges which should be redirected to point to | |
1944 | NEW_VERSION. ALL the callees edges of OLD_VERSION | |
1945 | are cloned to the new version node. Return the new | |
b06ab5fa | 1946 | version node. |
1947 | ||
1948 | If non-NULL BLOCK_TO_COPY determine what basic blocks | |
1949 | was copied to prevent duplications of calls that are dead | |
1950 | in the clone. */ | |
b5d36404 | 1951 | |
4c0315d0 | 1952 | struct cgraph_node * |
b5d36404 | 1953 | cgraph_copy_node_for_versioning (struct cgraph_node *old_version, |
4460a647 | 1954 | tree new_decl, |
b06ab5fa | 1955 | VEC(cgraph_edge_p,heap) *redirect_callers, |
1956 | bitmap bbs_to_copy) | |
1957 | { | |
b5d36404 | 1958 | struct cgraph_node *new_version; |
32936803 | 1959 | struct cgraph_edge *e; |
b5d36404 | 1960 | unsigned i; |
1961 | ||
1962 | gcc_assert (old_version); | |
a0c938f0 | 1963 | |
5a90471f | 1964 | new_version = cgraph_create_node (new_decl); |
b5d36404 | 1965 | |
4c0315d0 | 1966 | new_version->analyzed = old_version->analyzed; |
b5d36404 | 1967 | new_version->local = old_version->local; |
7d0d0ce1 | 1968 | new_version->symbol.externally_visible = false; |
7c455d87 | 1969 | new_version->local.local = old_version->analyzed; |
b5d36404 | 1970 | new_version->global = old_version->global; |
a93f1c3b | 1971 | new_version->rtl = old_version->rtl; |
b5d36404 | 1972 | new_version->count = old_version->count; |
1973 | ||
a70a5e2c | 1974 | for (e = old_version->callees; e; e=e->next_callee) |
b06ab5fa | 1975 | if (!bbs_to_copy |
1976 | || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) | |
1977 | cgraph_clone_edge (e, new_version, e->call_stmt, | |
1978 | e->lto_stmt_uid, REG_BR_PROB_BASE, | |
1979 | CGRAPH_FREQ_BASE, | |
0835ad03 | 1980 | true); |
a70a5e2c | 1981 | for (e = old_version->indirect_calls; e; e=e->next_callee) |
b06ab5fa | 1982 | if (!bbs_to_copy |
1983 | || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) | |
1984 | cgraph_clone_edge (e, new_version, e->call_stmt, | |
1985 | e->lto_stmt_uid, REG_BR_PROB_BASE, | |
1986 | CGRAPH_FREQ_BASE, | |
0835ad03 | 1987 | true); |
48148244 | 1988 | FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e) |
4460a647 | 1989 | { |
1990 | /* Redirect calls to the old version node to point to its new | |
1991 | version. */ | |
1992 | cgraph_redirect_edge_callee (e, new_version); | |
1993 | } | |
b5d36404 | 1994 | |
ad687a96 | 1995 | cgraph_call_node_duplication_hooks (old_version, new_version); |
1996 | ||
b5d36404 | 1997 | return new_version; |
1998 | } | |
1999 | ||
2000 | /* Perform function versioning. | |
a0c938f0 | 2001 | Function versioning includes copying of the tree and |
b5d36404 | 2002 | a callgraph update (creating a new cgraph node and updating |
2003 | its callees and callers). | |
2004 | ||
2005 | REDIRECT_CALLERS varray includes the edges to be redirected | |
2006 | to the new version. | |
2007 | ||
2008 | TREE_MAP is a mapping of tree nodes we want to replace with | |
2009 | new ones (according to results of prior analysis). | |
2010 | OLD_VERSION_NODE is the node that is versioned. | |
7a3ec978 | 2011 | |
b06ab5fa | 2012 | If non-NULL ARGS_TO_SKIP determine function parameters to remove |
2013 | from new version. | |
7a3ec978 | 2014 | If SKIP_RETURN is true, the new version will return void. |
b06ab5fa | 2015 | If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. |
7a3ec978 | 2016 | If non_NULL NEW_ENTRY determine new entry BB of the clone. |
2017 | ||
2018 | Return the new version's cgraph node. */ | |
b5d36404 | 2019 | |
2020 | struct cgraph_node * | |
2021 | cgraph_function_versioning (struct cgraph_node *old_version_node, | |
4460a647 | 2022 | VEC(cgraph_edge_p,heap) *redirect_callers, |
ccf4ab6b | 2023 | VEC (ipa_replace_map_p,gc)* tree_map, |
a70a5e2c | 2024 | bitmap args_to_skip, |
7a3ec978 | 2025 | bool skip_return, |
b06ab5fa | 2026 | bitmap bbs_to_copy, |
2027 | basic_block new_entry_block, | |
a70a5e2c | 2028 | const char *clone_name) |
b5d36404 | 2029 | { |
7d0d0ce1 | 2030 | tree old_decl = old_version_node->symbol.decl; |
b5d36404 | 2031 | struct cgraph_node *new_version_node = NULL; |
2032 | tree new_decl; | |
2033 | ||
2034 | if (!tree_versionable_function_p (old_decl)) | |
2035 | return NULL; | |
2036 | ||
3c97c75d | 2037 | gcc_assert (old_version_node->local.can_change_signature || !args_to_skip); |
2038 | ||
7a3ec978 | 2039 | /* Make a new FUNCTION_DECL tree node for the new version. */ |
2040 | if (!args_to_skip && !skip_return) | |
5afe38fe | 2041 | new_decl = copy_node (old_decl); |
2042 | else | |
7a3ec978 | 2043 | new_decl |
2044 | = build_function_decl_skip_args (old_decl, args_to_skip, skip_return); | |
b5d36404 | 2045 | |
df0b8dfb | 2046 | /* Generate a new name for the new version. */ |
2047 | DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name); | |
2048 | SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); | |
2049 | SET_DECL_RTL (new_decl, NULL); | |
2050 | ||
e54aa8a4 | 2051 | /* When the old decl was a con-/destructor make sure the clone isn't. */ |
2052 | DECL_STATIC_CONSTRUCTOR(new_decl) = 0; | |
2053 | DECL_STATIC_DESTRUCTOR(new_decl) = 0; | |
2054 | ||
b5d36404 | 2055 | /* Create the new version's call-graph node. |
2056 | and update the edges of the new node. */ | |
2057 | new_version_node = | |
2058 | cgraph_copy_node_for_versioning (old_version_node, new_decl, | |
b06ab5fa | 2059 | redirect_callers, bbs_to_copy); |
b5d36404 | 2060 | |
2061 | /* Copy the OLD_VERSION_NODE function tree to the new version. */ | |
b06ab5fa | 2062 | tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip, |
7a3ec978 | 2063 | skip_return, bbs_to_copy, new_entry_block); |
b5d36404 | 2064 | |
a0c938f0 | 2065 | /* Update the new version's properties. |
e03a95e7 | 2066 | Make The new version visible only within this translation unit. Make sure |
2067 | that is not weak also. | |
a0c938f0 | 2068 | ??? We cannot use COMDAT linkage because there is no |
b5d36404 | 2069 | ABI support for this. */ |
cf951b1a | 2070 | symtab_make_decl_local (new_version_node->symbol.decl); |
7d0d0ce1 | 2071 | DECL_VIRTUAL_P (new_version_node->symbol.decl) = 0; |
2072 | new_version_node->symbol.externally_visible = 0; | |
b5d36404 | 2073 | new_version_node->local.local = 1; |
2074 | new_version_node->lowered = true; | |
f014e39d | 2075 | |
e03a95e7 | 2076 | /* Update the call_expr on the edges to call the new version node. */ |
2077 | update_call_expr (new_version_node); | |
48e1416a | 2078 | |
50828ed8 | 2079 | cgraph_call_function_insertion_hooks (new_version_node); |
b5d36404 | 2080 | return new_version_node; |
2081 | } | |
469679ab | 2082 | |
ccf4ab6b | 2083 | /* Given virtual clone, turn it into actual clone. */ |
2084 | static void | |
2085 | cgraph_materialize_clone (struct cgraph_node *node) | |
2086 | { | |
2087 | bitmap_obstack_initialize (NULL); | |
7d0d0ce1 | 2088 | node->former_clone_of = node->clone_of->symbol.decl; |
e748b31d | 2089 | if (node->clone_of->former_clone_of) |
2090 | node->former_clone_of = node->clone_of->former_clone_of; | |
ccf4ab6b | 2091 | /* Copy the OLD_VERSION_NODE function tree to the new version. */ |
7d0d0ce1 | 2092 | tree_function_versioning (node->clone_of->symbol.decl, node->symbol.decl, |
ccf4ab6b | 2093 | node->clone.tree_map, true, |
7a3ec978 | 2094 | node->clone.args_to_skip, false, |
2095 | NULL, NULL); | |
e20422ea | 2096 | if (cgraph_dump_file) |
2097 | { | |
7d0d0ce1 | 2098 | dump_function_to_file (node->clone_of->symbol.decl, cgraph_dump_file, dump_flags); |
2099 | dump_function_to_file (node->symbol.decl, cgraph_dump_file, dump_flags); | |
e20422ea | 2100 | } |
ccf4ab6b | 2101 | |
2102 | /* Function is no longer clone. */ | |
2103 | if (node->next_sibling_clone) | |
2104 | node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; | |
2105 | if (node->prev_sibling_clone) | |
2106 | node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; | |
2107 | else | |
2108 | node->clone_of->clones = node->next_sibling_clone; | |
2109 | node->next_sibling_clone = NULL; | |
2110 | node->prev_sibling_clone = NULL; | |
6d1cc52c | 2111 | if (!node->clone_of->analyzed && !node->clone_of->clones) |
7d6a1ec8 | 2112 | { |
2113 | cgraph_release_function_body (node->clone_of); | |
2114 | cgraph_node_remove_callees (node->clone_of); | |
7d0d0ce1 | 2115 | ipa_remove_all_references (&node->clone_of->symbol.ref_list); |
7d6a1ec8 | 2116 | } |
ccf4ab6b | 2117 | node->clone_of = NULL; |
2118 | bitmap_obstack_release (NULL); | |
2119 | } | |
2120 | ||
c596d830 | 2121 | /* If necessary, change the function declaration in the call statement |
2122 | associated with E so that it corresponds to the edge callee. */ | |
2123 | ||
2124 | gimple | |
2125 | cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e) | |
2126 | { | |
2127 | tree decl = gimple_call_fndecl (e->call_stmt); | |
2128 | gimple new_stmt; | |
3fd0ca33 | 2129 | gimple_stmt_iterator gsi; |
1f449108 | 2130 | #ifdef ENABLE_CHECKING |
2131 | struct cgraph_node *node; | |
2132 | #endif | |
c596d830 | 2133 | |
1caef38b | 2134 | if (e->indirect_unknown_callee |
7d0d0ce1 | 2135 | || decl == e->callee->symbol.decl) |
c596d830 | 2136 | return e->call_stmt; |
2137 | ||
1f449108 | 2138 | #ifdef ENABLE_CHECKING |
1caef38b | 2139 | if (decl) |
2140 | { | |
2141 | node = cgraph_get_node (decl); | |
2142 | gcc_assert (!node || !node->clone.combined_args_to_skip); | |
2143 | } | |
1f449108 | 2144 | #endif |
e748b31d | 2145 | |
c596d830 | 2146 | if (cgraph_dump_file) |
2147 | { | |
2148 | fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ", | |
a690dc32 | 2149 | xstrdup (cgraph_node_name (e->caller)), e->caller->uid, |
2150 | xstrdup (cgraph_node_name (e->callee)), e->callee->uid); | |
c596d830 | 2151 | print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags); |
e748b31d | 2152 | if (e->callee->clone.combined_args_to_skip) |
91aba934 | 2153 | { |
2154 | fprintf (cgraph_dump_file, " combined args to skip: "); | |
2155 | dump_bitmap (cgraph_dump_file, | |
2156 | e->callee->clone.combined_args_to_skip); | |
e748b31d | 2157 | } |
c596d830 | 2158 | } |
2159 | ||
2160 | if (e->callee->clone.combined_args_to_skip) | |
91aba934 | 2161 | { |
092cd838 | 2162 | int lp_nr; |
91aba934 | 2163 | |
2164 | new_stmt | |
2165 | = gimple_call_copy_skip_args (e->call_stmt, | |
2166 | e->callee->clone.combined_args_to_skip); | |
7d0d0ce1 | 2167 | gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl); |
91aba934 | 2168 | |
2169 | if (gimple_vdef (new_stmt) | |
2170 | && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME) | |
2171 | SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; | |
2172 | ||
d4e80e2b | 2173 | gsi = gsi_for_stmt (e->call_stmt); |
9126b675 | 2174 | gsi_replace (&gsi, new_stmt, false); |
092cd838 | 2175 | /* We need to defer cleaning EH info on the new statement to |
2176 | fixup-cfg. We may not have dominator information at this point | |
2177 | and thus would end up with unreachable blocks and have no way | |
2178 | to communicate that we need to run CFG cleanup then. */ | |
2179 | lp_nr = lookup_stmt_eh_lp (e->call_stmt); | |
2180 | if (lp_nr != 0) | |
2181 | { | |
2182 | remove_stmt_from_eh_lp (e->call_stmt); | |
2183 | add_stmt_to_eh_lp (new_stmt, lp_nr); | |
2184 | } | |
91aba934 | 2185 | } |
c596d830 | 2186 | else |
75c7f5a5 | 2187 | { |
2188 | new_stmt = e->call_stmt; | |
7d0d0ce1 | 2189 | gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl); |
75c7f5a5 | 2190 | update_stmt (new_stmt); |
2191 | } | |
c596d830 | 2192 | |
c596d830 | 2193 | cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt); |
2194 | ||
2195 | if (cgraph_dump_file) | |
2196 | { | |
2197 | fprintf (cgraph_dump_file, " updated to:"); | |
2198 | print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags); | |
2199 | } | |
2200 | return new_stmt; | |
2201 | } | |
2202 | ||
ccf4ab6b | 2203 | /* Once all functions from compilation unit are in memory, produce all clones |
c596d830 | 2204 | and update all calls. We might also do this on demand if we don't want to |
2205 | bring all functions to memory prior compilation, but current WHOPR | |
2206 | implementation does that and it is is bit easier to keep everything right in | |
2207 | this order. */ | |
d2bb3f9d | 2208 | static void |
ccf4ab6b | 2209 | cgraph_materialize_all_clones (void) |
2210 | { | |
2211 | struct cgraph_node *node; | |
2212 | bool stabilized = false; | |
2213 | ||
2214 | if (cgraph_dump_file) | |
2215 | fprintf (cgraph_dump_file, "Materializing clones\n"); | |
2216 | #ifdef ENABLE_CHECKING | |
2217 | verify_cgraph (); | |
2218 | #endif | |
2219 | ||
2220 | /* We can also do topological order, but number of iterations should be | |
2221 | bounded by number of IPA passes since single IPA pass is probably not | |
2222 | going to create clones of clones it created itself. */ | |
2223 | while (!stabilized) | |
2224 | { | |
2225 | stabilized = true; | |
7c455d87 | 2226 | FOR_EACH_FUNCTION (node) |
ccf4ab6b | 2227 | { |
7d0d0ce1 | 2228 | if (node->clone_of && node->symbol.decl != node->clone_of->symbol.decl |
2229 | && !gimple_has_body_p (node->symbol.decl)) | |
ccf4ab6b | 2230 | { |
7d0d0ce1 | 2231 | if (gimple_has_body_p (node->clone_of->symbol.decl)) |
ccf4ab6b | 2232 | { |
2233 | if (cgraph_dump_file) | |
e20422ea | 2234 | { |
0a10fd82 | 2235 | fprintf (cgraph_dump_file, "cloning %s to %s\n", |
a690dc32 | 2236 | xstrdup (cgraph_node_name (node->clone_of)), |
2237 | xstrdup (cgraph_node_name (node))); | |
e20422ea | 2238 | if (node->clone.tree_map) |
2239 | { | |
2240 | unsigned int i; | |
2241 | fprintf (cgraph_dump_file, " replace map: "); | |
2242 | for (i = 0; i < VEC_length (ipa_replace_map_p, | |
2243 | node->clone.tree_map); | |
2244 | i++) | |
2245 | { | |
2246 | struct ipa_replace_map *replace_info; | |
2247 | replace_info = VEC_index (ipa_replace_map_p, | |
2248 | node->clone.tree_map, | |
2249 | i); | |
2250 | print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0); | |
2251 | fprintf (cgraph_dump_file, " -> "); | |
2252 | print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0); | |
2253 | fprintf (cgraph_dump_file, "%s%s;", | |
2254 | replace_info->replace_p ? "(replace)":"", | |
2255 | replace_info->ref_p ? "(ref)":""); | |
2256 | } | |
2257 | fprintf (cgraph_dump_file, "\n"); | |
2258 | } | |
2259 | if (node->clone.args_to_skip) | |
2260 | { | |
2261 | fprintf (cgraph_dump_file, " args_to_skip: "); | |
2262 | dump_bitmap (cgraph_dump_file, node->clone.args_to_skip); | |
2263 | } | |
2264 | if (node->clone.args_to_skip) | |
2265 | { | |
2266 | fprintf (cgraph_dump_file, " combined_args_to_skip:"); | |
2267 | dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip); | |
2268 | } | |
2269 | } | |
ccf4ab6b | 2270 | cgraph_materialize_clone (node); |
a510bd8d | 2271 | stabilized = false; |
ccf4ab6b | 2272 | } |
ccf4ab6b | 2273 | } |
2274 | } | |
2275 | } | |
7c455d87 | 2276 | FOR_EACH_FUNCTION (node) |
ee3f5fc0 | 2277 | if (!node->analyzed && node->callees) |
2278 | cgraph_node_remove_callees (node); | |
c596d830 | 2279 | if (cgraph_dump_file) |
2280 | fprintf (cgraph_dump_file, "Materialization Call site updates done.\n"); | |
947781ac | 2281 | #ifdef ENABLE_CHECKING |
2282 | verify_cgraph (); | |
2283 | #endif | |
ccf4ab6b | 2284 | cgraph_remove_unreachable_nodes (false, cgraph_dump_file); |
2285 | } | |
2286 | ||
d2bb3f9d | 2287 | |
2288 | /* Perform simple optimizations based on callgraph. */ | |
2289 | ||
2290 | void | |
cf951b1a | 2291 | compile (void) |
d2bb3f9d | 2292 | { |
2293 | if (seen_error ()) | |
2294 | return; | |
2295 | ||
2296 | #ifdef ENABLE_CHECKING | |
3e7775f6 | 2297 | verify_symtab (); |
d2bb3f9d | 2298 | #endif |
2299 | ||
d2bb3f9d | 2300 | timevar_push (TV_CGRAPHOPT); |
2301 | if (pre_ipa_mem_report) | |
2302 | { | |
2303 | fprintf (stderr, "Memory consumption before IPA\n"); | |
2304 | dump_memory_report (false); | |
2305 | } | |
2306 | if (!quiet_flag) | |
2307 | fprintf (stderr, "Performing interprocedural optimizations\n"); | |
2308 | cgraph_state = CGRAPH_STATE_IPA; | |
2309 | ||
cf951b1a | 2310 | /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */ |
2311 | if (flag_lto) | |
2312 | lto_streamer_hooks_init (); | |
2313 | ||
d2bb3f9d | 2314 | /* Don't run the IPA passes if there was any error or sorry messages. */ |
2315 | if (!seen_error ()) | |
2316 | ipa_passes (); | |
2317 | ||
2318 | /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */ | |
2319 | if (seen_error () | |
2320 | || (!in_lto_p && flag_lto && !flag_fat_lto_objects)) | |
2321 | { | |
2322 | timevar_pop (TV_CGRAPHOPT); | |
2323 | return; | |
2324 | } | |
2325 | ||
2326 | /* This pass remove bodies of extern inline functions we never inlined. | |
2327 | Do this later so other IPA passes see what is really going on. */ | |
2328 | cgraph_remove_unreachable_nodes (false, dump_file); | |
2329 | cgraph_global_info_ready = true; | |
2330 | if (cgraph_dump_file) | |
2331 | { | |
2332 | fprintf (cgraph_dump_file, "Optimized "); | |
18841b0c | 2333 | dump_symtab (cgraph_dump_file); |
d2bb3f9d | 2334 | } |
2335 | if (post_ipa_mem_report) | |
2336 | { | |
2337 | fprintf (stderr, "Memory consumption after IPA\n"); | |
2338 | dump_memory_report (false); | |
2339 | } | |
2340 | timevar_pop (TV_CGRAPHOPT); | |
2341 | ||
2342 | /* Output everything. */ | |
2343 | (*debug_hooks->assembly_start) (); | |
2344 | if (!quiet_flag) | |
2345 | fprintf (stderr, "Assembling functions:\n"); | |
2346 | #ifdef ENABLE_CHECKING | |
3e7775f6 | 2347 | verify_symtab (); |
d2bb3f9d | 2348 | #endif |
2349 | ||
2350 | cgraph_materialize_all_clones (); | |
2351 | bitmap_obstack_initialize (NULL); | |
2352 | execute_ipa_pass_list (all_late_ipa_passes); | |
2353 | cgraph_remove_unreachable_nodes (true, dump_file); | |
2354 | #ifdef ENABLE_CHECKING | |
3e7775f6 | 2355 | verify_symtab (); |
d2bb3f9d | 2356 | #endif |
2357 | bitmap_obstack_release (NULL); | |
cf951b1a | 2358 | mark_functions_to_output (); |
d2bb3f9d | 2359 | output_weakrefs (); |
2360 | ||
2361 | cgraph_state = CGRAPH_STATE_EXPANSION; | |
2362 | if (!flag_toplevel_reorder) | |
cf951b1a | 2363 | output_in_order (); |
d2bb3f9d | 2364 | else |
2365 | { | |
cf951b1a | 2366 | output_asm_statements (); |
d2bb3f9d | 2367 | |
cf951b1a | 2368 | expand_all_functions (); |
2369 | varpool_output_variables (); | |
d2bb3f9d | 2370 | } |
2371 | ||
2372 | cgraph_process_new_functions (); | |
2373 | cgraph_state = CGRAPH_STATE_FINISHED; | |
2374 | ||
2375 | if (cgraph_dump_file) | |
2376 | { | |
2377 | fprintf (cgraph_dump_file, "\nFinal "); | |
18841b0c | 2378 | dump_symtab (cgraph_dump_file); |
d2bb3f9d | 2379 | } |
2380 | #ifdef ENABLE_CHECKING | |
3e7775f6 | 2381 | verify_symtab (); |
d2bb3f9d | 2382 | /* Double check that all inline clones are gone and that all |
2383 | function bodies have been released from memory. */ | |
2384 | if (!seen_error ()) | |
2385 | { | |
2386 | struct cgraph_node *node; | |
2387 | bool error_found = false; | |
2388 | ||
7c455d87 | 2389 | FOR_EACH_DEFINED_FUNCTION (node) |
2390 | if (node->global.inlined_to | |
2391 | || gimple_has_body_p (node->symbol.decl)) | |
d2bb3f9d | 2392 | { |
2393 | error_found = true; | |
2394 | dump_cgraph_node (stderr, node); | |
2395 | } | |
2396 | if (error_found) | |
2397 | internal_error ("nodes with unreleased memory found"); | |
2398 | } | |
2399 | #endif | |
2400 | } | |
2401 | ||
2402 | ||
2403 | /* Analyze the whole compilation unit once it is parsed completely. */ | |
2404 | ||
2405 | void | |
cf951b1a | 2406 | finalize_compilation_unit (void) |
d2bb3f9d | 2407 | { |
2408 | timevar_push (TV_CGRAPH); | |
2409 | ||
d2bb3f9d | 2410 | /* If we're here there's no current function anymore. Some frontends |
2411 | are lazy in clearing these. */ | |
2412 | current_function_decl = NULL; | |
2413 | set_cfun (NULL); | |
2414 | ||
2415 | /* Do not skip analyzing the functions if there were errors, we | |
2416 | miss diagnostics for following functions otherwise. */ | |
2417 | ||
2418 | /* Emit size functions we didn't inline. */ | |
2419 | finalize_size_functions (); | |
2420 | ||
2421 | /* Mark alias targets necessary and emit diagnostics. */ | |
2422 | finish_aliases_1 (); | |
2423 | handle_alias_pairs (); | |
2424 | ||
2425 | if (!quiet_flag) | |
2426 | { | |
2427 | fprintf (stderr, "\nAnalyzing compilation unit\n"); | |
2428 | fflush (stderr); | |
2429 | } | |
2430 | ||
2431 | if (flag_dump_passes) | |
2432 | dump_passes (); | |
2433 | ||
2434 | /* Gimplify and lower all functions, compute reachability and | |
2435 | remove unreachable nodes. */ | |
2436 | cgraph_analyze_functions (); | |
2437 | ||
2438 | /* Mark alias targets necessary and emit diagnostics. */ | |
2439 | finish_aliases_1 (); | |
2440 | handle_alias_pairs (); | |
2441 | ||
2442 | /* Gimplify and lower thunks. */ | |
2443 | cgraph_analyze_functions (); | |
2444 | ||
2445 | /* Finally drive the pass manager. */ | |
cf951b1a | 2446 | compile (); |
d2bb3f9d | 2447 | |
2448 | timevar_pop (TV_CGRAPH); | |
2449 | } | |
2450 | ||
2451 | ||
a861fe52 | 2452 | #include "gt-cgraphunit.h" |