]>
Commit | Line | Data |
---|---|---|
1 | /* Callgraph based interprocedural optimizations. | |
2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, | |
3 | 2011, 2012 Free Software Foundation, Inc. | |
4 | Contributed by Jan Hubicka | |
5 | ||
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify it under | |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 3, or (at your option) any later | |
11 | version. | |
12 | ||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GCC; see the file COPYING3. If not see | |
20 | <http://www.gnu.org/licenses/>. */ | |
21 | ||
22 | /* This module implements main driver of compilation process as well as | |
23 | few basic interprocedural optimizers. | |
24 | ||
25 | The main scope of this file is to act as an interface in between | |
26 | tree based frontends and the backend (and middle end) | |
27 | ||
28 | The front-end is supposed to use following functionality: | |
29 | ||
30 | - cgraph_finalize_function | |
31 | ||
32 | This function is called once front-end has parsed whole body of function | |
33 | and it is certain that the function body nor the declaration will change. | |
34 | ||
35 | (There is one exception needed for implementing GCC extern inline | |
36 | function.) | |
37 | ||
38 | - varpool_finalize_variable | |
39 | ||
40 | This function has same behavior as the above but is used for static | |
41 | variables. | |
42 | ||
43 | - cgraph_finalize_compilation_unit | |
44 | ||
45 | This function is called once (source level) compilation unit is finalized | |
46 | and it will no longer change. | |
47 | ||
48 | In the call-graph construction and local function analysis takes | |
49 | place here. Bodies of unreachable functions are released to | |
50 | conserve memory usage. | |
51 | ||
52 | The function can be called multiple times when multiple source level | |
53 | compilation units are combined (such as in C frontend) | |
54 | ||
55 | - cgraph_optimize | |
56 | ||
57 | In this unit-at-a-time compilation the intra procedural analysis takes | |
58 | place here. In particular the static functions whose address is never | |
59 | taken are marked as local. Backend can then use this information to | |
60 | modify calling conventions, do better inlining or similar optimizations. | |
61 | ||
62 | - cgraph_mark_needed_node | |
63 | - varpool_mark_needed_node | |
64 | ||
65 | When function or variable is referenced by some hidden way the call-graph | |
66 | data structure must be updated accordingly by this function. | |
67 | There should be little need to call this function and all the references | |
68 | should be made explicit to cgraph code. At present these functions are | |
69 | used by C++ frontend to explicitly mark the keyed methods. | |
70 | ||
71 | - analyze_expr callback | |
72 | ||
73 | This function is responsible for lowering tree nodes not understood by | |
74 | generic code into understandable ones or alternatively marking | |
75 | callgraph and varpool nodes referenced by the as needed. | |
76 | ||
77 | ??? On the tree-ssa genericizing should take place here and we will avoid | |
78 | need for these hooks (replacing them by genericizing hook) | |
79 | ||
80 | Analyzing of all functions is deferred | |
81 | to cgraph_finalize_compilation_unit and expansion into cgraph_optimize. | |
82 | ||
83 | In cgraph_finalize_compilation_unit the reachable functions are | |
84 | analyzed. During analysis the call-graph edges from reachable | |
85 | functions are constructed and their destinations are marked as | |
86 | reachable. References to functions and variables are discovered too | |
87 | and variables found to be needed output to the assembly file. Via | |
88 | mark_referenced call in assemble_variable functions referenced by | |
89 | static variables are noticed too. | |
90 | ||
91 | The intra-procedural information is produced and its existence | |
92 | indicated by global_info_ready. Once this flag is set it is impossible | |
93 | to change function from !reachable to reachable and thus | |
94 | assemble_variable no longer call mark_referenced. | |
95 | ||
96 | Finally the call-graph is topologically sorted and all reachable functions | |
97 | that has not been completely inlined or are not external are output. | |
98 | ||
99 | ??? It is possible that reference to function or variable is optimized | |
100 | out. We can not deal with this nicely because topological order is not | |
101 | suitable for it. For tree-ssa we may consider another pass doing | |
102 | optimization and re-discovering reachable functions. | |
103 | ||
104 | ??? Reorganize code so variables are output very last and only if they | |
105 | really has been referenced by produced code, so we catch more cases | |
106 | where reference has been optimized out. */ | |
107 | ||
108 | ||
109 | #include "config.h" | |
110 | #include "system.h" | |
111 | #include "coretypes.h" | |
112 | #include "tm.h" | |
113 | #include "tree.h" | |
114 | #include "rtl.h" | |
115 | #include "tree-flow.h" | |
116 | #include "tree-inline.h" | |
117 | #include "langhooks.h" | |
118 | #include "pointer-set.h" | |
119 | #include "toplev.h" | |
120 | #include "flags.h" | |
121 | #include "ggc.h" | |
122 | #include "debug.h" | |
123 | #include "target.h" | |
124 | #include "cgraph.h" | |
125 | #include "diagnostic.h" | |
126 | #include "tree-pretty-print.h" | |
127 | #include "gimple-pretty-print.h" | |
128 | #include "timevar.h" | |
129 | #include "params.h" | |
130 | #include "fibheap.h" | |
131 | #include "intl.h" | |
132 | #include "function.h" | |
133 | #include "ipa-prop.h" | |
134 | #include "gimple.h" | |
135 | #include "tree-iterator.h" | |
136 | #include "tree-pass.h" | |
137 | #include "tree-dump.h" | |
138 | #include "output.h" | |
139 | #include "coverage.h" | |
140 | #include "plugin.h" | |
141 | #include "ipa-inline.h" | |
142 | #include "ipa-utils.h" | |
143 | #include "lto-streamer.h" | |
144 | ||
145 | static void cgraph_expand_all_functions (void); | |
146 | static void cgraph_mark_functions_to_output (void); | |
147 | static void cgraph_expand_function (struct cgraph_node *); | |
148 | static void cgraph_output_pending_asms (void); | |
149 | ||
150 | FILE *cgraph_dump_file; | |
151 | ||
152 | /* Used for vtable lookup in thunk adjusting. */ | |
153 | static GTY (()) tree vtable_entry_type; | |
154 | ||
155 | /* Determine if function DECL is needed. That is, visible to something | |
156 | either outside this translation unit, something magic in the system | |
157 | configury. */ | |
158 | ||
159 | bool | |
160 | cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl) | |
161 | { | |
162 | /* If the user told us it is used, then it must be so. */ | |
163 | if (node->local.externally_visible) | |
164 | return true; | |
165 | ||
166 | /* ??? If the assembler name is set by hand, it is possible to assemble | |
167 | the name later after finalizing the function and the fact is noticed | |
168 | in assemble_name then. This is arguably a bug. */ | |
169 | if (DECL_ASSEMBLER_NAME_SET_P (decl) | |
170 | && (!node->thunk.thunk_p && !node->same_body_alias) | |
171 | && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))) | |
172 | return true; | |
173 | ||
174 | /* With -fkeep-inline-functions we are keeping all inline functions except | |
175 | for extern inline ones. */ | |
176 | if (flag_keep_inline_functions | |
177 | && DECL_DECLARED_INLINE_P (decl) | |
178 | && !DECL_EXTERNAL (decl) | |
179 | && !DECL_DISREGARD_INLINE_LIMITS (decl)) | |
180 | return true; | |
181 | ||
182 | /* If we decided it was needed before, but at the time we didn't have | |
183 | the body of the function available, then it's still needed. We have | |
184 | to go back and re-check its dependencies now. */ | |
185 | if (node->needed) | |
186 | return true; | |
187 | ||
188 | /* Externally visible functions must be output. The exception is | |
189 | COMDAT functions that must be output only when they are needed. | |
190 | ||
191 | When not optimizing, also output the static functions. (see | |
192 | PR24561), but don't do so for always_inline functions, functions | |
193 | declared inline and nested functions. These were optimized out | |
194 | in the original implementation and it is unclear whether we want | |
195 | to change the behavior here. */ | |
196 | if (((TREE_PUBLIC (decl) | |
197 | || (!optimize | |
198 | && !node->same_body_alias | |
199 | && !DECL_DISREGARD_INLINE_LIMITS (decl) | |
200 | && !DECL_DECLARED_INLINE_P (decl) | |
201 | && !(DECL_CONTEXT (decl) | |
202 | && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))) | |
203 | && !flag_whole_program | |
204 | && !flag_lto) | |
205 | && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)) | |
206 | return true; | |
207 | ||
208 | return false; | |
209 | } | |
210 | ||
211 | /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these | |
212 | functions into callgraph in a way so they look like ordinary reachable | |
213 | functions inserted into callgraph already at construction time. */ | |
214 | ||
215 | bool | |
216 | cgraph_process_new_functions (void) | |
217 | { | |
218 | bool output = false; | |
219 | tree fndecl; | |
220 | struct cgraph_node *node; | |
221 | ||
222 | varpool_analyze_pending_decls (); | |
223 | /* Note that this queue may grow as its being processed, as the new | |
224 | functions may generate new ones. */ | |
225 | while (cgraph_new_nodes) | |
226 | { | |
227 | node = cgraph_new_nodes; | |
228 | fndecl = node->decl; | |
229 | cgraph_new_nodes = cgraph_new_nodes->next_needed; | |
230 | switch (cgraph_state) | |
231 | { | |
232 | case CGRAPH_STATE_CONSTRUCTION: | |
233 | /* At construction time we just need to finalize function and move | |
234 | it into reachable functions list. */ | |
235 | ||
236 | node->next_needed = NULL; | |
237 | cgraph_finalize_function (fndecl, false); | |
238 | cgraph_mark_reachable_node (node); | |
239 | output = true; | |
240 | cgraph_call_function_insertion_hooks (node); | |
241 | break; | |
242 | ||
243 | case CGRAPH_STATE_IPA: | |
244 | case CGRAPH_STATE_IPA_SSA: | |
245 | /* When IPA optimization already started, do all essential | |
246 | transformations that has been already performed on the whole | |
247 | cgraph but not on this function. */ | |
248 | ||
249 | gimple_register_cfg_hooks (); | |
250 | if (!node->analyzed) | |
251 | cgraph_analyze_function (node); | |
252 | push_cfun (DECL_STRUCT_FUNCTION (fndecl)); | |
253 | current_function_decl = fndecl; | |
254 | if ((cgraph_state == CGRAPH_STATE_IPA_SSA | |
255 | && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl))) | |
256 | /* When not optimizing, be sure we run early local passes anyway | |
257 | to expand OMP. */ | |
258 | || !optimize) | |
259 | execute_pass_list (pass_early_local_passes.pass.sub); | |
260 | else | |
261 | compute_inline_parameters (node, true); | |
262 | free_dominance_info (CDI_POST_DOMINATORS); | |
263 | free_dominance_info (CDI_DOMINATORS); | |
264 | pop_cfun (); | |
265 | current_function_decl = NULL; | |
266 | cgraph_call_function_insertion_hooks (node); | |
267 | break; | |
268 | ||
269 | case CGRAPH_STATE_EXPANSION: | |
270 | /* Functions created during expansion shall be compiled | |
271 | directly. */ | |
272 | node->process = 0; | |
273 | cgraph_call_function_insertion_hooks (node); | |
274 | cgraph_expand_function (node); | |
275 | break; | |
276 | ||
277 | default: | |
278 | gcc_unreachable (); | |
279 | break; | |
280 | } | |
281 | varpool_analyze_pending_decls (); | |
282 | } | |
283 | return output; | |
284 | } | |
285 | ||
286 | /* As an GCC extension we allow redefinition of the function. The | |
287 | semantics when both copies of bodies differ is not well defined. | |
288 | We replace the old body with new body so in unit at a time mode | |
289 | we always use new body, while in normal mode we may end up with | |
290 | old body inlined into some functions and new body expanded and | |
291 | inlined in others. | |
292 | ||
293 | ??? It may make more sense to use one body for inlining and other | |
294 | body for expanding the function but this is difficult to do. */ | |
295 | ||
296 | static void | |
297 | cgraph_reset_node (struct cgraph_node *node) | |
298 | { | |
299 | /* If node->process is set, then we have already begun whole-unit analysis. | |
300 | This is *not* testing for whether we've already emitted the function. | |
301 | That case can be sort-of legitimately seen with real function redefinition | |
302 | errors. I would argue that the front end should never present us with | |
303 | such a case, but don't enforce that for now. */ | |
304 | gcc_assert (!node->process); | |
305 | ||
306 | /* Reset our data structures so we can analyze the function again. */ | |
307 | memset (&node->local, 0, sizeof (node->local)); | |
308 | memset (&node->global, 0, sizeof (node->global)); | |
309 | memset (&node->rtl, 0, sizeof (node->rtl)); | |
310 | node->analyzed = false; | |
311 | node->local.finalized = false; | |
312 | ||
313 | cgraph_node_remove_callees (node); | |
314 | } | |
315 | ||
316 | static void | |
317 | cgraph_lower_function (struct cgraph_node *node) | |
318 | { | |
319 | if (node->lowered) | |
320 | return; | |
321 | ||
322 | if (node->nested) | |
323 | lower_nested_functions (node->decl); | |
324 | gcc_assert (!node->nested); | |
325 | ||
326 | tree_lowering_passes (node->decl); | |
327 | node->lowered = true; | |
328 | } | |
329 | ||
330 | /* DECL has been parsed. Take it, queue it, compile it at the whim of the | |
331 | logic in effect. If NESTED is true, then our caller cannot stand to have | |
332 | the garbage collector run at the moment. We would need to either create | |
333 | a new GC context, or just not compile right now. */ | |
334 | ||
335 | void | |
336 | cgraph_finalize_function (tree decl, bool nested) | |
337 | { | |
338 | struct cgraph_node *node = cgraph_get_create_node (decl); | |
339 | ||
340 | if (node->local.finalized) | |
341 | { | |
342 | cgraph_reset_node (node); | |
343 | node->local.redefined_extern_inline = true; | |
344 | } | |
345 | ||
346 | notice_global_symbol (decl); | |
347 | node->local.finalized = true; | |
348 | node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL; | |
349 | ||
350 | if (cgraph_decide_is_function_needed (node, decl)) | |
351 | cgraph_mark_needed_node (node); | |
352 | ||
353 | /* Since we reclaim unreachable nodes at the end of every language | |
354 | level unit, we need to be conservative about possible entry points | |
355 | there. */ | |
356 | if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)) | |
357 | || DECL_STATIC_CONSTRUCTOR (decl) | |
358 | || DECL_STATIC_DESTRUCTOR (decl) | |
359 | /* COMDAT virtual functions may be referenced by vtable from | |
360 | other compilation unit. Still we want to devirtualize calls | |
361 | to those so we need to analyze them. | |
362 | FIXME: We should introduce may edges for this purpose and update | |
363 | their handling in unreachable function removal and inliner too. */ | |
364 | || (DECL_VIRTUAL_P (decl) | |
365 | && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl)))) | |
366 | cgraph_mark_reachable_node (node); | |
367 | ||
368 | /* If we've not yet emitted decl, tell the debug info about it. */ | |
369 | if (!TREE_ASM_WRITTEN (decl)) | |
370 | (*debug_hooks->deferred_inline_function) (decl); | |
371 | ||
372 | /* Possibly warn about unused parameters. */ | |
373 | if (warn_unused_parameter) | |
374 | do_warn_unused_parameter (decl); | |
375 | ||
376 | if (!nested) | |
377 | ggc_collect (); | |
378 | } | |
379 | ||
380 | /* C99 extern inline keywords allow changing of declaration after function | |
381 | has been finalized. We need to re-decide if we want to mark the function as | |
382 | needed then. */ | |
383 | ||
384 | void | |
385 | cgraph_mark_if_needed (tree decl) | |
386 | { | |
387 | struct cgraph_node *node = cgraph_get_node (decl); | |
388 | if (node->local.finalized && cgraph_decide_is_function_needed (node, decl)) | |
389 | cgraph_mark_needed_node (node); | |
390 | } | |
391 | ||
392 | /* Return TRUE if NODE2 is equivalent to NODE or its clone. */ | |
393 | static bool | |
394 | clone_of_p (struct cgraph_node *node, struct cgraph_node *node2) | |
395 | { | |
396 | node = cgraph_function_or_thunk_node (node, NULL); | |
397 | node2 = cgraph_function_or_thunk_node (node2, NULL); | |
398 | while (node != node2 && node2) | |
399 | node2 = node2->clone_of; | |
400 | return node2 != NULL; | |
401 | } | |
402 | ||
403 | /* Verify edge E count and frequency. */ | |
404 | ||
405 | static bool | |
406 | verify_edge_count_and_frequency (struct cgraph_edge *e) | |
407 | { | |
408 | bool error_found = false; | |
409 | if (e->count < 0) | |
410 | { | |
411 | error ("caller edge count is negative"); | |
412 | error_found = true; | |
413 | } | |
414 | if (e->frequency < 0) | |
415 | { | |
416 | error ("caller edge frequency is negative"); | |
417 | error_found = true; | |
418 | } | |
419 | if (e->frequency > CGRAPH_FREQ_MAX) | |
420 | { | |
421 | error ("caller edge frequency is too large"); | |
422 | error_found = true; | |
423 | } | |
424 | if (gimple_has_body_p (e->caller->decl) | |
425 | && !e->caller->global.inlined_to | |
426 | /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out. | |
427 | Remove this once edges are actualy removed from the function at that time. */ | |
428 | && (e->frequency | |
429 | || (inline_edge_summary_vec | |
430 | && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec) | |
431 | <= (unsigned) e->uid) | |
432 | || !inline_edge_summary (e)->predicate))) | |
433 | && (e->frequency | |
434 | != compute_call_stmt_bb_frequency (e->caller->decl, | |
435 | gimple_bb (e->call_stmt)))) | |
436 | { | |
437 | error ("caller edge frequency %i does not match BB frequency %i", | |
438 | e->frequency, | |
439 | compute_call_stmt_bb_frequency (e->caller->decl, | |
440 | gimple_bb (e->call_stmt))); | |
441 | error_found = true; | |
442 | } | |
443 | return error_found; | |
444 | } | |
445 | ||
446 | /* Switch to THIS_CFUN if needed and print STMT to stderr. */ | |
447 | static void | |
448 | cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt) | |
449 | { | |
450 | /* debug_gimple_stmt needs correct cfun */ | |
451 | if (cfun != this_cfun) | |
452 | set_cfun (this_cfun); | |
453 | debug_gimple_stmt (stmt); | |
454 | } | |
455 | ||
456 | /* Verify that call graph edge E corresponds to DECL from the associated | |
457 | statement. Return true if the verification should fail. */ | |
458 | ||
459 | static bool | |
460 | verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl) | |
461 | { | |
462 | struct cgraph_node *node; | |
463 | ||
464 | if (!decl || e->callee->global.inlined_to) | |
465 | return false; | |
466 | node = cgraph_get_node (decl); | |
467 | ||
468 | /* We do not know if a node from a different partition is an alias or what it | |
469 | aliases and therefore cannot do the former_clone_of check reliably. */ | |
470 | if (!node || node->in_other_partition) | |
471 | return false; | |
472 | node = cgraph_function_or_thunk_node (node, NULL); | |
473 | ||
474 | if ((e->callee->former_clone_of != node->decl | |
475 | && (!node->same_body_alias | |
476 | || e->callee->former_clone_of != node->thunk.alias)) | |
477 | /* IPA-CP sometimes redirect edge to clone and then back to the former | |
478 | function. This ping-pong has to go, eventually. */ | |
479 | && (node != cgraph_function_or_thunk_node (e->callee, NULL)) | |
480 | && !clone_of_p (node, e->callee) | |
481 | /* If decl is a same body alias of some other decl, allow e->callee to be | |
482 | a clone of a clone of that other decl too. */ | |
483 | && (!node->same_body_alias | |
484 | || !clone_of_p (cgraph_get_node (node->thunk.alias), e->callee))) | |
485 | return true; | |
486 | else | |
487 | return false; | |
488 | } | |
489 | ||
490 | /* Verify cgraph nodes of given cgraph node. */ | |
491 | DEBUG_FUNCTION void | |
492 | verify_cgraph_node (struct cgraph_node *node) | |
493 | { | |
494 | struct cgraph_edge *e; | |
495 | struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl); | |
496 | basic_block this_block; | |
497 | gimple_stmt_iterator gsi; | |
498 | bool error_found = false; | |
499 | ||
500 | if (seen_error ()) | |
501 | return; | |
502 | ||
503 | timevar_push (TV_CGRAPH_VERIFY); | |
504 | for (e = node->callees; e; e = e->next_callee) | |
505 | if (e->aux) | |
506 | { | |
507 | error ("aux field set for edge %s->%s", | |
508 | identifier_to_locale (cgraph_node_name (e->caller)), | |
509 | identifier_to_locale (cgraph_node_name (e->callee))); | |
510 | error_found = true; | |
511 | } | |
512 | if (node->count < 0) | |
513 | { | |
514 | error ("execution count is negative"); | |
515 | error_found = true; | |
516 | } | |
517 | if (node->global.inlined_to && node->local.externally_visible) | |
518 | { | |
519 | error ("externally visible inline clone"); | |
520 | error_found = true; | |
521 | } | |
522 | if (node->global.inlined_to && node->address_taken) | |
523 | { | |
524 | error ("inline clone with address taken"); | |
525 | error_found = true; | |
526 | } | |
527 | if (node->global.inlined_to && node->needed) | |
528 | { | |
529 | error ("inline clone is needed"); | |
530 | error_found = true; | |
531 | } | |
532 | for (e = node->indirect_calls; e; e = e->next_callee) | |
533 | { | |
534 | if (e->aux) | |
535 | { | |
536 | error ("aux field set for indirect edge from %s", | |
537 | identifier_to_locale (cgraph_node_name (e->caller))); | |
538 | error_found = true; | |
539 | } | |
540 | if (!e->indirect_unknown_callee | |
541 | || !e->indirect_info) | |
542 | { | |
543 | error ("An indirect edge from %s is not marked as indirect or has " | |
544 | "associated indirect_info, the corresponding statement is: ", | |
545 | identifier_to_locale (cgraph_node_name (e->caller))); | |
546 | cgraph_debug_gimple_stmt (this_cfun, e->call_stmt); | |
547 | error_found = true; | |
548 | } | |
549 | } | |
550 | for (e = node->callers; e; e = e->next_caller) | |
551 | { | |
552 | if (verify_edge_count_and_frequency (e)) | |
553 | error_found = true; | |
554 | if (!e->inline_failed) | |
555 | { | |
556 | if (node->global.inlined_to | |
557 | != (e->caller->global.inlined_to | |
558 | ? e->caller->global.inlined_to : e->caller)) | |
559 | { | |
560 | error ("inlined_to pointer is wrong"); | |
561 | error_found = true; | |
562 | } | |
563 | if (node->callers->next_caller) | |
564 | { | |
565 | error ("multiple inline callers"); | |
566 | error_found = true; | |
567 | } | |
568 | } | |
569 | else | |
570 | if (node->global.inlined_to) | |
571 | { | |
572 | error ("inlined_to pointer set for noninline callers"); | |
573 | error_found = true; | |
574 | } | |
575 | } | |
576 | for (e = node->indirect_calls; e; e = e->next_callee) | |
577 | if (verify_edge_count_and_frequency (e)) | |
578 | error_found = true; | |
579 | if (!node->callers && node->global.inlined_to) | |
580 | { | |
581 | error ("inlined_to pointer is set but no predecessors found"); | |
582 | error_found = true; | |
583 | } | |
584 | if (node->global.inlined_to == node) | |
585 | { | |
586 | error ("inlined_to pointer refers to itself"); | |
587 | error_found = true; | |
588 | } | |
589 | ||
590 | if (!cgraph_get_node (node->decl)) | |
591 | { | |
592 | error ("node not found in cgraph_hash"); | |
593 | error_found = true; | |
594 | } | |
595 | ||
596 | if (node->clone_of) | |
597 | { | |
598 | struct cgraph_node *n; | |
599 | for (n = node->clone_of->clones; n; n = n->next_sibling_clone) | |
600 | if (n == node) | |
601 | break; | |
602 | if (!n) | |
603 | { | |
604 | error ("node has wrong clone_of"); | |
605 | error_found = true; | |
606 | } | |
607 | } | |
608 | if (node->clones) | |
609 | { | |
610 | struct cgraph_node *n; | |
611 | for (n = node->clones; n; n = n->next_sibling_clone) | |
612 | if (n->clone_of != node) | |
613 | break; | |
614 | if (n) | |
615 | { | |
616 | error ("node has wrong clone list"); | |
617 | error_found = true; | |
618 | } | |
619 | } | |
620 | if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of) | |
621 | { | |
622 | error ("node is in clone list but it is not clone"); | |
623 | error_found = true; | |
624 | } | |
625 | if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node) | |
626 | { | |
627 | error ("node has wrong prev_clone pointer"); | |
628 | error_found = true; | |
629 | } | |
630 | if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node) | |
631 | { | |
632 | error ("double linked list of clones corrupted"); | |
633 | error_found = true; | |
634 | } | |
635 | if (node->same_comdat_group) | |
636 | { | |
637 | struct cgraph_node *n = node->same_comdat_group; | |
638 | ||
639 | if (!DECL_ONE_ONLY (node->decl)) | |
640 | { | |
641 | error ("non-DECL_ONE_ONLY node in a same_comdat_group list"); | |
642 | error_found = true; | |
643 | } | |
644 | if (n == node) | |
645 | { | |
646 | error ("node is alone in a comdat group"); | |
647 | error_found = true; | |
648 | } | |
649 | do | |
650 | { | |
651 | if (!n->same_comdat_group) | |
652 | { | |
653 | error ("same_comdat_group is not a circular list"); | |
654 | error_found = true; | |
655 | break; | |
656 | } | |
657 | n = n->same_comdat_group; | |
658 | } | |
659 | while (n != node); | |
660 | } | |
661 | ||
662 | if (node->analyzed && node->alias) | |
663 | { | |
664 | bool ref_found = false; | |
665 | int i; | |
666 | struct ipa_ref *ref; | |
667 | ||
668 | if (node->callees) | |
669 | { | |
670 | error ("Alias has call edges"); | |
671 | error_found = true; | |
672 | } | |
673 | for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++) | |
674 | if (ref->use != IPA_REF_ALIAS) | |
675 | { | |
676 | error ("Alias has non-alias reference"); | |
677 | error_found = true; | |
678 | } | |
679 | else if (ref_found) | |
680 | { | |
681 | error ("Alias has more than one alias reference"); | |
682 | error_found = true; | |
683 | } | |
684 | else | |
685 | ref_found = true; | |
686 | if (!ref_found) | |
687 | { | |
688 | error ("Analyzed alias has no reference"); | |
689 | error_found = true; | |
690 | } | |
691 | } | |
692 | if (node->analyzed && node->thunk.thunk_p) | |
693 | { | |
694 | if (!node->callees) | |
695 | { | |
696 | error ("No edge out of thunk node"); | |
697 | error_found = true; | |
698 | } | |
699 | else if (node->callees->next_callee) | |
700 | { | |
701 | error ("More than one edge out of thunk node"); | |
702 | error_found = true; | |
703 | } | |
704 | if (gimple_has_body_p (node->decl)) | |
705 | { | |
706 | error ("Thunk is not supposed to have body"); | |
707 | error_found = true; | |
708 | } | |
709 | } | |
710 | else if (node->analyzed && gimple_has_body_p (node->decl) | |
711 | && !TREE_ASM_WRITTEN (node->decl) | |
712 | && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to) | |
713 | && !flag_wpa) | |
714 | { | |
715 | if (this_cfun->cfg) | |
716 | { | |
717 | /* The nodes we're interested in are never shared, so walk | |
718 | the tree ignoring duplicates. */ | |
719 | struct pointer_set_t *visited_nodes = pointer_set_create (); | |
720 | /* Reach the trees by walking over the CFG, and note the | |
721 | enclosing basic-blocks in the call edges. */ | |
722 | FOR_EACH_BB_FN (this_block, this_cfun) | |
723 | for (gsi = gsi_start_bb (this_block); | |
724 | !gsi_end_p (gsi); | |
725 | gsi_next (&gsi)) | |
726 | { | |
727 | gimple stmt = gsi_stmt (gsi); | |
728 | if (is_gimple_call (stmt)) | |
729 | { | |
730 | struct cgraph_edge *e = cgraph_edge (node, stmt); | |
731 | tree decl = gimple_call_fndecl (stmt); | |
732 | if (e) | |
733 | { | |
734 | if (e->aux) | |
735 | { | |
736 | error ("shared call_stmt:"); | |
737 | cgraph_debug_gimple_stmt (this_cfun, stmt); | |
738 | error_found = true; | |
739 | } | |
740 | if (!e->indirect_unknown_callee) | |
741 | { | |
742 | if (verify_edge_corresponds_to_fndecl (e, decl)) | |
743 | { | |
744 | error ("edge points to wrong declaration:"); | |
745 | debug_tree (e->callee->decl); | |
746 | fprintf (stderr," Instead of:"); | |
747 | debug_tree (decl); | |
748 | error_found = true; | |
749 | } | |
750 | } | |
751 | else if (decl) | |
752 | { | |
753 | error ("an indirect edge with unknown callee " | |
754 | "corresponding to a call_stmt with " | |
755 | "a known declaration:"); | |
756 | error_found = true; | |
757 | cgraph_debug_gimple_stmt (this_cfun, e->call_stmt); | |
758 | } | |
759 | e->aux = (void *)1; | |
760 | } | |
761 | else if (decl) | |
762 | { | |
763 | error ("missing callgraph edge for call stmt:"); | |
764 | cgraph_debug_gimple_stmt (this_cfun, stmt); | |
765 | error_found = true; | |
766 | } | |
767 | } | |
768 | } | |
769 | pointer_set_destroy (visited_nodes); | |
770 | } | |
771 | else | |
772 | /* No CFG available?! */ | |
773 | gcc_unreachable (); | |
774 | ||
775 | for (e = node->callees; e; e = e->next_callee) | |
776 | { | |
777 | if (!e->aux) | |
778 | { | |
779 | error ("edge %s->%s has no corresponding call_stmt", | |
780 | identifier_to_locale (cgraph_node_name (e->caller)), | |
781 | identifier_to_locale (cgraph_node_name (e->callee))); | |
782 | cgraph_debug_gimple_stmt (this_cfun, e->call_stmt); | |
783 | error_found = true; | |
784 | } | |
785 | e->aux = 0; | |
786 | } | |
787 | for (e = node->indirect_calls; e; e = e->next_callee) | |
788 | { | |
789 | if (!e->aux) | |
790 | { | |
791 | error ("an indirect edge from %s has no corresponding call_stmt", | |
792 | identifier_to_locale (cgraph_node_name (e->caller))); | |
793 | cgraph_debug_gimple_stmt (this_cfun, e->call_stmt); | |
794 | error_found = true; | |
795 | } | |
796 | e->aux = 0; | |
797 | } | |
798 | } | |
799 | if (error_found) | |
800 | { | |
801 | dump_cgraph_node (stderr, node); | |
802 | internal_error ("verify_cgraph_node failed"); | |
803 | } | |
804 | timevar_pop (TV_CGRAPH_VERIFY); | |
805 | } | |
806 | ||
807 | /* Verify whole cgraph structure. */ | |
808 | DEBUG_FUNCTION void | |
809 | verify_cgraph (void) | |
810 | { | |
811 | struct cgraph_node *node; | |
812 | ||
813 | if (seen_error ()) | |
814 | return; | |
815 | ||
816 | for (node = cgraph_nodes; node; node = node->next) | |
817 | verify_cgraph_node (node); | |
818 | } | |
819 | ||
820 | /* Output all asm statements we have stored up to be output. */ | |
821 | ||
822 | static void | |
823 | cgraph_output_pending_asms (void) | |
824 | { | |
825 | struct cgraph_asm_node *can; | |
826 | ||
827 | if (seen_error ()) | |
828 | return; | |
829 | ||
830 | for (can = cgraph_asm_nodes; can; can = can->next) | |
831 | assemble_asm (can->asm_str); | |
832 | cgraph_asm_nodes = NULL; | |
833 | } | |
834 | ||
835 | /* Analyze the function scheduled to be output. */ | |
836 | void | |
837 | cgraph_analyze_function (struct cgraph_node *node) | |
838 | { | |
839 | tree save = current_function_decl; | |
840 | tree decl = node->decl; | |
841 | ||
842 | if (node->alias && node->thunk.alias) | |
843 | { | |
844 | struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias); | |
845 | struct cgraph_node *n; | |
846 | ||
847 | for (n = tgt; n && n->alias; | |
848 | n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL) | |
849 | if (n == node) | |
850 | { | |
851 | error ("function %q+D part of alias cycle", node->decl); | |
852 | node->alias = false; | |
853 | return; | |
854 | } | |
855 | if (!VEC_length (ipa_ref_t, node->ref_list.references)) | |
856 | ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL); | |
857 | if (node->same_body_alias) | |
858 | { | |
859 | DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias); | |
860 | DECL_DECLARED_INLINE_P (node->decl) | |
861 | = DECL_DECLARED_INLINE_P (node->thunk.alias); | |
862 | DECL_DISREGARD_INLINE_LIMITS (node->decl) | |
863 | = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias); | |
864 | } | |
865 | ||
866 | /* Fixup visibility nonsences C++ frontend produce on same body aliases. */ | |
867 | if (TREE_PUBLIC (node->decl) && node->same_body_alias) | |
868 | { | |
869 | DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias); | |
870 | if (DECL_ONE_ONLY (node->thunk.alias)) | |
871 | { | |
872 | DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias); | |
873 | DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias); | |
874 | if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group) | |
875 | { | |
876 | struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias); | |
877 | node->same_comdat_group = tgt; | |
878 | if (!tgt->same_comdat_group) | |
879 | tgt->same_comdat_group = node; | |
880 | else | |
881 | { | |
882 | struct cgraph_node *n; | |
883 | for (n = tgt->same_comdat_group; | |
884 | n->same_comdat_group != tgt; | |
885 | n = n->same_comdat_group) | |
886 | ; | |
887 | n->same_comdat_group = node; | |
888 | } | |
889 | } | |
890 | } | |
891 | } | |
892 | cgraph_mark_reachable_node (cgraph_alias_aliased_node (node)); | |
893 | if (node->address_taken) | |
894 | cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node)); | |
895 | if (cgraph_decide_is_function_needed (node, node->decl)) | |
896 | cgraph_mark_needed_node (node); | |
897 | } | |
898 | else if (node->thunk.thunk_p) | |
899 | { | |
900 | cgraph_create_edge (node, cgraph_get_node (node->thunk.alias), | |
901 | NULL, 0, CGRAPH_FREQ_BASE); | |
902 | } | |
903 | else | |
904 | { | |
905 | current_function_decl = decl; | |
906 | push_cfun (DECL_STRUCT_FUNCTION (decl)); | |
907 | ||
908 | assign_assembler_name_if_neeeded (node->decl); | |
909 | ||
910 | /* Make sure to gimplify bodies only once. During analyzing a | |
911 | function we lower it, which will require gimplified nested | |
912 | functions, so we can end up here with an already gimplified | |
913 | body. */ | |
914 | if (!gimple_body (decl)) | |
915 | gimplify_function_tree (decl); | |
916 | dump_function (TDI_generic, decl); | |
917 | ||
918 | cgraph_lower_function (node); | |
919 | pop_cfun (); | |
920 | } | |
921 | node->analyzed = true; | |
922 | ||
923 | current_function_decl = save; | |
924 | } | |
925 | ||
926 | /* C++ frontend produce same body aliases all over the place, even before PCH | |
927 | gets streamed out. It relies on us linking the aliases with their function | |
928 | in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we | |
929 | first produce aliases without links, but once C++ FE is sure he won't sream | |
930 | PCH we build the links via this function. */ | |
931 | ||
932 | void | |
933 | cgraph_process_same_body_aliases (void) | |
934 | { | |
935 | struct cgraph_node *node; | |
936 | for (node = cgraph_nodes; node; node = node->next) | |
937 | if (node->same_body_alias | |
938 | && !VEC_length (ipa_ref_t, node->ref_list.references)) | |
939 | { | |
940 | struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias); | |
941 | ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL); | |
942 | } | |
943 | same_body_aliases_done = true; | |
944 | } | |
945 | ||
946 | /* Process attributes common for vars and functions. */ | |
947 | ||
948 | static void | |
949 | process_common_attributes (tree decl) | |
950 | { | |
951 | tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)); | |
952 | ||
953 | if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl))) | |
954 | { | |
955 | warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes, | |
956 | "%<weakref%> attribute should be accompanied with" | |
957 | " an %<alias%> attribute"); | |
958 | DECL_WEAK (decl) = 0; | |
959 | DECL_ATTRIBUTES (decl) = remove_attribute ("weakref", | |
960 | DECL_ATTRIBUTES (decl)); | |
961 | } | |
962 | } | |
963 | ||
964 | /* Look for externally_visible and used attributes and mark cgraph nodes | |
965 | accordingly. | |
966 | ||
967 | We cannot mark the nodes at the point the attributes are processed (in | |
968 | handle_*_attribute) because the copy of the declarations available at that | |
969 | point may not be canonical. For example, in: | |
970 | ||
971 | void f(); | |
972 | void f() __attribute__((used)); | |
973 | ||
974 | the declaration we see in handle_used_attribute will be the second | |
975 | declaration -- but the front end will subsequently merge that declaration | |
976 | with the original declaration and discard the second declaration. | |
977 | ||
978 | Furthermore, we can't mark these nodes in cgraph_finalize_function because: | |
979 | ||
980 | void f() {} | |
981 | void f() __attribute__((externally_visible)); | |
982 | ||
983 | is valid. | |
984 | ||
985 | So, we walk the nodes at the end of the translation unit, applying the | |
986 | attributes at that point. */ | |
987 | ||
988 | static void | |
989 | process_function_and_variable_attributes (struct cgraph_node *first, | |
990 | struct varpool_node *first_var) | |
991 | { | |
992 | struct cgraph_node *node; | |
993 | struct varpool_node *vnode; | |
994 | ||
995 | for (node = cgraph_nodes; node != first; node = node->next) | |
996 | { | |
997 | tree decl = node->decl; | |
998 | if (DECL_PRESERVE_P (decl)) | |
999 | cgraph_mark_needed_node (node); | |
1000 | if (TARGET_DLLIMPORT_DECL_ATTRIBUTES | |
1001 | && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl)) | |
1002 | && TREE_PUBLIC (node->decl)) | |
1003 | { | |
1004 | if (node->local.finalized) | |
1005 | cgraph_mark_needed_node (node); | |
1006 | } | |
1007 | else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl))) | |
1008 | { | |
1009 | if (! TREE_PUBLIC (node->decl)) | |
1010 | warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes, | |
1011 | "%<externally_visible%>" | |
1012 | " attribute have effect only on public objects"); | |
1013 | else if (node->local.finalized) | |
1014 | cgraph_mark_needed_node (node); | |
1015 | } | |
1016 | if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)) | |
1017 | && (node->local.finalized && !node->alias)) | |
1018 | { | |
1019 | warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes, | |
1020 | "%<weakref%> attribute ignored" | |
1021 | " because function is defined"); | |
1022 | DECL_WEAK (decl) = 0; | |
1023 | DECL_ATTRIBUTES (decl) = remove_attribute ("weakref", | |
1024 | DECL_ATTRIBUTES (decl)); | |
1025 | } | |
1026 | ||
1027 | if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)) | |
1028 | && !DECL_DECLARED_INLINE_P (decl) | |
1029 | /* redefining extern inline function makes it DECL_UNINLINABLE. */ | |
1030 | && !DECL_UNINLINABLE (decl)) | |
1031 | warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes, | |
1032 | "always_inline function might not be inlinable"); | |
1033 | ||
1034 | process_common_attributes (decl); | |
1035 | } | |
1036 | for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next) | |
1037 | { | |
1038 | tree decl = vnode->decl; | |
1039 | if (DECL_PRESERVE_P (decl)) | |
1040 | { | |
1041 | vnode->force_output = true; | |
1042 | if (vnode->finalized) | |
1043 | varpool_mark_needed_node (vnode); | |
1044 | } | |
1045 | if (TARGET_DLLIMPORT_DECL_ATTRIBUTES | |
1046 | && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl)) | |
1047 | && TREE_PUBLIC (vnode->decl)) | |
1048 | { | |
1049 | if (vnode->finalized) | |
1050 | varpool_mark_needed_node (vnode); | |
1051 | } | |
1052 | else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl))) | |
1053 | { | |
1054 | if (! TREE_PUBLIC (vnode->decl)) | |
1055 | warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes, | |
1056 | "%<externally_visible%>" | |
1057 | " attribute have effect only on public objects"); | |
1058 | else if (vnode->finalized) | |
1059 | varpool_mark_needed_node (vnode); | |
1060 | } | |
1061 | if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)) | |
1062 | && vnode->finalized | |
1063 | && DECL_INITIAL (decl)) | |
1064 | { | |
1065 | warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes, | |
1066 | "%<weakref%> attribute ignored" | |
1067 | " because variable is initialized"); | |
1068 | DECL_WEAK (decl) = 0; | |
1069 | DECL_ATTRIBUTES (decl) = remove_attribute ("weakref", | |
1070 | DECL_ATTRIBUTES (decl)); | |
1071 | } | |
1072 | process_common_attributes (decl); | |
1073 | } | |
1074 | } | |
1075 | ||
1076 | /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively | |
1077 | each reachable functions) and build cgraph. | |
1078 | The function can be called multiple times after inserting new nodes | |
1079 | into beginning of queue. Just the new part of queue is re-scanned then. */ | |
1080 | ||
1081 | static void | |
1082 | cgraph_analyze_functions (void) | |
1083 | { | |
1084 | /* Keep track of already processed nodes when called multiple times for | |
1085 | intermodule optimization. */ | |
1086 | static struct cgraph_node *first_analyzed; | |
1087 | struct cgraph_node *first_processed = first_analyzed; | |
1088 | static struct varpool_node *first_analyzed_var; | |
1089 | struct cgraph_node *node, *next; | |
1090 | ||
1091 | bitmap_obstack_initialize (NULL); | |
1092 | process_function_and_variable_attributes (first_processed, | |
1093 | first_analyzed_var); | |
1094 | first_processed = cgraph_nodes; | |
1095 | first_analyzed_var = varpool_nodes; | |
1096 | varpool_analyze_pending_decls (); | |
1097 | if (cgraph_dump_file) | |
1098 | { | |
1099 | fprintf (cgraph_dump_file, "Initial entry points:"); | |
1100 | for (node = cgraph_nodes; node != first_analyzed; node = node->next) | |
1101 | if (node->needed) | |
1102 | fprintf (cgraph_dump_file, " %s", cgraph_node_name (node)); | |
1103 | fprintf (cgraph_dump_file, "\n"); | |
1104 | } | |
1105 | cgraph_process_new_functions (); | |
1106 | ||
1107 | /* Propagate reachability flag and lower representation of all reachable | |
1108 | functions. In the future, lowering will introduce new functions and | |
1109 | new entry points on the way (by template instantiation and virtual | |
1110 | method table generation for instance). */ | |
1111 | while (cgraph_nodes_queue) | |
1112 | { | |
1113 | struct cgraph_edge *edge; | |
1114 | tree decl = cgraph_nodes_queue->decl; | |
1115 | ||
1116 | node = cgraph_nodes_queue; | |
1117 | cgraph_nodes_queue = cgraph_nodes_queue->next_needed; | |
1118 | node->next_needed = NULL; | |
1119 | ||
1120 | /* ??? It is possible to create extern inline function and later using | |
1121 | weak alias attribute to kill its body. See | |
1122 | gcc.c-torture/compile/20011119-1.c */ | |
1123 | if (!DECL_STRUCT_FUNCTION (decl) | |
1124 | && (!node->alias || !node->thunk.alias) | |
1125 | && !node->thunk.thunk_p) | |
1126 | { | |
1127 | cgraph_reset_node (node); | |
1128 | node->local.redefined_extern_inline = true; | |
1129 | continue; | |
1130 | } | |
1131 | ||
1132 | if (!node->analyzed) | |
1133 | cgraph_analyze_function (node); | |
1134 | ||
1135 | for (edge = node->callees; edge; edge = edge->next_callee) | |
1136 | if (!edge->callee->reachable) | |
1137 | cgraph_mark_reachable_node (edge->callee); | |
1138 | for (edge = node->callers; edge; edge = edge->next_caller) | |
1139 | if (!edge->caller->reachable && edge->caller->thunk.thunk_p) | |
1140 | cgraph_mark_reachable_node (edge->caller); | |
1141 | ||
1142 | if (node->same_comdat_group) | |
1143 | { | |
1144 | for (next = node->same_comdat_group; | |
1145 | next != node; | |
1146 | next = next->same_comdat_group) | |
1147 | cgraph_mark_reachable_node (next); | |
1148 | } | |
1149 | ||
1150 | /* If decl is a clone of an abstract function, mark that abstract | |
1151 | function so that we don't release its body. The DECL_INITIAL() of that | |
1152 | abstract function declaration will be later needed to output debug | |
1153 | info. */ | |
1154 | if (DECL_ABSTRACT_ORIGIN (decl)) | |
1155 | { | |
1156 | struct cgraph_node *origin_node; | |
1157 | origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl)); | |
1158 | origin_node->abstract_and_needed = true; | |
1159 | } | |
1160 | ||
1161 | /* We finalize local static variables during constructing callgraph | |
1162 | edges. Process their attributes too. */ | |
1163 | process_function_and_variable_attributes (first_processed, | |
1164 | first_analyzed_var); | |
1165 | first_processed = cgraph_nodes; | |
1166 | first_analyzed_var = varpool_nodes; | |
1167 | varpool_analyze_pending_decls (); | |
1168 | cgraph_process_new_functions (); | |
1169 | } | |
1170 | ||
1171 | /* Collect entry points to the unit. */ | |
1172 | if (cgraph_dump_file) | |
1173 | { | |
1174 | fprintf (cgraph_dump_file, "Unit entry points:"); | |
1175 | for (node = cgraph_nodes; node != first_analyzed; node = node->next) | |
1176 | if (node->needed) | |
1177 | fprintf (cgraph_dump_file, " %s", cgraph_node_name (node)); | |
1178 | fprintf (cgraph_dump_file, "\n\nInitial "); | |
1179 | dump_cgraph (cgraph_dump_file); | |
1180 | dump_varpool (cgraph_dump_file); | |
1181 | } | |
1182 | ||
1183 | if (cgraph_dump_file) | |
1184 | fprintf (cgraph_dump_file, "\nReclaiming functions:"); | |
1185 | ||
1186 | for (node = cgraph_nodes; node != first_analyzed; node = next) | |
1187 | { | |
1188 | tree decl = node->decl; | |
1189 | next = node->next; | |
1190 | ||
1191 | if (node->local.finalized && !gimple_has_body_p (decl) | |
1192 | && (!node->alias || !node->thunk.alias) | |
1193 | && !node->thunk.thunk_p) | |
1194 | cgraph_reset_node (node); | |
1195 | ||
1196 | if (!node->reachable | |
1197 | && (gimple_has_body_p (decl) || node->thunk.thunk_p | |
1198 | || (node->alias && node->thunk.alias))) | |
1199 | { | |
1200 | if (cgraph_dump_file) | |
1201 | fprintf (cgraph_dump_file, " %s", cgraph_node_name (node)); | |
1202 | cgraph_remove_node (node); | |
1203 | continue; | |
1204 | } | |
1205 | else | |
1206 | node->next_needed = NULL; | |
1207 | gcc_assert (!node->local.finalized || node->thunk.thunk_p | |
1208 | || node->alias | |
1209 | || gimple_has_body_p (decl)); | |
1210 | gcc_assert (node->analyzed == node->local.finalized); | |
1211 | } | |
1212 | if (cgraph_dump_file) | |
1213 | { | |
1214 | fprintf (cgraph_dump_file, "\n\nReclaimed "); | |
1215 | dump_cgraph (cgraph_dump_file); | |
1216 | dump_varpool (cgraph_dump_file); | |
1217 | } | |
1218 | bitmap_obstack_release (NULL); | |
1219 | first_analyzed = cgraph_nodes; | |
1220 | ggc_collect (); | |
1221 | } | |
1222 | ||
1223 | /* Translate the ugly representation of aliases as alias pairs into nice | |
1224 | representation in callgraph. We don't handle all cases yet, | |
1225 | unforutnately. */ | |
1226 | ||
1227 | static void | |
1228 | handle_alias_pairs (void) | |
1229 | { | |
1230 | alias_pair *p; | |
1231 | unsigned i; | |
1232 | struct cgraph_node *target_node; | |
1233 | struct cgraph_node *src_node; | |
1234 | struct varpool_node *target_vnode; | |
1235 | ||
1236 | for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);) | |
1237 | { | |
1238 | if (TREE_CODE (p->decl) == FUNCTION_DECL | |
1239 | && (target_node = cgraph_node_for_asm (p->target)) != NULL) | |
1240 | { | |
1241 | src_node = cgraph_get_node (p->decl); | |
1242 | if (src_node && src_node->local.finalized) | |
1243 | cgraph_reset_node (src_node); | |
1244 | /* Normally EXTERNAL flag is used to mark external inlines, | |
1245 | however for aliases it seems to be allowed to use it w/o | |
1246 | any meaning. See gcc.dg/attr-alias-3.c | |
1247 | However for weakref we insist on EXTERNAL flag being set. | |
1248 | See gcc.dg/attr-alias-5.c */ | |
1249 | if (DECL_EXTERNAL (p->decl)) | |
1250 | DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref", | |
1251 | DECL_ATTRIBUTES (p->decl)) != NULL; | |
1252 | cgraph_create_function_alias (p->decl, target_node->decl); | |
1253 | VEC_unordered_remove (alias_pair, alias_pairs, i); | |
1254 | } | |
1255 | else if (TREE_CODE (p->decl) == VAR_DECL | |
1256 | && (target_vnode = varpool_node_for_asm (p->target)) != NULL) | |
1257 | { | |
1258 | /* Normally EXTERNAL flag is used to mark external inlines, | |
1259 | however for aliases it seems to be allowed to use it w/o | |
1260 | any meaning. See gcc.dg/attr-alias-3.c | |
1261 | However for weakref we insist on EXTERNAL flag being set. | |
1262 | See gcc.dg/attr-alias-5.c */ | |
1263 | if (DECL_EXTERNAL (p->decl)) | |
1264 | DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref", | |
1265 | DECL_ATTRIBUTES (p->decl)) != NULL; | |
1266 | varpool_create_variable_alias (p->decl, target_vnode->decl); | |
1267 | VEC_unordered_remove (alias_pair, alias_pairs, i); | |
1268 | } | |
1269 | /* Weakrefs with target not defined in current unit are easy to handle; they | |
1270 | behave just as external variables except we need to note the alias flag | |
1271 | to later output the weakref pseudo op into asm file. */ | |
1272 | else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL | |
1273 | && (TREE_CODE (p->decl) == FUNCTION_DECL | |
1274 | ? (varpool_node_for_asm (p->target) == NULL) | |
1275 | : (cgraph_node_for_asm (p->target) == NULL))) | |
1276 | { | |
1277 | if (TREE_CODE (p->decl) == FUNCTION_DECL) | |
1278 | cgraph_get_create_node (p->decl)->alias = true; | |
1279 | else | |
1280 | varpool_get_node (p->decl)->alias = true; | |
1281 | DECL_EXTERNAL (p->decl) = 1; | |
1282 | VEC_unordered_remove (alias_pair, alias_pairs, i); | |
1283 | } | |
1284 | else | |
1285 | { | |
1286 | if (dump_file) | |
1287 | fprintf (dump_file, "Unhandled alias %s->%s\n", | |
1288 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)), | |
1289 | IDENTIFIER_POINTER (p->target)); | |
1290 | ||
1291 | i++; | |
1292 | } | |
1293 | } | |
1294 | } | |
1295 | ||
1296 | ||
1297 | /* Analyze the whole compilation unit once it is parsed completely. */ | |
1298 | ||
1299 | void | |
1300 | cgraph_finalize_compilation_unit (void) | |
1301 | { | |
1302 | timevar_push (TV_CGRAPH); | |
1303 | ||
1304 | /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */ | |
1305 | if (flag_lto) | |
1306 | lto_streamer_hooks_init (); | |
1307 | ||
1308 | /* If we're here there's no current function anymore. Some frontends | |
1309 | are lazy in clearing these. */ | |
1310 | current_function_decl = NULL; | |
1311 | set_cfun (NULL); | |
1312 | ||
1313 | /* Do not skip analyzing the functions if there were errors, we | |
1314 | miss diagnostics for following functions otherwise. */ | |
1315 | ||
1316 | /* Emit size functions we didn't inline. */ | |
1317 | finalize_size_functions (); | |
1318 | ||
1319 | /* Mark alias targets necessary and emit diagnostics. */ | |
1320 | finish_aliases_1 (); | |
1321 | handle_alias_pairs (); | |
1322 | ||
1323 | if (!quiet_flag) | |
1324 | { | |
1325 | fprintf (stderr, "\nAnalyzing compilation unit\n"); | |
1326 | fflush (stderr); | |
1327 | } | |
1328 | ||
1329 | if (flag_dump_passes) | |
1330 | dump_passes (); | |
1331 | ||
1332 | /* Gimplify and lower all functions, compute reachability and | |
1333 | remove unreachable nodes. */ | |
1334 | cgraph_analyze_functions (); | |
1335 | ||
1336 | /* Mark alias targets necessary and emit diagnostics. */ | |
1337 | finish_aliases_1 (); | |
1338 | handle_alias_pairs (); | |
1339 | ||
1340 | /* Gimplify and lower thunks. */ | |
1341 | cgraph_analyze_functions (); | |
1342 | ||
1343 | /* Finally drive the pass manager. */ | |
1344 | cgraph_optimize (); | |
1345 | ||
1346 | timevar_pop (TV_CGRAPH); | |
1347 | } | |
1348 | ||
1349 | ||
1350 | /* Figure out what functions we want to assemble. */ | |
1351 | ||
1352 | static void | |
1353 | cgraph_mark_functions_to_output (void) | |
1354 | { | |
1355 | struct cgraph_node *node; | |
1356 | #ifdef ENABLE_CHECKING | |
1357 | bool check_same_comdat_groups = false; | |
1358 | ||
1359 | for (node = cgraph_nodes; node; node = node->next) | |
1360 | gcc_assert (!node->process); | |
1361 | #endif | |
1362 | ||
1363 | for (node = cgraph_nodes; node; node = node->next) | |
1364 | { | |
1365 | tree decl = node->decl; | |
1366 | struct cgraph_edge *e; | |
1367 | ||
1368 | gcc_assert (!node->process || node->same_comdat_group); | |
1369 | if (node->process) | |
1370 | continue; | |
1371 | ||
1372 | for (e = node->callers; e; e = e->next_caller) | |
1373 | if (e->inline_failed) | |
1374 | break; | |
1375 | ||
1376 | /* We need to output all local functions that are used and not | |
1377 | always inlined, as well as those that are reachable from | |
1378 | outside the current compilation unit. */ | |
1379 | if (node->analyzed | |
1380 | && !node->thunk.thunk_p | |
1381 | && !node->alias | |
1382 | && !node->global.inlined_to | |
1383 | && (!cgraph_only_called_directly_p (node) | |
1384 | || ((e || ipa_ref_has_aliases_p (&node->ref_list)) | |
1385 | && node->reachable)) | |
1386 | && !TREE_ASM_WRITTEN (decl) | |
1387 | && !DECL_EXTERNAL (decl)) | |
1388 | { | |
1389 | node->process = 1; | |
1390 | if (node->same_comdat_group) | |
1391 | { | |
1392 | struct cgraph_node *next; | |
1393 | for (next = node->same_comdat_group; | |
1394 | next != node; | |
1395 | next = next->same_comdat_group) | |
1396 | if (!next->thunk.thunk_p && !next->alias) | |
1397 | next->process = 1; | |
1398 | } | |
1399 | } | |
1400 | else if (node->same_comdat_group) | |
1401 | { | |
1402 | #ifdef ENABLE_CHECKING | |
1403 | check_same_comdat_groups = true; | |
1404 | #endif | |
1405 | } | |
1406 | else | |
1407 | { | |
1408 | /* We should've reclaimed all functions that are not needed. */ | |
1409 | #ifdef ENABLE_CHECKING | |
1410 | if (!node->global.inlined_to | |
1411 | && gimple_has_body_p (decl) | |
1412 | /* FIXME: in ltrans unit when offline copy is outside partition but inline copies | |
1413 | are inside partition, we can end up not removing the body since we no longer | |
1414 | have analyzed node pointing to it. */ | |
1415 | && !node->in_other_partition | |
1416 | && !node->alias | |
1417 | && !DECL_EXTERNAL (decl)) | |
1418 | { | |
1419 | dump_cgraph_node (stderr, node); | |
1420 | internal_error ("failed to reclaim unneeded function"); | |
1421 | } | |
1422 | #endif | |
1423 | gcc_assert (node->global.inlined_to | |
1424 | || !gimple_has_body_p (decl) | |
1425 | || node->in_other_partition | |
1426 | || DECL_EXTERNAL (decl)); | |
1427 | ||
1428 | } | |
1429 | ||
1430 | } | |
1431 | #ifdef ENABLE_CHECKING | |
1432 | if (check_same_comdat_groups) | |
1433 | for (node = cgraph_nodes; node; node = node->next) | |
1434 | if (node->same_comdat_group && !node->process) | |
1435 | { | |
1436 | tree decl = node->decl; | |
1437 | if (!node->global.inlined_to | |
1438 | && gimple_has_body_p (decl) | |
1439 | /* FIXME: in an ltrans unit when the offline copy is outside a | |
1440 | partition but inline copies are inside a partition, we can | |
1441 | end up not removing the body since we no longer have an | |
1442 | analyzed node pointing to it. */ | |
1443 | && !node->in_other_partition | |
1444 | && !DECL_EXTERNAL (decl)) | |
1445 | { | |
1446 | dump_cgraph_node (stderr, node); | |
1447 | internal_error ("failed to reclaim unneeded function in same " | |
1448 | "comdat group"); | |
1449 | } | |
1450 | } | |
1451 | #endif | |
1452 | } | |
1453 | ||
1454 | /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function | |
1455 | in lowered gimple form. | |
1456 | ||
1457 | Set current_function_decl and cfun to newly constructed empty function body. | |
1458 | return basic block in the function body. */ | |
1459 | ||
1460 | static basic_block | |
1461 | init_lowered_empty_function (tree decl) | |
1462 | { | |
1463 | basic_block bb; | |
1464 | ||
1465 | current_function_decl = decl; | |
1466 | allocate_struct_function (decl, false); | |
1467 | gimple_register_cfg_hooks (); | |
1468 | init_empty_tree_cfg (); | |
1469 | init_tree_ssa (cfun); | |
1470 | init_ssa_operands (); | |
1471 | cfun->gimple_df->in_ssa_p = true; | |
1472 | DECL_INITIAL (decl) = make_node (BLOCK); | |
1473 | ||
1474 | DECL_SAVED_TREE (decl) = error_mark_node; | |
1475 | cfun->curr_properties |= | |
1476 | (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars | | |
1477 | PROP_ssa | PROP_gimple_any); | |
1478 | ||
1479 | /* Create BB for body of the function and connect it properly. */ | |
1480 | bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR); | |
1481 | make_edge (ENTRY_BLOCK_PTR, bb, 0); | |
1482 | make_edge (bb, EXIT_BLOCK_PTR, 0); | |
1483 | ||
1484 | return bb; | |
1485 | } | |
1486 | ||
1487 | /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable | |
1488 | offset indicated by VIRTUAL_OFFSET, if that is | |
1489 | non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and | |
1490 | zero for a result adjusting thunk. */ | |
1491 | ||
1492 | static tree | |
1493 | thunk_adjust (gimple_stmt_iterator * bsi, | |
1494 | tree ptr, bool this_adjusting, | |
1495 | HOST_WIDE_INT fixed_offset, tree virtual_offset) | |
1496 | { | |
1497 | gimple stmt; | |
1498 | tree ret; | |
1499 | ||
1500 | if (this_adjusting | |
1501 | && fixed_offset != 0) | |
1502 | { | |
1503 | stmt = gimple_build_assign | |
1504 | (ptr, fold_build_pointer_plus_hwi_loc (input_location, | |
1505 | ptr, | |
1506 | fixed_offset)); | |
1507 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1508 | } | |
1509 | ||
1510 | /* If there's a virtual offset, look up that value in the vtable and | |
1511 | adjust the pointer again. */ | |
1512 | if (virtual_offset) | |
1513 | { | |
1514 | tree vtabletmp; | |
1515 | tree vtabletmp2; | |
1516 | tree vtabletmp3; | |
1517 | ||
1518 | if (!vtable_entry_type) | |
1519 | { | |
1520 | tree vfunc_type = make_node (FUNCTION_TYPE); | |
1521 | TREE_TYPE (vfunc_type) = integer_type_node; | |
1522 | TYPE_ARG_TYPES (vfunc_type) = NULL_TREE; | |
1523 | layout_type (vfunc_type); | |
1524 | ||
1525 | vtable_entry_type = build_pointer_type (vfunc_type); | |
1526 | } | |
1527 | ||
1528 | vtabletmp = | |
1529 | create_tmp_var (build_pointer_type | |
1530 | (build_pointer_type (vtable_entry_type)), "vptr"); | |
1531 | ||
1532 | /* The vptr is always at offset zero in the object. */ | |
1533 | stmt = gimple_build_assign (vtabletmp, | |
1534 | build1 (NOP_EXPR, TREE_TYPE (vtabletmp), | |
1535 | ptr)); | |
1536 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1537 | mark_symbols_for_renaming (stmt); | |
1538 | find_referenced_vars_in (stmt); | |
1539 | ||
1540 | /* Form the vtable address. */ | |
1541 | vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)), | |
1542 | "vtableaddr"); | |
1543 | stmt = gimple_build_assign (vtabletmp2, | |
1544 | build_simple_mem_ref (vtabletmp)); | |
1545 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1546 | mark_symbols_for_renaming (stmt); | |
1547 | find_referenced_vars_in (stmt); | |
1548 | ||
1549 | /* Find the entry with the vcall offset. */ | |
1550 | stmt = gimple_build_assign (vtabletmp2, | |
1551 | fold_build_pointer_plus_loc (input_location, | |
1552 | vtabletmp2, | |
1553 | virtual_offset)); | |
1554 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1555 | ||
1556 | /* Get the offset itself. */ | |
1557 | vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)), | |
1558 | "vcalloffset"); | |
1559 | stmt = gimple_build_assign (vtabletmp3, | |
1560 | build_simple_mem_ref (vtabletmp2)); | |
1561 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1562 | mark_symbols_for_renaming (stmt); | |
1563 | find_referenced_vars_in (stmt); | |
1564 | ||
1565 | /* Adjust the `this' pointer. */ | |
1566 | ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3); | |
1567 | ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false, | |
1568 | GSI_CONTINUE_LINKING); | |
1569 | } | |
1570 | ||
1571 | if (!this_adjusting | |
1572 | && fixed_offset != 0) | |
1573 | /* Adjust the pointer by the constant. */ | |
1574 | { | |
1575 | tree ptrtmp; | |
1576 | ||
1577 | if (TREE_CODE (ptr) == VAR_DECL) | |
1578 | ptrtmp = ptr; | |
1579 | else | |
1580 | { | |
1581 | ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr"); | |
1582 | stmt = gimple_build_assign (ptrtmp, ptr); | |
1583 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1584 | mark_symbols_for_renaming (stmt); | |
1585 | find_referenced_vars_in (stmt); | |
1586 | } | |
1587 | ptr = fold_build_pointer_plus_hwi_loc (input_location, | |
1588 | ptrtmp, fixed_offset); | |
1589 | } | |
1590 | ||
1591 | /* Emit the statement and gimplify the adjustment expression. */ | |
1592 | ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this"); | |
1593 | stmt = gimple_build_assign (ret, ptr); | |
1594 | mark_symbols_for_renaming (stmt); | |
1595 | find_referenced_vars_in (stmt); | |
1596 | gsi_insert_after (bsi, stmt, GSI_NEW_STMT); | |
1597 | ||
1598 | return ret; | |
1599 | } | |
1600 | ||
1601 | /* Produce assembler for thunk NODE. */ | |
1602 | ||
1603 | static void | |
1604 | assemble_thunk (struct cgraph_node *node) | |
1605 | { | |
1606 | bool this_adjusting = node->thunk.this_adjusting; | |
1607 | HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset; | |
1608 | HOST_WIDE_INT virtual_value = node->thunk.virtual_value; | |
1609 | tree virtual_offset = NULL; | |
1610 | tree alias = node->thunk.alias; | |
1611 | tree thunk_fndecl = node->decl; | |
1612 | tree a = DECL_ARGUMENTS (thunk_fndecl); | |
1613 | ||
1614 | current_function_decl = thunk_fndecl; | |
1615 | ||
1616 | /* Ensure thunks are emitted in their correct sections. */ | |
1617 | resolve_unique_section (thunk_fndecl, 0, flag_function_sections); | |
1618 | ||
1619 | if (this_adjusting | |
1620 | && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset, | |
1621 | virtual_value, alias)) | |
1622 | { | |
1623 | const char *fnname; | |
1624 | tree fn_block; | |
1625 | tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl)); | |
1626 | ||
1627 | DECL_RESULT (thunk_fndecl) | |
1628 | = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl), | |
1629 | RESULT_DECL, 0, restype); | |
1630 | fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl)); | |
1631 | ||
1632 | /* The back end expects DECL_INITIAL to contain a BLOCK, so we | |
1633 | create one. */ | |
1634 | fn_block = make_node (BLOCK); | |
1635 | BLOCK_VARS (fn_block) = a; | |
1636 | DECL_INITIAL (thunk_fndecl) = fn_block; | |
1637 | init_function_start (thunk_fndecl); | |
1638 | cfun->is_thunk = 1; | |
1639 | assemble_start_function (thunk_fndecl, fnname); | |
1640 | ||
1641 | targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl, | |
1642 | fixed_offset, virtual_value, alias); | |
1643 | ||
1644 | assemble_end_function (thunk_fndecl, fnname); | |
1645 | init_insn_lengths (); | |
1646 | free_after_compilation (cfun); | |
1647 | set_cfun (NULL); | |
1648 | TREE_ASM_WRITTEN (thunk_fndecl) = 1; | |
1649 | node->thunk.thunk_p = false; | |
1650 | node->analyzed = false; | |
1651 | } | |
1652 | else | |
1653 | { | |
1654 | tree restype; | |
1655 | basic_block bb, then_bb, else_bb, return_bb; | |
1656 | gimple_stmt_iterator bsi; | |
1657 | int nargs = 0; | |
1658 | tree arg; | |
1659 | int i; | |
1660 | tree resdecl; | |
1661 | tree restmp = NULL; | |
1662 | VEC(tree, heap) *vargs; | |
1663 | ||
1664 | gimple call; | |
1665 | gimple ret; | |
1666 | ||
1667 | DECL_IGNORED_P (thunk_fndecl) = 1; | |
1668 | bitmap_obstack_initialize (NULL); | |
1669 | ||
1670 | if (node->thunk.virtual_offset_p) | |
1671 | virtual_offset = size_int (virtual_value); | |
1672 | ||
1673 | /* Build the return declaration for the function. */ | |
1674 | restype = TREE_TYPE (TREE_TYPE (thunk_fndecl)); | |
1675 | if (DECL_RESULT (thunk_fndecl) == NULL_TREE) | |
1676 | { | |
1677 | resdecl = build_decl (input_location, RESULT_DECL, 0, restype); | |
1678 | DECL_ARTIFICIAL (resdecl) = 1; | |
1679 | DECL_IGNORED_P (resdecl) = 1; | |
1680 | DECL_RESULT (thunk_fndecl) = resdecl; | |
1681 | } | |
1682 | else | |
1683 | resdecl = DECL_RESULT (thunk_fndecl); | |
1684 | ||
1685 | bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl); | |
1686 | ||
1687 | bsi = gsi_start_bb (bb); | |
1688 | ||
1689 | /* Build call to the function being thunked. */ | |
1690 | if (!VOID_TYPE_P (restype)) | |
1691 | { | |
1692 | if (!is_gimple_reg_type (restype)) | |
1693 | { | |
1694 | restmp = resdecl; | |
1695 | add_local_decl (cfun, restmp); | |
1696 | BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp; | |
1697 | } | |
1698 | else | |
1699 | restmp = create_tmp_var_raw (restype, "retval"); | |
1700 | } | |
1701 | ||
1702 | for (arg = a; arg; arg = DECL_CHAIN (arg)) | |
1703 | nargs++; | |
1704 | vargs = VEC_alloc (tree, heap, nargs); | |
1705 | if (this_adjusting) | |
1706 | VEC_quick_push (tree, vargs, | |
1707 | thunk_adjust (&bsi, | |
1708 | a, 1, fixed_offset, | |
1709 | virtual_offset)); | |
1710 | else | |
1711 | VEC_quick_push (tree, vargs, a); | |
1712 | for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg)) | |
1713 | VEC_quick_push (tree, vargs, arg); | |
1714 | call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs); | |
1715 | VEC_free (tree, heap, vargs); | |
1716 | gimple_call_set_from_thunk (call, true); | |
1717 | if (restmp) | |
1718 | gimple_call_set_lhs (call, restmp); | |
1719 | gsi_insert_after (&bsi, call, GSI_NEW_STMT); | |
1720 | mark_symbols_for_renaming (call); | |
1721 | find_referenced_vars_in (call); | |
1722 | update_stmt (call); | |
1723 | ||
1724 | if (restmp && !this_adjusting) | |
1725 | { | |
1726 | tree true_label = NULL_TREE; | |
1727 | ||
1728 | if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE) | |
1729 | { | |
1730 | gimple stmt; | |
1731 | /* If the return type is a pointer, we need to | |
1732 | protect against NULL. We know there will be an | |
1733 | adjustment, because that's why we're emitting a | |
1734 | thunk. */ | |
1735 | then_bb = create_basic_block (NULL, (void *) 0, bb); | |
1736 | return_bb = create_basic_block (NULL, (void *) 0, then_bb); | |
1737 | else_bb = create_basic_block (NULL, (void *) 0, else_bb); | |
1738 | remove_edge (single_succ_edge (bb)); | |
1739 | true_label = gimple_block_label (then_bb); | |
1740 | stmt = gimple_build_cond (NE_EXPR, restmp, | |
1741 | build_zero_cst (TREE_TYPE (restmp)), | |
1742 | NULL_TREE, NULL_TREE); | |
1743 | gsi_insert_after (&bsi, stmt, GSI_NEW_STMT); | |
1744 | make_edge (bb, then_bb, EDGE_TRUE_VALUE); | |
1745 | make_edge (bb, else_bb, EDGE_FALSE_VALUE); | |
1746 | make_edge (return_bb, EXIT_BLOCK_PTR, 0); | |
1747 | make_edge (then_bb, return_bb, EDGE_FALLTHRU); | |
1748 | make_edge (else_bb, return_bb, EDGE_FALLTHRU); | |
1749 | bsi = gsi_last_bb (then_bb); | |
1750 | } | |
1751 | ||
1752 | restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0, | |
1753 | fixed_offset, virtual_offset); | |
1754 | if (true_label) | |
1755 | { | |
1756 | gimple stmt; | |
1757 | bsi = gsi_last_bb (else_bb); | |
1758 | stmt = gimple_build_assign (restmp, | |
1759 | build_zero_cst (TREE_TYPE (restmp))); | |
1760 | gsi_insert_after (&bsi, stmt, GSI_NEW_STMT); | |
1761 | bsi = gsi_last_bb (return_bb); | |
1762 | } | |
1763 | } | |
1764 | else | |
1765 | gimple_call_set_tail (call, true); | |
1766 | ||
1767 | /* Build return value. */ | |
1768 | ret = gimple_build_return (restmp); | |
1769 | gsi_insert_after (&bsi, ret, GSI_NEW_STMT); | |
1770 | ||
1771 | delete_unreachable_blocks (); | |
1772 | update_ssa (TODO_update_ssa); | |
1773 | ||
1774 | /* Since we want to emit the thunk, we explicitly mark its name as | |
1775 | referenced. */ | |
1776 | node->thunk.thunk_p = false; | |
1777 | cgraph_node_remove_callees (node); | |
1778 | cgraph_add_new_function (thunk_fndecl, true); | |
1779 | bitmap_obstack_release (NULL); | |
1780 | } | |
1781 | current_function_decl = NULL; | |
1782 | } | |
1783 | ||
1784 | ||
1785 | ||
1786 | /* Assemble thunks and aliases asociated to NODE. */ | |
1787 | ||
1788 | static void | |
1789 | assemble_thunks_and_aliases (struct cgraph_node *node) | |
1790 | { | |
1791 | struct cgraph_edge *e; | |
1792 | int i; | |
1793 | struct ipa_ref *ref; | |
1794 | ||
1795 | for (e = node->callers; e;) | |
1796 | if (e->caller->thunk.thunk_p) | |
1797 | { | |
1798 | struct cgraph_node *thunk = e->caller; | |
1799 | ||
1800 | e = e->next_caller; | |
1801 | assemble_thunks_and_aliases (thunk); | |
1802 | assemble_thunk (thunk); | |
1803 | } | |
1804 | else | |
1805 | e = e->next_caller; | |
1806 | for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++) | |
1807 | if (ref->use == IPA_REF_ALIAS) | |
1808 | { | |
1809 | struct cgraph_node *alias = ipa_ref_refering_node (ref); | |
1810 | bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias); | |
1811 | ||
1812 | /* Force assemble_alias to really output the alias this time instead | |
1813 | of buffering it in same alias pairs. */ | |
1814 | TREE_ASM_WRITTEN (alias->thunk.alias) = 1; | |
1815 | assemble_alias (alias->decl, | |
1816 | DECL_ASSEMBLER_NAME (alias->thunk.alias)); | |
1817 | assemble_thunks_and_aliases (alias); | |
1818 | TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written; | |
1819 | } | |
1820 | } | |
1821 | ||
1822 | /* Expand function specified by NODE. */ | |
1823 | ||
1824 | static void | |
1825 | cgraph_expand_function (struct cgraph_node *node) | |
1826 | { | |
1827 | tree decl = node->decl; | |
1828 | ||
1829 | /* We ought to not compile any inline clones. */ | |
1830 | gcc_assert (!node->global.inlined_to); | |
1831 | ||
1832 | announce_function (decl); | |
1833 | node->process = 0; | |
1834 | gcc_assert (node->lowered); | |
1835 | ||
1836 | /* Generate RTL for the body of DECL. */ | |
1837 | tree_rest_of_compilation (decl); | |
1838 | ||
1839 | /* Make sure that BE didn't give up on compiling. */ | |
1840 | gcc_assert (TREE_ASM_WRITTEN (decl)); | |
1841 | current_function_decl = NULL; | |
1842 | gcc_assert (!cgraph_preserve_function_body_p (node)); | |
1843 | ||
1844 | /* It would make a lot more sense to output thunks before function body to get more | |
1845 | forward and lest backwarding jumps. This is however would need solving problem | |
1846 | with comdats. See PR48668. Also aliases must come after function itself to | |
1847 | make one pass assemblers, like one on AIX happy. See PR 50689. | |
1848 | FIXME: Perhaps thunks should be move before function IFF they are not in comdat | |
1849 | groups. */ | |
1850 | assemble_thunks_and_aliases (node); | |
1851 | cgraph_release_function_body (node); | |
1852 | /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer | |
1853 | points to the dead function body. */ | |
1854 | cgraph_node_remove_callees (node); | |
1855 | ||
1856 | cgraph_function_flags_ready = true; | |
1857 | } | |
1858 | ||
1859 | /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */ | |
1860 | ||
1861 | bool | |
1862 | cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason) | |
1863 | { | |
1864 | *reason = e->inline_failed; | |
1865 | return !e->inline_failed; | |
1866 | } | |
1867 | ||
1868 | ||
1869 | ||
1870 | /* Expand all functions that must be output. | |
1871 | ||
1872 | Attempt to topologically sort the nodes so function is output when | |
1873 | all called functions are already assembled to allow data to be | |
1874 | propagated across the callgraph. Use a stack to get smaller distance | |
1875 | between a function and its callees (later we may choose to use a more | |
1876 | sophisticated algorithm for function reordering; we will likely want | |
1877 | to use subsections to make the output functions appear in top-down | |
1878 | order). */ | |
1879 | ||
1880 | static void | |
1881 | cgraph_expand_all_functions (void) | |
1882 | { | |
1883 | struct cgraph_node *node; | |
1884 | struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); | |
1885 | int order_pos, new_order_pos = 0; | |
1886 | int i; | |
1887 | ||
1888 | order_pos = ipa_reverse_postorder (order); | |
1889 | gcc_assert (order_pos == cgraph_n_nodes); | |
1890 | ||
1891 | /* Garbage collector may remove inline clones we eliminate during | |
1892 | optimization. So we must be sure to not reference them. */ | |
1893 | for (i = 0; i < order_pos; i++) | |
1894 | if (order[i]->process) | |
1895 | order[new_order_pos++] = order[i]; | |
1896 | ||
1897 | for (i = new_order_pos - 1; i >= 0; i--) | |
1898 | { | |
1899 | node = order[i]; | |
1900 | if (node->process) | |
1901 | { | |
1902 | gcc_assert (node->reachable); | |
1903 | node->process = 0; | |
1904 | cgraph_expand_function (node); | |
1905 | } | |
1906 | } | |
1907 | cgraph_process_new_functions (); | |
1908 | ||
1909 | free (order); | |
1910 | ||
1911 | } | |
1912 | ||
1913 | /* This is used to sort the node types by the cgraph order number. */ | |
1914 | ||
1915 | enum cgraph_order_sort_kind | |
1916 | { | |
1917 | ORDER_UNDEFINED = 0, | |
1918 | ORDER_FUNCTION, | |
1919 | ORDER_VAR, | |
1920 | ORDER_ASM | |
1921 | }; | |
1922 | ||
1923 | struct cgraph_order_sort | |
1924 | { | |
1925 | enum cgraph_order_sort_kind kind; | |
1926 | union | |
1927 | { | |
1928 | struct cgraph_node *f; | |
1929 | struct varpool_node *v; | |
1930 | struct cgraph_asm_node *a; | |
1931 | } u; | |
1932 | }; | |
1933 | ||
1934 | /* Output all functions, variables, and asm statements in the order | |
1935 | according to their order fields, which is the order in which they | |
1936 | appeared in the file. This implements -fno-toplevel-reorder. In | |
1937 | this mode we may output functions and variables which don't really | |
1938 | need to be output. */ | |
1939 | ||
1940 | static void | |
1941 | cgraph_output_in_order (void) | |
1942 | { | |
1943 | int max; | |
1944 | struct cgraph_order_sort *nodes; | |
1945 | int i; | |
1946 | struct cgraph_node *pf; | |
1947 | struct varpool_node *pv; | |
1948 | struct cgraph_asm_node *pa; | |
1949 | ||
1950 | max = cgraph_order; | |
1951 | nodes = XCNEWVEC (struct cgraph_order_sort, max); | |
1952 | ||
1953 | varpool_analyze_pending_decls (); | |
1954 | ||
1955 | for (pf = cgraph_nodes; pf; pf = pf->next) | |
1956 | { | |
1957 | if (pf->process && !pf->thunk.thunk_p && !pf->alias) | |
1958 | { | |
1959 | i = pf->order; | |
1960 | gcc_assert (nodes[i].kind == ORDER_UNDEFINED); | |
1961 | nodes[i].kind = ORDER_FUNCTION; | |
1962 | nodes[i].u.f = pf; | |
1963 | } | |
1964 | } | |
1965 | ||
1966 | for (pv = varpool_nodes_queue; pv; pv = pv->next_needed) | |
1967 | { | |
1968 | i = pv->order; | |
1969 | gcc_assert (nodes[i].kind == ORDER_UNDEFINED); | |
1970 | nodes[i].kind = ORDER_VAR; | |
1971 | nodes[i].u.v = pv; | |
1972 | } | |
1973 | ||
1974 | for (pa = cgraph_asm_nodes; pa; pa = pa->next) | |
1975 | { | |
1976 | i = pa->order; | |
1977 | gcc_assert (nodes[i].kind == ORDER_UNDEFINED); | |
1978 | nodes[i].kind = ORDER_ASM; | |
1979 | nodes[i].u.a = pa; | |
1980 | } | |
1981 | ||
1982 | /* In toplevel reorder mode we output all statics; mark them as needed. */ | |
1983 | for (i = 0; i < max; ++i) | |
1984 | { | |
1985 | if (nodes[i].kind == ORDER_VAR) | |
1986 | { | |
1987 | varpool_mark_needed_node (nodes[i].u.v); | |
1988 | } | |
1989 | } | |
1990 | varpool_empty_needed_queue (); | |
1991 | ||
1992 | for (i = 0; i < max; ++i) | |
1993 | if (nodes[i].kind == ORDER_VAR) | |
1994 | varpool_finalize_named_section_flags (nodes[i].u.v); | |
1995 | ||
1996 | for (i = 0; i < max; ++i) | |
1997 | { | |
1998 | switch (nodes[i].kind) | |
1999 | { | |
2000 | case ORDER_FUNCTION: | |
2001 | nodes[i].u.f->process = 0; | |
2002 | cgraph_expand_function (nodes[i].u.f); | |
2003 | break; | |
2004 | ||
2005 | case ORDER_VAR: | |
2006 | varpool_assemble_decl (nodes[i].u.v); | |
2007 | break; | |
2008 | ||
2009 | case ORDER_ASM: | |
2010 | assemble_asm (nodes[i].u.a->asm_str); | |
2011 | break; | |
2012 | ||
2013 | case ORDER_UNDEFINED: | |
2014 | break; | |
2015 | ||
2016 | default: | |
2017 | gcc_unreachable (); | |
2018 | } | |
2019 | } | |
2020 | ||
2021 | cgraph_asm_nodes = NULL; | |
2022 | free (nodes); | |
2023 | } | |
2024 | ||
2025 | /* Return true when function body of DECL still needs to be kept around | |
2026 | for later re-use. */ | |
2027 | bool | |
2028 | cgraph_preserve_function_body_p (struct cgraph_node *node) | |
2029 | { | |
2030 | gcc_assert (cgraph_global_info_ready); | |
2031 | gcc_assert (!node->alias && !node->thunk.thunk_p); | |
2032 | ||
2033 | /* Look if there is any clone around. */ | |
2034 | if (node->clones) | |
2035 | return true; | |
2036 | return false; | |
2037 | } | |
2038 | ||
2039 | static void | |
2040 | ipa_passes (void) | |
2041 | { | |
2042 | set_cfun (NULL); | |
2043 | current_function_decl = NULL; | |
2044 | gimple_register_cfg_hooks (); | |
2045 | bitmap_obstack_initialize (NULL); | |
2046 | ||
2047 | invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL); | |
2048 | ||
2049 | if (!in_lto_p) | |
2050 | { | |
2051 | execute_ipa_pass_list (all_small_ipa_passes); | |
2052 | if (seen_error ()) | |
2053 | return; | |
2054 | } | |
2055 | ||
2056 | /* We never run removal of unreachable nodes after early passes. This is | |
2057 | because TODO is run before the subpasses. It is important to remove | |
2058 | the unreachable functions to save works at IPA level and to get LTO | |
2059 | symbol tables right. */ | |
2060 | cgraph_remove_unreachable_nodes (true, cgraph_dump_file); | |
2061 | ||
2062 | /* If pass_all_early_optimizations was not scheduled, the state of | |
2063 | the cgraph will not be properly updated. Update it now. */ | |
2064 | if (cgraph_state < CGRAPH_STATE_IPA_SSA) | |
2065 | cgraph_state = CGRAPH_STATE_IPA_SSA; | |
2066 | ||
2067 | if (!in_lto_p) | |
2068 | { | |
2069 | /* Generate coverage variables and constructors. */ | |
2070 | coverage_finish (); | |
2071 | ||
2072 | /* Process new functions added. */ | |
2073 | set_cfun (NULL); | |
2074 | current_function_decl = NULL; | |
2075 | cgraph_process_new_functions (); | |
2076 | ||
2077 | execute_ipa_summary_passes | |
2078 | ((struct ipa_opt_pass_d *) all_regular_ipa_passes); | |
2079 | } | |
2080 | ||
2081 | /* Some targets need to handle LTO assembler output specially. */ | |
2082 | if (flag_generate_lto) | |
2083 | targetm.asm_out.lto_start (); | |
2084 | ||
2085 | execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes); | |
2086 | ||
2087 | if (!in_lto_p) | |
2088 | ipa_write_summaries (); | |
2089 | ||
2090 | if (flag_generate_lto) | |
2091 | targetm.asm_out.lto_end (); | |
2092 | ||
2093 | if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects)) | |
2094 | execute_ipa_pass_list (all_regular_ipa_passes); | |
2095 | invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL); | |
2096 | ||
2097 | bitmap_obstack_release (NULL); | |
2098 | } | |
2099 | ||
2100 | ||
2101 | /* Return string alias is alias of. */ | |
2102 | ||
2103 | static tree | |
2104 | get_alias_symbol (tree decl) | |
2105 | { | |
2106 | tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl)); | |
2107 | return get_identifier (TREE_STRING_POINTER | |
2108 | (TREE_VALUE (TREE_VALUE (alias)))); | |
2109 | } | |
2110 | ||
2111 | ||
2112 | /* Weakrefs may be associated to external decls and thus not output | |
2113 | at expansion time. Emit all neccesary aliases. */ | |
2114 | ||
2115 | static void | |
2116 | output_weakrefs (void) | |
2117 | { | |
2118 | struct cgraph_node *node; | |
2119 | struct varpool_node *vnode; | |
2120 | for (node = cgraph_nodes; node; node = node->next) | |
2121 | if (node->alias && DECL_EXTERNAL (node->decl) | |
2122 | && !TREE_ASM_WRITTEN (node->decl) | |
2123 | && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl))) | |
2124 | assemble_alias (node->decl, | |
2125 | node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias) | |
2126 | : get_alias_symbol (node->decl)); | |
2127 | for (vnode = varpool_nodes; vnode; vnode = vnode->next) | |
2128 | if (vnode->alias && DECL_EXTERNAL (vnode->decl) | |
2129 | && !TREE_ASM_WRITTEN (vnode->decl) | |
2130 | && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->decl))) | |
2131 | assemble_alias (vnode->decl, | |
2132 | vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of) | |
2133 | : get_alias_symbol (vnode->decl)); | |
2134 | } | |
2135 | ||
2136 | ||
2137 | /* Perform simple optimizations based on callgraph. */ | |
2138 | ||
2139 | void | |
2140 | cgraph_optimize (void) | |
2141 | { | |
2142 | if (seen_error ()) | |
2143 | return; | |
2144 | ||
2145 | #ifdef ENABLE_CHECKING | |
2146 | verify_cgraph (); | |
2147 | #endif | |
2148 | ||
2149 | /* Frontend may output common variables after the unit has been finalized. | |
2150 | It is safe to deal with them here as they are always zero initialized. */ | |
2151 | varpool_analyze_pending_decls (); | |
2152 | ||
2153 | timevar_push (TV_CGRAPHOPT); | |
2154 | if (pre_ipa_mem_report) | |
2155 | { | |
2156 | fprintf (stderr, "Memory consumption before IPA\n"); | |
2157 | dump_memory_report (false); | |
2158 | } | |
2159 | if (!quiet_flag) | |
2160 | fprintf (stderr, "Performing interprocedural optimizations\n"); | |
2161 | cgraph_state = CGRAPH_STATE_IPA; | |
2162 | ||
2163 | /* Don't run the IPA passes if there was any error or sorry messages. */ | |
2164 | if (!seen_error ()) | |
2165 | ipa_passes (); | |
2166 | ||
2167 | /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */ | |
2168 | if (seen_error () | |
2169 | || (!in_lto_p && flag_lto && !flag_fat_lto_objects)) | |
2170 | { | |
2171 | timevar_pop (TV_CGRAPHOPT); | |
2172 | return; | |
2173 | } | |
2174 | ||
2175 | /* This pass remove bodies of extern inline functions we never inlined. | |
2176 | Do this later so other IPA passes see what is really going on. */ | |
2177 | cgraph_remove_unreachable_nodes (false, dump_file); | |
2178 | cgraph_global_info_ready = true; | |
2179 | if (cgraph_dump_file) | |
2180 | { | |
2181 | fprintf (cgraph_dump_file, "Optimized "); | |
2182 | dump_cgraph (cgraph_dump_file); | |
2183 | dump_varpool (cgraph_dump_file); | |
2184 | } | |
2185 | if (post_ipa_mem_report) | |
2186 | { | |
2187 | fprintf (stderr, "Memory consumption after IPA\n"); | |
2188 | dump_memory_report (false); | |
2189 | } | |
2190 | timevar_pop (TV_CGRAPHOPT); | |
2191 | ||
2192 | /* Output everything. */ | |
2193 | (*debug_hooks->assembly_start) (); | |
2194 | if (!quiet_flag) | |
2195 | fprintf (stderr, "Assembling functions:\n"); | |
2196 | #ifdef ENABLE_CHECKING | |
2197 | verify_cgraph (); | |
2198 | #endif | |
2199 | ||
2200 | cgraph_materialize_all_clones (); | |
2201 | bitmap_obstack_initialize (NULL); | |
2202 | execute_ipa_pass_list (all_late_ipa_passes); | |
2203 | cgraph_remove_unreachable_nodes (true, dump_file); | |
2204 | #ifdef ENABLE_CHECKING | |
2205 | verify_cgraph (); | |
2206 | #endif | |
2207 | bitmap_obstack_release (NULL); | |
2208 | cgraph_mark_functions_to_output (); | |
2209 | output_weakrefs (); | |
2210 | ||
2211 | cgraph_state = CGRAPH_STATE_EXPANSION; | |
2212 | if (!flag_toplevel_reorder) | |
2213 | cgraph_output_in_order (); | |
2214 | else | |
2215 | { | |
2216 | cgraph_output_pending_asms (); | |
2217 | ||
2218 | cgraph_expand_all_functions (); | |
2219 | varpool_remove_unreferenced_decls (); | |
2220 | ||
2221 | varpool_assemble_pending_decls (); | |
2222 | } | |
2223 | ||
2224 | cgraph_process_new_functions (); | |
2225 | cgraph_state = CGRAPH_STATE_FINISHED; | |
2226 | ||
2227 | if (cgraph_dump_file) | |
2228 | { | |
2229 | fprintf (cgraph_dump_file, "\nFinal "); | |
2230 | dump_cgraph (cgraph_dump_file); | |
2231 | dump_varpool (cgraph_dump_file); | |
2232 | } | |
2233 | #ifdef ENABLE_CHECKING | |
2234 | verify_cgraph (); | |
2235 | /* Double check that all inline clones are gone and that all | |
2236 | function bodies have been released from memory. */ | |
2237 | if (!seen_error ()) | |
2238 | { | |
2239 | struct cgraph_node *node; | |
2240 | bool error_found = false; | |
2241 | ||
2242 | for (node = cgraph_nodes; node; node = node->next) | |
2243 | if (node->analyzed | |
2244 | && (node->global.inlined_to | |
2245 | || gimple_has_body_p (node->decl))) | |
2246 | { | |
2247 | error_found = true; | |
2248 | dump_cgraph_node (stderr, node); | |
2249 | } | |
2250 | if (error_found) | |
2251 | internal_error ("nodes with unreleased memory found"); | |
2252 | } | |
2253 | #endif | |
2254 | } | |
2255 | ||
2256 | void | |
2257 | init_cgraph (void) | |
2258 | { | |
2259 | if (!cgraph_dump_file) | |
2260 | cgraph_dump_file = dump_begin (TDI_cgraph, NULL); | |
2261 | } | |
2262 | ||
2263 | /* The edges representing the callers of the NEW_VERSION node were | |
2264 | fixed by cgraph_function_versioning (), now the call_expr in their | |
2265 | respective tree code should be updated to call the NEW_VERSION. */ | |
2266 | ||
2267 | static void | |
2268 | update_call_expr (struct cgraph_node *new_version) | |
2269 | { | |
2270 | struct cgraph_edge *e; | |
2271 | ||
2272 | gcc_assert (new_version); | |
2273 | ||
2274 | /* Update the call expr on the edges to call the new version. */ | |
2275 | for (e = new_version->callers; e; e = e->next_caller) | |
2276 | { | |
2277 | struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl); | |
2278 | gimple_call_set_fndecl (e->call_stmt, new_version->decl); | |
2279 | maybe_clean_eh_stmt_fn (inner_function, e->call_stmt); | |
2280 | } | |
2281 | } | |
2282 | ||
2283 | ||
2284 | /* Create a new cgraph node which is the new version of | |
2285 | OLD_VERSION node. REDIRECT_CALLERS holds the callers | |
2286 | edges which should be redirected to point to | |
2287 | NEW_VERSION. ALL the callees edges of OLD_VERSION | |
2288 | are cloned to the new version node. Return the new | |
2289 | version node. | |
2290 | ||
2291 | If non-NULL BLOCK_TO_COPY determine what basic blocks | |
2292 | was copied to prevent duplications of calls that are dead | |
2293 | in the clone. */ | |
2294 | ||
2295 | struct cgraph_node * | |
2296 | cgraph_copy_node_for_versioning (struct cgraph_node *old_version, | |
2297 | tree new_decl, | |
2298 | VEC(cgraph_edge_p,heap) *redirect_callers, | |
2299 | bitmap bbs_to_copy) | |
2300 | { | |
2301 | struct cgraph_node *new_version; | |
2302 | struct cgraph_edge *e; | |
2303 | unsigned i; | |
2304 | ||
2305 | gcc_assert (old_version); | |
2306 | ||
2307 | new_version = cgraph_create_node (new_decl); | |
2308 | ||
2309 | new_version->analyzed = old_version->analyzed; | |
2310 | new_version->local = old_version->local; | |
2311 | new_version->local.externally_visible = false; | |
2312 | new_version->local.local = true; | |
2313 | new_version->global = old_version->global; | |
2314 | new_version->rtl = old_version->rtl; | |
2315 | new_version->reachable = true; | |
2316 | new_version->count = old_version->count; | |
2317 | ||
2318 | for (e = old_version->callees; e; e=e->next_callee) | |
2319 | if (!bbs_to_copy | |
2320 | || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) | |
2321 | cgraph_clone_edge (e, new_version, e->call_stmt, | |
2322 | e->lto_stmt_uid, REG_BR_PROB_BASE, | |
2323 | CGRAPH_FREQ_BASE, | |
2324 | true); | |
2325 | for (e = old_version->indirect_calls; e; e=e->next_callee) | |
2326 | if (!bbs_to_copy | |
2327 | || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) | |
2328 | cgraph_clone_edge (e, new_version, e->call_stmt, | |
2329 | e->lto_stmt_uid, REG_BR_PROB_BASE, | |
2330 | CGRAPH_FREQ_BASE, | |
2331 | true); | |
2332 | FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e) | |
2333 | { | |
2334 | /* Redirect calls to the old version node to point to its new | |
2335 | version. */ | |
2336 | cgraph_redirect_edge_callee (e, new_version); | |
2337 | } | |
2338 | ||
2339 | cgraph_call_node_duplication_hooks (old_version, new_version); | |
2340 | ||
2341 | return new_version; | |
2342 | } | |
2343 | ||
2344 | /* Perform function versioning. | |
2345 | Function versioning includes copying of the tree and | |
2346 | a callgraph update (creating a new cgraph node and updating | |
2347 | its callees and callers). | |
2348 | ||
2349 | REDIRECT_CALLERS varray includes the edges to be redirected | |
2350 | to the new version. | |
2351 | ||
2352 | TREE_MAP is a mapping of tree nodes we want to replace with | |
2353 | new ones (according to results of prior analysis). | |
2354 | OLD_VERSION_NODE is the node that is versioned. | |
2355 | ||
2356 | If non-NULL ARGS_TO_SKIP determine function parameters to remove | |
2357 | from new version. | |
2358 | If SKIP_RETURN is true, the new version will return void. | |
2359 | If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. | |
2360 | If non_NULL NEW_ENTRY determine new entry BB of the clone. | |
2361 | ||
2362 | Return the new version's cgraph node. */ | |
2363 | ||
2364 | struct cgraph_node * | |
2365 | cgraph_function_versioning (struct cgraph_node *old_version_node, | |
2366 | VEC(cgraph_edge_p,heap) *redirect_callers, | |
2367 | VEC (ipa_replace_map_p,gc)* tree_map, | |
2368 | bitmap args_to_skip, | |
2369 | bool skip_return, | |
2370 | bitmap bbs_to_copy, | |
2371 | basic_block new_entry_block, | |
2372 | const char *clone_name) | |
2373 | { | |
2374 | tree old_decl = old_version_node->decl; | |
2375 | struct cgraph_node *new_version_node = NULL; | |
2376 | tree new_decl; | |
2377 | ||
2378 | if (!tree_versionable_function_p (old_decl)) | |
2379 | return NULL; | |
2380 | ||
2381 | gcc_assert (old_version_node->local.can_change_signature || !args_to_skip); | |
2382 | ||
2383 | /* Make a new FUNCTION_DECL tree node for the new version. */ | |
2384 | if (!args_to_skip && !skip_return) | |
2385 | new_decl = copy_node (old_decl); | |
2386 | else | |
2387 | new_decl | |
2388 | = build_function_decl_skip_args (old_decl, args_to_skip, skip_return); | |
2389 | ||
2390 | /* Generate a new name for the new version. */ | |
2391 | DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name); | |
2392 | SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); | |
2393 | SET_DECL_RTL (new_decl, NULL); | |
2394 | ||
2395 | /* When the old decl was a con-/destructor make sure the clone isn't. */ | |
2396 | DECL_STATIC_CONSTRUCTOR(new_decl) = 0; | |
2397 | DECL_STATIC_DESTRUCTOR(new_decl) = 0; | |
2398 | ||
2399 | /* Create the new version's call-graph node. | |
2400 | and update the edges of the new node. */ | |
2401 | new_version_node = | |
2402 | cgraph_copy_node_for_versioning (old_version_node, new_decl, | |
2403 | redirect_callers, bbs_to_copy); | |
2404 | ||
2405 | /* Copy the OLD_VERSION_NODE function tree to the new version. */ | |
2406 | tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip, | |
2407 | skip_return, bbs_to_copy, new_entry_block); | |
2408 | ||
2409 | /* Update the new version's properties. | |
2410 | Make The new version visible only within this translation unit. Make sure | |
2411 | that is not weak also. | |
2412 | ??? We cannot use COMDAT linkage because there is no | |
2413 | ABI support for this. */ | |
2414 | cgraph_make_decl_local (new_version_node->decl); | |
2415 | DECL_VIRTUAL_P (new_version_node->decl) = 0; | |
2416 | new_version_node->local.externally_visible = 0; | |
2417 | new_version_node->local.local = 1; | |
2418 | new_version_node->lowered = true; | |
2419 | ||
2420 | /* Update the call_expr on the edges to call the new version node. */ | |
2421 | update_call_expr (new_version_node); | |
2422 | ||
2423 | cgraph_call_function_insertion_hooks (new_version_node); | |
2424 | return new_version_node; | |
2425 | } | |
2426 | ||
2427 | /* Given virtual clone, turn it into actual clone. */ | |
2428 | static void | |
2429 | cgraph_materialize_clone (struct cgraph_node *node) | |
2430 | { | |
2431 | bitmap_obstack_initialize (NULL); | |
2432 | node->former_clone_of = node->clone_of->decl; | |
2433 | if (node->clone_of->former_clone_of) | |
2434 | node->former_clone_of = node->clone_of->former_clone_of; | |
2435 | /* Copy the OLD_VERSION_NODE function tree to the new version. */ | |
2436 | tree_function_versioning (node->clone_of->decl, node->decl, | |
2437 | node->clone.tree_map, true, | |
2438 | node->clone.args_to_skip, false, | |
2439 | NULL, NULL); | |
2440 | if (cgraph_dump_file) | |
2441 | { | |
2442 | dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags); | |
2443 | dump_function_to_file (node->decl, cgraph_dump_file, dump_flags); | |
2444 | } | |
2445 | ||
2446 | /* Function is no longer clone. */ | |
2447 | if (node->next_sibling_clone) | |
2448 | node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; | |
2449 | if (node->prev_sibling_clone) | |
2450 | node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; | |
2451 | else | |
2452 | node->clone_of->clones = node->next_sibling_clone; | |
2453 | node->next_sibling_clone = NULL; | |
2454 | node->prev_sibling_clone = NULL; | |
2455 | if (!node->clone_of->analyzed && !node->clone_of->clones) | |
2456 | { | |
2457 | cgraph_release_function_body (node->clone_of); | |
2458 | cgraph_node_remove_callees (node->clone_of); | |
2459 | ipa_remove_all_references (&node->clone_of->ref_list); | |
2460 | } | |
2461 | node->clone_of = NULL; | |
2462 | bitmap_obstack_release (NULL); | |
2463 | } | |
2464 | ||
2465 | /* If necessary, change the function declaration in the call statement | |
2466 | associated with E so that it corresponds to the edge callee. */ | |
2467 | ||
2468 | gimple | |
2469 | cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e) | |
2470 | { | |
2471 | tree decl = gimple_call_fndecl (e->call_stmt); | |
2472 | gimple new_stmt; | |
2473 | gimple_stmt_iterator gsi; | |
2474 | #ifdef ENABLE_CHECKING | |
2475 | struct cgraph_node *node; | |
2476 | #endif | |
2477 | ||
2478 | if (e->indirect_unknown_callee | |
2479 | || decl == e->callee->decl) | |
2480 | return e->call_stmt; | |
2481 | ||
2482 | #ifdef ENABLE_CHECKING | |
2483 | if (decl) | |
2484 | { | |
2485 | node = cgraph_get_node (decl); | |
2486 | gcc_assert (!node || !node->clone.combined_args_to_skip); | |
2487 | } | |
2488 | #endif | |
2489 | ||
2490 | if (cgraph_dump_file) | |
2491 | { | |
2492 | fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ", | |
2493 | cgraph_node_name (e->caller), e->caller->uid, | |
2494 | cgraph_node_name (e->callee), e->callee->uid); | |
2495 | print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags); | |
2496 | if (e->callee->clone.combined_args_to_skip) | |
2497 | { | |
2498 | fprintf (cgraph_dump_file, " combined args to skip: "); | |
2499 | dump_bitmap (cgraph_dump_file, | |
2500 | e->callee->clone.combined_args_to_skip); | |
2501 | } | |
2502 | } | |
2503 | ||
2504 | if (e->callee->clone.combined_args_to_skip) | |
2505 | { | |
2506 | int lp_nr; | |
2507 | ||
2508 | new_stmt | |
2509 | = gimple_call_copy_skip_args (e->call_stmt, | |
2510 | e->callee->clone.combined_args_to_skip); | |
2511 | gimple_call_set_fndecl (new_stmt, e->callee->decl); | |
2512 | ||
2513 | if (gimple_vdef (new_stmt) | |
2514 | && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME) | |
2515 | SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; | |
2516 | ||
2517 | gsi = gsi_for_stmt (e->call_stmt); | |
2518 | gsi_replace (&gsi, new_stmt, false); | |
2519 | /* We need to defer cleaning EH info on the new statement to | |
2520 | fixup-cfg. We may not have dominator information at this point | |
2521 | and thus would end up with unreachable blocks and have no way | |
2522 | to communicate that we need to run CFG cleanup then. */ | |
2523 | lp_nr = lookup_stmt_eh_lp (e->call_stmt); | |
2524 | if (lp_nr != 0) | |
2525 | { | |
2526 | remove_stmt_from_eh_lp (e->call_stmt); | |
2527 | add_stmt_to_eh_lp (new_stmt, lp_nr); | |
2528 | } | |
2529 | } | |
2530 | else | |
2531 | { | |
2532 | new_stmt = e->call_stmt; | |
2533 | gimple_call_set_fndecl (new_stmt, e->callee->decl); | |
2534 | update_stmt (new_stmt); | |
2535 | } | |
2536 | ||
2537 | cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt); | |
2538 | ||
2539 | if (cgraph_dump_file) | |
2540 | { | |
2541 | fprintf (cgraph_dump_file, " updated to:"); | |
2542 | print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags); | |
2543 | } | |
2544 | return new_stmt; | |
2545 | } | |
2546 | ||
2547 | /* Once all functions from compilation unit are in memory, produce all clones | |
2548 | and update all calls. We might also do this on demand if we don't want to | |
2549 | bring all functions to memory prior compilation, but current WHOPR | |
2550 | implementation does that and it is is bit easier to keep everything right in | |
2551 | this order. */ | |
2552 | void | |
2553 | cgraph_materialize_all_clones (void) | |
2554 | { | |
2555 | struct cgraph_node *node; | |
2556 | bool stabilized = false; | |
2557 | ||
2558 | if (cgraph_dump_file) | |
2559 | fprintf (cgraph_dump_file, "Materializing clones\n"); | |
2560 | #ifdef ENABLE_CHECKING | |
2561 | verify_cgraph (); | |
2562 | #endif | |
2563 | ||
2564 | /* We can also do topological order, but number of iterations should be | |
2565 | bounded by number of IPA passes since single IPA pass is probably not | |
2566 | going to create clones of clones it created itself. */ | |
2567 | while (!stabilized) | |
2568 | { | |
2569 | stabilized = true; | |
2570 | for (node = cgraph_nodes; node; node = node->next) | |
2571 | { | |
2572 | if (node->clone_of && node->decl != node->clone_of->decl | |
2573 | && !gimple_has_body_p (node->decl)) | |
2574 | { | |
2575 | if (gimple_has_body_p (node->clone_of->decl)) | |
2576 | { | |
2577 | if (cgraph_dump_file) | |
2578 | { | |
2579 | fprintf (cgraph_dump_file, "cloning %s to %s\n", | |
2580 | cgraph_node_name (node->clone_of), | |
2581 | cgraph_node_name (node)); | |
2582 | if (node->clone.tree_map) | |
2583 | { | |
2584 | unsigned int i; | |
2585 | fprintf (cgraph_dump_file, " replace map: "); | |
2586 | for (i = 0; i < VEC_length (ipa_replace_map_p, | |
2587 | node->clone.tree_map); | |
2588 | i++) | |
2589 | { | |
2590 | struct ipa_replace_map *replace_info; | |
2591 | replace_info = VEC_index (ipa_replace_map_p, | |
2592 | node->clone.tree_map, | |
2593 | i); | |
2594 | print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0); | |
2595 | fprintf (cgraph_dump_file, " -> "); | |
2596 | print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0); | |
2597 | fprintf (cgraph_dump_file, "%s%s;", | |
2598 | replace_info->replace_p ? "(replace)":"", | |
2599 | replace_info->ref_p ? "(ref)":""); | |
2600 | } | |
2601 | fprintf (cgraph_dump_file, "\n"); | |
2602 | } | |
2603 | if (node->clone.args_to_skip) | |
2604 | { | |
2605 | fprintf (cgraph_dump_file, " args_to_skip: "); | |
2606 | dump_bitmap (cgraph_dump_file, node->clone.args_to_skip); | |
2607 | } | |
2608 | if (node->clone.args_to_skip) | |
2609 | { | |
2610 | fprintf (cgraph_dump_file, " combined_args_to_skip:"); | |
2611 | dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip); | |
2612 | } | |
2613 | } | |
2614 | cgraph_materialize_clone (node); | |
2615 | stabilized = false; | |
2616 | } | |
2617 | } | |
2618 | } | |
2619 | } | |
2620 | for (node = cgraph_nodes; node; node = node->next) | |
2621 | if (!node->analyzed && node->callees) | |
2622 | cgraph_node_remove_callees (node); | |
2623 | if (cgraph_dump_file) | |
2624 | fprintf (cgraph_dump_file, "Materialization Call site updates done.\n"); | |
2625 | #ifdef ENABLE_CHECKING | |
2626 | verify_cgraph (); | |
2627 | #endif | |
2628 | cgraph_remove_unreachable_nodes (false, cgraph_dump_file); | |
2629 | } | |
2630 | ||
2631 | #include "gt-cgraphunit.h" |