]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
cgraph.c (dump_cgraph_node): Do not dump inline summaries.
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains basic routines manipulating call graph
23
24 The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
32
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
36
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
41
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
47
48 Interprocedural information:
49
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
56
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
59
60 Inlining plans:
61
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
70
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
74
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101
102 const char * const ld_plugin_symbol_resolution_names[]=
103 {
104 "",
105 "undef",
106 "prevailing_def",
107 "prevailing_def_ironly",
108 "preempted_reg",
109 "preempted_ir",
110 "resolved_ir",
111 "resolved_exec",
112 "resolved_dyn"
113 };
114
115 static void cgraph_node_remove_callers (struct cgraph_node *node);
116 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
117 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
118
119 /* Hash table used to convert declarations into nodes. */
120 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
121 /* Hash table used to convert assembler names into nodes. */
122 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
123
124 /* The linked list of cgraph nodes. */
125 struct cgraph_node *cgraph_nodes;
126
127 /* Queue of cgraph nodes scheduled to be lowered. */
128 struct cgraph_node *cgraph_nodes_queue;
129
130 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
131 secondary queue used during optimization to accommodate passes that
132 may generate new functions that need to be optimized and expanded. */
133 struct cgraph_node *cgraph_new_nodes;
134
135 /* Number of nodes in existence. */
136 int cgraph_n_nodes;
137
138 /* Maximal uid used in cgraph nodes. */
139 int cgraph_max_uid;
140
141 /* Maximal uid used in cgraph edges. */
142 int cgraph_edge_max_uid;
143
144 /* Maximal pid used for profiling */
145 int cgraph_max_pid;
146
147 /* Set when whole unit has been analyzed so we can access global info. */
148 bool cgraph_global_info_ready = false;
149
150 /* What state callgraph is in right now. */
151 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
152
153 /* Set when the cgraph is fully build and the basic flags are computed. */
154 bool cgraph_function_flags_ready = false;
155
156 /* Linked list of cgraph asm nodes. */
157 struct cgraph_asm_node *cgraph_asm_nodes;
158
159 /* Last node in cgraph_asm_nodes. */
160 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
161
162 /* The order index of the next cgraph node to be created. This is
163 used so that we can sort the cgraph nodes in order by when we saw
164 them, to support -fno-toplevel-reorder. */
165 int cgraph_order;
166
167 /* List of hooks triggered on cgraph_edge events. */
168 struct cgraph_edge_hook_list {
169 cgraph_edge_hook hook;
170 void *data;
171 struct cgraph_edge_hook_list *next;
172 };
173
174 /* List of hooks triggered on cgraph_node events. */
175 struct cgraph_node_hook_list {
176 cgraph_node_hook hook;
177 void *data;
178 struct cgraph_node_hook_list *next;
179 };
180
181 /* List of hooks triggered on events involving two cgraph_edges. */
182 struct cgraph_2edge_hook_list {
183 cgraph_2edge_hook hook;
184 void *data;
185 struct cgraph_2edge_hook_list *next;
186 };
187
188 /* List of hooks triggered on events involving two cgraph_nodes. */
189 struct cgraph_2node_hook_list {
190 cgraph_2node_hook hook;
191 void *data;
192 struct cgraph_2node_hook_list *next;
193 };
194
195 /* List of hooks triggered when an edge is removed. */
196 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
197 /* List of hooks triggered when a node is removed. */
198 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
199 /* List of hooks triggered when an edge is duplicated. */
200 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
201 /* List of hooks triggered when a node is duplicated. */
202 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
203 /* List of hooks triggered when an function is inserted. */
204 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
205
206 /* Head of a linked list of unused (freed) call graph nodes.
207 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
208 static GTY(()) struct cgraph_node *free_nodes;
209 /* Head of a linked list of unused (freed) call graph edges.
210 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
211 static GTY(()) struct cgraph_edge *free_edges;
212
213 /* Macros to access the next item in the list of free cgraph nodes and
214 edges. */
215 #define NEXT_FREE_NODE(NODE) (NODE)->next
216 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
217
218 /* Register HOOK to be called with DATA on each removed edge. */
219 struct cgraph_edge_hook_list *
220 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
221 {
222 struct cgraph_edge_hook_list *entry;
223 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
224
225 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
226 entry->hook = hook;
227 entry->data = data;
228 entry->next = NULL;
229 while (*ptr)
230 ptr = &(*ptr)->next;
231 *ptr = entry;
232 return entry;
233 }
234
235 /* Remove ENTRY from the list of hooks called on removing edges. */
236 void
237 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
238 {
239 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
240
241 while (*ptr != entry)
242 ptr = &(*ptr)->next;
243 *ptr = entry->next;
244 free (entry);
245 }
246
247 /* Call all edge removal hooks. */
248 static void
249 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
250 {
251 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
252 while (entry)
253 {
254 entry->hook (e, entry->data);
255 entry = entry->next;
256 }
257 }
258
259 /* Register HOOK to be called with DATA on each removed node. */
260 struct cgraph_node_hook_list *
261 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
262 {
263 struct cgraph_node_hook_list *entry;
264 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
265
266 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
267 entry->hook = hook;
268 entry->data = data;
269 entry->next = NULL;
270 while (*ptr)
271 ptr = &(*ptr)->next;
272 *ptr = entry;
273 return entry;
274 }
275
276 /* Remove ENTRY from the list of hooks called on removing nodes. */
277 void
278 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
279 {
280 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
281
282 while (*ptr != entry)
283 ptr = &(*ptr)->next;
284 *ptr = entry->next;
285 free (entry);
286 }
287
288 /* Call all node removal hooks. */
289 static void
290 cgraph_call_node_removal_hooks (struct cgraph_node *node)
291 {
292 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
293 while (entry)
294 {
295 entry->hook (node, entry->data);
296 entry = entry->next;
297 }
298 }
299
300 /* Register HOOK to be called with DATA on each inserted node. */
301 struct cgraph_node_hook_list *
302 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
303 {
304 struct cgraph_node_hook_list *entry;
305 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
306
307 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
308 entry->hook = hook;
309 entry->data = data;
310 entry->next = NULL;
311 while (*ptr)
312 ptr = &(*ptr)->next;
313 *ptr = entry;
314 return entry;
315 }
316
317 /* Remove ENTRY from the list of hooks called on inserted nodes. */
318 void
319 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
320 {
321 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
322
323 while (*ptr != entry)
324 ptr = &(*ptr)->next;
325 *ptr = entry->next;
326 free (entry);
327 }
328
329 /* Call all node insertion hooks. */
330 void
331 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
332 {
333 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
334 while (entry)
335 {
336 entry->hook (node, entry->data);
337 entry = entry->next;
338 }
339 }
340
341 /* Register HOOK to be called with DATA on each duplicated edge. */
342 struct cgraph_2edge_hook_list *
343 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
344 {
345 struct cgraph_2edge_hook_list *entry;
346 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
347
348 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
349 entry->hook = hook;
350 entry->data = data;
351 entry->next = NULL;
352 while (*ptr)
353 ptr = &(*ptr)->next;
354 *ptr = entry;
355 return entry;
356 }
357
358 /* Remove ENTRY from the list of hooks called on duplicating edges. */
359 void
360 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
361 {
362 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
363
364 while (*ptr != entry)
365 ptr = &(*ptr)->next;
366 *ptr = entry->next;
367 free (entry);
368 }
369
370 /* Call all edge duplication hooks. */
371 static void
372 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
373 struct cgraph_edge *cs2)
374 {
375 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
376 while (entry)
377 {
378 entry->hook (cs1, cs2, entry->data);
379 entry = entry->next;
380 }
381 }
382
383 /* Register HOOK to be called with DATA on each duplicated node. */
384 struct cgraph_2node_hook_list *
385 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
386 {
387 struct cgraph_2node_hook_list *entry;
388 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
389
390 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
391 entry->hook = hook;
392 entry->data = data;
393 entry->next = NULL;
394 while (*ptr)
395 ptr = &(*ptr)->next;
396 *ptr = entry;
397 return entry;
398 }
399
400 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
401 void
402 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
403 {
404 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
405
406 while (*ptr != entry)
407 ptr = &(*ptr)->next;
408 *ptr = entry->next;
409 free (entry);
410 }
411
412 /* Call all node duplication hooks. */
413 static void
414 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
415 struct cgraph_node *node2)
416 {
417 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
418 while (entry)
419 {
420 entry->hook (node1, node2, entry->data);
421 entry = entry->next;
422 }
423 }
424
425 /* Returns a hash code for P. */
426
427 static hashval_t
428 hash_node (const void *p)
429 {
430 const struct cgraph_node *n = (const struct cgraph_node *) p;
431 return (hashval_t) DECL_UID (n->decl);
432 }
433
434
435 /* Returns nonzero if P1 and P2 are equal. */
436
437 static int
438 eq_node (const void *p1, const void *p2)
439 {
440 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
441 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
442 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
443 }
444
445 /* Allocate new callgraph node. */
446
447 static inline struct cgraph_node *
448 cgraph_allocate_node (void)
449 {
450 struct cgraph_node *node;
451
452 if (free_nodes)
453 {
454 node = free_nodes;
455 free_nodes = NEXT_FREE_NODE (node);
456 }
457 else
458 {
459 node = ggc_alloc_cleared_cgraph_node ();
460 node->uid = cgraph_max_uid++;
461 }
462
463 return node;
464 }
465
466 /* Allocate new callgraph node and insert it into basic data structures. */
467
468 static struct cgraph_node *
469 cgraph_create_node_1 (void)
470 {
471 struct cgraph_node *node = cgraph_allocate_node ();
472
473 node->next = cgraph_nodes;
474 node->pid = -1;
475 node->order = cgraph_order++;
476 if (cgraph_nodes)
477 cgraph_nodes->previous = node;
478 node->previous = NULL;
479 node->global.estimated_growth = INT_MIN;
480 node->frequency = NODE_FREQUENCY_NORMAL;
481 node->count_materialization_scale = REG_BR_PROB_BASE;
482 ipa_empty_ref_list (&node->ref_list);
483 cgraph_nodes = node;
484 cgraph_n_nodes++;
485 return node;
486 }
487
488 /* Return cgraph node assigned to DECL. Create new one when needed. */
489
490 struct cgraph_node *
491 cgraph_create_node (tree decl)
492 {
493 struct cgraph_node key, *node, **slot;
494
495 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
496
497 if (!cgraph_hash)
498 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
499
500 key.decl = decl;
501 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
502 gcc_assert (!*slot);
503
504 node = cgraph_create_node_1 ();
505 node->decl = decl;
506 *slot = node;
507 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
508 {
509 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
510 node->next_nested = node->origin->nested;
511 node->origin->nested = node;
512 }
513 if (assembler_name_hash)
514 {
515 void **aslot;
516 tree name = DECL_ASSEMBLER_NAME (decl);
517
518 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
519 decl_assembler_name_hash (name),
520 INSERT);
521 /* We can have multiple declarations with same assembler name. For C++
522 it is __builtin_strlen and strlen, for instance. Do we need to
523 record them all? Original implementation marked just first one
524 so lets hope for the best. */
525 if (*aslot == NULL)
526 *aslot = node;
527 }
528 return node;
529 }
530
531 /* Try to find a call graph node for declaration DECL and if it does not exist,
532 create it. */
533
534 struct cgraph_node *
535 cgraph_get_create_node (tree decl)
536 {
537 struct cgraph_node *node;
538
539 node = cgraph_get_node (decl);
540 if (node)
541 return node;
542
543 return cgraph_create_node (decl);
544 }
545
546 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
547 the function body is associated with (not neccesarily cgraph_node (DECL). */
548
549 static struct cgraph_node *
550 cgraph_same_body_alias_1 (struct cgraph_node *decl_node, tree alias, tree decl)
551 {
552 struct cgraph_node key, *alias_node, **slot;
553
554 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
555 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
556
557 key.decl = alias;
558
559 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
560
561 /* If the cgraph_node has been already created, fail. */
562 if (*slot)
563 return NULL;
564
565 alias_node = cgraph_allocate_node ();
566 alias_node->decl = alias;
567 alias_node->same_body_alias = 1;
568 alias_node->same_body = decl_node;
569 alias_node->previous = NULL;
570 if (decl_node->same_body)
571 decl_node->same_body->previous = alias_node;
572 alias_node->next = decl_node->same_body;
573 alias_node->thunk.alias = decl;
574 decl_node->same_body = alias_node;
575 *slot = alias_node;
576 return alias_node;
577 }
578
579 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
580 and NULL otherwise.
581 Same body aliases are output whenever the body of DECL is output,
582 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
583
584 struct cgraph_node *
585 cgraph_same_body_alias (struct cgraph_node *decl_node, tree alias, tree decl)
586 {
587 #ifndef ASM_OUTPUT_DEF
588 /* If aliases aren't supported by the assembler, fail. */
589 return NULL;
590 #endif
591
592 /*gcc_assert (!assembler_name_hash);*/
593
594 return cgraph_same_body_alias_1 (decl_node, alias, decl);
595 }
596
597 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
598 aliases DECL with an adjustments made into the first parameter.
599 See comments in thunk_adjust for detail on the parameters. */
600
601 struct cgraph_node *
602 cgraph_add_thunk (struct cgraph_node *decl_node, tree alias, tree decl,
603 bool this_adjusting,
604 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
605 tree virtual_offset,
606 tree real_alias)
607 {
608 struct cgraph_node *node = cgraph_get_node (alias);
609
610 if (node)
611 {
612 gcc_assert (node->local.finalized);
613 gcc_assert (!node->same_body);
614 cgraph_remove_node (node);
615 }
616
617 node = cgraph_same_body_alias_1 (decl_node, alias, decl);
618 gcc_assert (node);
619 gcc_checking_assert (!virtual_offset
620 || tree_int_cst_equal (virtual_offset,
621 size_int (virtual_value)));
622 node->thunk.fixed_offset = fixed_offset;
623 node->thunk.this_adjusting = this_adjusting;
624 node->thunk.virtual_value = virtual_value;
625 node->thunk.virtual_offset_p = virtual_offset != NULL;
626 node->thunk.alias = real_alias;
627 node->thunk.thunk_p = true;
628 return node;
629 }
630
631 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
632 is assigned. */
633
634 struct cgraph_node *
635 cgraph_get_node_or_alias (const_tree decl)
636 {
637 struct cgraph_node key, *node = NULL, **slot;
638
639 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
640
641 if (!cgraph_hash)
642 return NULL;
643
644 key.decl = CONST_CAST2 (tree, const_tree, decl);
645
646 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
647 NO_INSERT);
648
649 if (slot && *slot)
650 node = *slot;
651 return node;
652 }
653
654 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
655 is assigned. */
656
657 struct cgraph_node *
658 cgraph_get_node (const_tree decl)
659 {
660 struct cgraph_node key, *node = NULL, **slot;
661
662 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
663
664 if (!cgraph_hash)
665 return NULL;
666
667 key.decl = CONST_CAST2 (tree, const_tree, decl);
668
669 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
670 NO_INSERT);
671
672 if (slot && *slot)
673 {
674 node = *slot;
675 if (node->same_body_alias)
676 node = node->same_body;
677 }
678 return node;
679 }
680
681 /* Insert already constructed node into hashtable. */
682
683 void
684 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
685 {
686 struct cgraph_node **slot;
687
688 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
689
690 gcc_assert (!*slot);
691 *slot = node;
692 }
693
694 /* Returns a hash code for P. */
695
696 static hashval_t
697 hash_node_by_assembler_name (const void *p)
698 {
699 const struct cgraph_node *n = (const struct cgraph_node *) p;
700 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
701 }
702
703 /* Returns nonzero if P1 and P2 are equal. */
704
705 static int
706 eq_assembler_name (const void *p1, const void *p2)
707 {
708 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
709 const_tree name = (const_tree)p2;
710 return (decl_assembler_name_equal (n1->decl, name));
711 }
712
713 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
714 Return NULL if there's no such node. */
715
716 struct cgraph_node *
717 cgraph_node_for_asm (tree asmname)
718 {
719 struct cgraph_node *node;
720 void **slot;
721
722 if (!assembler_name_hash)
723 {
724 assembler_name_hash =
725 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
726 NULL);
727 for (node = cgraph_nodes; node; node = node->next)
728 if (!node->global.inlined_to)
729 {
730 tree name = DECL_ASSEMBLER_NAME (node->decl);
731 slot = htab_find_slot_with_hash (assembler_name_hash, name,
732 decl_assembler_name_hash (name),
733 INSERT);
734 /* We can have multiple declarations with same assembler name. For C++
735 it is __builtin_strlen and strlen, for instance. Do we need to
736 record them all? Original implementation marked just first one
737 so lets hope for the best. */
738 if (!*slot)
739 *slot = node;
740 if (node->same_body)
741 {
742 struct cgraph_node *alias;
743
744 for (alias = node->same_body; alias; alias = alias->next)
745 {
746 hashval_t hash;
747 name = DECL_ASSEMBLER_NAME (alias->decl);
748 hash = decl_assembler_name_hash (name);
749 slot = htab_find_slot_with_hash (assembler_name_hash, name,
750 hash, INSERT);
751 if (!*slot)
752 *slot = alias;
753 }
754 }
755 }
756 }
757
758 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
759 decl_assembler_name_hash (asmname),
760 NO_INSERT);
761
762 if (slot)
763 {
764 node = (struct cgraph_node *) *slot;
765 if (node->same_body_alias)
766 node = node->same_body;
767 return node;
768 }
769 return NULL;
770 }
771
772 /* Returns a hash value for X (which really is a die_struct). */
773
774 static hashval_t
775 edge_hash (const void *x)
776 {
777 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
778 }
779
780 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
781
782 static int
783 edge_eq (const void *x, const void *y)
784 {
785 return ((const struct cgraph_edge *) x)->call_stmt == y;
786 }
787
788 /* Add call graph edge E to call site hash of its caller. */
789
790 static inline void
791 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
792 {
793 void **slot;
794 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
795 e->call_stmt,
796 htab_hash_pointer (e->call_stmt),
797 INSERT);
798 gcc_assert (!*slot);
799 *slot = e;
800 }
801
802 /* Return the callgraph edge representing the GIMPLE_CALL statement
803 CALL_STMT. */
804
805 struct cgraph_edge *
806 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
807 {
808 struct cgraph_edge *e, *e2;
809 int n = 0;
810
811 if (node->call_site_hash)
812 return (struct cgraph_edge *)
813 htab_find_with_hash (node->call_site_hash, call_stmt,
814 htab_hash_pointer (call_stmt));
815
816 /* This loop may turn out to be performance problem. In such case adding
817 hashtables into call nodes with very many edges is probably best
818 solution. It is not good idea to add pointer into CALL_EXPR itself
819 because we want to make possible having multiple cgraph nodes representing
820 different clones of the same body before the body is actually cloned. */
821 for (e = node->callees; e; e = e->next_callee)
822 {
823 if (e->call_stmt == call_stmt)
824 break;
825 n++;
826 }
827
828 if (!e)
829 for (e = node->indirect_calls; e; e = e->next_callee)
830 {
831 if (e->call_stmt == call_stmt)
832 break;
833 n++;
834 }
835
836 if (n > 100)
837 {
838 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
839 for (e2 = node->callees; e2; e2 = e2->next_callee)
840 cgraph_add_edge_to_call_site_hash (e2);
841 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
842 cgraph_add_edge_to_call_site_hash (e2);
843 }
844
845 return e;
846 }
847
848
849 /* Change field call_stmt of edge E to NEW_STMT. */
850
851 void
852 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
853 {
854 tree decl;
855
856 if (e->caller->call_site_hash)
857 {
858 htab_remove_elt_with_hash (e->caller->call_site_hash,
859 e->call_stmt,
860 htab_hash_pointer (e->call_stmt));
861 }
862
863 e->call_stmt = new_stmt;
864 if (e->indirect_unknown_callee
865 && (decl = gimple_call_fndecl (new_stmt)))
866 {
867 /* Constant propagation (and possibly also inlining?) can turn an
868 indirect call into a direct one. */
869 struct cgraph_node *new_callee = cgraph_get_node (decl);
870
871 gcc_checking_assert (new_callee);
872 cgraph_make_edge_direct (e, new_callee, 0);
873 }
874
875 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
876 e->can_throw_external = stmt_can_throw_external (new_stmt);
877 pop_cfun ();
878 if (e->caller->call_site_hash)
879 cgraph_add_edge_to_call_site_hash (e);
880 }
881
882 /* Like cgraph_set_call_stmt but walk the clone tree and update all
883 clones sharing the same function body. */
884
885 void
886 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
887 gimple old_stmt, gimple new_stmt)
888 {
889 struct cgraph_node *node;
890 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
891
892 if (edge)
893 cgraph_set_call_stmt (edge, new_stmt);
894
895 node = orig->clones;
896 if (node)
897 while (node != orig)
898 {
899 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
900 if (edge)
901 cgraph_set_call_stmt (edge, new_stmt);
902 if (node->clones)
903 node = node->clones;
904 else if (node->next_sibling_clone)
905 node = node->next_sibling_clone;
906 else
907 {
908 while (node != orig && !node->next_sibling_clone)
909 node = node->clone_of;
910 if (node != orig)
911 node = node->next_sibling_clone;
912 }
913 }
914 }
915
916 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
917 same function body. If clones already have edge for OLD_STMT; only
918 update the edge same way as cgraph_set_call_stmt_including_clones does.
919
920 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
921 frequencies of the clones. */
922
923 void
924 cgraph_create_edge_including_clones (struct cgraph_node *orig,
925 struct cgraph_node *callee,
926 gimple old_stmt,
927 gimple stmt, gcov_type count,
928 int freq, int loop_depth,
929 cgraph_inline_failed_t reason)
930 {
931 struct cgraph_node *node;
932 struct cgraph_edge *edge;
933
934 if (!cgraph_edge (orig, stmt))
935 {
936 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
937 edge->inline_failed = reason;
938 }
939
940 node = orig->clones;
941 if (node)
942 while (node != orig)
943 {
944 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
945
946 /* It is possible that clones already contain the edge while
947 master didn't. Either we promoted indirect call into direct
948 call in the clone or we are processing clones of unreachable
949 master where edges has been removed. */
950 if (edge)
951 cgraph_set_call_stmt (edge, stmt);
952 else if (!cgraph_edge (node, stmt))
953 {
954 edge = cgraph_create_edge (node, callee, stmt, count,
955 freq, loop_depth);
956 edge->inline_failed = reason;
957 }
958
959 if (node->clones)
960 node = node->clones;
961 else if (node->next_sibling_clone)
962 node = node->next_sibling_clone;
963 else
964 {
965 while (node != orig && !node->next_sibling_clone)
966 node = node->clone_of;
967 if (node != orig)
968 node = node->next_sibling_clone;
969 }
970 }
971 }
972
973 /* Give initial reasons why inlining would fail on EDGE. This gets either
974 nullified or usually overwritten by more precise reasons later. */
975
976 static void
977 initialize_inline_failed (struct cgraph_edge *e)
978 {
979 struct cgraph_node *callee = e->callee;
980
981 if (e->indirect_unknown_callee)
982 e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
983 else if (!callee->analyzed)
984 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
985 else if (callee->local.redefined_extern_inline)
986 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
987 else if (!callee->local.inlinable)
988 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
989 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
990 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
991 else
992 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
993 }
994
995 /* Allocate a cgraph_edge structure and fill it with data according to the
996 parameters of which only CALLEE can be NULL (when creating an indirect call
997 edge). */
998
999 static struct cgraph_edge *
1000 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
1001 gimple call_stmt, gcov_type count, int freq, int nest)
1002 {
1003 struct cgraph_edge *edge;
1004
1005 /* LTO does not actually have access to the call_stmt since these
1006 have not been loaded yet. */
1007 if (call_stmt)
1008 {
1009 /* This is a rather expensive check possibly triggering
1010 construction of call stmt hashtable. */
1011 gcc_checking_assert (!cgraph_edge (caller, call_stmt));
1012
1013 gcc_assert (is_gimple_call (call_stmt));
1014 }
1015
1016 if (free_edges)
1017 {
1018 edge = free_edges;
1019 free_edges = NEXT_FREE_EDGE (edge);
1020 }
1021 else
1022 {
1023 edge = ggc_alloc_cgraph_edge ();
1024 edge->uid = cgraph_edge_max_uid++;
1025 }
1026
1027 edge->aux = NULL;
1028 edge->caller = caller;
1029 edge->callee = callee;
1030 edge->prev_caller = NULL;
1031 edge->next_caller = NULL;
1032 edge->prev_callee = NULL;
1033 edge->next_callee = NULL;
1034
1035 edge->count = count;
1036 gcc_assert (count >= 0);
1037 edge->frequency = freq;
1038 gcc_assert (freq >= 0);
1039 gcc_assert (freq <= CGRAPH_FREQ_MAX);
1040 edge->loop_nest = nest;
1041
1042 edge->call_stmt = call_stmt;
1043 edge->call_stmt_size = 0;
1044 edge->call_stmt_time = 0;
1045 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
1046 edge->can_throw_external
1047 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
1048 pop_cfun ();
1049 edge->call_stmt_cannot_inline_p =
1050 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
1051 if (call_stmt && caller->call_site_hash)
1052 cgraph_add_edge_to_call_site_hash (edge);
1053
1054 edge->indirect_info = NULL;
1055 edge->indirect_inlining_edge = 0;
1056
1057 return edge;
1058 }
1059
1060 /* Create edge from CALLER to CALLEE in the cgraph. */
1061
1062 struct cgraph_edge *
1063 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1064 gimple call_stmt, gcov_type count, int freq, int nest)
1065 {
1066 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1067 count, freq, nest);
1068
1069 edge->indirect_unknown_callee = 0;
1070 initialize_inline_failed (edge);
1071
1072 edge->next_caller = callee->callers;
1073 if (callee->callers)
1074 callee->callers->prev_caller = edge;
1075 edge->next_callee = caller->callees;
1076 if (caller->callees)
1077 caller->callees->prev_callee = edge;
1078 caller->callees = edge;
1079 callee->callers = edge;
1080
1081 return edge;
1082 }
1083
1084 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
1085
1086 struct cgraph_indirect_call_info *
1087 cgraph_allocate_init_indirect_info (void)
1088 {
1089 struct cgraph_indirect_call_info *ii;
1090
1091 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
1092 ii->param_index = -1;
1093 return ii;
1094 }
1095
1096 /* Create an indirect edge with a yet-undetermined callee where the call
1097 statement destination is a formal parameter of the caller with index
1098 PARAM_INDEX. */
1099
1100 struct cgraph_edge *
1101 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1102 int ecf_flags,
1103 gcov_type count, int freq, int nest)
1104 {
1105 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1106 count, freq, nest);
1107
1108 edge->indirect_unknown_callee = 1;
1109 initialize_inline_failed (edge);
1110
1111 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1112 edge->indirect_info->ecf_flags = ecf_flags;
1113
1114 edge->next_callee = caller->indirect_calls;
1115 if (caller->indirect_calls)
1116 caller->indirect_calls->prev_callee = edge;
1117 caller->indirect_calls = edge;
1118
1119 return edge;
1120 }
1121
1122 /* Remove the edge E from the list of the callers of the callee. */
1123
1124 static inline void
1125 cgraph_edge_remove_callee (struct cgraph_edge *e)
1126 {
1127 gcc_assert (!e->indirect_unknown_callee);
1128 if (e->prev_caller)
1129 e->prev_caller->next_caller = e->next_caller;
1130 if (e->next_caller)
1131 e->next_caller->prev_caller = e->prev_caller;
1132 if (!e->prev_caller)
1133 e->callee->callers = e->next_caller;
1134 }
1135
1136 /* Remove the edge E from the list of the callees of the caller. */
1137
1138 static inline void
1139 cgraph_edge_remove_caller (struct cgraph_edge *e)
1140 {
1141 if (e->prev_callee)
1142 e->prev_callee->next_callee = e->next_callee;
1143 if (e->next_callee)
1144 e->next_callee->prev_callee = e->prev_callee;
1145 if (!e->prev_callee)
1146 {
1147 if (e->indirect_unknown_callee)
1148 e->caller->indirect_calls = e->next_callee;
1149 else
1150 e->caller->callees = e->next_callee;
1151 }
1152 if (e->caller->call_site_hash)
1153 htab_remove_elt_with_hash (e->caller->call_site_hash,
1154 e->call_stmt,
1155 htab_hash_pointer (e->call_stmt));
1156 }
1157
1158 /* Put the edge onto the free list. */
1159
1160 static void
1161 cgraph_free_edge (struct cgraph_edge *e)
1162 {
1163 int uid = e->uid;
1164
1165 /* Clear out the edge so we do not dangle pointers. */
1166 memset (e, 0, sizeof (*e));
1167 e->uid = uid;
1168 NEXT_FREE_EDGE (e) = free_edges;
1169 free_edges = e;
1170 }
1171
1172 /* Remove the edge E in the cgraph. */
1173
1174 void
1175 cgraph_remove_edge (struct cgraph_edge *e)
1176 {
1177 /* Call all edge removal hooks. */
1178 cgraph_call_edge_removal_hooks (e);
1179
1180 if (!e->indirect_unknown_callee)
1181 /* Remove from callers list of the callee. */
1182 cgraph_edge_remove_callee (e);
1183
1184 /* Remove from callees list of the callers. */
1185 cgraph_edge_remove_caller (e);
1186
1187 /* Put the edge onto the free list. */
1188 cgraph_free_edge (e);
1189 }
1190
1191 /* Set callee of call graph edge E and add it to the corresponding set of
1192 callers. */
1193
1194 static void
1195 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1196 {
1197 e->prev_caller = NULL;
1198 if (n->callers)
1199 n->callers->prev_caller = e;
1200 e->next_caller = n->callers;
1201 n->callers = e;
1202 e->callee = n;
1203 }
1204
1205 /* Redirect callee of E to N. The function does not update underlying
1206 call expression. */
1207
1208 void
1209 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1210 {
1211 /* Remove from callers list of the current callee. */
1212 cgraph_edge_remove_callee (e);
1213
1214 /* Insert to callers list of the new callee. */
1215 cgraph_set_edge_callee (e, n);
1216 }
1217
1218 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1219 CALLEE. DELTA is an integer constant that is to be added to the this
1220 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1221
1222 void
1223 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee,
1224 HOST_WIDE_INT delta)
1225 {
1226 edge->indirect_unknown_callee = 0;
1227 edge->indirect_info->thunk_delta = delta;
1228
1229 /* Get the edge out of the indirect edge list. */
1230 if (edge->prev_callee)
1231 edge->prev_callee->next_callee = edge->next_callee;
1232 if (edge->next_callee)
1233 edge->next_callee->prev_callee = edge->prev_callee;
1234 if (!edge->prev_callee)
1235 edge->caller->indirect_calls = edge->next_callee;
1236
1237 /* Put it into the normal callee list */
1238 edge->prev_callee = NULL;
1239 edge->next_callee = edge->caller->callees;
1240 if (edge->caller->callees)
1241 edge->caller->callees->prev_callee = edge;
1242 edge->caller->callees = edge;
1243
1244 /* Insert to callers list of the new callee. */
1245 cgraph_set_edge_callee (edge, callee);
1246
1247 /* We need to re-determine the inlining status of the edge. */
1248 initialize_inline_failed (edge);
1249 }
1250
1251
1252 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1253 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1254 of OLD_STMT if it was previously call statement. */
1255
1256 static void
1257 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1258 gimple old_stmt, tree old_call, gimple new_stmt)
1259 {
1260 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1261
1262 /* We are seeing indirect calls, then there is nothing to update. */
1263 if (!new_call && !old_call)
1264 return;
1265 /* See if we turned indirect call into direct call or folded call to one builtin
1266 into different builtin. */
1267 if (old_call != new_call)
1268 {
1269 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1270 struct cgraph_edge *ne = NULL;
1271 gcov_type count;
1272 int frequency;
1273 int loop_nest;
1274
1275 if (e)
1276 {
1277 /* See if the edge is already there and has the correct callee. It
1278 might be so because of indirect inlining has already updated
1279 it. We also might've cloned and redirected the edge. */
1280 if (new_call && e->callee)
1281 {
1282 struct cgraph_node *callee = e->callee;
1283 while (callee)
1284 {
1285 if (callee->decl == new_call
1286 || callee->former_clone_of == new_call)
1287 return;
1288 callee = callee->clone_of;
1289 }
1290 }
1291
1292 /* Otherwise remove edge and create new one; we can't simply redirect
1293 since function has changed, so inline plan and other information
1294 attached to edge is invalid. */
1295 count = e->count;
1296 frequency = e->frequency;
1297 loop_nest = e->loop_nest;
1298 cgraph_remove_edge (e);
1299 }
1300 else
1301 {
1302 /* We are seeing new direct call; compute profile info based on BB. */
1303 basic_block bb = gimple_bb (new_stmt);
1304 count = bb->count;
1305 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1306 bb);
1307 loop_nest = bb->loop_depth;
1308 }
1309
1310 if (new_call)
1311 {
1312 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1313 new_stmt, count, frequency,
1314 loop_nest);
1315 gcc_assert (ne->inline_failed);
1316 }
1317 }
1318 /* We only updated the call stmt; update pointer in cgraph edge.. */
1319 else if (old_stmt != new_stmt)
1320 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1321 }
1322
1323 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1324 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1325 of OLD_STMT before it was updated (updating can happen inplace). */
1326
1327 void
1328 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1329 {
1330 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1331 struct cgraph_node *node;
1332
1333 gcc_checking_assert (orig);
1334 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1335 if (orig->clones)
1336 for (node = orig->clones; node != orig;)
1337 {
1338 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1339 if (node->clones)
1340 node = node->clones;
1341 else if (node->next_sibling_clone)
1342 node = node->next_sibling_clone;
1343 else
1344 {
1345 while (node != orig && !node->next_sibling_clone)
1346 node = node->clone_of;
1347 if (node != orig)
1348 node = node->next_sibling_clone;
1349 }
1350 }
1351 }
1352
1353
1354 /* Remove all callees from the node. */
1355
1356 void
1357 cgraph_node_remove_callees (struct cgraph_node *node)
1358 {
1359 struct cgraph_edge *e, *f;
1360
1361 /* It is sufficient to remove the edges from the lists of callers of
1362 the callees. The callee list of the node can be zapped with one
1363 assignment. */
1364 for (e = node->callees; e; e = f)
1365 {
1366 f = e->next_callee;
1367 cgraph_call_edge_removal_hooks (e);
1368 if (!e->indirect_unknown_callee)
1369 cgraph_edge_remove_callee (e);
1370 cgraph_free_edge (e);
1371 }
1372 for (e = node->indirect_calls; e; e = f)
1373 {
1374 f = e->next_callee;
1375 cgraph_call_edge_removal_hooks (e);
1376 if (!e->indirect_unknown_callee)
1377 cgraph_edge_remove_callee (e);
1378 cgraph_free_edge (e);
1379 }
1380 node->indirect_calls = NULL;
1381 node->callees = NULL;
1382 if (node->call_site_hash)
1383 {
1384 htab_delete (node->call_site_hash);
1385 node->call_site_hash = NULL;
1386 }
1387 }
1388
1389 /* Remove all callers from the node. */
1390
1391 static void
1392 cgraph_node_remove_callers (struct cgraph_node *node)
1393 {
1394 struct cgraph_edge *e, *f;
1395
1396 /* It is sufficient to remove the edges from the lists of callees of
1397 the callers. The caller list of the node can be zapped with one
1398 assignment. */
1399 for (e = node->callers; e; e = f)
1400 {
1401 f = e->next_caller;
1402 cgraph_call_edge_removal_hooks (e);
1403 cgraph_edge_remove_caller (e);
1404 cgraph_free_edge (e);
1405 }
1406 node->callers = NULL;
1407 }
1408
1409 /* Release memory used to represent body of function NODE. */
1410
1411 void
1412 cgraph_release_function_body (struct cgraph_node *node)
1413 {
1414 if (DECL_STRUCT_FUNCTION (node->decl))
1415 {
1416 tree old_decl = current_function_decl;
1417 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1418 if (cfun->gimple_df)
1419 {
1420 current_function_decl = node->decl;
1421 delete_tree_ssa ();
1422 delete_tree_cfg_annotations ();
1423 cfun->eh = NULL;
1424 current_function_decl = old_decl;
1425 }
1426 if (cfun->cfg)
1427 {
1428 gcc_assert (dom_computed[0] == DOM_NONE);
1429 gcc_assert (dom_computed[1] == DOM_NONE);
1430 clear_edges ();
1431 }
1432 if (cfun->value_histograms)
1433 free_histograms ();
1434 gcc_assert (!current_loops);
1435 pop_cfun();
1436 gimple_set_body (node->decl, NULL);
1437 VEC_free (ipa_opt_pass, heap,
1438 node->ipa_transforms_to_apply);
1439 /* Struct function hangs a lot of data that would leak if we didn't
1440 removed all pointers to it. */
1441 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1442 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1443 }
1444 DECL_SAVED_TREE (node->decl) = NULL;
1445 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1446 of its associated function function declaration because it's
1447 needed to emit debug info later. */
1448 if (!node->abstract_and_needed)
1449 DECL_INITIAL (node->decl) = error_mark_node;
1450 }
1451
1452 /* Remove same body alias node. */
1453
1454 void
1455 cgraph_remove_same_body_alias (struct cgraph_node *node)
1456 {
1457 void **slot;
1458 int uid = node->uid;
1459
1460 gcc_assert (node->same_body_alias);
1461 if (node->previous)
1462 node->previous->next = node->next;
1463 else
1464 node->same_body->same_body = node->next;
1465 if (node->next)
1466 node->next->previous = node->previous;
1467 node->next = NULL;
1468 node->previous = NULL;
1469 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1470 if (*slot == node)
1471 htab_clear_slot (cgraph_hash, slot);
1472 if (assembler_name_hash)
1473 {
1474 tree name = DECL_ASSEMBLER_NAME (node->decl);
1475 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1476 decl_assembler_name_hash (name),
1477 NO_INSERT);
1478 if (slot && *slot == node)
1479 htab_clear_slot (assembler_name_hash, slot);
1480 }
1481
1482 /* Clear out the node to NULL all pointers and add the node to the free
1483 list. */
1484 memset (node, 0, sizeof(*node));
1485 node->uid = uid;
1486 NEXT_FREE_NODE (node) = free_nodes;
1487 free_nodes = node;
1488 }
1489
1490 /* Remove the node from cgraph. */
1491
1492 void
1493 cgraph_remove_node (struct cgraph_node *node)
1494 {
1495 void **slot;
1496 bool kill_body = false;
1497 struct cgraph_node *n;
1498 int uid = node->uid;
1499
1500 cgraph_call_node_removal_hooks (node);
1501 cgraph_node_remove_callers (node);
1502 cgraph_node_remove_callees (node);
1503 ipa_remove_all_references (&node->ref_list);
1504 ipa_remove_all_refering (&node->ref_list);
1505 VEC_free (ipa_opt_pass, heap,
1506 node->ipa_transforms_to_apply);
1507
1508 /* Incremental inlining access removed nodes stored in the postorder list.
1509 */
1510 node->needed = node->reachable = false;
1511 for (n = node->nested; n; n = n->next_nested)
1512 n->origin = NULL;
1513 node->nested = NULL;
1514 if (node->origin)
1515 {
1516 struct cgraph_node **node2 = &node->origin->nested;
1517
1518 while (*node2 != node)
1519 node2 = &(*node2)->next_nested;
1520 *node2 = node->next_nested;
1521 }
1522 if (node->previous)
1523 node->previous->next = node->next;
1524 else
1525 cgraph_nodes = node->next;
1526 if (node->next)
1527 node->next->previous = node->previous;
1528 node->next = NULL;
1529 node->previous = NULL;
1530 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1531 if (*slot == node)
1532 {
1533 struct cgraph_node *next_inline_clone;
1534
1535 for (next_inline_clone = node->clones;
1536 next_inline_clone && next_inline_clone->decl != node->decl;
1537 next_inline_clone = next_inline_clone->next_sibling_clone)
1538 ;
1539
1540 /* If there is inline clone of the node being removed, we need
1541 to put it into the position of removed node and reorganize all
1542 other clones to be based on it. */
1543 if (next_inline_clone)
1544 {
1545 struct cgraph_node *n;
1546 struct cgraph_node *new_clones;
1547
1548 *slot = next_inline_clone;
1549
1550 /* Unlink inline clone from the list of clones of removed node. */
1551 if (next_inline_clone->next_sibling_clone)
1552 next_inline_clone->next_sibling_clone->prev_sibling_clone
1553 = next_inline_clone->prev_sibling_clone;
1554 if (next_inline_clone->prev_sibling_clone)
1555 {
1556 gcc_assert (node->clones != next_inline_clone);
1557 next_inline_clone->prev_sibling_clone->next_sibling_clone
1558 = next_inline_clone->next_sibling_clone;
1559 }
1560 else
1561 {
1562 gcc_assert (node->clones == next_inline_clone);
1563 node->clones = next_inline_clone->next_sibling_clone;
1564 }
1565
1566 new_clones = node->clones;
1567 node->clones = NULL;
1568
1569 /* Copy clone info. */
1570 next_inline_clone->clone = node->clone;
1571
1572 /* Now place it into clone tree at same level at NODE. */
1573 next_inline_clone->clone_of = node->clone_of;
1574 next_inline_clone->prev_sibling_clone = NULL;
1575 next_inline_clone->next_sibling_clone = NULL;
1576 if (node->clone_of)
1577 {
1578 if (node->clone_of->clones)
1579 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1580 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1581 node->clone_of->clones = next_inline_clone;
1582 }
1583
1584 /* Merge the clone list. */
1585 if (new_clones)
1586 {
1587 if (!next_inline_clone->clones)
1588 next_inline_clone->clones = new_clones;
1589 else
1590 {
1591 n = next_inline_clone->clones;
1592 while (n->next_sibling_clone)
1593 n = n->next_sibling_clone;
1594 n->next_sibling_clone = new_clones;
1595 new_clones->prev_sibling_clone = n;
1596 }
1597 }
1598
1599 /* Update clone_of pointers. */
1600 n = new_clones;
1601 while (n)
1602 {
1603 n->clone_of = next_inline_clone;
1604 n = n->next_sibling_clone;
1605 }
1606 }
1607 else
1608 {
1609 htab_clear_slot (cgraph_hash, slot);
1610 kill_body = true;
1611 }
1612
1613 }
1614 if (node->prev_sibling_clone)
1615 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1616 else if (node->clone_of)
1617 node->clone_of->clones = node->next_sibling_clone;
1618 if (node->next_sibling_clone)
1619 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1620 if (node->clones)
1621 {
1622 struct cgraph_node *n, *next;
1623
1624 if (node->clone_of)
1625 {
1626 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1627 n->clone_of = node->clone_of;
1628 n->clone_of = node->clone_of;
1629 n->next_sibling_clone = node->clone_of->clones;
1630 if (node->clone_of->clones)
1631 node->clone_of->clones->prev_sibling_clone = n;
1632 node->clone_of->clones = node->clones;
1633 }
1634 else
1635 {
1636 /* We are removing node with clones. this makes clones inconsistent,
1637 but assume they will be removed subsequently and just keep clone
1638 tree intact. This can happen in unreachable function removal since
1639 we remove unreachable functions in random order, not by bottom-up
1640 walk of clone trees. */
1641 for (n = node->clones; n; n = next)
1642 {
1643 next = n->next_sibling_clone;
1644 n->next_sibling_clone = NULL;
1645 n->prev_sibling_clone = NULL;
1646 n->clone_of = NULL;
1647 }
1648 }
1649 }
1650
1651 while (node->same_body)
1652 cgraph_remove_same_body_alias (node->same_body);
1653
1654 if (node->same_comdat_group)
1655 {
1656 struct cgraph_node *prev;
1657 for (prev = node->same_comdat_group;
1658 prev->same_comdat_group != node;
1659 prev = prev->same_comdat_group)
1660 ;
1661 if (node->same_comdat_group == prev)
1662 prev->same_comdat_group = NULL;
1663 else
1664 prev->same_comdat_group = node->same_comdat_group;
1665 node->same_comdat_group = NULL;
1666 }
1667
1668 /* While all the clones are removed after being proceeded, the function
1669 itself is kept in the cgraph even after it is compiled. Check whether
1670 we are done with this body and reclaim it proactively if this is the case.
1671 */
1672 if (!kill_body && *slot)
1673 {
1674 struct cgraph_node *n = (struct cgraph_node *) *slot;
1675 if (!n->clones && !n->clone_of && !n->global.inlined_to
1676 && (cgraph_global_info_ready
1677 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1678 || n->in_other_partition)))
1679 kill_body = true;
1680 }
1681 if (assembler_name_hash)
1682 {
1683 tree name = DECL_ASSEMBLER_NAME (node->decl);
1684 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1685 decl_assembler_name_hash (name),
1686 NO_INSERT);
1687 /* Inline clones are not hashed. */
1688 if (slot && *slot == node)
1689 htab_clear_slot (assembler_name_hash, slot);
1690 }
1691
1692 if (kill_body)
1693 cgraph_release_function_body (node);
1694 node->decl = NULL;
1695 if (node->call_site_hash)
1696 {
1697 htab_delete (node->call_site_hash);
1698 node->call_site_hash = NULL;
1699 }
1700 cgraph_n_nodes--;
1701
1702 /* Clear out the node to NULL all pointers and add the node to the free
1703 list. */
1704 memset (node, 0, sizeof(*node));
1705 node->uid = uid;
1706 NEXT_FREE_NODE (node) = free_nodes;
1707 free_nodes = node;
1708 }
1709
1710 /* Remove the node from cgraph. */
1711
1712 void
1713 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1714 {
1715 struct cgraph_edge *e, *next;
1716 for (e = node->callees; e; e = next)
1717 {
1718 next = e->next_callee;
1719 if (!e->inline_failed)
1720 cgraph_remove_node_and_inline_clones (e->callee);
1721 }
1722 cgraph_remove_node (node);
1723 }
1724
1725 /* Notify finalize_compilation_unit that given node is reachable. */
1726
1727 void
1728 cgraph_mark_reachable_node (struct cgraph_node *node)
1729 {
1730 if (!node->reachable && node->local.finalized)
1731 {
1732 if (cgraph_global_info_ready)
1733 {
1734 /* Verify that function does not appear to be needed out of blue
1735 during the optimization process. This can happen for extern
1736 inlines when bodies was removed after inlining. */
1737 gcc_assert ((node->analyzed || node->in_other_partition
1738 || DECL_EXTERNAL (node->decl)));
1739 }
1740 else
1741 notice_global_symbol (node->decl);
1742 node->reachable = 1;
1743
1744 node->next_needed = cgraph_nodes_queue;
1745 cgraph_nodes_queue = node;
1746 }
1747 }
1748
1749 /* Likewise indicate that a node is needed, i.e. reachable via some
1750 external means. */
1751
1752 void
1753 cgraph_mark_needed_node (struct cgraph_node *node)
1754 {
1755 node->needed = 1;
1756 gcc_assert (!node->global.inlined_to);
1757 cgraph_mark_reachable_node (node);
1758 }
1759
1760 /* Likewise indicate that a node is having address taken. */
1761
1762 void
1763 cgraph_mark_address_taken_node (struct cgraph_node *node)
1764 {
1765 gcc_assert (!node->global.inlined_to);
1766 cgraph_mark_reachable_node (node);
1767 node->address_taken = 1;
1768 }
1769
1770 /* Return local info for the compiled function. */
1771
1772 struct cgraph_local_info *
1773 cgraph_local_info (tree decl)
1774 {
1775 struct cgraph_node *node;
1776
1777 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1778 node = cgraph_get_node (decl);
1779 if (!node)
1780 return NULL;
1781 return &node->local;
1782 }
1783
1784 /* Return local info for the compiled function. */
1785
1786 struct cgraph_global_info *
1787 cgraph_global_info (tree decl)
1788 {
1789 struct cgraph_node *node;
1790
1791 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1792 node = cgraph_get_node (decl);
1793 if (!node)
1794 return NULL;
1795 return &node->global;
1796 }
1797
1798 /* Return local info for the compiled function. */
1799
1800 struct cgraph_rtl_info *
1801 cgraph_rtl_info (tree decl)
1802 {
1803 struct cgraph_node *node;
1804
1805 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1806 node = cgraph_get_node (decl);
1807 if (!node
1808 || (decl != current_function_decl
1809 && !TREE_ASM_WRITTEN (node->decl)))
1810 return NULL;
1811 return &node->rtl;
1812 }
1813
1814 /* Return a string describing the failure REASON. */
1815
1816 const char*
1817 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1818 {
1819 #undef DEFCIFCODE
1820 #define DEFCIFCODE(code, string) string,
1821
1822 static const char *cif_string_table[CIF_N_REASONS] = {
1823 #include "cif-code.def"
1824 };
1825
1826 /* Signedness of an enum type is implementation defined, so cast it
1827 to unsigned before testing. */
1828 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1829 return cif_string_table[reason];
1830 }
1831
1832 /* Return name of the node used in debug output. */
1833 const char *
1834 cgraph_node_name (struct cgraph_node *node)
1835 {
1836 return lang_hooks.decl_printable_name (node->decl, 2);
1837 }
1838
1839 /* Names used to print out the availability enum. */
1840 const char * const cgraph_availability_names[] =
1841 {"unset", "not_available", "overwritable", "available", "local"};
1842
1843
1844 /* Dump call graph node NODE to file F. */
1845
1846 void
1847 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1848 {
1849 struct cgraph_edge *edge;
1850 int indirect_calls_count = 0;
1851
1852 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1853 node->pid);
1854 dump_addr (f, " @", (void *)node);
1855 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1856 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1857 if (node->global.inlined_to)
1858 fprintf (f, " (inline copy in %s/%i)",
1859 cgraph_node_name (node->global.inlined_to),
1860 node->global.inlined_to->uid);
1861 if (node->same_comdat_group)
1862 fprintf (f, " (same comdat group as %s/%i)",
1863 cgraph_node_name (node->same_comdat_group),
1864 node->same_comdat_group->uid);
1865 if (node->clone_of)
1866 fprintf (f, " (clone of %s/%i)",
1867 cgraph_node_name (node->clone_of),
1868 node->clone_of->uid);
1869 if (cgraph_function_flags_ready)
1870 fprintf (f, " availability:%s",
1871 cgraph_availability_names [cgraph_function_body_availability (node)]);
1872 if (node->analyzed)
1873 fprintf (f, " analyzed");
1874 if (node->in_other_partition)
1875 fprintf (f, " in_other_partition");
1876 if (node->count)
1877 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1878 (HOST_WIDEST_INT)node->count);
1879 if (node->origin)
1880 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1881 if (node->needed)
1882 fprintf (f, " needed");
1883 if (node->address_taken)
1884 fprintf (f, " address_taken");
1885 else if (node->reachable)
1886 fprintf (f, " reachable");
1887 else if (node->reachable_from_other_partition)
1888 fprintf (f, " reachable_from_other_partition");
1889 if (gimple_has_body_p (node->decl))
1890 fprintf (f, " body");
1891 if (node->process)
1892 fprintf (f, " process");
1893 if (node->local.local)
1894 fprintf (f, " local");
1895 if (node->local.externally_visible)
1896 fprintf (f, " externally_visible");
1897 if (node->resolution != LDPR_UNKNOWN)
1898 fprintf (f, " %s",
1899 ld_plugin_symbol_resolution_names[(int)node->resolution]);
1900 if (node->local.finalized)
1901 fprintf (f, " finalized");
1902 if (node->local.disregard_inline_limits)
1903 fprintf (f, " always_inline");
1904 else if (node->local.inlinable)
1905 fprintf (f, " inlinable");
1906 else if (node->local.versionable)
1907 fprintf (f, " versionable");
1908 if (node->local.redefined_extern_inline)
1909 fprintf (f, " redefined_extern_inline");
1910 if (TREE_ASM_WRITTEN (node->decl))
1911 fprintf (f, " asm_written");
1912 if (node->only_called_at_startup)
1913 fprintf (f, " only_called_at_startup");
1914 if (node->only_called_at_exit)
1915 fprintf (f, " only_called_at_exit");
1916
1917 fprintf (f, "\n called by: ");
1918 for (edge = node->callers; edge; edge = edge->next_caller)
1919 {
1920 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1921 edge->caller->uid);
1922 if (edge->count)
1923 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1924 (HOST_WIDEST_INT)edge->count);
1925 if (edge->frequency)
1926 fprintf (f, "(%.2f per call) ",
1927 edge->frequency / (double)CGRAPH_FREQ_BASE);
1928 if (!edge->inline_failed)
1929 fprintf(f, "(inlined) ");
1930 if (edge->indirect_inlining_edge)
1931 fprintf(f, "(indirect_inlining) ");
1932 if (edge->can_throw_external)
1933 fprintf(f, "(can throw external) ");
1934 }
1935
1936 fprintf (f, "\n calls: ");
1937 for (edge = node->callees; edge; edge = edge->next_callee)
1938 {
1939 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1940 edge->callee->uid);
1941 if (!edge->inline_failed)
1942 fprintf(f, "(inlined) ");
1943 if (edge->indirect_inlining_edge)
1944 fprintf(f, "(indirect_inlining) ");
1945 if (edge->count)
1946 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1947 (HOST_WIDEST_INT)edge->count);
1948 if (edge->frequency)
1949 fprintf (f, "(%.2f per call) ",
1950 edge->frequency / (double)CGRAPH_FREQ_BASE);
1951 if (edge->loop_nest)
1952 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1953 if (edge->can_throw_external)
1954 fprintf(f, "(can throw external) ");
1955 }
1956 fprintf (f, "\n");
1957 fprintf (f, " References: ");
1958 ipa_dump_references (f, &node->ref_list);
1959 fprintf (f, " Refering this function: ");
1960 ipa_dump_refering (f, &node->ref_list);
1961
1962 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1963 indirect_calls_count++;
1964 if (indirect_calls_count)
1965 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1966 indirect_calls_count);
1967
1968 if (node->same_body)
1969 {
1970 struct cgraph_node *n;
1971 fprintf (f, " aliases & thunks:");
1972 for (n = node->same_body; n; n = n->next)
1973 {
1974 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1975 if (n->thunk.thunk_p)
1976 {
1977 fprintf (f, " (thunk of %s fixed offset %i virtual value %i has "
1978 "virtual offset %i",
1979 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1980 (int)n->thunk.fixed_offset,
1981 (int)n->thunk.virtual_value,
1982 (int)n->thunk.virtual_offset_p);
1983 fprintf (f, ")");
1984 }
1985 if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
1986 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
1987 }
1988 fprintf (f, "\n");
1989 }
1990 }
1991
1992
1993 /* Dump call graph node NODE to stderr. */
1994
1995 DEBUG_FUNCTION void
1996 debug_cgraph_node (struct cgraph_node *node)
1997 {
1998 dump_cgraph_node (stderr, node);
1999 }
2000
2001
2002 /* Dump the callgraph to file F. */
2003
2004 void
2005 dump_cgraph (FILE *f)
2006 {
2007 struct cgraph_node *node;
2008
2009 fprintf (f, "callgraph:\n\n");
2010 for (node = cgraph_nodes; node; node = node->next)
2011 dump_cgraph_node (f, node);
2012 }
2013
2014
2015 /* Dump the call graph to stderr. */
2016
2017 DEBUG_FUNCTION void
2018 debug_cgraph (void)
2019 {
2020 dump_cgraph (stderr);
2021 }
2022
2023
2024 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
2025
2026 void
2027 change_decl_assembler_name (tree decl, tree name)
2028 {
2029 struct cgraph_node *node;
2030 void **slot;
2031 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
2032 SET_DECL_ASSEMBLER_NAME (decl, name);
2033 else
2034 {
2035 if (name == DECL_ASSEMBLER_NAME (decl))
2036 return;
2037
2038 if (assembler_name_hash
2039 && TREE_CODE (decl) == FUNCTION_DECL
2040 && (node = cgraph_get_node_or_alias (decl)) != NULL)
2041 {
2042 tree old_name = DECL_ASSEMBLER_NAME (decl);
2043 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
2044 decl_assembler_name_hash (old_name),
2045 NO_INSERT);
2046 /* Inline clones are not hashed. */
2047 if (slot && *slot == node)
2048 htab_clear_slot (assembler_name_hash, slot);
2049 }
2050 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
2051 && DECL_RTL_SET_P (decl))
2052 warning (0, "%D renamed after being referenced in assembly", decl);
2053
2054 SET_DECL_ASSEMBLER_NAME (decl, name);
2055 }
2056 if (assembler_name_hash
2057 && TREE_CODE (decl) == FUNCTION_DECL
2058 && (node = cgraph_get_node_or_alias (decl)) != NULL)
2059 {
2060 slot = htab_find_slot_with_hash (assembler_name_hash, name,
2061 decl_assembler_name_hash (name),
2062 INSERT);
2063 gcc_assert (!*slot);
2064 *slot = node;
2065 }
2066 }
2067
2068 /* Add a top-level asm statement to the list. */
2069
2070 struct cgraph_asm_node *
2071 cgraph_add_asm_node (tree asm_str)
2072 {
2073 struct cgraph_asm_node *node;
2074
2075 node = ggc_alloc_cleared_cgraph_asm_node ();
2076 node->asm_str = asm_str;
2077 node->order = cgraph_order++;
2078 node->next = NULL;
2079 if (cgraph_asm_nodes == NULL)
2080 cgraph_asm_nodes = node;
2081 else
2082 cgraph_asm_last_node->next = node;
2083 cgraph_asm_last_node = node;
2084 return node;
2085 }
2086
2087 /* Return true when the DECL can possibly be inlined. */
2088 bool
2089 cgraph_function_possibly_inlined_p (tree decl)
2090 {
2091 if (!cgraph_global_info_ready)
2092 return !DECL_UNINLINABLE (decl);
2093 return DECL_POSSIBLY_INLINED (decl);
2094 }
2095
2096 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2097 struct cgraph_edge *
2098 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2099 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2100 int freq_scale, int loop_nest, bool update_original)
2101 {
2102 struct cgraph_edge *new_edge;
2103 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2104 gcov_type freq;
2105
2106 /* We do not want to ignore loop nest after frequency drops to 0. */
2107 if (!freq_scale)
2108 freq_scale = 1;
2109 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2110 if (freq > CGRAPH_FREQ_MAX)
2111 freq = CGRAPH_FREQ_MAX;
2112
2113 if (e->indirect_unknown_callee)
2114 {
2115 tree decl;
2116
2117 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2118 {
2119 struct cgraph_node *callee = cgraph_get_node (decl);
2120 gcc_checking_assert (callee);
2121 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
2122 e->loop_nest + loop_nest);
2123 }
2124 else
2125 {
2126 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2127 e->indirect_info->ecf_flags,
2128 count, freq,
2129 e->loop_nest + loop_nest);
2130 *new_edge->indirect_info = *e->indirect_info;
2131 }
2132 }
2133 else
2134 {
2135 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
2136 e->loop_nest + loop_nest);
2137 if (e->indirect_info)
2138 {
2139 new_edge->indirect_info
2140 = ggc_alloc_cleared_cgraph_indirect_call_info ();
2141 *new_edge->indirect_info = *e->indirect_info;
2142 }
2143 }
2144
2145 new_edge->call_stmt_size = e->call_stmt_size;
2146 new_edge->call_stmt_time = e->call_stmt_time;
2147 new_edge->inline_failed = e->inline_failed;
2148 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2149 new_edge->lto_stmt_uid = stmt_uid;
2150 /* Clone flags that depend on call_stmt availability manually. */
2151 new_edge->can_throw_external = e->can_throw_external;
2152 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
2153 if (update_original)
2154 {
2155 e->count -= new_edge->count;
2156 if (e->count < 0)
2157 e->count = 0;
2158 }
2159 cgraph_call_edge_duplication_hooks (e, new_edge);
2160 return new_edge;
2161 }
2162
2163 /* Create node representing clone of N executed COUNT times. Decrease
2164 the execution counts from original node too.
2165 The new clone will have decl set to DECL that may or may not be the same
2166 as decl of N.
2167
2168 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2169 function's profile to reflect the fact that part of execution is handled
2170 by node. */
2171 struct cgraph_node *
2172 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2173 int loop_nest, bool update_original,
2174 VEC(cgraph_edge_p,heap) *redirect_callers)
2175 {
2176 struct cgraph_node *new_node = cgraph_create_node_1 ();
2177 struct cgraph_edge *e;
2178 gcov_type count_scale;
2179 unsigned i;
2180
2181 new_node->decl = decl;
2182 new_node->origin = n->origin;
2183 if (new_node->origin)
2184 {
2185 new_node->next_nested = new_node->origin->nested;
2186 new_node->origin->nested = new_node;
2187 }
2188 new_node->analyzed = n->analyzed;
2189 new_node->local = n->local;
2190 new_node->local.externally_visible = false;
2191 new_node->local.local = true;
2192 new_node->local.vtable_method = false;
2193 new_node->global = n->global;
2194 new_node->rtl = n->rtl;
2195 new_node->count = count;
2196 new_node->frequency = n->frequency;
2197 new_node->clone = n->clone;
2198 new_node->clone.tree_map = 0;
2199 if (n->count)
2200 {
2201 if (new_node->count > n->count)
2202 count_scale = REG_BR_PROB_BASE;
2203 else
2204 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2205 }
2206 else
2207 count_scale = 0;
2208 if (update_original)
2209 {
2210 n->count -= count;
2211 if (n->count < 0)
2212 n->count = 0;
2213 }
2214
2215 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2216 {
2217 /* Redirect calls to the old version node to point to its new
2218 version. */
2219 cgraph_redirect_edge_callee (e, new_node);
2220 }
2221
2222
2223 for (e = n->callees;e; e=e->next_callee)
2224 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2225 count_scale, freq, loop_nest, update_original);
2226
2227 for (e = n->indirect_calls; e; e = e->next_callee)
2228 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2229 count_scale, freq, loop_nest, update_original);
2230 ipa_clone_references (new_node, NULL, &n->ref_list);
2231
2232 new_node->next_sibling_clone = n->clones;
2233 if (n->clones)
2234 n->clones->prev_sibling_clone = new_node;
2235 n->clones = new_node;
2236 new_node->clone_of = n;
2237
2238 cgraph_call_node_duplication_hooks (n, new_node);
2239 if (n->decl != decl)
2240 {
2241 struct cgraph_node **slot;
2242 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2243 gcc_assert (!*slot);
2244 *slot = new_node;
2245 if (assembler_name_hash)
2246 {
2247 void **aslot;
2248 tree name = DECL_ASSEMBLER_NAME (decl);
2249
2250 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2251 decl_assembler_name_hash (name),
2252 INSERT);
2253 gcc_assert (!*aslot);
2254 *aslot = new_node;
2255 }
2256 }
2257 return new_node;
2258 }
2259
2260 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2261
2262 static GTY(()) unsigned int clone_fn_id_num;
2263
2264 tree
2265 clone_function_name (tree decl, const char *suffix)
2266 {
2267 tree name = DECL_ASSEMBLER_NAME (decl);
2268 size_t len = IDENTIFIER_LENGTH (name);
2269 char *tmp_name, *prefix;
2270
2271 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2272 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2273 strcpy (prefix + len + 1, suffix);
2274 #ifndef NO_DOT_IN_LABEL
2275 prefix[len] = '.';
2276 #elif !defined NO_DOLLAR_IN_LABEL
2277 prefix[len] = '$';
2278 #else
2279 prefix[len] = '_';
2280 #endif
2281 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2282 return get_identifier (tmp_name);
2283 }
2284
2285 /* Create callgraph node clone with new declaration. The actual body will
2286 be copied later at compilation stage.
2287
2288 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2289 bitmap interface.
2290 */
2291 struct cgraph_node *
2292 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2293 VEC(cgraph_edge_p,heap) *redirect_callers,
2294 VEC(ipa_replace_map_p,gc) *tree_map,
2295 bitmap args_to_skip,
2296 const char * suffix)
2297 {
2298 tree old_decl = old_node->decl;
2299 struct cgraph_node *new_node = NULL;
2300 tree new_decl;
2301 size_t i;
2302 struct ipa_replace_map *map;
2303
2304 if (!flag_wpa)
2305 gcc_checking_assert (tree_versionable_function_p (old_decl));
2306
2307 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
2308
2309 /* Make a new FUNCTION_DECL tree node */
2310 if (!args_to_skip)
2311 new_decl = copy_node (old_decl);
2312 else
2313 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2314 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2315
2316 /* Generate a new name for the new version. */
2317 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2318 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2319 SET_DECL_RTL (new_decl, NULL);
2320
2321 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2322 CGRAPH_FREQ_BASE, 0, false,
2323 redirect_callers);
2324 /* Update the properties.
2325 Make clone visible only within this translation unit. Make sure
2326 that is not weak also.
2327 ??? We cannot use COMDAT linkage because there is no
2328 ABI support for this. */
2329 DECL_EXTERNAL (new_node->decl) = 0;
2330 if (DECL_ONE_ONLY (old_decl))
2331 DECL_SECTION_NAME (new_node->decl) = NULL;
2332 DECL_COMDAT_GROUP (new_node->decl) = 0;
2333 TREE_PUBLIC (new_node->decl) = 0;
2334 DECL_COMDAT (new_node->decl) = 0;
2335 DECL_WEAK (new_node->decl) = 0;
2336 new_node->clone.tree_map = tree_map;
2337 new_node->clone.args_to_skip = args_to_skip;
2338 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2339 {
2340 tree var = map->new_tree;
2341
2342 STRIP_NOPS (var);
2343 if (TREE_CODE (var) != ADDR_EXPR)
2344 continue;
2345 var = get_base_var (var);
2346 if (!var)
2347 continue;
2348
2349 /* Record references of the future statement initializing the constant
2350 argument. */
2351 if (TREE_CODE (var) == FUNCTION_DECL)
2352 {
2353 struct cgraph_node *ref_node = cgraph_get_node (var);
2354 gcc_checking_assert (ref_node);
2355 ipa_record_reference (new_node, NULL, ref_node, NULL, IPA_REF_ADDR,
2356 NULL);
2357 }
2358 else if (TREE_CODE (var) == VAR_DECL)
2359 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2360 IPA_REF_ADDR, NULL);
2361 }
2362 if (!args_to_skip)
2363 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2364 else if (old_node->clone.combined_args_to_skip)
2365 {
2366 int newi = 0, oldi = 0;
2367 tree arg;
2368 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2369 struct cgraph_node *orig_node;
2370 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2371 ;
2372 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2373 {
2374 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2375 {
2376 bitmap_set_bit (new_args_to_skip, oldi);
2377 continue;
2378 }
2379 if (bitmap_bit_p (args_to_skip, newi))
2380 bitmap_set_bit (new_args_to_skip, oldi);
2381 newi++;
2382 }
2383 new_node->clone.combined_args_to_skip = new_args_to_skip;
2384 }
2385 else
2386 new_node->clone.combined_args_to_skip = args_to_skip;
2387 new_node->local.externally_visible = 0;
2388 new_node->local.local = 1;
2389 new_node->lowered = true;
2390 new_node->reachable = true;
2391
2392
2393 return new_node;
2394 }
2395
2396 /* NODE is no longer nested function; update cgraph accordingly. */
2397 void
2398 cgraph_unnest_node (struct cgraph_node *node)
2399 {
2400 struct cgraph_node **node2 = &node->origin->nested;
2401 gcc_assert (node->origin);
2402
2403 while (*node2 != node)
2404 node2 = &(*node2)->next_nested;
2405 *node2 = node->next_nested;
2406 node->origin = NULL;
2407 }
2408
2409 /* Return function availability. See cgraph.h for description of individual
2410 return values. */
2411 enum availability
2412 cgraph_function_body_availability (struct cgraph_node *node)
2413 {
2414 enum availability avail;
2415 gcc_assert (cgraph_function_flags_ready);
2416 if (!node->analyzed)
2417 avail = AVAIL_NOT_AVAILABLE;
2418 else if (node->local.local)
2419 avail = AVAIL_LOCAL;
2420 else if (!node->local.externally_visible)
2421 avail = AVAIL_AVAILABLE;
2422 /* Inline functions are safe to be analyzed even if their symbol can
2423 be overwritten at runtime. It is not meaningful to enforce any sane
2424 behaviour on replacing inline function by different body. */
2425 else if (DECL_DECLARED_INLINE_P (node->decl))
2426 avail = AVAIL_AVAILABLE;
2427
2428 /* If the function can be overwritten, return OVERWRITABLE. Take
2429 care at least of two notable extensions - the COMDAT functions
2430 used to share template instantiations in C++ (this is symmetric
2431 to code cp_cannot_inline_tree_fn and probably shall be shared and
2432 the inlinability hooks completely eliminated).
2433
2434 ??? Does the C++ one definition rule allow us to always return
2435 AVAIL_AVAILABLE here? That would be good reason to preserve this
2436 bit. */
2437
2438 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2439 avail = AVAIL_OVERWRITABLE;
2440 else avail = AVAIL_AVAILABLE;
2441
2442 return avail;
2443 }
2444
2445 /* Add the function FNDECL to the call graph.
2446 Unlike cgraph_finalize_function, this function is intended to be used
2447 by middle end and allows insertion of new function at arbitrary point
2448 of compilation. The function can be either in high, low or SSA form
2449 GIMPLE.
2450
2451 The function is assumed to be reachable and have address taken (so no
2452 API breaking optimizations are performed on it).
2453
2454 Main work done by this function is to enqueue the function for later
2455 processing to avoid need the passes to be re-entrant. */
2456
2457 void
2458 cgraph_add_new_function (tree fndecl, bool lowered)
2459 {
2460 struct cgraph_node *node;
2461 switch (cgraph_state)
2462 {
2463 case CGRAPH_STATE_CONSTRUCTION:
2464 /* Just enqueue function to be processed at nearest occurrence. */
2465 node = cgraph_create_node (fndecl);
2466 node->next_needed = cgraph_new_nodes;
2467 if (lowered)
2468 node->lowered = true;
2469 cgraph_new_nodes = node;
2470 break;
2471
2472 case CGRAPH_STATE_IPA:
2473 case CGRAPH_STATE_IPA_SSA:
2474 case CGRAPH_STATE_EXPANSION:
2475 /* Bring the function into finalized state and enqueue for later
2476 analyzing and compilation. */
2477 node = cgraph_get_create_node (fndecl);
2478 node->local.local = false;
2479 node->local.finalized = true;
2480 node->reachable = node->needed = true;
2481 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2482 {
2483 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2484 current_function_decl = fndecl;
2485 gimple_register_cfg_hooks ();
2486 tree_lowering_passes (fndecl);
2487 bitmap_obstack_initialize (NULL);
2488 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2489 execute_pass_list (pass_early_local_passes.pass.sub);
2490 bitmap_obstack_release (NULL);
2491 pop_cfun ();
2492 current_function_decl = NULL;
2493
2494 lowered = true;
2495 }
2496 if (lowered)
2497 node->lowered = true;
2498 node->next_needed = cgraph_new_nodes;
2499 cgraph_new_nodes = node;
2500 break;
2501
2502 case CGRAPH_STATE_FINISHED:
2503 /* At the very end of compilation we have to do all the work up
2504 to expansion. */
2505 node = cgraph_create_node (fndecl);
2506 if (lowered)
2507 node->lowered = true;
2508 cgraph_analyze_function (node);
2509 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2510 current_function_decl = fndecl;
2511 gimple_register_cfg_hooks ();
2512 bitmap_obstack_initialize (NULL);
2513 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2514 execute_pass_list (pass_early_local_passes.pass.sub);
2515 bitmap_obstack_release (NULL);
2516 tree_rest_of_compilation (fndecl);
2517 pop_cfun ();
2518 current_function_decl = NULL;
2519 break;
2520 }
2521
2522 /* Set a personality if required and we already passed EH lowering. */
2523 if (lowered
2524 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2525 == eh_personality_lang))
2526 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2527 }
2528
2529 /* Return true if NODE can be made local for API change.
2530 Extern inline functions and C++ COMDAT functions can be made local
2531 at the expense of possible code size growth if function is used in multiple
2532 compilation units. */
2533 bool
2534 cgraph_node_can_be_local_p (struct cgraph_node *node)
2535 {
2536 return (!node->needed && !node->address_taken
2537 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2538 || !node->local.externally_visible));
2539 }
2540
2541 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2542 but other code such as notice_global_symbol generates rtl. */
2543 void
2544 cgraph_make_decl_local (tree decl)
2545 {
2546 rtx rtl, symbol;
2547
2548 if (TREE_CODE (decl) == VAR_DECL)
2549 DECL_COMMON (decl) = 0;
2550 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2551
2552 if (DECL_COMDAT (decl))
2553 {
2554 /* It is possible that we are linking against library defining same COMDAT
2555 function. To avoid conflict we need to rename our local name of the
2556 function just in the case WHOPR partitioning decide to make it hidden
2557 to avoid cross partition references. */
2558 if (flag_wpa)
2559 {
2560 const char *old_name;
2561
2562 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2563 if (TREE_CODE (decl) == FUNCTION_DECL)
2564 {
2565 struct cgraph_node *node = cgraph_get_node_or_alias (decl);
2566 change_decl_assembler_name (decl,
2567 clone_function_name (decl, "local"));
2568 if (node->local.lto_file_data)
2569 lto_record_renamed_decl (node->local.lto_file_data,
2570 old_name,
2571 IDENTIFIER_POINTER
2572 (DECL_ASSEMBLER_NAME (decl)));
2573 }
2574 else if (TREE_CODE (decl) == VAR_DECL)
2575 {
2576 struct varpool_node *vnode = varpool_get_node (decl);
2577 /* change_decl_assembler_name will warn here on vtables because
2578 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2579 SET_DECL_ASSEMBLER_NAME (decl,
2580 clone_function_name (decl, "local"));
2581 if (vnode->lto_file_data)
2582 lto_record_renamed_decl (vnode->lto_file_data,
2583 old_name,
2584 IDENTIFIER_POINTER
2585 (DECL_ASSEMBLER_NAME (decl)));
2586 }
2587 }
2588 DECL_SECTION_NAME (decl) = 0;
2589 DECL_COMDAT (decl) = 0;
2590 }
2591 DECL_COMDAT_GROUP (decl) = 0;
2592 DECL_WEAK (decl) = 0;
2593 DECL_EXTERNAL (decl) = 0;
2594 TREE_PUBLIC (decl) = 0;
2595 if (!DECL_RTL_SET_P (decl))
2596 return;
2597
2598 /* Update rtl flags. */
2599 make_decl_rtl (decl);
2600
2601 rtl = DECL_RTL (decl);
2602 if (!MEM_P (rtl))
2603 return;
2604
2605 symbol = XEXP (rtl, 0);
2606 if (GET_CODE (symbol) != SYMBOL_REF)
2607 return;
2608
2609 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2610 }
2611
2612 /* Bring NODE local. */
2613 void
2614 cgraph_make_node_local (struct cgraph_node *node)
2615 {
2616 gcc_assert (cgraph_node_can_be_local_p (node));
2617 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2618 {
2619 struct cgraph_node *alias;
2620 cgraph_make_decl_local (node->decl);
2621
2622 for (alias = node->same_body; alias; alias = alias->next)
2623 cgraph_make_decl_local (alias->decl);
2624
2625 node->local.externally_visible = false;
2626 node->local.local = true;
2627 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2628 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2629 }
2630 }
2631
2632 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2633 if any to NOTHROW. */
2634
2635 void
2636 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2637 {
2638 struct cgraph_node *alias;
2639 TREE_NOTHROW (node->decl) = nothrow;
2640 for (alias = node->same_body; alias; alias = alias->next)
2641 TREE_NOTHROW (alias->decl) = nothrow;
2642 }
2643
2644 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2645 if any to READONLY. */
2646
2647 void
2648 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2649 {
2650 struct cgraph_node *alias;
2651 /* Static constructors and destructors without a side effect can be
2652 optimized out. */
2653 if (!looping && readonly)
2654 {
2655 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2656 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2657 if (DECL_STATIC_DESTRUCTOR (node->decl))
2658 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2659 }
2660 TREE_READONLY (node->decl) = readonly;
2661 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2662 for (alias = node->same_body; alias; alias = alias->next)
2663 {
2664 TREE_READONLY (alias->decl) = readonly;
2665 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping;
2666 }
2667 }
2668
2669 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2670 if any to PURE. */
2671
2672 void
2673 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2674 {
2675 struct cgraph_node *alias;
2676 /* Static constructors and destructors without a side effect can be
2677 optimized out. */
2678 if (!looping && pure)
2679 {
2680 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2681 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2682 if (DECL_STATIC_DESTRUCTOR (node->decl))
2683 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2684 }
2685 DECL_PURE_P (node->decl) = pure;
2686 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2687 for (alias = node->same_body; alias; alias = alias->next)
2688 {
2689 DECL_PURE_P (alias->decl) = pure;
2690 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping;
2691 }
2692 }
2693
2694 /* See if the frequency of NODE can be updated based on frequencies of its
2695 callers. */
2696 bool
2697 cgraph_propagate_frequency (struct cgraph_node *node)
2698 {
2699 bool maybe_unlikely_executed = true, maybe_executed_once = true;
2700 bool only_called_at_startup = true;
2701 bool only_called_at_exit = true;
2702 bool changed = false;
2703 struct cgraph_edge *edge;
2704
2705 if (!node->local.local)
2706 return false;
2707 gcc_assert (node->analyzed);
2708 if (dump_file && (dump_flags & TDF_DETAILS))
2709 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2710
2711 for (edge = node->callers;
2712 edge && (maybe_unlikely_executed || maybe_executed_once
2713 || only_called_at_startup || only_called_at_exit);
2714 edge = edge->next_caller)
2715 {
2716 if (edge->caller != node)
2717 {
2718 only_called_at_startup &= edge->caller->only_called_at_startup;
2719 /* It makes sense to put main() together with the static constructors.
2720 It will be executed for sure, but rest of functions called from
2721 main are definitely not at startup only. */
2722 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
2723 only_called_at_startup = 0;
2724 only_called_at_exit &= edge->caller->only_called_at_exit;
2725 }
2726 if (!edge->frequency)
2727 continue;
2728 switch (edge->caller->frequency)
2729 {
2730 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2731 break;
2732 case NODE_FREQUENCY_EXECUTED_ONCE:
2733 if (dump_file && (dump_flags & TDF_DETAILS))
2734 fprintf (dump_file, " Called by %s that is executed once\n",
2735 cgraph_node_name (edge->caller));
2736 maybe_unlikely_executed = false;
2737 if (edge->loop_nest)
2738 {
2739 maybe_executed_once = false;
2740 if (dump_file && (dump_flags & TDF_DETAILS))
2741 fprintf (dump_file, " Called in loop\n");
2742 }
2743 break;
2744 case NODE_FREQUENCY_HOT:
2745 case NODE_FREQUENCY_NORMAL:
2746 if (dump_file && (dump_flags & TDF_DETAILS))
2747 fprintf (dump_file, " Called by %s that is normal or hot\n",
2748 cgraph_node_name (edge->caller));
2749 maybe_unlikely_executed = false;
2750 maybe_executed_once = false;
2751 break;
2752 }
2753 }
2754 if ((only_called_at_startup && !only_called_at_exit)
2755 && !node->only_called_at_startup)
2756 {
2757 node->only_called_at_startup = true;
2758 if (dump_file)
2759 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
2760 cgraph_node_name (node));
2761 changed = true;
2762 }
2763 if ((only_called_at_exit && !only_called_at_startup)
2764 && !node->only_called_at_exit)
2765 {
2766 node->only_called_at_exit = true;
2767 if (dump_file)
2768 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
2769 cgraph_node_name (node));
2770 changed = true;
2771 }
2772 /* These come either from profile or user hints; never update them. */
2773 if (node->frequency == NODE_FREQUENCY_HOT
2774 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2775 return changed;
2776 if (maybe_unlikely_executed)
2777 {
2778 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2779 if (dump_file)
2780 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
2781 cgraph_node_name (node));
2782 changed = true;
2783 }
2784 else if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2785 {
2786 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2787 if (dump_file)
2788 fprintf (dump_file, "Node %s promoted to executed once.\n",
2789 cgraph_node_name (node));
2790 changed = true;
2791 }
2792 return changed;
2793 }
2794
2795 /* Return true when NODE can not return or throw and thus
2796 it is safe to ignore its side effects for IPA analysis. */
2797
2798 bool
2799 cgraph_node_cannot_return (struct cgraph_node *node)
2800 {
2801 int flags = flags_from_decl_or_type (node->decl);
2802 if (!flag_exceptions)
2803 return (flags & ECF_NORETURN) != 0;
2804 else
2805 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2806 == (ECF_NORETURN | ECF_NOTHROW));
2807 }
2808
2809 /* Return true when call of E can not lead to return from caller
2810 and thus it is safe to ignore its side effects for IPA analysis
2811 when computing side effects of the caller.
2812 FIXME: We could actually mark all edges that have no reaching
2813 patch to EXIT_BLOCK_PTR or throw to get better results. */
2814 bool
2815 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2816 {
2817 if (cgraph_node_cannot_return (e->caller))
2818 return true;
2819 if (e->indirect_unknown_callee)
2820 {
2821 int flags = e->indirect_info->ecf_flags;
2822 if (!flag_exceptions)
2823 return (flags & ECF_NORETURN) != 0;
2824 else
2825 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2826 == (ECF_NORETURN | ECF_NOTHROW));
2827 }
2828 else
2829 return cgraph_node_cannot_return (e->callee);
2830 }
2831
2832 /* Return true when function NODE can be removed from callgraph
2833 if all direct calls are eliminated. */
2834
2835 bool
2836 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2837 {
2838 gcc_assert (!node->global.inlined_to);
2839 /* Extern inlines can always go, we will use the external definition. */
2840 if (DECL_EXTERNAL (node->decl))
2841 return true;
2842 /* When function is needed, we can not remove it. */
2843 if (node->needed || node->reachable_from_other_partition)
2844 return false;
2845 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2846 || DECL_STATIC_DESTRUCTOR (node->decl))
2847 return false;
2848 /* Only COMDAT functions can be removed if externally visible. */
2849 if (node->local.externally_visible
2850 && (!DECL_COMDAT (node->decl)
2851 || cgraph_used_from_object_file_p (node)))
2852 return false;
2853 return true;
2854 }
2855
2856 /* Return true when function NODE can be expected to be removed
2857 from program when direct calls in this compilation unit are removed.
2858
2859 As a special case COMDAT functions are
2860 cgraph_can_remove_if_no_direct_calls_p while the are not
2861 cgraph_only_called_directly_p (it is possible they are called from other
2862 unit)
2863
2864 This function behaves as cgraph_only_called_directly_p because eliminating
2865 all uses of COMDAT function does not make it necessarily disappear from
2866 the program unless we are compiling whole program or we do LTO. In this
2867 case we know we win since dynamic linking will not really discard the
2868 linkonce section. */
2869
2870 bool
2871 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2872 {
2873 gcc_assert (!node->global.inlined_to);
2874 if (cgraph_used_from_object_file_p (node))
2875 return false;
2876 if (!in_lto_p && !flag_whole_program)
2877 return cgraph_only_called_directly_p (node);
2878 else
2879 {
2880 if (DECL_EXTERNAL (node->decl))
2881 return true;
2882 return cgraph_can_remove_if_no_direct_calls_p (node);
2883 }
2884 }
2885
2886 /* Return true when RESOLUTION indicate that linker will use
2887 the symbol from non-LTO object files. */
2888
2889 bool
2890 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2891 {
2892 return (resolution == LDPR_PREVAILING_DEF
2893 || resolution == LDPR_PREEMPTED_REG
2894 || resolution == LDPR_RESOLVED_EXEC
2895 || resolution == LDPR_RESOLVED_DYN);
2896 }
2897
2898 /* Return true when NODE is known to be used from other (non-LTO) object file.
2899 Known only when doing LTO via linker plugin. */
2900
2901 bool
2902 cgraph_used_from_object_file_p (struct cgraph_node *node)
2903 {
2904 struct cgraph_node *alias;
2905
2906 gcc_assert (!node->global.inlined_to);
2907 if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
2908 return false;
2909 if (resolution_used_from_other_file_p (node->resolution))
2910 return true;
2911 for (alias = node->same_body; alias; alias = alias->next)
2912 if (TREE_PUBLIC (alias->decl)
2913 && resolution_used_from_other_file_p (alias->resolution))
2914 return true;
2915 return false;
2916 }
2917
2918 #include "gt-cgraph.h"