]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
cgraph.c (cgraph_for_node_thunks_and_aliases, [...]): Fix thinko in recursive walking.
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains basic routines manipulating call graph
23
24 The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
32
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
36
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
41
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
47
48 Interprocedural information:
49
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
56
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
59
60 Inlining plans:
61
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
70
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
74
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101 #include "ipa-inline.h"
102
103 const char * const ld_plugin_symbol_resolution_names[]=
104 {
105 "",
106 "undef",
107 "prevailing_def",
108 "prevailing_def_ironly",
109 "preempted_reg",
110 "preempted_ir",
111 "resolved_ir",
112 "resolved_exec",
113 "resolved_dyn"
114 };
115
116 static void cgraph_node_remove_callers (struct cgraph_node *node);
117 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
118 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
119
120 /* Hash table used to convert declarations into nodes. */
121 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
122 /* Hash table used to convert assembler names into nodes. */
123 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
124
125 /* The linked list of cgraph nodes. */
126 struct cgraph_node *cgraph_nodes;
127
128 /* Queue of cgraph nodes scheduled to be lowered. */
129 struct cgraph_node *cgraph_nodes_queue;
130
131 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
132 secondary queue used during optimization to accommodate passes that
133 may generate new functions that need to be optimized and expanded. */
134 struct cgraph_node *cgraph_new_nodes;
135
136 /* Number of nodes in existence. */
137 int cgraph_n_nodes;
138
139 /* Maximal uid used in cgraph nodes. */
140 int cgraph_max_uid;
141
142 /* Maximal uid used in cgraph edges. */
143 int cgraph_edge_max_uid;
144
145 /* Set when whole unit has been analyzed so we can access global info. */
146 bool cgraph_global_info_ready = false;
147
148 /* What state callgraph is in right now. */
149 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
150
151 /* Set when the cgraph is fully build and the basic flags are computed. */
152 bool cgraph_function_flags_ready = false;
153
154 /* Linked list of cgraph asm nodes. */
155 struct cgraph_asm_node *cgraph_asm_nodes;
156
157 /* Last node in cgraph_asm_nodes. */
158 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
159
160 /* The order index of the next cgraph node to be created. This is
161 used so that we can sort the cgraph nodes in order by when we saw
162 them, to support -fno-toplevel-reorder. */
163 int cgraph_order;
164
165 /* List of hooks triggered on cgraph_edge events. */
166 struct cgraph_edge_hook_list {
167 cgraph_edge_hook hook;
168 void *data;
169 struct cgraph_edge_hook_list *next;
170 };
171
172 /* List of hooks triggered on cgraph_node events. */
173 struct cgraph_node_hook_list {
174 cgraph_node_hook hook;
175 void *data;
176 struct cgraph_node_hook_list *next;
177 };
178
179 /* List of hooks triggered on events involving two cgraph_edges. */
180 struct cgraph_2edge_hook_list {
181 cgraph_2edge_hook hook;
182 void *data;
183 struct cgraph_2edge_hook_list *next;
184 };
185
186 /* List of hooks triggered on events involving two cgraph_nodes. */
187 struct cgraph_2node_hook_list {
188 cgraph_2node_hook hook;
189 void *data;
190 struct cgraph_2node_hook_list *next;
191 };
192
193 /* List of hooks triggered when an edge is removed. */
194 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
195 /* List of hooks triggered when a node is removed. */
196 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
197 /* List of hooks triggered when an edge is duplicated. */
198 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
199 /* List of hooks triggered when a node is duplicated. */
200 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
201 /* List of hooks triggered when an function is inserted. */
202 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
203
204 /* Head of a linked list of unused (freed) call graph nodes.
205 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
206 static GTY(()) struct cgraph_node *free_nodes;
207 /* Head of a linked list of unused (freed) call graph edges.
208 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
209 static GTY(()) struct cgraph_edge *free_edges;
210
211 /* Did procss_same_body_aliases run? */
212 bool same_body_aliases_done;
213
214 /* Macros to access the next item in the list of free cgraph nodes and
215 edges. */
216 #define NEXT_FREE_NODE(NODE) (NODE)->next
217 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
218
219 /* Register HOOK to be called with DATA on each removed edge. */
220 struct cgraph_edge_hook_list *
221 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
222 {
223 struct cgraph_edge_hook_list *entry;
224 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
225
226 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
227 entry->hook = hook;
228 entry->data = data;
229 entry->next = NULL;
230 while (*ptr)
231 ptr = &(*ptr)->next;
232 *ptr = entry;
233 return entry;
234 }
235
236 /* Remove ENTRY from the list of hooks called on removing edges. */
237 void
238 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
239 {
240 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
241
242 while (*ptr != entry)
243 ptr = &(*ptr)->next;
244 *ptr = entry->next;
245 free (entry);
246 }
247
248 /* Call all edge removal hooks. */
249 static void
250 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
251 {
252 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
253 while (entry)
254 {
255 entry->hook (e, entry->data);
256 entry = entry->next;
257 }
258 }
259
260 /* Register HOOK to be called with DATA on each removed node. */
261 struct cgraph_node_hook_list *
262 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
263 {
264 struct cgraph_node_hook_list *entry;
265 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
266
267 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
268 entry->hook = hook;
269 entry->data = data;
270 entry->next = NULL;
271 while (*ptr)
272 ptr = &(*ptr)->next;
273 *ptr = entry;
274 return entry;
275 }
276
277 /* Remove ENTRY from the list of hooks called on removing nodes. */
278 void
279 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
280 {
281 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
282
283 while (*ptr != entry)
284 ptr = &(*ptr)->next;
285 *ptr = entry->next;
286 free (entry);
287 }
288
289 /* Call all node removal hooks. */
290 static void
291 cgraph_call_node_removal_hooks (struct cgraph_node *node)
292 {
293 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
294 while (entry)
295 {
296 entry->hook (node, entry->data);
297 entry = entry->next;
298 }
299 }
300
301 /* Register HOOK to be called with DATA on each inserted node. */
302 struct cgraph_node_hook_list *
303 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
304 {
305 struct cgraph_node_hook_list *entry;
306 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
307
308 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
309 entry->hook = hook;
310 entry->data = data;
311 entry->next = NULL;
312 while (*ptr)
313 ptr = &(*ptr)->next;
314 *ptr = entry;
315 return entry;
316 }
317
318 /* Remove ENTRY from the list of hooks called on inserted nodes. */
319 void
320 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
321 {
322 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
323
324 while (*ptr != entry)
325 ptr = &(*ptr)->next;
326 *ptr = entry->next;
327 free (entry);
328 }
329
330 /* Call all node insertion hooks. */
331 void
332 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
333 {
334 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
335 while (entry)
336 {
337 entry->hook (node, entry->data);
338 entry = entry->next;
339 }
340 }
341
342 /* Register HOOK to be called with DATA on each duplicated edge. */
343 struct cgraph_2edge_hook_list *
344 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
345 {
346 struct cgraph_2edge_hook_list *entry;
347 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
348
349 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
350 entry->hook = hook;
351 entry->data = data;
352 entry->next = NULL;
353 while (*ptr)
354 ptr = &(*ptr)->next;
355 *ptr = entry;
356 return entry;
357 }
358
359 /* Remove ENTRY from the list of hooks called on duplicating edges. */
360 void
361 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
362 {
363 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
364
365 while (*ptr != entry)
366 ptr = &(*ptr)->next;
367 *ptr = entry->next;
368 free (entry);
369 }
370
371 /* Call all edge duplication hooks. */
372 static void
373 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
374 struct cgraph_edge *cs2)
375 {
376 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
377 while (entry)
378 {
379 entry->hook (cs1, cs2, entry->data);
380 entry = entry->next;
381 }
382 }
383
384 /* Register HOOK to be called with DATA on each duplicated node. */
385 struct cgraph_2node_hook_list *
386 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
387 {
388 struct cgraph_2node_hook_list *entry;
389 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
390
391 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
392 entry->hook = hook;
393 entry->data = data;
394 entry->next = NULL;
395 while (*ptr)
396 ptr = &(*ptr)->next;
397 *ptr = entry;
398 return entry;
399 }
400
401 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
402 void
403 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
404 {
405 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
406
407 while (*ptr != entry)
408 ptr = &(*ptr)->next;
409 *ptr = entry->next;
410 free (entry);
411 }
412
413 /* Call all node duplication hooks. */
414 static void
415 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
416 struct cgraph_node *node2)
417 {
418 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
419 while (entry)
420 {
421 entry->hook (node1, node2, entry->data);
422 entry = entry->next;
423 }
424 }
425
426 /* Returns a hash code for P. */
427
428 static hashval_t
429 hash_node (const void *p)
430 {
431 const struct cgraph_node *n = (const struct cgraph_node *) p;
432 return (hashval_t) DECL_UID (n->decl);
433 }
434
435
436 /* Returns nonzero if P1 and P2 are equal. */
437
438 static int
439 eq_node (const void *p1, const void *p2)
440 {
441 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
442 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
443 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
444 }
445
446 /* Allocate new callgraph node. */
447
448 static inline struct cgraph_node *
449 cgraph_allocate_node (void)
450 {
451 struct cgraph_node *node;
452
453 if (free_nodes)
454 {
455 node = free_nodes;
456 free_nodes = NEXT_FREE_NODE (node);
457 }
458 else
459 {
460 node = ggc_alloc_cleared_cgraph_node ();
461 node->uid = cgraph_max_uid++;
462 }
463
464 return node;
465 }
466
467 /* Allocate new callgraph node and insert it into basic data structures. */
468
469 static struct cgraph_node *
470 cgraph_create_node_1 (void)
471 {
472 struct cgraph_node *node = cgraph_allocate_node ();
473
474 node->next = cgraph_nodes;
475 node->order = cgraph_order++;
476 if (cgraph_nodes)
477 cgraph_nodes->previous = node;
478 node->previous = NULL;
479 node->frequency = NODE_FREQUENCY_NORMAL;
480 node->count_materialization_scale = REG_BR_PROB_BASE;
481 ipa_empty_ref_list (&node->ref_list);
482 cgraph_nodes = node;
483 cgraph_n_nodes++;
484 return node;
485 }
486
487 /* Return cgraph node assigned to DECL. Create new one when needed. */
488
489 struct cgraph_node *
490 cgraph_create_node (tree decl)
491 {
492 struct cgraph_node key, *node, **slot;
493
494 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
495
496 if (!cgraph_hash)
497 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
498
499 key.decl = decl;
500 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
501 gcc_assert (!*slot);
502
503 node = cgraph_create_node_1 ();
504 node->decl = decl;
505 *slot = node;
506 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
507 {
508 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
509 node->next_nested = node->origin->nested;
510 node->origin->nested = node;
511 }
512 if (assembler_name_hash)
513 {
514 void **aslot;
515 tree name = DECL_ASSEMBLER_NAME (decl);
516
517 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
518 decl_assembler_name_hash (name),
519 INSERT);
520 /* We can have multiple declarations with same assembler name. For C++
521 it is __builtin_strlen and strlen, for instance. Do we need to
522 record them all? Original implementation marked just first one
523 so lets hope for the best. */
524 if (*aslot == NULL)
525 *aslot = node;
526 }
527 return node;
528 }
529
530 /* Try to find a call graph node for declaration DECL and if it does not exist,
531 create it. */
532
533 struct cgraph_node *
534 cgraph_get_create_node (tree decl)
535 {
536 struct cgraph_node *node;
537
538 node = cgraph_get_node (decl);
539 if (node)
540 return node;
541
542 return cgraph_create_node (decl);
543 }
544
545 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
546 the function body is associated with (not neccesarily cgraph_node (DECL). */
547
548 struct cgraph_node *
549 cgraph_create_function_alias (tree alias, tree decl)
550 {
551 struct cgraph_node *alias_node;
552
553 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
554 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
555 alias_node = cgraph_get_create_node (alias);
556 gcc_assert (!alias_node->local.finalized);
557 alias_node->thunk.alias = decl;
558 alias_node->local.finalized = true;
559 alias_node->alias = 1;
560
561 if ((TREE_PUBLIC (alias) && !DECL_COMDAT (alias) && !DECL_EXTERNAL (alias))
562 || (DECL_VIRTUAL_P (alias)
563 && (DECL_COMDAT (alias) || DECL_EXTERNAL (alias))))
564 cgraph_mark_reachable_node (alias_node);
565 return alias_node;
566 }
567
568 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
569 and NULL otherwise.
570 Same body aliases are output whenever the body of DECL is output,
571 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
572
573 struct cgraph_node *
574 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
575 {
576 struct cgraph_node *n;
577 #ifndef ASM_OUTPUT_DEF
578 /* If aliases aren't supported by the assembler, fail. */
579 return NULL;
580 #endif
581 /* Langhooks can create same body aliases of symbols not defined.
582 Those are useless. Drop them on the floor. */
583 if (cgraph_global_info_ready)
584 return NULL;
585
586 n = cgraph_create_function_alias (alias, decl);
587 n->same_body_alias = true;
588 if (same_body_aliases_done)
589 ipa_record_reference (n, NULL, cgraph_get_node (decl), NULL, IPA_REF_ALIAS,
590 NULL);
591 return n;
592 }
593
594 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
595 aliases DECL with an adjustments made into the first parameter.
596 See comments in thunk_adjust for detail on the parameters. */
597
598 struct cgraph_node *
599 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
600 tree alias, tree decl,
601 bool this_adjusting,
602 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
603 tree virtual_offset,
604 tree real_alias)
605 {
606 struct cgraph_node *node;
607
608 node = cgraph_get_node (alias);
609 if (node)
610 {
611 gcc_assert (node->local.finalized);
612 gcc_assert (!node->alias);
613 gcc_assert (!node->thunk.thunk_p);
614 cgraph_remove_node (node);
615 }
616
617 node = cgraph_create_node (alias);
618 gcc_checking_assert (!virtual_offset
619 || double_int_equal_p
620 (tree_to_double_int (virtual_offset),
621 shwi_to_double_int (virtual_value)));
622 node->thunk.fixed_offset = fixed_offset;
623 node->thunk.this_adjusting = this_adjusting;
624 node->thunk.virtual_value = virtual_value;
625 node->thunk.virtual_offset_p = virtual_offset != NULL;
626 node->thunk.alias = real_alias;
627 node->thunk.thunk_p = true;
628 node->local.finalized = true;
629
630 if (cgraph_decide_is_function_needed (node, decl))
631 cgraph_mark_needed_node (node);
632
633 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
634 || (DECL_VIRTUAL_P (decl)
635 && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
636 cgraph_mark_reachable_node (node);
637
638 return node;
639 }
640
641 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
642 is assigned. */
643
644 struct cgraph_node *
645 cgraph_get_node_or_alias (const_tree decl)
646 {
647 struct cgraph_node key, *node = NULL, **slot;
648
649 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
650
651 if (!cgraph_hash)
652 return NULL;
653
654 key.decl = CONST_CAST2 (tree, const_tree, decl);
655
656 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
657 NO_INSERT);
658
659 if (slot && *slot)
660 node = *slot;
661 return node;
662 }
663
664 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
665 is assigned. */
666
667 struct cgraph_node *
668 cgraph_get_node (const_tree decl)
669 {
670 struct cgraph_node key, *node = NULL, **slot;
671
672 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
673
674 if (!cgraph_hash)
675 return NULL;
676
677 key.decl = CONST_CAST2 (tree, const_tree, decl);
678
679 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
680 NO_INSERT);
681
682 if (slot && *slot)
683 node = *slot;
684 return node;
685 }
686
687 /* Insert already constructed node into hashtable. */
688
689 void
690 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
691 {
692 struct cgraph_node **slot;
693
694 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
695
696 gcc_assert (!*slot);
697 *slot = node;
698 }
699
700 /* Returns a hash code for P. */
701
702 static hashval_t
703 hash_node_by_assembler_name (const void *p)
704 {
705 const struct cgraph_node *n = (const struct cgraph_node *) p;
706 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
707 }
708
709 /* Returns nonzero if P1 and P2 are equal. */
710
711 static int
712 eq_assembler_name (const void *p1, const void *p2)
713 {
714 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
715 const_tree name = (const_tree)p2;
716 return (decl_assembler_name_equal (n1->decl, name));
717 }
718
719 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
720 Return NULL if there's no such node. */
721
722 struct cgraph_node *
723 cgraph_node_for_asm (tree asmname)
724 {
725 struct cgraph_node *node;
726 void **slot;
727
728 if (!assembler_name_hash)
729 {
730 assembler_name_hash =
731 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
732 NULL);
733 for (node = cgraph_nodes; node; node = node->next)
734 if (!node->global.inlined_to)
735 {
736 tree name = DECL_ASSEMBLER_NAME (node->decl);
737 slot = htab_find_slot_with_hash (assembler_name_hash, name,
738 decl_assembler_name_hash (name),
739 INSERT);
740 /* We can have multiple declarations with same assembler name. For C++
741 it is __builtin_strlen and strlen, for instance. Do we need to
742 record them all? Original implementation marked just first one
743 so lets hope for the best. */
744 if (!*slot)
745 *slot = node;
746 }
747 }
748
749 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
750 decl_assembler_name_hash (asmname),
751 NO_INSERT);
752
753 if (slot)
754 {
755 node = (struct cgraph_node *) *slot;
756 return node;
757 }
758 return NULL;
759 }
760
761 /* Returns a hash value for X (which really is a die_struct). */
762
763 static hashval_t
764 edge_hash (const void *x)
765 {
766 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
767 }
768
769 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
770
771 static int
772 edge_eq (const void *x, const void *y)
773 {
774 return ((const struct cgraph_edge *) x)->call_stmt == y;
775 }
776
777 /* Add call graph edge E to call site hash of its caller. */
778
779 static inline void
780 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
781 {
782 void **slot;
783 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
784 e->call_stmt,
785 htab_hash_pointer (e->call_stmt),
786 INSERT);
787 gcc_assert (!*slot);
788 *slot = e;
789 }
790
791 /* Return the callgraph edge representing the GIMPLE_CALL statement
792 CALL_STMT. */
793
794 struct cgraph_edge *
795 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
796 {
797 struct cgraph_edge *e, *e2;
798 int n = 0;
799
800 if (node->call_site_hash)
801 return (struct cgraph_edge *)
802 htab_find_with_hash (node->call_site_hash, call_stmt,
803 htab_hash_pointer (call_stmt));
804
805 /* This loop may turn out to be performance problem. In such case adding
806 hashtables into call nodes with very many edges is probably best
807 solution. It is not good idea to add pointer into CALL_EXPR itself
808 because we want to make possible having multiple cgraph nodes representing
809 different clones of the same body before the body is actually cloned. */
810 for (e = node->callees; e; e = e->next_callee)
811 {
812 if (e->call_stmt == call_stmt)
813 break;
814 n++;
815 }
816
817 if (!e)
818 for (e = node->indirect_calls; e; e = e->next_callee)
819 {
820 if (e->call_stmt == call_stmt)
821 break;
822 n++;
823 }
824
825 if (n > 100)
826 {
827 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
828 for (e2 = node->callees; e2; e2 = e2->next_callee)
829 cgraph_add_edge_to_call_site_hash (e2);
830 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
831 cgraph_add_edge_to_call_site_hash (e2);
832 }
833
834 return e;
835 }
836
837
838 /* Change field call_stmt of edge E to NEW_STMT. */
839
840 void
841 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
842 {
843 tree decl;
844
845 if (e->caller->call_site_hash)
846 {
847 htab_remove_elt_with_hash (e->caller->call_site_hash,
848 e->call_stmt,
849 htab_hash_pointer (e->call_stmt));
850 }
851
852 e->call_stmt = new_stmt;
853 if (e->indirect_unknown_callee
854 && (decl = gimple_call_fndecl (new_stmt)))
855 {
856 /* Constant propagation (and possibly also inlining?) can turn an
857 indirect call into a direct one. */
858 struct cgraph_node *new_callee = cgraph_get_node (decl);
859
860 gcc_checking_assert (new_callee);
861 cgraph_make_edge_direct (e, new_callee, 0);
862 }
863
864 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
865 e->can_throw_external = stmt_can_throw_external (new_stmt);
866 pop_cfun ();
867 if (e->caller->call_site_hash)
868 cgraph_add_edge_to_call_site_hash (e);
869 }
870
871 /* Like cgraph_set_call_stmt but walk the clone tree and update all
872 clones sharing the same function body. */
873
874 void
875 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
876 gimple old_stmt, gimple new_stmt)
877 {
878 struct cgraph_node *node;
879 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
880
881 if (edge)
882 cgraph_set_call_stmt (edge, new_stmt);
883
884 node = orig->clones;
885 if (node)
886 while (node != orig)
887 {
888 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
889 if (edge)
890 cgraph_set_call_stmt (edge, new_stmt);
891 if (node->clones)
892 node = node->clones;
893 else if (node->next_sibling_clone)
894 node = node->next_sibling_clone;
895 else
896 {
897 while (node != orig && !node->next_sibling_clone)
898 node = node->clone_of;
899 if (node != orig)
900 node = node->next_sibling_clone;
901 }
902 }
903 }
904
905 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
906 same function body. If clones already have edge for OLD_STMT; only
907 update the edge same way as cgraph_set_call_stmt_including_clones does.
908
909 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
910 frequencies of the clones. */
911
912 void
913 cgraph_create_edge_including_clones (struct cgraph_node *orig,
914 struct cgraph_node *callee,
915 gimple old_stmt,
916 gimple stmt, gcov_type count,
917 int freq,
918 cgraph_inline_failed_t reason)
919 {
920 struct cgraph_node *node;
921 struct cgraph_edge *edge;
922
923 if (!cgraph_edge (orig, stmt))
924 {
925 edge = cgraph_create_edge (orig, callee, stmt, count, freq);
926 edge->inline_failed = reason;
927 }
928
929 node = orig->clones;
930 if (node)
931 while (node != orig)
932 {
933 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
934
935 /* It is possible that clones already contain the edge while
936 master didn't. Either we promoted indirect call into direct
937 call in the clone or we are processing clones of unreachable
938 master where edges has been removed. */
939 if (edge)
940 cgraph_set_call_stmt (edge, stmt);
941 else if (!cgraph_edge (node, stmt))
942 {
943 edge = cgraph_create_edge (node, callee, stmt, count,
944 freq);
945 edge->inline_failed = reason;
946 }
947
948 if (node->clones)
949 node = node->clones;
950 else if (node->next_sibling_clone)
951 node = node->next_sibling_clone;
952 else
953 {
954 while (node != orig && !node->next_sibling_clone)
955 node = node->clone_of;
956 if (node != orig)
957 node = node->next_sibling_clone;
958 }
959 }
960 }
961
962 /* Allocate a cgraph_edge structure and fill it with data according to the
963 parameters of which only CALLEE can be NULL (when creating an indirect call
964 edge). */
965
966 static struct cgraph_edge *
967 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
968 gimple call_stmt, gcov_type count, int freq)
969 {
970 struct cgraph_edge *edge;
971
972 /* LTO does not actually have access to the call_stmt since these
973 have not been loaded yet. */
974 if (call_stmt)
975 {
976 /* This is a rather expensive check possibly triggering
977 construction of call stmt hashtable. */
978 gcc_checking_assert (!cgraph_edge (caller, call_stmt));
979
980 gcc_assert (is_gimple_call (call_stmt));
981 }
982
983 if (free_edges)
984 {
985 edge = free_edges;
986 free_edges = NEXT_FREE_EDGE (edge);
987 }
988 else
989 {
990 edge = ggc_alloc_cgraph_edge ();
991 edge->uid = cgraph_edge_max_uid++;
992 }
993
994 edge->aux = NULL;
995 edge->caller = caller;
996 edge->callee = callee;
997 edge->prev_caller = NULL;
998 edge->next_caller = NULL;
999 edge->prev_callee = NULL;
1000 edge->next_callee = NULL;
1001
1002 edge->count = count;
1003 gcc_assert (count >= 0);
1004 edge->frequency = freq;
1005 gcc_assert (freq >= 0);
1006 gcc_assert (freq <= CGRAPH_FREQ_MAX);
1007
1008 edge->call_stmt = call_stmt;
1009 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
1010 edge->can_throw_external
1011 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
1012 pop_cfun ();
1013 edge->call_stmt_cannot_inline_p =
1014 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
1015 if (call_stmt && caller->call_site_hash)
1016 cgraph_add_edge_to_call_site_hash (edge);
1017
1018 edge->indirect_info = NULL;
1019 edge->indirect_inlining_edge = 0;
1020
1021 return edge;
1022 }
1023
1024 /* Create edge from CALLER to CALLEE in the cgraph. */
1025
1026 struct cgraph_edge *
1027 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1028 gimple call_stmt, gcov_type count, int freq)
1029 {
1030 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1031 count, freq);
1032
1033 edge->indirect_unknown_callee = 0;
1034 initialize_inline_failed (edge);
1035
1036 edge->next_caller = callee->callers;
1037 if (callee->callers)
1038 callee->callers->prev_caller = edge;
1039 edge->next_callee = caller->callees;
1040 if (caller->callees)
1041 caller->callees->prev_callee = edge;
1042 caller->callees = edge;
1043 callee->callers = edge;
1044
1045 return edge;
1046 }
1047
1048 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
1049
1050 struct cgraph_indirect_call_info *
1051 cgraph_allocate_init_indirect_info (void)
1052 {
1053 struct cgraph_indirect_call_info *ii;
1054
1055 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
1056 ii->param_index = -1;
1057 return ii;
1058 }
1059
1060 /* Create an indirect edge with a yet-undetermined callee where the call
1061 statement destination is a formal parameter of the caller with index
1062 PARAM_INDEX. */
1063
1064 struct cgraph_edge *
1065 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1066 int ecf_flags,
1067 gcov_type count, int freq)
1068 {
1069 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1070 count, freq);
1071
1072 edge->indirect_unknown_callee = 1;
1073 initialize_inline_failed (edge);
1074
1075 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1076 edge->indirect_info->ecf_flags = ecf_flags;
1077
1078 edge->next_callee = caller->indirect_calls;
1079 if (caller->indirect_calls)
1080 caller->indirect_calls->prev_callee = edge;
1081 caller->indirect_calls = edge;
1082
1083 return edge;
1084 }
1085
1086 /* Remove the edge E from the list of the callers of the callee. */
1087
1088 static inline void
1089 cgraph_edge_remove_callee (struct cgraph_edge *e)
1090 {
1091 gcc_assert (!e->indirect_unknown_callee);
1092 if (e->prev_caller)
1093 e->prev_caller->next_caller = e->next_caller;
1094 if (e->next_caller)
1095 e->next_caller->prev_caller = e->prev_caller;
1096 if (!e->prev_caller)
1097 e->callee->callers = e->next_caller;
1098 }
1099
1100 /* Remove the edge E from the list of the callees of the caller. */
1101
1102 static inline void
1103 cgraph_edge_remove_caller (struct cgraph_edge *e)
1104 {
1105 if (e->prev_callee)
1106 e->prev_callee->next_callee = e->next_callee;
1107 if (e->next_callee)
1108 e->next_callee->prev_callee = e->prev_callee;
1109 if (!e->prev_callee)
1110 {
1111 if (e->indirect_unknown_callee)
1112 e->caller->indirect_calls = e->next_callee;
1113 else
1114 e->caller->callees = e->next_callee;
1115 }
1116 if (e->caller->call_site_hash)
1117 htab_remove_elt_with_hash (e->caller->call_site_hash,
1118 e->call_stmt,
1119 htab_hash_pointer (e->call_stmt));
1120 }
1121
1122 /* Put the edge onto the free list. */
1123
1124 static void
1125 cgraph_free_edge (struct cgraph_edge *e)
1126 {
1127 int uid = e->uid;
1128
1129 /* Clear out the edge so we do not dangle pointers. */
1130 memset (e, 0, sizeof (*e));
1131 e->uid = uid;
1132 NEXT_FREE_EDGE (e) = free_edges;
1133 free_edges = e;
1134 }
1135
1136 /* Remove the edge E in the cgraph. */
1137
1138 void
1139 cgraph_remove_edge (struct cgraph_edge *e)
1140 {
1141 /* Call all edge removal hooks. */
1142 cgraph_call_edge_removal_hooks (e);
1143
1144 if (!e->indirect_unknown_callee)
1145 /* Remove from callers list of the callee. */
1146 cgraph_edge_remove_callee (e);
1147
1148 /* Remove from callees list of the callers. */
1149 cgraph_edge_remove_caller (e);
1150
1151 /* Put the edge onto the free list. */
1152 cgraph_free_edge (e);
1153 }
1154
1155 /* Set callee of call graph edge E and add it to the corresponding set of
1156 callers. */
1157
1158 static void
1159 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1160 {
1161 e->prev_caller = NULL;
1162 if (n->callers)
1163 n->callers->prev_caller = e;
1164 e->next_caller = n->callers;
1165 n->callers = e;
1166 e->callee = n;
1167 }
1168
1169 /* Redirect callee of E to N. The function does not update underlying
1170 call expression. */
1171
1172 void
1173 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1174 {
1175 /* Remove from callers list of the current callee. */
1176 cgraph_edge_remove_callee (e);
1177
1178 /* Insert to callers list of the new callee. */
1179 cgraph_set_edge_callee (e, n);
1180 }
1181
1182 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1183 CALLEE. DELTA is an integer constant that is to be added to the this
1184 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1185
1186 void
1187 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee,
1188 HOST_WIDE_INT delta)
1189 {
1190 edge->indirect_unknown_callee = 0;
1191 edge->indirect_info->thunk_delta = delta;
1192
1193 /* Get the edge out of the indirect edge list. */
1194 if (edge->prev_callee)
1195 edge->prev_callee->next_callee = edge->next_callee;
1196 if (edge->next_callee)
1197 edge->next_callee->prev_callee = edge->prev_callee;
1198 if (!edge->prev_callee)
1199 edge->caller->indirect_calls = edge->next_callee;
1200
1201 /* Put it into the normal callee list */
1202 edge->prev_callee = NULL;
1203 edge->next_callee = edge->caller->callees;
1204 if (edge->caller->callees)
1205 edge->caller->callees->prev_callee = edge;
1206 edge->caller->callees = edge;
1207
1208 /* Insert to callers list of the new callee. */
1209 cgraph_set_edge_callee (edge, callee);
1210
1211 /* We need to re-determine the inlining status of the edge. */
1212 initialize_inline_failed (edge);
1213 }
1214
1215
1216 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1217 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1218 of OLD_STMT if it was previously call statement.
1219 If NEW_STMT is NULL, the call has been dropped without any
1220 replacement. */
1221
1222 static void
1223 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1224 gimple old_stmt, tree old_call,
1225 gimple new_stmt)
1226 {
1227 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1228 ? gimple_call_fndecl (new_stmt) : 0;
1229
1230 /* We are seeing indirect calls, then there is nothing to update. */
1231 if (!new_call && !old_call)
1232 return;
1233 /* See if we turned indirect call into direct call or folded call to one builtin
1234 into different builtin. */
1235 if (old_call != new_call)
1236 {
1237 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1238 struct cgraph_edge *ne = NULL;
1239 gcov_type count;
1240 int frequency;
1241
1242 if (e)
1243 {
1244 /* See if the edge is already there and has the correct callee. It
1245 might be so because of indirect inlining has already updated
1246 it. We also might've cloned and redirected the edge. */
1247 if (new_call && e->callee)
1248 {
1249 struct cgraph_node *callee = e->callee;
1250 while (callee)
1251 {
1252 if (callee->decl == new_call
1253 || callee->former_clone_of == new_call)
1254 return;
1255 callee = callee->clone_of;
1256 }
1257 }
1258
1259 /* Otherwise remove edge and create new one; we can't simply redirect
1260 since function has changed, so inline plan and other information
1261 attached to edge is invalid. */
1262 count = e->count;
1263 frequency = e->frequency;
1264 cgraph_remove_edge (e);
1265 }
1266 else if (new_call)
1267 {
1268 /* We are seeing new direct call; compute profile info based on BB. */
1269 basic_block bb = gimple_bb (new_stmt);
1270 count = bb->count;
1271 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1272 bb);
1273 }
1274
1275 if (new_call)
1276 {
1277 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1278 new_stmt, count, frequency);
1279 gcc_assert (ne->inline_failed);
1280 }
1281 }
1282 /* We only updated the call stmt; update pointer in cgraph edge.. */
1283 else if (old_stmt != new_stmt)
1284 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1285 }
1286
1287 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1288 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1289 of OLD_STMT before it was updated (updating can happen inplace). */
1290
1291 void
1292 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1293 {
1294 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1295 struct cgraph_node *node;
1296
1297 gcc_checking_assert (orig);
1298 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1299 if (orig->clones)
1300 for (node = orig->clones; node != orig;)
1301 {
1302 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1303 if (node->clones)
1304 node = node->clones;
1305 else if (node->next_sibling_clone)
1306 node = node->next_sibling_clone;
1307 else
1308 {
1309 while (node != orig && !node->next_sibling_clone)
1310 node = node->clone_of;
1311 if (node != orig)
1312 node = node->next_sibling_clone;
1313 }
1314 }
1315 }
1316
1317
1318 /* Remove all callees from the node. */
1319
1320 void
1321 cgraph_node_remove_callees (struct cgraph_node *node)
1322 {
1323 struct cgraph_edge *e, *f;
1324
1325 /* It is sufficient to remove the edges from the lists of callers of
1326 the callees. The callee list of the node can be zapped with one
1327 assignment. */
1328 for (e = node->callees; e; e = f)
1329 {
1330 f = e->next_callee;
1331 cgraph_call_edge_removal_hooks (e);
1332 if (!e->indirect_unknown_callee)
1333 cgraph_edge_remove_callee (e);
1334 cgraph_free_edge (e);
1335 }
1336 for (e = node->indirect_calls; e; e = f)
1337 {
1338 f = e->next_callee;
1339 cgraph_call_edge_removal_hooks (e);
1340 if (!e->indirect_unknown_callee)
1341 cgraph_edge_remove_callee (e);
1342 cgraph_free_edge (e);
1343 }
1344 node->indirect_calls = NULL;
1345 node->callees = NULL;
1346 if (node->call_site_hash)
1347 {
1348 htab_delete (node->call_site_hash);
1349 node->call_site_hash = NULL;
1350 }
1351 }
1352
1353 /* Remove all callers from the node. */
1354
1355 static void
1356 cgraph_node_remove_callers (struct cgraph_node *node)
1357 {
1358 struct cgraph_edge *e, *f;
1359
1360 /* It is sufficient to remove the edges from the lists of callees of
1361 the callers. The caller list of the node can be zapped with one
1362 assignment. */
1363 for (e = node->callers; e; e = f)
1364 {
1365 f = e->next_caller;
1366 cgraph_call_edge_removal_hooks (e);
1367 cgraph_edge_remove_caller (e);
1368 cgraph_free_edge (e);
1369 }
1370 node->callers = NULL;
1371 }
1372
1373 /* Release memory used to represent body of function NODE. */
1374
1375 void
1376 cgraph_release_function_body (struct cgraph_node *node)
1377 {
1378 if (DECL_STRUCT_FUNCTION (node->decl))
1379 {
1380 tree old_decl = current_function_decl;
1381 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1382 if (cfun->gimple_df)
1383 {
1384 current_function_decl = node->decl;
1385 delete_tree_ssa ();
1386 delete_tree_cfg_annotations ();
1387 cfun->eh = NULL;
1388 current_function_decl = old_decl;
1389 }
1390 if (cfun->cfg)
1391 {
1392 gcc_assert (dom_computed[0] == DOM_NONE);
1393 gcc_assert (dom_computed[1] == DOM_NONE);
1394 clear_edges ();
1395 }
1396 if (cfun->value_histograms)
1397 free_histograms ();
1398 gcc_assert (!current_loops);
1399 pop_cfun();
1400 gimple_set_body (node->decl, NULL);
1401 VEC_free (ipa_opt_pass, heap,
1402 node->ipa_transforms_to_apply);
1403 /* Struct function hangs a lot of data that would leak if we didn't
1404 removed all pointers to it. */
1405 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1406 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1407 }
1408 DECL_SAVED_TREE (node->decl) = NULL;
1409 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1410 of its associated function function declaration because it's
1411 needed to emit debug info later. */
1412 if (!node->abstract_and_needed)
1413 DECL_INITIAL (node->decl) = error_mark_node;
1414 }
1415
1416 /* Remove the node from cgraph. */
1417
1418 void
1419 cgraph_remove_node (struct cgraph_node *node)
1420 {
1421 void **slot;
1422 bool kill_body = false;
1423 struct cgraph_node *n;
1424 int uid = node->uid;
1425
1426 cgraph_call_node_removal_hooks (node);
1427 cgraph_node_remove_callers (node);
1428 cgraph_node_remove_callees (node);
1429 ipa_remove_all_references (&node->ref_list);
1430 ipa_remove_all_refering (&node->ref_list);
1431 VEC_free (ipa_opt_pass, heap,
1432 node->ipa_transforms_to_apply);
1433
1434 /* Incremental inlining access removed nodes stored in the postorder list.
1435 */
1436 node->needed = node->reachable = false;
1437 for (n = node->nested; n; n = n->next_nested)
1438 n->origin = NULL;
1439 node->nested = NULL;
1440 if (node->origin)
1441 {
1442 struct cgraph_node **node2 = &node->origin->nested;
1443
1444 while (*node2 != node)
1445 node2 = &(*node2)->next_nested;
1446 *node2 = node->next_nested;
1447 }
1448 if (node->previous)
1449 node->previous->next = node->next;
1450 else
1451 cgraph_nodes = node->next;
1452 if (node->next)
1453 node->next->previous = node->previous;
1454 node->next = NULL;
1455 node->previous = NULL;
1456 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1457 if (*slot == node)
1458 {
1459 struct cgraph_node *next_inline_clone;
1460
1461 for (next_inline_clone = node->clones;
1462 next_inline_clone && next_inline_clone->decl != node->decl;
1463 next_inline_clone = next_inline_clone->next_sibling_clone)
1464 ;
1465
1466 /* If there is inline clone of the node being removed, we need
1467 to put it into the position of removed node and reorganize all
1468 other clones to be based on it. */
1469 if (next_inline_clone)
1470 {
1471 struct cgraph_node *n;
1472 struct cgraph_node *new_clones;
1473
1474 *slot = next_inline_clone;
1475
1476 /* Unlink inline clone from the list of clones of removed node. */
1477 if (next_inline_clone->next_sibling_clone)
1478 next_inline_clone->next_sibling_clone->prev_sibling_clone
1479 = next_inline_clone->prev_sibling_clone;
1480 if (next_inline_clone->prev_sibling_clone)
1481 {
1482 gcc_assert (node->clones != next_inline_clone);
1483 next_inline_clone->prev_sibling_clone->next_sibling_clone
1484 = next_inline_clone->next_sibling_clone;
1485 }
1486 else
1487 {
1488 gcc_assert (node->clones == next_inline_clone);
1489 node->clones = next_inline_clone->next_sibling_clone;
1490 }
1491
1492 new_clones = node->clones;
1493 node->clones = NULL;
1494
1495 /* Copy clone info. */
1496 next_inline_clone->clone = node->clone;
1497
1498 /* Now place it into clone tree at same level at NODE. */
1499 next_inline_clone->clone_of = node->clone_of;
1500 next_inline_clone->prev_sibling_clone = NULL;
1501 next_inline_clone->next_sibling_clone = NULL;
1502 if (node->clone_of)
1503 {
1504 if (node->clone_of->clones)
1505 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1506 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1507 node->clone_of->clones = next_inline_clone;
1508 }
1509
1510 /* Merge the clone list. */
1511 if (new_clones)
1512 {
1513 if (!next_inline_clone->clones)
1514 next_inline_clone->clones = new_clones;
1515 else
1516 {
1517 n = next_inline_clone->clones;
1518 while (n->next_sibling_clone)
1519 n = n->next_sibling_clone;
1520 n->next_sibling_clone = new_clones;
1521 new_clones->prev_sibling_clone = n;
1522 }
1523 }
1524
1525 /* Update clone_of pointers. */
1526 n = new_clones;
1527 while (n)
1528 {
1529 n->clone_of = next_inline_clone;
1530 n = n->next_sibling_clone;
1531 }
1532 }
1533 else
1534 {
1535 htab_clear_slot (cgraph_hash, slot);
1536 kill_body = true;
1537 }
1538
1539 }
1540 if (node->prev_sibling_clone)
1541 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1542 else if (node->clone_of)
1543 node->clone_of->clones = node->next_sibling_clone;
1544 if (node->next_sibling_clone)
1545 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1546 if (node->clones)
1547 {
1548 struct cgraph_node *n, *next;
1549
1550 if (node->clone_of)
1551 {
1552 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1553 n->clone_of = node->clone_of;
1554 n->clone_of = node->clone_of;
1555 n->next_sibling_clone = node->clone_of->clones;
1556 if (node->clone_of->clones)
1557 node->clone_of->clones->prev_sibling_clone = n;
1558 node->clone_of->clones = node->clones;
1559 }
1560 else
1561 {
1562 /* We are removing node with clones. this makes clones inconsistent,
1563 but assume they will be removed subsequently and just keep clone
1564 tree intact. This can happen in unreachable function removal since
1565 we remove unreachable functions in random order, not by bottom-up
1566 walk of clone trees. */
1567 for (n = node->clones; n; n = next)
1568 {
1569 next = n->next_sibling_clone;
1570 n->next_sibling_clone = NULL;
1571 n->prev_sibling_clone = NULL;
1572 n->clone_of = NULL;
1573 }
1574 }
1575 }
1576
1577 if (node->same_comdat_group)
1578 {
1579 struct cgraph_node *prev;
1580 for (prev = node->same_comdat_group;
1581 prev->same_comdat_group != node;
1582 prev = prev->same_comdat_group)
1583 ;
1584 if (node->same_comdat_group == prev)
1585 prev->same_comdat_group = NULL;
1586 else
1587 prev->same_comdat_group = node->same_comdat_group;
1588 node->same_comdat_group = NULL;
1589 }
1590
1591 /* While all the clones are removed after being proceeded, the function
1592 itself is kept in the cgraph even after it is compiled. Check whether
1593 we are done with this body and reclaim it proactively if this is the case.
1594 */
1595 if (!kill_body && *slot)
1596 {
1597 struct cgraph_node *n = (struct cgraph_node *) *slot;
1598 if (!n->clones && !n->clone_of && !n->global.inlined_to
1599 && (cgraph_global_info_ready
1600 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1601 || n->in_other_partition)))
1602 kill_body = true;
1603 }
1604 if (assembler_name_hash)
1605 {
1606 tree name = DECL_ASSEMBLER_NAME (node->decl);
1607 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1608 decl_assembler_name_hash (name),
1609 NO_INSERT);
1610 /* Inline clones are not hashed. */
1611 if (slot && *slot == node)
1612 htab_clear_slot (assembler_name_hash, slot);
1613 }
1614
1615 if (kill_body)
1616 cgraph_release_function_body (node);
1617 node->decl = NULL;
1618 if (node->call_site_hash)
1619 {
1620 htab_delete (node->call_site_hash);
1621 node->call_site_hash = NULL;
1622 }
1623 cgraph_n_nodes--;
1624
1625 /* Clear out the node to NULL all pointers and add the node to the free
1626 list. */
1627 memset (node, 0, sizeof(*node));
1628 node->uid = uid;
1629 NEXT_FREE_NODE (node) = free_nodes;
1630 free_nodes = node;
1631 }
1632
1633 /* Remove the node from cgraph. */
1634
1635 void
1636 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1637 {
1638 struct cgraph_edge *e, *next;
1639 for (e = node->callees; e; e = next)
1640 {
1641 next = e->next_callee;
1642 if (!e->inline_failed)
1643 cgraph_remove_node_and_inline_clones (e->callee);
1644 }
1645 cgraph_remove_node (node);
1646 }
1647
1648 /* Notify finalize_compilation_unit that given node is reachable. */
1649
1650 void
1651 cgraph_mark_reachable_node (struct cgraph_node *node)
1652 {
1653 if (!node->reachable && node->local.finalized)
1654 {
1655 if (cgraph_global_info_ready)
1656 {
1657 /* Verify that function does not appear to be needed out of blue
1658 during the optimization process. This can happen for extern
1659 inlines when bodies was removed after inlining. */
1660 gcc_assert ((node->analyzed || node->in_other_partition
1661 || DECL_EXTERNAL (node->decl)));
1662 }
1663 else
1664 notice_global_symbol (node->decl);
1665 node->reachable = 1;
1666
1667 node->next_needed = cgraph_nodes_queue;
1668 cgraph_nodes_queue = node;
1669 }
1670 }
1671
1672 /* Likewise indicate that a node is needed, i.e. reachable via some
1673 external means. */
1674
1675 void
1676 cgraph_mark_needed_node (struct cgraph_node *node)
1677 {
1678 node->needed = 1;
1679 gcc_assert (!node->global.inlined_to);
1680 cgraph_mark_reachable_node (node);
1681 }
1682
1683 /* Likewise indicate that a node is having address taken. */
1684
1685 void
1686 cgraph_mark_address_taken_node (struct cgraph_node *node)
1687 {
1688 gcc_assert (!node->global.inlined_to);
1689 cgraph_mark_reachable_node (node);
1690 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1691 IPA_REF_ADDR reference exists (and thus it should be set on node
1692 representing alias we take address of) and as a test whether address
1693 of the object was taken (and thus it should be set on node alias is
1694 referring to). We should remove the first use and the remove the
1695 following set. */
1696 node->address_taken = 1;
1697 node = cgraph_function_or_thunk_node (node, NULL);
1698 node->address_taken = 1;
1699 }
1700
1701 /* Return local info for the compiled function. */
1702
1703 struct cgraph_local_info *
1704 cgraph_local_info (tree decl)
1705 {
1706 struct cgraph_node *node;
1707
1708 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1709 node = cgraph_get_node (decl);
1710 if (!node)
1711 return NULL;
1712 return &node->local;
1713 }
1714
1715 /* Return local info for the compiled function. */
1716
1717 struct cgraph_global_info *
1718 cgraph_global_info (tree decl)
1719 {
1720 struct cgraph_node *node;
1721
1722 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1723 node = cgraph_get_node (decl);
1724 if (!node)
1725 return NULL;
1726 return &node->global;
1727 }
1728
1729 /* Return local info for the compiled function. */
1730
1731 struct cgraph_rtl_info *
1732 cgraph_rtl_info (tree decl)
1733 {
1734 struct cgraph_node *node;
1735
1736 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1737 node = cgraph_get_node (decl);
1738 if (!node
1739 || (decl != current_function_decl
1740 && !TREE_ASM_WRITTEN (node->decl)))
1741 return NULL;
1742 return &node->rtl;
1743 }
1744
1745 /* Return a string describing the failure REASON. */
1746
1747 const char*
1748 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1749 {
1750 #undef DEFCIFCODE
1751 #define DEFCIFCODE(code, string) string,
1752
1753 static const char *cif_string_table[CIF_N_REASONS] = {
1754 #include "cif-code.def"
1755 };
1756
1757 /* Signedness of an enum type is implementation defined, so cast it
1758 to unsigned before testing. */
1759 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1760 return cif_string_table[reason];
1761 }
1762
1763 /* Return name of the node used in debug output. */
1764 const char *
1765 cgraph_node_name (struct cgraph_node *node)
1766 {
1767 return lang_hooks.decl_printable_name (node->decl, 2);
1768 }
1769
1770 /* Names used to print out the availability enum. */
1771 const char * const cgraph_availability_names[] =
1772 {"unset", "not_available", "overwritable", "available", "local"};
1773
1774
1775 /* Dump call graph node NODE to file F. */
1776
1777 void
1778 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1779 {
1780 struct cgraph_edge *edge;
1781 int indirect_calls_count = 0;
1782
1783 fprintf (f, "%s/%i", cgraph_node_name (node), node->uid);
1784 dump_addr (f, " @", (void *)node);
1785 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1786 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1787 if (node->global.inlined_to)
1788 fprintf (f, " (inline copy in %s/%i)",
1789 cgraph_node_name (node->global.inlined_to),
1790 node->global.inlined_to->uid);
1791 if (node->same_comdat_group)
1792 fprintf (f, " (same comdat group as %s/%i)",
1793 cgraph_node_name (node->same_comdat_group),
1794 node->same_comdat_group->uid);
1795 if (node->clone_of)
1796 fprintf (f, " (clone of %s/%i)",
1797 cgraph_node_name (node->clone_of),
1798 node->clone_of->uid);
1799 if (cgraph_function_flags_ready)
1800 fprintf (f, " availability:%s",
1801 cgraph_availability_names [cgraph_function_body_availability (node)]);
1802 if (node->analyzed)
1803 fprintf (f, " analyzed");
1804 if (node->in_other_partition)
1805 fprintf (f, " in_other_partition");
1806 if (node->count)
1807 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1808 (HOST_WIDEST_INT)node->count);
1809 if (node->origin)
1810 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1811 if (node->needed)
1812 fprintf (f, " needed");
1813 if (node->address_taken)
1814 fprintf (f, " address_taken");
1815 else if (node->reachable)
1816 fprintf (f, " reachable");
1817 else if (node->reachable_from_other_partition)
1818 fprintf (f, " reachable_from_other_partition");
1819 if (gimple_has_body_p (node->decl))
1820 fprintf (f, " body");
1821 if (node->process)
1822 fprintf (f, " process");
1823 if (node->local.local)
1824 fprintf (f, " local");
1825 if (node->local.externally_visible)
1826 fprintf (f, " externally_visible");
1827 if (node->resolution != LDPR_UNKNOWN)
1828 fprintf (f, " %s",
1829 ld_plugin_symbol_resolution_names[(int)node->resolution]);
1830 if (node->local.finalized)
1831 fprintf (f, " finalized");
1832 if (node->local.redefined_extern_inline)
1833 fprintf (f, " redefined_extern_inline");
1834 if (TREE_ASM_WRITTEN (node->decl))
1835 fprintf (f, " asm_written");
1836 if (node->only_called_at_startup)
1837 fprintf (f, " only_called_at_startup");
1838 if (node->only_called_at_exit)
1839 fprintf (f, " only_called_at_exit");
1840
1841 fprintf (f, "\n");
1842
1843 if (node->thunk.thunk_p)
1844 {
1845 fprintf (f, " thunk of %s (asm: %s) fixed offset %i virtual value %i has "
1846 "virtual offset %i)\n",
1847 lang_hooks.decl_printable_name (node->thunk.alias, 2),
1848 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)),
1849 (int)node->thunk.fixed_offset,
1850 (int)node->thunk.virtual_value,
1851 (int)node->thunk.virtual_offset_p);
1852 }
1853 if (node->alias && node->thunk.alias)
1854 {
1855 fprintf (f, " alias of %s",
1856 lang_hooks.decl_printable_name (node->thunk.alias, 2));
1857 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
1858 fprintf (f, " (asm: %s)",
1859 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
1860 fprintf (f, "\n");
1861 }
1862
1863 fprintf (f, " called by: ");
1864
1865 for (edge = node->callers; edge; edge = edge->next_caller)
1866 {
1867 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1868 edge->caller->uid);
1869 if (edge->count)
1870 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1871 (HOST_WIDEST_INT)edge->count);
1872 if (edge->frequency)
1873 fprintf (f, "(%.2f per call) ",
1874 edge->frequency / (double)CGRAPH_FREQ_BASE);
1875 if (!edge->inline_failed)
1876 fprintf(f, "(inlined) ");
1877 if (edge->indirect_inlining_edge)
1878 fprintf(f, "(indirect_inlining) ");
1879 if (edge->can_throw_external)
1880 fprintf(f, "(can throw external) ");
1881 }
1882
1883 fprintf (f, "\n calls: ");
1884 for (edge = node->callees; edge; edge = edge->next_callee)
1885 {
1886 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1887 edge->callee->uid);
1888 if (!edge->inline_failed)
1889 fprintf(f, "(inlined) ");
1890 if (edge->indirect_inlining_edge)
1891 fprintf(f, "(indirect_inlining) ");
1892 if (edge->count)
1893 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1894 (HOST_WIDEST_INT)edge->count);
1895 if (edge->frequency)
1896 fprintf (f, "(%.2f per call) ",
1897 edge->frequency / (double)CGRAPH_FREQ_BASE);
1898 if (edge->can_throw_external)
1899 fprintf(f, "(can throw external) ");
1900 }
1901 fprintf (f, "\n");
1902 fprintf (f, " References: ");
1903 ipa_dump_references (f, &node->ref_list);
1904 fprintf (f, " Refering this function: ");
1905 ipa_dump_refering (f, &node->ref_list);
1906
1907 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1908 indirect_calls_count++;
1909 if (indirect_calls_count)
1910 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1911 indirect_calls_count);
1912 }
1913
1914
1915 /* Dump call graph node NODE to stderr. */
1916
1917 DEBUG_FUNCTION void
1918 debug_cgraph_node (struct cgraph_node *node)
1919 {
1920 dump_cgraph_node (stderr, node);
1921 }
1922
1923
1924 /* Dump the callgraph to file F. */
1925
1926 void
1927 dump_cgraph (FILE *f)
1928 {
1929 struct cgraph_node *node;
1930
1931 fprintf (f, "callgraph:\n\n");
1932 for (node = cgraph_nodes; node; node = node->next)
1933 dump_cgraph_node (f, node);
1934 }
1935
1936
1937 /* Dump the call graph to stderr. */
1938
1939 DEBUG_FUNCTION void
1940 debug_cgraph (void)
1941 {
1942 dump_cgraph (stderr);
1943 }
1944
1945
1946 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1947
1948 void
1949 change_decl_assembler_name (tree decl, tree name)
1950 {
1951 struct cgraph_node *node;
1952 void **slot;
1953 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1954 SET_DECL_ASSEMBLER_NAME (decl, name);
1955 else
1956 {
1957 if (name == DECL_ASSEMBLER_NAME (decl))
1958 return;
1959
1960 if (assembler_name_hash
1961 && TREE_CODE (decl) == FUNCTION_DECL
1962 && (node = cgraph_get_node_or_alias (decl)) != NULL)
1963 {
1964 tree old_name = DECL_ASSEMBLER_NAME (decl);
1965 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
1966 decl_assembler_name_hash (old_name),
1967 NO_INSERT);
1968 /* Inline clones are not hashed. */
1969 if (slot && *slot == node)
1970 htab_clear_slot (assembler_name_hash, slot);
1971 }
1972 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1973 && DECL_RTL_SET_P (decl))
1974 warning (0, "%D renamed after being referenced in assembly", decl);
1975
1976 SET_DECL_ASSEMBLER_NAME (decl, name);
1977 }
1978 if (assembler_name_hash
1979 && TREE_CODE (decl) == FUNCTION_DECL
1980 && (node = cgraph_get_node_or_alias (decl)) != NULL)
1981 {
1982 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1983 decl_assembler_name_hash (name),
1984 INSERT);
1985 gcc_assert (!*slot);
1986 *slot = node;
1987 }
1988 }
1989
1990 /* Add a top-level asm statement to the list. */
1991
1992 struct cgraph_asm_node *
1993 cgraph_add_asm_node (tree asm_str)
1994 {
1995 struct cgraph_asm_node *node;
1996
1997 node = ggc_alloc_cleared_cgraph_asm_node ();
1998 node->asm_str = asm_str;
1999 node->order = cgraph_order++;
2000 node->next = NULL;
2001 if (cgraph_asm_nodes == NULL)
2002 cgraph_asm_nodes = node;
2003 else
2004 cgraph_asm_last_node->next = node;
2005 cgraph_asm_last_node = node;
2006 return node;
2007 }
2008
2009 /* Return true when the DECL can possibly be inlined. */
2010 bool
2011 cgraph_function_possibly_inlined_p (tree decl)
2012 {
2013 if (!cgraph_global_info_ready)
2014 return !DECL_UNINLINABLE (decl);
2015 return DECL_POSSIBLY_INLINED (decl);
2016 }
2017
2018 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2019 struct cgraph_edge *
2020 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2021 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2022 int freq_scale, bool update_original)
2023 {
2024 struct cgraph_edge *new_edge;
2025 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2026 gcov_type freq;
2027
2028 /* We do not want to ignore loop nest after frequency drops to 0. */
2029 if (!freq_scale)
2030 freq_scale = 1;
2031 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2032 if (freq > CGRAPH_FREQ_MAX)
2033 freq = CGRAPH_FREQ_MAX;
2034
2035 if (e->indirect_unknown_callee)
2036 {
2037 tree decl;
2038
2039 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2040 {
2041 struct cgraph_node *callee = cgraph_get_node (decl);
2042 gcc_checking_assert (callee);
2043 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
2044 }
2045 else
2046 {
2047 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2048 e->indirect_info->ecf_flags,
2049 count, freq);
2050 *new_edge->indirect_info = *e->indirect_info;
2051 }
2052 }
2053 else
2054 {
2055 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
2056 if (e->indirect_info)
2057 {
2058 new_edge->indirect_info
2059 = ggc_alloc_cleared_cgraph_indirect_call_info ();
2060 *new_edge->indirect_info = *e->indirect_info;
2061 }
2062 }
2063
2064 new_edge->inline_failed = e->inline_failed;
2065 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2066 new_edge->lto_stmt_uid = stmt_uid;
2067 /* Clone flags that depend on call_stmt availability manually. */
2068 new_edge->can_throw_external = e->can_throw_external;
2069 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
2070 if (update_original)
2071 {
2072 e->count -= new_edge->count;
2073 if (e->count < 0)
2074 e->count = 0;
2075 }
2076 cgraph_call_edge_duplication_hooks (e, new_edge);
2077 return new_edge;
2078 }
2079
2080
2081 /* Create node representing clone of N executed COUNT times. Decrease
2082 the execution counts from original node too.
2083 The new clone will have decl set to DECL that may or may not be the same
2084 as decl of N.
2085
2086 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2087 function's profile to reflect the fact that part of execution is handled
2088 by node.
2089 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
2090 the new clone. Otherwise the caller is responsible for doing so later. */
2091
2092 struct cgraph_node *
2093 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2094 bool update_original,
2095 VEC(cgraph_edge_p,heap) *redirect_callers,
2096 bool call_duplication_hook)
2097 {
2098 struct cgraph_node *new_node = cgraph_create_node_1 ();
2099 struct cgraph_edge *e;
2100 gcov_type count_scale;
2101 unsigned i;
2102
2103 new_node->decl = decl;
2104 new_node->origin = n->origin;
2105 if (new_node->origin)
2106 {
2107 new_node->next_nested = new_node->origin->nested;
2108 new_node->origin->nested = new_node;
2109 }
2110 new_node->analyzed = n->analyzed;
2111 new_node->local = n->local;
2112 new_node->local.externally_visible = false;
2113 new_node->local.local = true;
2114 new_node->global = n->global;
2115 new_node->rtl = n->rtl;
2116 new_node->count = count;
2117 new_node->frequency = n->frequency;
2118 new_node->clone = n->clone;
2119 new_node->clone.tree_map = 0;
2120 if (n->count)
2121 {
2122 if (new_node->count > n->count)
2123 count_scale = REG_BR_PROB_BASE;
2124 else
2125 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2126 }
2127 else
2128 count_scale = 0;
2129 if (update_original)
2130 {
2131 n->count -= count;
2132 if (n->count < 0)
2133 n->count = 0;
2134 }
2135
2136 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2137 {
2138 /* Redirect calls to the old version node to point to its new
2139 version. */
2140 cgraph_redirect_edge_callee (e, new_node);
2141 }
2142
2143
2144 for (e = n->callees;e; e=e->next_callee)
2145 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2146 count_scale, freq, update_original);
2147
2148 for (e = n->indirect_calls; e; e = e->next_callee)
2149 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2150 count_scale, freq, update_original);
2151 ipa_clone_references (new_node, NULL, &n->ref_list);
2152
2153 new_node->next_sibling_clone = n->clones;
2154 if (n->clones)
2155 n->clones->prev_sibling_clone = new_node;
2156 n->clones = new_node;
2157 new_node->clone_of = n;
2158
2159 if (n->decl != decl)
2160 {
2161 struct cgraph_node **slot;
2162 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2163 gcc_assert (!*slot);
2164 *slot = new_node;
2165 if (assembler_name_hash)
2166 {
2167 void **aslot;
2168 tree name = DECL_ASSEMBLER_NAME (decl);
2169
2170 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2171 decl_assembler_name_hash (name),
2172 INSERT);
2173 gcc_assert (!*aslot);
2174 *aslot = new_node;
2175 }
2176 }
2177
2178 if (call_duplication_hook)
2179 cgraph_call_node_duplication_hooks (n, new_node);
2180 return new_node;
2181 }
2182
2183 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2184
2185 static GTY(()) unsigned int clone_fn_id_num;
2186
2187 tree
2188 clone_function_name (tree decl, const char *suffix)
2189 {
2190 tree name = DECL_ASSEMBLER_NAME (decl);
2191 size_t len = IDENTIFIER_LENGTH (name);
2192 char *tmp_name, *prefix;
2193
2194 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2195 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2196 strcpy (prefix + len + 1, suffix);
2197 #ifndef NO_DOT_IN_LABEL
2198 prefix[len] = '.';
2199 #elif !defined NO_DOLLAR_IN_LABEL
2200 prefix[len] = '$';
2201 #else
2202 prefix[len] = '_';
2203 #endif
2204 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2205 return get_identifier (tmp_name);
2206 }
2207
2208 /* Create callgraph node clone with new declaration. The actual body will
2209 be copied later at compilation stage.
2210
2211 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2212 bitmap interface.
2213 */
2214 struct cgraph_node *
2215 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2216 VEC(cgraph_edge_p,heap) *redirect_callers,
2217 VEC(ipa_replace_map_p,gc) *tree_map,
2218 bitmap args_to_skip,
2219 const char * suffix)
2220 {
2221 tree old_decl = old_node->decl;
2222 struct cgraph_node *new_node = NULL;
2223 tree new_decl;
2224 size_t i;
2225 struct ipa_replace_map *map;
2226
2227 if (!flag_wpa)
2228 gcc_checking_assert (tree_versionable_function_p (old_decl));
2229
2230 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
2231
2232 /* Make a new FUNCTION_DECL tree node */
2233 if (!args_to_skip)
2234 new_decl = copy_node (old_decl);
2235 else
2236 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2237 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2238
2239 /* Generate a new name for the new version. */
2240 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2241 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2242 SET_DECL_RTL (new_decl, NULL);
2243
2244 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2245 CGRAPH_FREQ_BASE, false,
2246 redirect_callers, false);
2247 /* Update the properties.
2248 Make clone visible only within this translation unit. Make sure
2249 that is not weak also.
2250 ??? We cannot use COMDAT linkage because there is no
2251 ABI support for this. */
2252 DECL_EXTERNAL (new_node->decl) = 0;
2253 if (DECL_ONE_ONLY (old_decl))
2254 DECL_SECTION_NAME (new_node->decl) = NULL;
2255 DECL_COMDAT_GROUP (new_node->decl) = 0;
2256 TREE_PUBLIC (new_node->decl) = 0;
2257 DECL_COMDAT (new_node->decl) = 0;
2258 DECL_WEAK (new_node->decl) = 0;
2259 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
2260 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
2261 new_node->clone.tree_map = tree_map;
2262 new_node->clone.args_to_skip = args_to_skip;
2263 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2264 {
2265 tree var = map->new_tree;
2266
2267 STRIP_NOPS (var);
2268 if (TREE_CODE (var) != ADDR_EXPR)
2269 continue;
2270 var = get_base_var (var);
2271 if (!var)
2272 continue;
2273
2274 /* Record references of the future statement initializing the constant
2275 argument. */
2276 if (TREE_CODE (var) == FUNCTION_DECL)
2277 {
2278 struct cgraph_node *ref_node = cgraph_get_node (var);
2279 gcc_checking_assert (ref_node);
2280 ipa_record_reference (new_node, NULL, ref_node, NULL, IPA_REF_ADDR,
2281 NULL);
2282 }
2283 else if (TREE_CODE (var) == VAR_DECL)
2284 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2285 IPA_REF_ADDR, NULL);
2286 }
2287 if (!args_to_skip)
2288 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2289 else if (old_node->clone.combined_args_to_skip)
2290 {
2291 int newi = 0, oldi = 0;
2292 tree arg;
2293 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2294 struct cgraph_node *orig_node;
2295 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2296 ;
2297 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2298 {
2299 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2300 {
2301 bitmap_set_bit (new_args_to_skip, oldi);
2302 continue;
2303 }
2304 if (bitmap_bit_p (args_to_skip, newi))
2305 bitmap_set_bit (new_args_to_skip, oldi);
2306 newi++;
2307 }
2308 new_node->clone.combined_args_to_skip = new_args_to_skip;
2309 }
2310 else
2311 new_node->clone.combined_args_to_skip = args_to_skip;
2312 new_node->local.externally_visible = 0;
2313 new_node->local.local = 1;
2314 new_node->lowered = true;
2315 new_node->reachable = true;
2316
2317 cgraph_call_node_duplication_hooks (old_node, new_node);
2318
2319
2320 return new_node;
2321 }
2322
2323 /* NODE is no longer nested function; update cgraph accordingly. */
2324 void
2325 cgraph_unnest_node (struct cgraph_node *node)
2326 {
2327 struct cgraph_node **node2 = &node->origin->nested;
2328 gcc_assert (node->origin);
2329
2330 while (*node2 != node)
2331 node2 = &(*node2)->next_nested;
2332 *node2 = node->next_nested;
2333 node->origin = NULL;
2334 }
2335
2336 /* Return function availability. See cgraph.h for description of individual
2337 return values. */
2338 enum availability
2339 cgraph_function_body_availability (struct cgraph_node *node)
2340 {
2341 enum availability avail;
2342 gcc_assert (cgraph_function_flags_ready);
2343 if (!node->analyzed)
2344 avail = AVAIL_NOT_AVAILABLE;
2345 else if (node->local.local)
2346 avail = AVAIL_LOCAL;
2347 else if (!node->local.externally_visible)
2348 avail = AVAIL_AVAILABLE;
2349 /* Inline functions are safe to be analyzed even if their symbol can
2350 be overwritten at runtime. It is not meaningful to enforce any sane
2351 behaviour on replacing inline function by different body. */
2352 else if (DECL_DECLARED_INLINE_P (node->decl))
2353 avail = AVAIL_AVAILABLE;
2354
2355 /* If the function can be overwritten, return OVERWRITABLE. Take
2356 care at least of two notable extensions - the COMDAT functions
2357 used to share template instantiations in C++ (this is symmetric
2358 to code cp_cannot_inline_tree_fn and probably shall be shared and
2359 the inlinability hooks completely eliminated).
2360
2361 ??? Does the C++ one definition rule allow us to always return
2362 AVAIL_AVAILABLE here? That would be good reason to preserve this
2363 bit. */
2364
2365 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2366 avail = AVAIL_OVERWRITABLE;
2367 else avail = AVAIL_AVAILABLE;
2368
2369 return avail;
2370 }
2371
2372 /* Add the function FNDECL to the call graph.
2373 Unlike cgraph_finalize_function, this function is intended to be used
2374 by middle end and allows insertion of new function at arbitrary point
2375 of compilation. The function can be either in high, low or SSA form
2376 GIMPLE.
2377
2378 The function is assumed to be reachable and have address taken (so no
2379 API breaking optimizations are performed on it).
2380
2381 Main work done by this function is to enqueue the function for later
2382 processing to avoid need the passes to be re-entrant. */
2383
2384 void
2385 cgraph_add_new_function (tree fndecl, bool lowered)
2386 {
2387 struct cgraph_node *node;
2388 switch (cgraph_state)
2389 {
2390 case CGRAPH_STATE_CONSTRUCTION:
2391 /* Just enqueue function to be processed at nearest occurrence. */
2392 node = cgraph_create_node (fndecl);
2393 node->next_needed = cgraph_new_nodes;
2394 if (lowered)
2395 node->lowered = true;
2396 cgraph_new_nodes = node;
2397 break;
2398
2399 case CGRAPH_STATE_IPA:
2400 case CGRAPH_STATE_IPA_SSA:
2401 case CGRAPH_STATE_EXPANSION:
2402 /* Bring the function into finalized state and enqueue for later
2403 analyzing and compilation. */
2404 node = cgraph_get_create_node (fndecl);
2405 node->local.local = false;
2406 node->local.finalized = true;
2407 node->reachable = node->needed = true;
2408 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2409 {
2410 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2411 current_function_decl = fndecl;
2412 gimple_register_cfg_hooks ();
2413 tree_lowering_passes (fndecl);
2414 bitmap_obstack_initialize (NULL);
2415 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2416 execute_pass_list (pass_early_local_passes.pass.sub);
2417 bitmap_obstack_release (NULL);
2418 pop_cfun ();
2419 current_function_decl = NULL;
2420
2421 lowered = true;
2422 }
2423 if (lowered)
2424 node->lowered = true;
2425 node->next_needed = cgraph_new_nodes;
2426 cgraph_new_nodes = node;
2427 break;
2428
2429 case CGRAPH_STATE_FINISHED:
2430 /* At the very end of compilation we have to do all the work up
2431 to expansion. */
2432 node = cgraph_create_node (fndecl);
2433 if (lowered)
2434 node->lowered = true;
2435 cgraph_analyze_function (node);
2436 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2437 current_function_decl = fndecl;
2438 gimple_register_cfg_hooks ();
2439 bitmap_obstack_initialize (NULL);
2440 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2441 execute_pass_list (pass_early_local_passes.pass.sub);
2442 bitmap_obstack_release (NULL);
2443 tree_rest_of_compilation (fndecl);
2444 pop_cfun ();
2445 current_function_decl = NULL;
2446 break;
2447 }
2448
2449 /* Set a personality if required and we already passed EH lowering. */
2450 if (lowered
2451 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2452 == eh_personality_lang))
2453 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2454 }
2455
2456 /* Worker for cgraph_node_can_be_local_p. */
2457 static bool
2458 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2459 void *data ATTRIBUTE_UNUSED)
2460 {
2461 return !(!node->needed
2462 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2463 || !node->local.externally_visible));
2464 }
2465
2466 /* Return true if NODE can be made local for API change.
2467 Extern inline functions and C++ COMDAT functions can be made local
2468 at the expense of possible code size growth if function is used in multiple
2469 compilation units. */
2470 bool
2471 cgraph_node_can_be_local_p (struct cgraph_node *node)
2472 {
2473 return (!node->address_taken
2474 && !cgraph_for_node_and_aliases (node,
2475 cgraph_node_cannot_be_local_p_1,
2476 NULL, true));
2477 }
2478
2479 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2480 but other code such as notice_global_symbol generates rtl. */
2481 void
2482 cgraph_make_decl_local (tree decl)
2483 {
2484 rtx rtl, symbol;
2485
2486 if (TREE_CODE (decl) == VAR_DECL)
2487 DECL_COMMON (decl) = 0;
2488 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2489
2490 if (DECL_COMDAT (decl))
2491 {
2492 /* It is possible that we are linking against library defining same COMDAT
2493 function. To avoid conflict we need to rename our local name of the
2494 function just in the case WHOPR partitioning decide to make it hidden
2495 to avoid cross partition references. */
2496 if (flag_wpa)
2497 {
2498 const char *old_name;
2499
2500 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2501 if (TREE_CODE (decl) == FUNCTION_DECL)
2502 {
2503 struct cgraph_node *node = cgraph_get_node_or_alias (decl);
2504 change_decl_assembler_name (decl,
2505 clone_function_name (decl, "local"));
2506 if (node->local.lto_file_data)
2507 lto_record_renamed_decl (node->local.lto_file_data,
2508 old_name,
2509 IDENTIFIER_POINTER
2510 (DECL_ASSEMBLER_NAME (decl)));
2511 }
2512 else if (TREE_CODE (decl) == VAR_DECL)
2513 {
2514 struct varpool_node *vnode = varpool_get_node (decl);
2515 /* change_decl_assembler_name will warn here on vtables because
2516 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2517 SET_DECL_ASSEMBLER_NAME (decl,
2518 clone_function_name (decl, "local"));
2519 if (vnode->lto_file_data)
2520 lto_record_renamed_decl (vnode->lto_file_data,
2521 old_name,
2522 IDENTIFIER_POINTER
2523 (DECL_ASSEMBLER_NAME (decl)));
2524 }
2525 }
2526 DECL_SECTION_NAME (decl) = 0;
2527 DECL_COMDAT (decl) = 0;
2528 }
2529 DECL_COMDAT_GROUP (decl) = 0;
2530 DECL_WEAK (decl) = 0;
2531 DECL_EXTERNAL (decl) = 0;
2532 TREE_PUBLIC (decl) = 0;
2533 if (!DECL_RTL_SET_P (decl))
2534 return;
2535
2536 /* Update rtl flags. */
2537 make_decl_rtl (decl);
2538
2539 rtl = DECL_RTL (decl);
2540 if (!MEM_P (rtl))
2541 return;
2542
2543 symbol = XEXP (rtl, 0);
2544 if (GET_CODE (symbol) != SYMBOL_REF)
2545 return;
2546
2547 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2548 }
2549
2550 /* Call calback on NODE, thunks and aliases asociated to NODE.
2551 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2552 skipped. */
2553
2554 bool
2555 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2556 bool (*callback) (struct cgraph_node *, void *),
2557 void *data,
2558 bool include_overwritable)
2559 {
2560 struct cgraph_edge *e;
2561 int i;
2562 struct ipa_ref *ref;
2563
2564 if (callback (node, data))
2565 return true;
2566 for (e = node->callers; e; e = e->next_caller)
2567 if (e->caller->thunk.thunk_p
2568 && (include_overwritable
2569 || cgraph_function_body_availability (e->caller)))
2570 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2571 include_overwritable))
2572 return true;
2573 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2574 if (ref->use == IPA_REF_ALIAS)
2575 {
2576 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2577 if (include_overwritable
2578 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2579 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2580 include_overwritable))
2581 return true;
2582 }
2583 return false;
2584 }
2585
2586 /* Call calback on NODE and aliases asociated to NODE.
2587 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2588 skipped. */
2589
2590 bool
2591 cgraph_for_node_and_aliases (struct cgraph_node *node,
2592 bool (*callback) (struct cgraph_node *, void *),
2593 void *data,
2594 bool include_overwritable)
2595 {
2596 int i;
2597 struct ipa_ref *ref;
2598
2599 if (callback (node, data))
2600 return true;
2601 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2602 if (ref->use == IPA_REF_ALIAS)
2603 {
2604 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2605 if (include_overwritable
2606 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2607 if (cgraph_for_node_and_aliases (alias, callback, data,
2608 include_overwritable))
2609 return true;
2610 }
2611 return false;
2612 }
2613
2614 /* Worker to bring NODE local. */
2615
2616 static bool
2617 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2618 {
2619 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2620 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2621 {
2622 cgraph_make_decl_local (node->decl);
2623
2624 node->local.externally_visible = false;
2625 node->local.local = true;
2626 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2627 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2628 }
2629 return false;
2630 }
2631
2632 /* Bring NODE local. */
2633
2634 void
2635 cgraph_make_node_local (struct cgraph_node *node)
2636 {
2637 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2638 NULL, true);
2639 }
2640
2641 /* Worker to set nothrow flag. */
2642
2643 static bool
2644 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2645 {
2646 struct cgraph_edge *e;
2647
2648 TREE_NOTHROW (node->decl) = data != NULL;
2649
2650 if (data != NULL)
2651 for (e = node->callers; e; e = e->next_caller)
2652 e->can_throw_external = false;
2653 return false;
2654 }
2655
2656 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2657 if any to NOTHROW. */
2658
2659 void
2660 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2661 {
2662 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2663 (void *)(size_t)nothrow, false);
2664 }
2665
2666 /* Worker to set const flag. */
2667
2668 static bool
2669 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2670 {
2671 /* Static constructors and destructors without a side effect can be
2672 optimized out. */
2673 if (data && !((size_t)data & 2))
2674 {
2675 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2676 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2677 if (DECL_STATIC_DESTRUCTOR (node->decl))
2678 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2679 }
2680 TREE_READONLY (node->decl) = data != NULL;
2681 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2682 return false;
2683 }
2684
2685 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2686 if any to READONLY. */
2687
2688 void
2689 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2690 {
2691 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2692 (void *)(size_t)(readonly + (int)looping * 2),
2693 false);
2694 }
2695
2696 /* Worker to set pure flag. */
2697
2698 static bool
2699 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2700 {
2701 /* Static pureructors and destructors without a side effect can be
2702 optimized out. */
2703 if (data && !((size_t)data & 2))
2704 {
2705 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2706 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2707 if (DECL_STATIC_DESTRUCTOR (node->decl))
2708 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2709 }
2710 DECL_PURE_P (node->decl) = data != NULL;
2711 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2712 return false;
2713 }
2714
2715 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2716 if any to PURE. */
2717
2718 void
2719 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2720 {
2721 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2722 (void *)(size_t)(pure + (int)looping * 2),
2723 false);
2724 }
2725
2726 /* Data used by cgraph_propagate_frequency. */
2727
2728 struct cgraph_propagate_frequency_data
2729 {
2730 bool maybe_unlikely_executed;
2731 bool maybe_executed_once;
2732 bool only_called_at_startup;
2733 bool only_called_at_exit;
2734 };
2735
2736 /* Worker for cgraph_propagate_frequency_1. */
2737
2738 static bool
2739 cgraph_propagate_frequency_1 (struct cgraph_node *node, void *data)
2740 {
2741 struct cgraph_propagate_frequency_data *d;
2742 struct cgraph_edge *edge;
2743
2744 d = (struct cgraph_propagate_frequency_data *)data;
2745 for (edge = node->callers;
2746 edge && (d->maybe_unlikely_executed || d->maybe_executed_once
2747 || d->only_called_at_startup || d->only_called_at_exit);
2748 edge = edge->next_caller)
2749 {
2750 if (edge->caller != node)
2751 {
2752 d->only_called_at_startup &= edge->caller->only_called_at_startup;
2753 /* It makes sense to put main() together with the static constructors.
2754 It will be executed for sure, but rest of functions called from
2755 main are definitely not at startup only. */
2756 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
2757 d->only_called_at_startup = 0;
2758 d->only_called_at_exit &= edge->caller->only_called_at_exit;
2759 }
2760 if (!edge->frequency)
2761 continue;
2762 switch (edge->caller->frequency)
2763 {
2764 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2765 break;
2766 case NODE_FREQUENCY_EXECUTED_ONCE:
2767 if (dump_file && (dump_flags & TDF_DETAILS))
2768 fprintf (dump_file, " Called by %s that is executed once\n",
2769 cgraph_node_name (edge->caller));
2770 d->maybe_unlikely_executed = false;
2771 if (inline_edge_summary (edge)->loop_depth)
2772 {
2773 d->maybe_executed_once = false;
2774 if (dump_file && (dump_flags & TDF_DETAILS))
2775 fprintf (dump_file, " Called in loop\n");
2776 }
2777 break;
2778 case NODE_FREQUENCY_HOT:
2779 case NODE_FREQUENCY_NORMAL:
2780 if (dump_file && (dump_flags & TDF_DETAILS))
2781 fprintf (dump_file, " Called by %s that is normal or hot\n",
2782 cgraph_node_name (edge->caller));
2783 d->maybe_unlikely_executed = false;
2784 d->maybe_executed_once = false;
2785 break;
2786 }
2787 }
2788 return edge != NULL;
2789 }
2790
2791 /* See if the frequency of NODE can be updated based on frequencies of its
2792 callers. */
2793 bool
2794 cgraph_propagate_frequency (struct cgraph_node *node)
2795 {
2796 struct cgraph_propagate_frequency_data d = {true, true, true, true};
2797 bool changed = false;
2798
2799 if (!node->local.local)
2800 return false;
2801 gcc_assert (node->analyzed);
2802 if (dump_file && (dump_flags & TDF_DETAILS))
2803 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2804
2805 cgraph_for_node_and_aliases (node, cgraph_propagate_frequency_1, &d, true);
2806
2807 if ((d.only_called_at_startup && !d.only_called_at_exit)
2808 && !node->only_called_at_startup)
2809 {
2810 node->only_called_at_startup = true;
2811 if (dump_file)
2812 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
2813 cgraph_node_name (node));
2814 changed = true;
2815 }
2816 if ((d.only_called_at_exit && !d.only_called_at_startup)
2817 && !node->only_called_at_exit)
2818 {
2819 node->only_called_at_exit = true;
2820 if (dump_file)
2821 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
2822 cgraph_node_name (node));
2823 changed = true;
2824 }
2825 /* These come either from profile or user hints; never update them. */
2826 if (node->frequency == NODE_FREQUENCY_HOT
2827 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2828 return changed;
2829 if (d.maybe_unlikely_executed)
2830 {
2831 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2832 if (dump_file)
2833 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
2834 cgraph_node_name (node));
2835 changed = true;
2836 }
2837 else if (d.maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2838 {
2839 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2840 if (dump_file)
2841 fprintf (dump_file, "Node %s promoted to executed once.\n",
2842 cgraph_node_name (node));
2843 changed = true;
2844 }
2845 return changed;
2846 }
2847
2848 /* Return true when NODE can not return or throw and thus
2849 it is safe to ignore its side effects for IPA analysis. */
2850
2851 bool
2852 cgraph_node_cannot_return (struct cgraph_node *node)
2853 {
2854 int flags = flags_from_decl_or_type (node->decl);
2855 if (!flag_exceptions)
2856 return (flags & ECF_NORETURN) != 0;
2857 else
2858 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2859 == (ECF_NORETURN | ECF_NOTHROW));
2860 }
2861
2862 /* Return true when call of E can not lead to return from caller
2863 and thus it is safe to ignore its side effects for IPA analysis
2864 when computing side effects of the caller.
2865 FIXME: We could actually mark all edges that have no reaching
2866 patch to EXIT_BLOCK_PTR or throw to get better results. */
2867 bool
2868 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2869 {
2870 if (cgraph_node_cannot_return (e->caller))
2871 return true;
2872 if (e->indirect_unknown_callee)
2873 {
2874 int flags = e->indirect_info->ecf_flags;
2875 if (!flag_exceptions)
2876 return (flags & ECF_NORETURN) != 0;
2877 else
2878 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2879 == (ECF_NORETURN | ECF_NOTHROW));
2880 }
2881 else
2882 return cgraph_node_cannot_return (e->callee);
2883 }
2884
2885 /* Return true when function NODE can be removed from callgraph
2886 if all direct calls are eliminated. */
2887
2888 bool
2889 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2890 {
2891 gcc_assert (!node->global.inlined_to);
2892 /* Extern inlines can always go, we will use the external definition. */
2893 if (DECL_EXTERNAL (node->decl))
2894 return true;
2895 /* When function is needed, we can not remove it. */
2896 if (node->needed || node->reachable_from_other_partition)
2897 return false;
2898 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2899 || DECL_STATIC_DESTRUCTOR (node->decl))
2900 return false;
2901 /* Only COMDAT functions can be removed if externally visible. */
2902 if (node->local.externally_visible
2903 && (!DECL_COMDAT (node->decl)
2904 || cgraph_used_from_object_file_p (node)))
2905 return false;
2906 return true;
2907 }
2908
2909 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2910
2911 static bool
2912 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2913 {
2914 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2915 }
2916
2917 /* Return true when function NODE and its aliases can be removed from callgraph
2918 if all direct calls are eliminated. */
2919
2920 bool
2921 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2922 {
2923 /* Extern inlines can always go, we will use the external definition. */
2924 if (DECL_EXTERNAL (node->decl))
2925 return true;
2926 if (node->address_taken)
2927 return false;
2928 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2929 }
2930
2931 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2932
2933 static bool
2934 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2935 {
2936 return cgraph_used_from_object_file_p (node);
2937 }
2938
2939 /* Return true when function NODE can be expected to be removed
2940 from program when direct calls in this compilation unit are removed.
2941
2942 As a special case COMDAT functions are
2943 cgraph_can_remove_if_no_direct_calls_p while the are not
2944 cgraph_only_called_directly_p (it is possible they are called from other
2945 unit)
2946
2947 This function behaves as cgraph_only_called_directly_p because eliminating
2948 all uses of COMDAT function does not make it necessarily disappear from
2949 the program unless we are compiling whole program or we do LTO. In this
2950 case we know we win since dynamic linking will not really discard the
2951 linkonce section. */
2952
2953 bool
2954 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2955 {
2956 gcc_assert (!node->global.inlined_to);
2957 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2958 return false;
2959 if (!in_lto_p && !flag_whole_program)
2960 return cgraph_only_called_directly_p (node);
2961 else
2962 {
2963 if (DECL_EXTERNAL (node->decl))
2964 return true;
2965 return cgraph_can_remove_if_no_direct_calls_p (node);
2966 }
2967 }
2968
2969 /* Return true when RESOLUTION indicate that linker will use
2970 the symbol from non-LTO object files. */
2971
2972 bool
2973 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2974 {
2975 return (resolution == LDPR_PREVAILING_DEF
2976 || resolution == LDPR_PREEMPTED_REG
2977 || resolution == LDPR_RESOLVED_EXEC
2978 || resolution == LDPR_RESOLVED_DYN);
2979 }
2980
2981
2982 /* Return true when NODE is known to be used from other (non-LTO) object file.
2983 Known only when doing LTO via linker plugin. */
2984
2985 bool
2986 cgraph_used_from_object_file_p (struct cgraph_node *node)
2987 {
2988 gcc_assert (!node->global.inlined_to);
2989 if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
2990 return false;
2991 if (resolution_used_from_other_file_p (node->resolution))
2992 return true;
2993 return false;
2994 }
2995
2996 /* Worker for cgraph_only_called_directly_p. */
2997
2998 static bool
2999 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3000 {
3001 return !cgraph_only_called_directly_or_aliased_p (node);
3002 }
3003
3004 /* Return true when function NODE and all its aliases are only called
3005 directly.
3006 i.e. it is not externally visible, address was not taken and
3007 it is not used in any other non-standard way. */
3008
3009 bool
3010 cgraph_only_called_directly_p (struct cgraph_node *node)
3011 {
3012 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
3013 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
3014 NULL, true);
3015 }
3016
3017
3018 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3019
3020 static bool
3021 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
3022 {
3023 VEC (cgraph_edge_p, heap) ** redirect_callers = (VEC (cgraph_edge_p, heap) **)data;
3024 struct cgraph_edge *cs;
3025 enum availability avail;
3026 cgraph_function_or_thunk_node (node, &avail);
3027
3028 if (avail > AVAIL_OVERWRITABLE)
3029 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3030 if (!cs->indirect_inlining_edge)
3031 VEC_safe_push (cgraph_edge_p, heap, *redirect_callers, cs);
3032 return false;
3033 }
3034
3035 /* Collect all callers of NODE and its aliases that are known to lead to NODE
3036 (i.e. are not overwritable). */
3037
3038 VEC (cgraph_edge_p, heap) *
3039 collect_callers_of_node (struct cgraph_node *node)
3040 {
3041 VEC (cgraph_edge_p, heap) * redirect_callers = NULL;
3042 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
3043 &redirect_callers, false);
3044 return redirect_callers;
3045 }
3046
3047 #include "gt-cgraph.h"