]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
cgraph.h (cgraph_get_node_or_alias): Removed declaration.
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains basic routines manipulating call graph
23
24 The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
32
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
36
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
41
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
47
48 Interprocedural information:
49
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
56
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
59
60 Inlining plans:
61
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
70
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
74
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101 #include "ipa-inline.h"
102
103 const char * const ld_plugin_symbol_resolution_names[]=
104 {
105 "",
106 "undef",
107 "prevailing_def",
108 "prevailing_def_ironly",
109 "preempted_reg",
110 "preempted_ir",
111 "resolved_ir",
112 "resolved_exec",
113 "resolved_dyn"
114 };
115
116 static void cgraph_node_remove_callers (struct cgraph_node *node);
117 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
118 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
119
120 /* Hash table used to convert declarations into nodes. */
121 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
122 /* Hash table used to convert assembler names into nodes. */
123 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
124
125 /* The linked list of cgraph nodes. */
126 struct cgraph_node *cgraph_nodes;
127
128 /* Queue of cgraph nodes scheduled to be lowered. */
129 struct cgraph_node *cgraph_nodes_queue;
130
131 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
132 secondary queue used during optimization to accommodate passes that
133 may generate new functions that need to be optimized and expanded. */
134 struct cgraph_node *cgraph_new_nodes;
135
136 /* Number of nodes in existence. */
137 int cgraph_n_nodes;
138
139 /* Maximal uid used in cgraph nodes. */
140 int cgraph_max_uid;
141
142 /* Maximal uid used in cgraph edges. */
143 int cgraph_edge_max_uid;
144
145 /* Set when whole unit has been analyzed so we can access global info. */
146 bool cgraph_global_info_ready = false;
147
148 /* What state callgraph is in right now. */
149 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
150
151 /* Set when the cgraph is fully build and the basic flags are computed. */
152 bool cgraph_function_flags_ready = false;
153
154 /* Linked list of cgraph asm nodes. */
155 struct cgraph_asm_node *cgraph_asm_nodes;
156
157 /* Last node in cgraph_asm_nodes. */
158 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
159
160 /* The order index of the next cgraph node to be created. This is
161 used so that we can sort the cgraph nodes in order by when we saw
162 them, to support -fno-toplevel-reorder. */
163 int cgraph_order;
164
165 /* List of hooks triggered on cgraph_edge events. */
166 struct cgraph_edge_hook_list {
167 cgraph_edge_hook hook;
168 void *data;
169 struct cgraph_edge_hook_list *next;
170 };
171
172 /* List of hooks triggered on cgraph_node events. */
173 struct cgraph_node_hook_list {
174 cgraph_node_hook hook;
175 void *data;
176 struct cgraph_node_hook_list *next;
177 };
178
179 /* List of hooks triggered on events involving two cgraph_edges. */
180 struct cgraph_2edge_hook_list {
181 cgraph_2edge_hook hook;
182 void *data;
183 struct cgraph_2edge_hook_list *next;
184 };
185
186 /* List of hooks triggered on events involving two cgraph_nodes. */
187 struct cgraph_2node_hook_list {
188 cgraph_2node_hook hook;
189 void *data;
190 struct cgraph_2node_hook_list *next;
191 };
192
193 /* List of hooks triggered when an edge is removed. */
194 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
195 /* List of hooks triggered when a node is removed. */
196 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
197 /* List of hooks triggered when an edge is duplicated. */
198 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
199 /* List of hooks triggered when a node is duplicated. */
200 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
201 /* List of hooks triggered when an function is inserted. */
202 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
203
204 /* Head of a linked list of unused (freed) call graph nodes.
205 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
206 static GTY(()) struct cgraph_node *free_nodes;
207 /* Head of a linked list of unused (freed) call graph edges.
208 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
209 static GTY(()) struct cgraph_edge *free_edges;
210
211 /* Did procss_same_body_aliases run? */
212 bool same_body_aliases_done;
213
214 /* Macros to access the next item in the list of free cgraph nodes and
215 edges. */
216 #define NEXT_FREE_NODE(NODE) (NODE)->next
217 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
218
219 /* Register HOOK to be called with DATA on each removed edge. */
220 struct cgraph_edge_hook_list *
221 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
222 {
223 struct cgraph_edge_hook_list *entry;
224 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
225
226 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
227 entry->hook = hook;
228 entry->data = data;
229 entry->next = NULL;
230 while (*ptr)
231 ptr = &(*ptr)->next;
232 *ptr = entry;
233 return entry;
234 }
235
236 /* Remove ENTRY from the list of hooks called on removing edges. */
237 void
238 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
239 {
240 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
241
242 while (*ptr != entry)
243 ptr = &(*ptr)->next;
244 *ptr = entry->next;
245 free (entry);
246 }
247
248 /* Call all edge removal hooks. */
249 static void
250 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
251 {
252 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
253 while (entry)
254 {
255 entry->hook (e, entry->data);
256 entry = entry->next;
257 }
258 }
259
260 /* Register HOOK to be called with DATA on each removed node. */
261 struct cgraph_node_hook_list *
262 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
263 {
264 struct cgraph_node_hook_list *entry;
265 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
266
267 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
268 entry->hook = hook;
269 entry->data = data;
270 entry->next = NULL;
271 while (*ptr)
272 ptr = &(*ptr)->next;
273 *ptr = entry;
274 return entry;
275 }
276
277 /* Remove ENTRY from the list of hooks called on removing nodes. */
278 void
279 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
280 {
281 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
282
283 while (*ptr != entry)
284 ptr = &(*ptr)->next;
285 *ptr = entry->next;
286 free (entry);
287 }
288
289 /* Call all node removal hooks. */
290 static void
291 cgraph_call_node_removal_hooks (struct cgraph_node *node)
292 {
293 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
294 while (entry)
295 {
296 entry->hook (node, entry->data);
297 entry = entry->next;
298 }
299 }
300
301 /* Register HOOK to be called with DATA on each inserted node. */
302 struct cgraph_node_hook_list *
303 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
304 {
305 struct cgraph_node_hook_list *entry;
306 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
307
308 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
309 entry->hook = hook;
310 entry->data = data;
311 entry->next = NULL;
312 while (*ptr)
313 ptr = &(*ptr)->next;
314 *ptr = entry;
315 return entry;
316 }
317
318 /* Remove ENTRY from the list of hooks called on inserted nodes. */
319 void
320 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
321 {
322 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
323
324 while (*ptr != entry)
325 ptr = &(*ptr)->next;
326 *ptr = entry->next;
327 free (entry);
328 }
329
330 /* Call all node insertion hooks. */
331 void
332 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
333 {
334 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
335 while (entry)
336 {
337 entry->hook (node, entry->data);
338 entry = entry->next;
339 }
340 }
341
342 /* Register HOOK to be called with DATA on each duplicated edge. */
343 struct cgraph_2edge_hook_list *
344 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
345 {
346 struct cgraph_2edge_hook_list *entry;
347 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
348
349 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
350 entry->hook = hook;
351 entry->data = data;
352 entry->next = NULL;
353 while (*ptr)
354 ptr = &(*ptr)->next;
355 *ptr = entry;
356 return entry;
357 }
358
359 /* Remove ENTRY from the list of hooks called on duplicating edges. */
360 void
361 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
362 {
363 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
364
365 while (*ptr != entry)
366 ptr = &(*ptr)->next;
367 *ptr = entry->next;
368 free (entry);
369 }
370
371 /* Call all edge duplication hooks. */
372 static void
373 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
374 struct cgraph_edge *cs2)
375 {
376 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
377 while (entry)
378 {
379 entry->hook (cs1, cs2, entry->data);
380 entry = entry->next;
381 }
382 }
383
384 /* Register HOOK to be called with DATA on each duplicated node. */
385 struct cgraph_2node_hook_list *
386 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
387 {
388 struct cgraph_2node_hook_list *entry;
389 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
390
391 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
392 entry->hook = hook;
393 entry->data = data;
394 entry->next = NULL;
395 while (*ptr)
396 ptr = &(*ptr)->next;
397 *ptr = entry;
398 return entry;
399 }
400
401 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
402 void
403 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
404 {
405 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
406
407 while (*ptr != entry)
408 ptr = &(*ptr)->next;
409 *ptr = entry->next;
410 free (entry);
411 }
412
413 /* Call all node duplication hooks. */
414 static void
415 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
416 struct cgraph_node *node2)
417 {
418 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
419 while (entry)
420 {
421 entry->hook (node1, node2, entry->data);
422 entry = entry->next;
423 }
424 }
425
426 /* Returns a hash code for P. */
427
428 static hashval_t
429 hash_node (const void *p)
430 {
431 const struct cgraph_node *n = (const struct cgraph_node *) p;
432 return (hashval_t) DECL_UID (n->decl);
433 }
434
435
436 /* Returns nonzero if P1 and P2 are equal. */
437
438 static int
439 eq_node (const void *p1, const void *p2)
440 {
441 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
442 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
443 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
444 }
445
446 /* Allocate new callgraph node. */
447
448 static inline struct cgraph_node *
449 cgraph_allocate_node (void)
450 {
451 struct cgraph_node *node;
452
453 if (free_nodes)
454 {
455 node = free_nodes;
456 free_nodes = NEXT_FREE_NODE (node);
457 }
458 else
459 {
460 node = ggc_alloc_cleared_cgraph_node ();
461 node->uid = cgraph_max_uid++;
462 }
463
464 return node;
465 }
466
467 /* Allocate new callgraph node and insert it into basic data structures. */
468
469 static struct cgraph_node *
470 cgraph_create_node_1 (void)
471 {
472 struct cgraph_node *node = cgraph_allocate_node ();
473
474 node->next = cgraph_nodes;
475 node->order = cgraph_order++;
476 if (cgraph_nodes)
477 cgraph_nodes->previous = node;
478 node->previous = NULL;
479 node->frequency = NODE_FREQUENCY_NORMAL;
480 node->count_materialization_scale = REG_BR_PROB_BASE;
481 ipa_empty_ref_list (&node->ref_list);
482 cgraph_nodes = node;
483 cgraph_n_nodes++;
484 return node;
485 }
486
487 /* Return cgraph node assigned to DECL. Create new one when needed. */
488
489 struct cgraph_node *
490 cgraph_create_node (tree decl)
491 {
492 struct cgraph_node key, *node, **slot;
493
494 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
495
496 if (!cgraph_hash)
497 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
498
499 key.decl = decl;
500 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
501 gcc_assert (!*slot);
502
503 node = cgraph_create_node_1 ();
504 node->decl = decl;
505 *slot = node;
506 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
507 {
508 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
509 node->next_nested = node->origin->nested;
510 node->origin->nested = node;
511 }
512 if (assembler_name_hash)
513 {
514 void **aslot;
515 tree name = DECL_ASSEMBLER_NAME (decl);
516
517 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
518 decl_assembler_name_hash (name),
519 INSERT);
520 /* We can have multiple declarations with same assembler name. For C++
521 it is __builtin_strlen and strlen, for instance. Do we need to
522 record them all? Original implementation marked just first one
523 so lets hope for the best. */
524 if (*aslot == NULL)
525 *aslot = node;
526 }
527 return node;
528 }
529
530 /* Try to find a call graph node for declaration DECL and if it does not exist,
531 create it. */
532
533 struct cgraph_node *
534 cgraph_get_create_node (tree decl)
535 {
536 struct cgraph_node *node;
537
538 node = cgraph_get_node (decl);
539 if (node)
540 return node;
541
542 return cgraph_create_node (decl);
543 }
544
545 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
546 the function body is associated with (not neccesarily cgraph_node (DECL). */
547
548 struct cgraph_node *
549 cgraph_create_function_alias (tree alias, tree decl)
550 {
551 struct cgraph_node *alias_node;
552
553 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
554 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
555 alias_node = cgraph_get_create_node (alias);
556 gcc_assert (!alias_node->local.finalized);
557 alias_node->thunk.alias = decl;
558 alias_node->local.finalized = true;
559 alias_node->alias = 1;
560
561 if ((TREE_PUBLIC (alias) && !DECL_COMDAT (alias) && !DECL_EXTERNAL (alias))
562 || (DECL_VIRTUAL_P (alias)
563 && (DECL_COMDAT (alias) || DECL_EXTERNAL (alias))))
564 cgraph_mark_reachable_node (alias_node);
565 return alias_node;
566 }
567
568 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
569 and NULL otherwise.
570 Same body aliases are output whenever the body of DECL is output,
571 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
572
573 struct cgraph_node *
574 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
575 {
576 struct cgraph_node *n;
577 #ifndef ASM_OUTPUT_DEF
578 /* If aliases aren't supported by the assembler, fail. */
579 return NULL;
580 #endif
581 /* Langhooks can create same body aliases of symbols not defined.
582 Those are useless. Drop them on the floor. */
583 if (cgraph_global_info_ready)
584 return NULL;
585
586 n = cgraph_create_function_alias (alias, decl);
587 n->same_body_alias = true;
588 if (same_body_aliases_done)
589 ipa_record_reference (n, NULL, cgraph_get_node (decl), NULL, IPA_REF_ALIAS,
590 NULL);
591 return n;
592 }
593
594 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
595 aliases DECL with an adjustments made into the first parameter.
596 See comments in thunk_adjust for detail on the parameters. */
597
598 struct cgraph_node *
599 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
600 tree alias, tree decl,
601 bool this_adjusting,
602 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
603 tree virtual_offset,
604 tree real_alias)
605 {
606 struct cgraph_node *node;
607
608 node = cgraph_get_node (alias);
609 if (node)
610 {
611 gcc_assert (node->local.finalized);
612 gcc_assert (!node->alias);
613 gcc_assert (!node->thunk.thunk_p);
614 cgraph_remove_node (node);
615 }
616
617 node = cgraph_create_node (alias);
618 gcc_checking_assert (!virtual_offset
619 || double_int_equal_p
620 (tree_to_double_int (virtual_offset),
621 shwi_to_double_int (virtual_value)));
622 node->thunk.fixed_offset = fixed_offset;
623 node->thunk.this_adjusting = this_adjusting;
624 node->thunk.virtual_value = virtual_value;
625 node->thunk.virtual_offset_p = virtual_offset != NULL;
626 node->thunk.alias = real_alias;
627 node->thunk.thunk_p = true;
628 node->local.finalized = true;
629
630 if (cgraph_decide_is_function_needed (node, decl))
631 cgraph_mark_needed_node (node);
632
633 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
634 || (DECL_VIRTUAL_P (decl)
635 && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
636 cgraph_mark_reachable_node (node);
637
638 return node;
639 }
640
641 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
642 is assigned. */
643
644 struct cgraph_node *
645 cgraph_get_node (const_tree decl)
646 {
647 struct cgraph_node key, *node = NULL, **slot;
648
649 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
650
651 if (!cgraph_hash)
652 return NULL;
653
654 key.decl = CONST_CAST2 (tree, const_tree, decl);
655
656 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
657 NO_INSERT);
658
659 if (slot && *slot)
660 node = *slot;
661 return node;
662 }
663
664 /* Insert already constructed node into hashtable. */
665
666 void
667 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
668 {
669 struct cgraph_node **slot;
670
671 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
672
673 gcc_assert (!*slot);
674 *slot = node;
675 }
676
677 /* Returns a hash code for P. */
678
679 static hashval_t
680 hash_node_by_assembler_name (const void *p)
681 {
682 const struct cgraph_node *n = (const struct cgraph_node *) p;
683 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
684 }
685
686 /* Returns nonzero if P1 and P2 are equal. */
687
688 static int
689 eq_assembler_name (const void *p1, const void *p2)
690 {
691 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
692 const_tree name = (const_tree)p2;
693 return (decl_assembler_name_equal (n1->decl, name));
694 }
695
696 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
697 Return NULL if there's no such node. */
698
699 struct cgraph_node *
700 cgraph_node_for_asm (tree asmname)
701 {
702 struct cgraph_node *node;
703 void **slot;
704
705 if (!assembler_name_hash)
706 {
707 assembler_name_hash =
708 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
709 NULL);
710 for (node = cgraph_nodes; node; node = node->next)
711 if (!node->global.inlined_to)
712 {
713 tree name = DECL_ASSEMBLER_NAME (node->decl);
714 slot = htab_find_slot_with_hash (assembler_name_hash, name,
715 decl_assembler_name_hash (name),
716 INSERT);
717 /* We can have multiple declarations with same assembler name. For C++
718 it is __builtin_strlen and strlen, for instance. Do we need to
719 record them all? Original implementation marked just first one
720 so lets hope for the best. */
721 if (!*slot)
722 *slot = node;
723 }
724 }
725
726 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
727 decl_assembler_name_hash (asmname),
728 NO_INSERT);
729
730 if (slot)
731 {
732 node = (struct cgraph_node *) *slot;
733 return node;
734 }
735 return NULL;
736 }
737
738 /* Returns a hash value for X (which really is a die_struct). */
739
740 static hashval_t
741 edge_hash (const void *x)
742 {
743 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
744 }
745
746 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
747
748 static int
749 edge_eq (const void *x, const void *y)
750 {
751 return ((const struct cgraph_edge *) x)->call_stmt == y;
752 }
753
754 /* Add call graph edge E to call site hash of its caller. */
755
756 static inline void
757 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
758 {
759 void **slot;
760 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
761 e->call_stmt,
762 htab_hash_pointer (e->call_stmt),
763 INSERT);
764 gcc_assert (!*slot);
765 *slot = e;
766 }
767
768 /* Return the callgraph edge representing the GIMPLE_CALL statement
769 CALL_STMT. */
770
771 struct cgraph_edge *
772 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
773 {
774 struct cgraph_edge *e, *e2;
775 int n = 0;
776
777 if (node->call_site_hash)
778 return (struct cgraph_edge *)
779 htab_find_with_hash (node->call_site_hash, call_stmt,
780 htab_hash_pointer (call_stmt));
781
782 /* This loop may turn out to be performance problem. In such case adding
783 hashtables into call nodes with very many edges is probably best
784 solution. It is not good idea to add pointer into CALL_EXPR itself
785 because we want to make possible having multiple cgraph nodes representing
786 different clones of the same body before the body is actually cloned. */
787 for (e = node->callees; e; e = e->next_callee)
788 {
789 if (e->call_stmt == call_stmt)
790 break;
791 n++;
792 }
793
794 if (!e)
795 for (e = node->indirect_calls; e; e = e->next_callee)
796 {
797 if (e->call_stmt == call_stmt)
798 break;
799 n++;
800 }
801
802 if (n > 100)
803 {
804 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
805 for (e2 = node->callees; e2; e2 = e2->next_callee)
806 cgraph_add_edge_to_call_site_hash (e2);
807 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
808 cgraph_add_edge_to_call_site_hash (e2);
809 }
810
811 return e;
812 }
813
814
815 /* Change field call_stmt of edge E to NEW_STMT. */
816
817 void
818 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
819 {
820 tree decl;
821
822 if (e->caller->call_site_hash)
823 {
824 htab_remove_elt_with_hash (e->caller->call_site_hash,
825 e->call_stmt,
826 htab_hash_pointer (e->call_stmt));
827 }
828
829 e->call_stmt = new_stmt;
830 if (e->indirect_unknown_callee
831 && (decl = gimple_call_fndecl (new_stmt)))
832 {
833 /* Constant propagation (and possibly also inlining?) can turn an
834 indirect call into a direct one. */
835 struct cgraph_node *new_callee = cgraph_get_node (decl);
836
837 gcc_checking_assert (new_callee);
838 cgraph_make_edge_direct (e, new_callee, 0);
839 }
840
841 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
842 e->can_throw_external = stmt_can_throw_external (new_stmt);
843 pop_cfun ();
844 if (e->caller->call_site_hash)
845 cgraph_add_edge_to_call_site_hash (e);
846 }
847
848 /* Like cgraph_set_call_stmt but walk the clone tree and update all
849 clones sharing the same function body. */
850
851 void
852 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
853 gimple old_stmt, gimple new_stmt)
854 {
855 struct cgraph_node *node;
856 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
857
858 if (edge)
859 cgraph_set_call_stmt (edge, new_stmt);
860
861 node = orig->clones;
862 if (node)
863 while (node != orig)
864 {
865 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
866 if (edge)
867 cgraph_set_call_stmt (edge, new_stmt);
868 if (node->clones)
869 node = node->clones;
870 else if (node->next_sibling_clone)
871 node = node->next_sibling_clone;
872 else
873 {
874 while (node != orig && !node->next_sibling_clone)
875 node = node->clone_of;
876 if (node != orig)
877 node = node->next_sibling_clone;
878 }
879 }
880 }
881
882 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
883 same function body. If clones already have edge for OLD_STMT; only
884 update the edge same way as cgraph_set_call_stmt_including_clones does.
885
886 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
887 frequencies of the clones. */
888
889 void
890 cgraph_create_edge_including_clones (struct cgraph_node *orig,
891 struct cgraph_node *callee,
892 gimple old_stmt,
893 gimple stmt, gcov_type count,
894 int freq,
895 cgraph_inline_failed_t reason)
896 {
897 struct cgraph_node *node;
898 struct cgraph_edge *edge;
899
900 if (!cgraph_edge (orig, stmt))
901 {
902 edge = cgraph_create_edge (orig, callee, stmt, count, freq);
903 edge->inline_failed = reason;
904 }
905
906 node = orig->clones;
907 if (node)
908 while (node != orig)
909 {
910 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
911
912 /* It is possible that clones already contain the edge while
913 master didn't. Either we promoted indirect call into direct
914 call in the clone or we are processing clones of unreachable
915 master where edges has been removed. */
916 if (edge)
917 cgraph_set_call_stmt (edge, stmt);
918 else if (!cgraph_edge (node, stmt))
919 {
920 edge = cgraph_create_edge (node, callee, stmt, count,
921 freq);
922 edge->inline_failed = reason;
923 }
924
925 if (node->clones)
926 node = node->clones;
927 else if (node->next_sibling_clone)
928 node = node->next_sibling_clone;
929 else
930 {
931 while (node != orig && !node->next_sibling_clone)
932 node = node->clone_of;
933 if (node != orig)
934 node = node->next_sibling_clone;
935 }
936 }
937 }
938
939 /* Allocate a cgraph_edge structure and fill it with data according to the
940 parameters of which only CALLEE can be NULL (when creating an indirect call
941 edge). */
942
943 static struct cgraph_edge *
944 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
945 gimple call_stmt, gcov_type count, int freq)
946 {
947 struct cgraph_edge *edge;
948
949 /* LTO does not actually have access to the call_stmt since these
950 have not been loaded yet. */
951 if (call_stmt)
952 {
953 /* This is a rather expensive check possibly triggering
954 construction of call stmt hashtable. */
955 gcc_checking_assert (!cgraph_edge (caller, call_stmt));
956
957 gcc_assert (is_gimple_call (call_stmt));
958 }
959
960 if (free_edges)
961 {
962 edge = free_edges;
963 free_edges = NEXT_FREE_EDGE (edge);
964 }
965 else
966 {
967 edge = ggc_alloc_cgraph_edge ();
968 edge->uid = cgraph_edge_max_uid++;
969 }
970
971 edge->aux = NULL;
972 edge->caller = caller;
973 edge->callee = callee;
974 edge->prev_caller = NULL;
975 edge->next_caller = NULL;
976 edge->prev_callee = NULL;
977 edge->next_callee = NULL;
978
979 edge->count = count;
980 gcc_assert (count >= 0);
981 edge->frequency = freq;
982 gcc_assert (freq >= 0);
983 gcc_assert (freq <= CGRAPH_FREQ_MAX);
984
985 edge->call_stmt = call_stmt;
986 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
987 edge->can_throw_external
988 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
989 pop_cfun ();
990 edge->call_stmt_cannot_inline_p =
991 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
992 if (call_stmt && caller->call_site_hash)
993 cgraph_add_edge_to_call_site_hash (edge);
994
995 edge->indirect_info = NULL;
996 edge->indirect_inlining_edge = 0;
997
998 return edge;
999 }
1000
1001 /* Create edge from CALLER to CALLEE in the cgraph. */
1002
1003 struct cgraph_edge *
1004 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1005 gimple call_stmt, gcov_type count, int freq)
1006 {
1007 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1008 count, freq);
1009
1010 edge->indirect_unknown_callee = 0;
1011 initialize_inline_failed (edge);
1012
1013 edge->next_caller = callee->callers;
1014 if (callee->callers)
1015 callee->callers->prev_caller = edge;
1016 edge->next_callee = caller->callees;
1017 if (caller->callees)
1018 caller->callees->prev_callee = edge;
1019 caller->callees = edge;
1020 callee->callers = edge;
1021
1022 return edge;
1023 }
1024
1025 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
1026
1027 struct cgraph_indirect_call_info *
1028 cgraph_allocate_init_indirect_info (void)
1029 {
1030 struct cgraph_indirect_call_info *ii;
1031
1032 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
1033 ii->param_index = -1;
1034 return ii;
1035 }
1036
1037 /* Create an indirect edge with a yet-undetermined callee where the call
1038 statement destination is a formal parameter of the caller with index
1039 PARAM_INDEX. */
1040
1041 struct cgraph_edge *
1042 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1043 int ecf_flags,
1044 gcov_type count, int freq)
1045 {
1046 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1047 count, freq);
1048
1049 edge->indirect_unknown_callee = 1;
1050 initialize_inline_failed (edge);
1051
1052 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1053 edge->indirect_info->ecf_flags = ecf_flags;
1054
1055 edge->next_callee = caller->indirect_calls;
1056 if (caller->indirect_calls)
1057 caller->indirect_calls->prev_callee = edge;
1058 caller->indirect_calls = edge;
1059
1060 return edge;
1061 }
1062
1063 /* Remove the edge E from the list of the callers of the callee. */
1064
1065 static inline void
1066 cgraph_edge_remove_callee (struct cgraph_edge *e)
1067 {
1068 gcc_assert (!e->indirect_unknown_callee);
1069 if (e->prev_caller)
1070 e->prev_caller->next_caller = e->next_caller;
1071 if (e->next_caller)
1072 e->next_caller->prev_caller = e->prev_caller;
1073 if (!e->prev_caller)
1074 e->callee->callers = e->next_caller;
1075 }
1076
1077 /* Remove the edge E from the list of the callees of the caller. */
1078
1079 static inline void
1080 cgraph_edge_remove_caller (struct cgraph_edge *e)
1081 {
1082 if (e->prev_callee)
1083 e->prev_callee->next_callee = e->next_callee;
1084 if (e->next_callee)
1085 e->next_callee->prev_callee = e->prev_callee;
1086 if (!e->prev_callee)
1087 {
1088 if (e->indirect_unknown_callee)
1089 e->caller->indirect_calls = e->next_callee;
1090 else
1091 e->caller->callees = e->next_callee;
1092 }
1093 if (e->caller->call_site_hash)
1094 htab_remove_elt_with_hash (e->caller->call_site_hash,
1095 e->call_stmt,
1096 htab_hash_pointer (e->call_stmt));
1097 }
1098
1099 /* Put the edge onto the free list. */
1100
1101 static void
1102 cgraph_free_edge (struct cgraph_edge *e)
1103 {
1104 int uid = e->uid;
1105
1106 /* Clear out the edge so we do not dangle pointers. */
1107 memset (e, 0, sizeof (*e));
1108 e->uid = uid;
1109 NEXT_FREE_EDGE (e) = free_edges;
1110 free_edges = e;
1111 }
1112
1113 /* Remove the edge E in the cgraph. */
1114
1115 void
1116 cgraph_remove_edge (struct cgraph_edge *e)
1117 {
1118 /* Call all edge removal hooks. */
1119 cgraph_call_edge_removal_hooks (e);
1120
1121 if (!e->indirect_unknown_callee)
1122 /* Remove from callers list of the callee. */
1123 cgraph_edge_remove_callee (e);
1124
1125 /* Remove from callees list of the callers. */
1126 cgraph_edge_remove_caller (e);
1127
1128 /* Put the edge onto the free list. */
1129 cgraph_free_edge (e);
1130 }
1131
1132 /* Set callee of call graph edge E and add it to the corresponding set of
1133 callers. */
1134
1135 static void
1136 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1137 {
1138 e->prev_caller = NULL;
1139 if (n->callers)
1140 n->callers->prev_caller = e;
1141 e->next_caller = n->callers;
1142 n->callers = e;
1143 e->callee = n;
1144 }
1145
1146 /* Redirect callee of E to N. The function does not update underlying
1147 call expression. */
1148
1149 void
1150 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1151 {
1152 /* Remove from callers list of the current callee. */
1153 cgraph_edge_remove_callee (e);
1154
1155 /* Insert to callers list of the new callee. */
1156 cgraph_set_edge_callee (e, n);
1157 }
1158
1159 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1160 CALLEE. DELTA is an integer constant that is to be added to the this
1161 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1162
1163 void
1164 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee,
1165 HOST_WIDE_INT delta)
1166 {
1167 edge->indirect_unknown_callee = 0;
1168 edge->indirect_info->thunk_delta = delta;
1169
1170 /* Get the edge out of the indirect edge list. */
1171 if (edge->prev_callee)
1172 edge->prev_callee->next_callee = edge->next_callee;
1173 if (edge->next_callee)
1174 edge->next_callee->prev_callee = edge->prev_callee;
1175 if (!edge->prev_callee)
1176 edge->caller->indirect_calls = edge->next_callee;
1177
1178 /* Put it into the normal callee list */
1179 edge->prev_callee = NULL;
1180 edge->next_callee = edge->caller->callees;
1181 if (edge->caller->callees)
1182 edge->caller->callees->prev_callee = edge;
1183 edge->caller->callees = edge;
1184
1185 /* Insert to callers list of the new callee. */
1186 cgraph_set_edge_callee (edge, callee);
1187
1188 /* We need to re-determine the inlining status of the edge. */
1189 initialize_inline_failed (edge);
1190 }
1191
1192
1193 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1194 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1195 of OLD_STMT if it was previously call statement.
1196 If NEW_STMT is NULL, the call has been dropped without any
1197 replacement. */
1198
1199 static void
1200 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1201 gimple old_stmt, tree old_call,
1202 gimple new_stmt)
1203 {
1204 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1205 ? gimple_call_fndecl (new_stmt) : 0;
1206
1207 /* We are seeing indirect calls, then there is nothing to update. */
1208 if (!new_call && !old_call)
1209 return;
1210 /* See if we turned indirect call into direct call or folded call to one builtin
1211 into different builtin. */
1212 if (old_call != new_call)
1213 {
1214 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1215 struct cgraph_edge *ne = NULL;
1216 gcov_type count;
1217 int frequency;
1218
1219 if (e)
1220 {
1221 /* See if the edge is already there and has the correct callee. It
1222 might be so because of indirect inlining has already updated
1223 it. We also might've cloned and redirected the edge. */
1224 if (new_call && e->callee)
1225 {
1226 struct cgraph_node *callee = e->callee;
1227 while (callee)
1228 {
1229 if (callee->decl == new_call
1230 || callee->former_clone_of == new_call)
1231 return;
1232 callee = callee->clone_of;
1233 }
1234 }
1235
1236 /* Otherwise remove edge and create new one; we can't simply redirect
1237 since function has changed, so inline plan and other information
1238 attached to edge is invalid. */
1239 count = e->count;
1240 frequency = e->frequency;
1241 cgraph_remove_edge (e);
1242 }
1243 else if (new_call)
1244 {
1245 /* We are seeing new direct call; compute profile info based on BB. */
1246 basic_block bb = gimple_bb (new_stmt);
1247 count = bb->count;
1248 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1249 bb);
1250 }
1251
1252 if (new_call)
1253 {
1254 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1255 new_stmt, count, frequency);
1256 gcc_assert (ne->inline_failed);
1257 }
1258 }
1259 /* We only updated the call stmt; update pointer in cgraph edge.. */
1260 else if (old_stmt != new_stmt)
1261 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1262 }
1263
1264 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1265 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1266 of OLD_STMT before it was updated (updating can happen inplace). */
1267
1268 void
1269 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1270 {
1271 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1272 struct cgraph_node *node;
1273
1274 gcc_checking_assert (orig);
1275 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1276 if (orig->clones)
1277 for (node = orig->clones; node != orig;)
1278 {
1279 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1280 if (node->clones)
1281 node = node->clones;
1282 else if (node->next_sibling_clone)
1283 node = node->next_sibling_clone;
1284 else
1285 {
1286 while (node != orig && !node->next_sibling_clone)
1287 node = node->clone_of;
1288 if (node != orig)
1289 node = node->next_sibling_clone;
1290 }
1291 }
1292 }
1293
1294
1295 /* Remove all callees from the node. */
1296
1297 void
1298 cgraph_node_remove_callees (struct cgraph_node *node)
1299 {
1300 struct cgraph_edge *e, *f;
1301
1302 /* It is sufficient to remove the edges from the lists of callers of
1303 the callees. The callee list of the node can be zapped with one
1304 assignment. */
1305 for (e = node->callees; e; e = f)
1306 {
1307 f = e->next_callee;
1308 cgraph_call_edge_removal_hooks (e);
1309 if (!e->indirect_unknown_callee)
1310 cgraph_edge_remove_callee (e);
1311 cgraph_free_edge (e);
1312 }
1313 for (e = node->indirect_calls; e; e = f)
1314 {
1315 f = e->next_callee;
1316 cgraph_call_edge_removal_hooks (e);
1317 if (!e->indirect_unknown_callee)
1318 cgraph_edge_remove_callee (e);
1319 cgraph_free_edge (e);
1320 }
1321 node->indirect_calls = NULL;
1322 node->callees = NULL;
1323 if (node->call_site_hash)
1324 {
1325 htab_delete (node->call_site_hash);
1326 node->call_site_hash = NULL;
1327 }
1328 }
1329
1330 /* Remove all callers from the node. */
1331
1332 static void
1333 cgraph_node_remove_callers (struct cgraph_node *node)
1334 {
1335 struct cgraph_edge *e, *f;
1336
1337 /* It is sufficient to remove the edges from the lists of callees of
1338 the callers. The caller list of the node can be zapped with one
1339 assignment. */
1340 for (e = node->callers; e; e = f)
1341 {
1342 f = e->next_caller;
1343 cgraph_call_edge_removal_hooks (e);
1344 cgraph_edge_remove_caller (e);
1345 cgraph_free_edge (e);
1346 }
1347 node->callers = NULL;
1348 }
1349
1350 /* Release memory used to represent body of function NODE. */
1351
1352 void
1353 cgraph_release_function_body (struct cgraph_node *node)
1354 {
1355 if (DECL_STRUCT_FUNCTION (node->decl))
1356 {
1357 tree old_decl = current_function_decl;
1358 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1359 if (cfun->gimple_df)
1360 {
1361 current_function_decl = node->decl;
1362 delete_tree_ssa ();
1363 delete_tree_cfg_annotations ();
1364 cfun->eh = NULL;
1365 current_function_decl = old_decl;
1366 }
1367 if (cfun->cfg)
1368 {
1369 gcc_assert (dom_computed[0] == DOM_NONE);
1370 gcc_assert (dom_computed[1] == DOM_NONE);
1371 clear_edges ();
1372 }
1373 if (cfun->value_histograms)
1374 free_histograms ();
1375 gcc_assert (!current_loops);
1376 pop_cfun();
1377 gimple_set_body (node->decl, NULL);
1378 VEC_free (ipa_opt_pass, heap,
1379 node->ipa_transforms_to_apply);
1380 /* Struct function hangs a lot of data that would leak if we didn't
1381 removed all pointers to it. */
1382 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1383 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1384 }
1385 DECL_SAVED_TREE (node->decl) = NULL;
1386 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1387 of its associated function function declaration because it's
1388 needed to emit debug info later. */
1389 if (!node->abstract_and_needed)
1390 DECL_INITIAL (node->decl) = error_mark_node;
1391 }
1392
1393 /* Remove the node from cgraph. */
1394
1395 void
1396 cgraph_remove_node (struct cgraph_node *node)
1397 {
1398 void **slot;
1399 bool kill_body = false;
1400 struct cgraph_node *n;
1401 int uid = node->uid;
1402
1403 cgraph_call_node_removal_hooks (node);
1404 cgraph_node_remove_callers (node);
1405 cgraph_node_remove_callees (node);
1406 ipa_remove_all_references (&node->ref_list);
1407 ipa_remove_all_refering (&node->ref_list);
1408 VEC_free (ipa_opt_pass, heap,
1409 node->ipa_transforms_to_apply);
1410
1411 /* Incremental inlining access removed nodes stored in the postorder list.
1412 */
1413 node->needed = node->reachable = false;
1414 for (n = node->nested; n; n = n->next_nested)
1415 n->origin = NULL;
1416 node->nested = NULL;
1417 if (node->origin)
1418 {
1419 struct cgraph_node **node2 = &node->origin->nested;
1420
1421 while (*node2 != node)
1422 node2 = &(*node2)->next_nested;
1423 *node2 = node->next_nested;
1424 }
1425 if (node->previous)
1426 node->previous->next = node->next;
1427 else
1428 cgraph_nodes = node->next;
1429 if (node->next)
1430 node->next->previous = node->previous;
1431 node->next = NULL;
1432 node->previous = NULL;
1433 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1434 if (*slot == node)
1435 {
1436 struct cgraph_node *next_inline_clone;
1437
1438 for (next_inline_clone = node->clones;
1439 next_inline_clone && next_inline_clone->decl != node->decl;
1440 next_inline_clone = next_inline_clone->next_sibling_clone)
1441 ;
1442
1443 /* If there is inline clone of the node being removed, we need
1444 to put it into the position of removed node and reorganize all
1445 other clones to be based on it. */
1446 if (next_inline_clone)
1447 {
1448 struct cgraph_node *n;
1449 struct cgraph_node *new_clones;
1450
1451 *slot = next_inline_clone;
1452
1453 /* Unlink inline clone from the list of clones of removed node. */
1454 if (next_inline_clone->next_sibling_clone)
1455 next_inline_clone->next_sibling_clone->prev_sibling_clone
1456 = next_inline_clone->prev_sibling_clone;
1457 if (next_inline_clone->prev_sibling_clone)
1458 {
1459 gcc_assert (node->clones != next_inline_clone);
1460 next_inline_clone->prev_sibling_clone->next_sibling_clone
1461 = next_inline_clone->next_sibling_clone;
1462 }
1463 else
1464 {
1465 gcc_assert (node->clones == next_inline_clone);
1466 node->clones = next_inline_clone->next_sibling_clone;
1467 }
1468
1469 new_clones = node->clones;
1470 node->clones = NULL;
1471
1472 /* Copy clone info. */
1473 next_inline_clone->clone = node->clone;
1474
1475 /* Now place it into clone tree at same level at NODE. */
1476 next_inline_clone->clone_of = node->clone_of;
1477 next_inline_clone->prev_sibling_clone = NULL;
1478 next_inline_clone->next_sibling_clone = NULL;
1479 if (node->clone_of)
1480 {
1481 if (node->clone_of->clones)
1482 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1483 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1484 node->clone_of->clones = next_inline_clone;
1485 }
1486
1487 /* Merge the clone list. */
1488 if (new_clones)
1489 {
1490 if (!next_inline_clone->clones)
1491 next_inline_clone->clones = new_clones;
1492 else
1493 {
1494 n = next_inline_clone->clones;
1495 while (n->next_sibling_clone)
1496 n = n->next_sibling_clone;
1497 n->next_sibling_clone = new_clones;
1498 new_clones->prev_sibling_clone = n;
1499 }
1500 }
1501
1502 /* Update clone_of pointers. */
1503 n = new_clones;
1504 while (n)
1505 {
1506 n->clone_of = next_inline_clone;
1507 n = n->next_sibling_clone;
1508 }
1509 }
1510 else
1511 {
1512 htab_clear_slot (cgraph_hash, slot);
1513 kill_body = true;
1514 }
1515
1516 }
1517 if (node->prev_sibling_clone)
1518 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1519 else if (node->clone_of)
1520 node->clone_of->clones = node->next_sibling_clone;
1521 if (node->next_sibling_clone)
1522 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1523 if (node->clones)
1524 {
1525 struct cgraph_node *n, *next;
1526
1527 if (node->clone_of)
1528 {
1529 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1530 n->clone_of = node->clone_of;
1531 n->clone_of = node->clone_of;
1532 n->next_sibling_clone = node->clone_of->clones;
1533 if (node->clone_of->clones)
1534 node->clone_of->clones->prev_sibling_clone = n;
1535 node->clone_of->clones = node->clones;
1536 }
1537 else
1538 {
1539 /* We are removing node with clones. this makes clones inconsistent,
1540 but assume they will be removed subsequently and just keep clone
1541 tree intact. This can happen in unreachable function removal since
1542 we remove unreachable functions in random order, not by bottom-up
1543 walk of clone trees. */
1544 for (n = node->clones; n; n = next)
1545 {
1546 next = n->next_sibling_clone;
1547 n->next_sibling_clone = NULL;
1548 n->prev_sibling_clone = NULL;
1549 n->clone_of = NULL;
1550 }
1551 }
1552 }
1553
1554 if (node->same_comdat_group)
1555 {
1556 struct cgraph_node *prev;
1557 for (prev = node->same_comdat_group;
1558 prev->same_comdat_group != node;
1559 prev = prev->same_comdat_group)
1560 ;
1561 if (node->same_comdat_group == prev)
1562 prev->same_comdat_group = NULL;
1563 else
1564 prev->same_comdat_group = node->same_comdat_group;
1565 node->same_comdat_group = NULL;
1566 }
1567
1568 /* While all the clones are removed after being proceeded, the function
1569 itself is kept in the cgraph even after it is compiled. Check whether
1570 we are done with this body and reclaim it proactively if this is the case.
1571 */
1572 if (!kill_body && *slot)
1573 {
1574 struct cgraph_node *n = (struct cgraph_node *) *slot;
1575 if (!n->clones && !n->clone_of && !n->global.inlined_to
1576 && (cgraph_global_info_ready
1577 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1578 || n->in_other_partition)))
1579 kill_body = true;
1580 }
1581 if (assembler_name_hash)
1582 {
1583 tree name = DECL_ASSEMBLER_NAME (node->decl);
1584 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1585 decl_assembler_name_hash (name),
1586 NO_INSERT);
1587 /* Inline clones are not hashed. */
1588 if (slot && *slot == node)
1589 htab_clear_slot (assembler_name_hash, slot);
1590 }
1591
1592 if (kill_body)
1593 cgraph_release_function_body (node);
1594 node->decl = NULL;
1595 if (node->call_site_hash)
1596 {
1597 htab_delete (node->call_site_hash);
1598 node->call_site_hash = NULL;
1599 }
1600 cgraph_n_nodes--;
1601
1602 /* Clear out the node to NULL all pointers and add the node to the free
1603 list. */
1604 memset (node, 0, sizeof(*node));
1605 node->uid = uid;
1606 NEXT_FREE_NODE (node) = free_nodes;
1607 free_nodes = node;
1608 }
1609
1610 /* Add NEW_ to the same comdat group that OLD is in. */
1611
1612 void
1613 cgraph_add_to_same_comdat_group (struct cgraph_node *new_,
1614 struct cgraph_node *old)
1615 {
1616 gcc_assert (DECL_ONE_ONLY (old->decl));
1617 gcc_assert (!new_->same_comdat_group);
1618 gcc_assert (new_ != old);
1619
1620 DECL_COMDAT_GROUP (new_->decl) = DECL_COMDAT_GROUP (old->decl);
1621 new_->same_comdat_group = old;
1622 if (!old->same_comdat_group)
1623 old->same_comdat_group = new_;
1624 else
1625 {
1626 struct cgraph_node *n;
1627 for (n = old->same_comdat_group;
1628 n->same_comdat_group != old;
1629 n = n->same_comdat_group)
1630 ;
1631 n->same_comdat_group = new_;
1632 }
1633 }
1634
1635 /* Remove the node from cgraph. */
1636
1637 void
1638 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1639 {
1640 struct cgraph_edge *e, *next;
1641 for (e = node->callees; e; e = next)
1642 {
1643 next = e->next_callee;
1644 if (!e->inline_failed)
1645 cgraph_remove_node_and_inline_clones (e->callee);
1646 }
1647 cgraph_remove_node (node);
1648 }
1649
1650 /* Notify finalize_compilation_unit that given node is reachable. */
1651
1652 void
1653 cgraph_mark_reachable_node (struct cgraph_node *node)
1654 {
1655 if (!node->reachable && node->local.finalized)
1656 {
1657 if (cgraph_global_info_ready)
1658 {
1659 /* Verify that function does not appear to be needed out of blue
1660 during the optimization process. This can happen for extern
1661 inlines when bodies was removed after inlining. */
1662 gcc_assert ((node->analyzed || node->in_other_partition
1663 || DECL_EXTERNAL (node->decl)));
1664 }
1665 else
1666 notice_global_symbol (node->decl);
1667 node->reachable = 1;
1668
1669 node->next_needed = cgraph_nodes_queue;
1670 cgraph_nodes_queue = node;
1671 }
1672 }
1673
1674 /* Likewise indicate that a node is needed, i.e. reachable via some
1675 external means. */
1676
1677 void
1678 cgraph_mark_needed_node (struct cgraph_node *node)
1679 {
1680 node->needed = 1;
1681 gcc_assert (!node->global.inlined_to);
1682 cgraph_mark_reachable_node (node);
1683 }
1684
1685 /* Likewise indicate that a node is having address taken. */
1686
1687 void
1688 cgraph_mark_address_taken_node (struct cgraph_node *node)
1689 {
1690 gcc_assert (!node->global.inlined_to);
1691 cgraph_mark_reachable_node (node);
1692 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1693 IPA_REF_ADDR reference exists (and thus it should be set on node
1694 representing alias we take address of) and as a test whether address
1695 of the object was taken (and thus it should be set on node alias is
1696 referring to). We should remove the first use and the remove the
1697 following set. */
1698 node->address_taken = 1;
1699 node = cgraph_function_or_thunk_node (node, NULL);
1700 node->address_taken = 1;
1701 }
1702
1703 /* Return local info for the compiled function. */
1704
1705 struct cgraph_local_info *
1706 cgraph_local_info (tree decl)
1707 {
1708 struct cgraph_node *node;
1709
1710 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1711 node = cgraph_get_node (decl);
1712 if (!node)
1713 return NULL;
1714 return &node->local;
1715 }
1716
1717 /* Return local info for the compiled function. */
1718
1719 struct cgraph_global_info *
1720 cgraph_global_info (tree decl)
1721 {
1722 struct cgraph_node *node;
1723
1724 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1725 node = cgraph_get_node (decl);
1726 if (!node)
1727 return NULL;
1728 return &node->global;
1729 }
1730
1731 /* Return local info for the compiled function. */
1732
1733 struct cgraph_rtl_info *
1734 cgraph_rtl_info (tree decl)
1735 {
1736 struct cgraph_node *node;
1737
1738 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1739 node = cgraph_get_node (decl);
1740 if (!node
1741 || (decl != current_function_decl
1742 && !TREE_ASM_WRITTEN (node->decl)))
1743 return NULL;
1744 return &node->rtl;
1745 }
1746
1747 /* Return a string describing the failure REASON. */
1748
1749 const char*
1750 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1751 {
1752 #undef DEFCIFCODE
1753 #define DEFCIFCODE(code, string) string,
1754
1755 static const char *cif_string_table[CIF_N_REASONS] = {
1756 #include "cif-code.def"
1757 };
1758
1759 /* Signedness of an enum type is implementation defined, so cast it
1760 to unsigned before testing. */
1761 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1762 return cif_string_table[reason];
1763 }
1764
1765 /* Return name of the node used in debug output. */
1766 const char *
1767 cgraph_node_name (struct cgraph_node *node)
1768 {
1769 return lang_hooks.decl_printable_name (node->decl, 2);
1770 }
1771
1772 /* Names used to print out the availability enum. */
1773 const char * const cgraph_availability_names[] =
1774 {"unset", "not_available", "overwritable", "available", "local"};
1775
1776
1777 /* Dump call graph node NODE to file F. */
1778
1779 void
1780 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1781 {
1782 struct cgraph_edge *edge;
1783 int indirect_calls_count = 0;
1784
1785 fprintf (f, "%s/%i", cgraph_node_name (node), node->uid);
1786 dump_addr (f, " @", (void *)node);
1787 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1788 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1789 if (node->global.inlined_to)
1790 fprintf (f, " (inline copy in %s/%i)",
1791 cgraph_node_name (node->global.inlined_to),
1792 node->global.inlined_to->uid);
1793 if (node->same_comdat_group)
1794 fprintf (f, " (same comdat group as %s/%i)",
1795 cgraph_node_name (node->same_comdat_group),
1796 node->same_comdat_group->uid);
1797 if (node->clone_of)
1798 fprintf (f, " (clone of %s/%i)",
1799 cgraph_node_name (node->clone_of),
1800 node->clone_of->uid);
1801 if (cgraph_function_flags_ready)
1802 fprintf (f, " availability:%s",
1803 cgraph_availability_names [cgraph_function_body_availability (node)]);
1804 if (node->analyzed)
1805 fprintf (f, " analyzed");
1806 if (node->in_other_partition)
1807 fprintf (f, " in_other_partition");
1808 if (node->count)
1809 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1810 (HOST_WIDEST_INT)node->count);
1811 if (node->origin)
1812 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1813 if (node->needed)
1814 fprintf (f, " needed");
1815 if (node->address_taken)
1816 fprintf (f, " address_taken");
1817 else if (node->reachable)
1818 fprintf (f, " reachable");
1819 else if (node->reachable_from_other_partition)
1820 fprintf (f, " reachable_from_other_partition");
1821 if (gimple_has_body_p (node->decl))
1822 fprintf (f, " body");
1823 if (node->process)
1824 fprintf (f, " process");
1825 if (node->local.local)
1826 fprintf (f, " local");
1827 if (node->local.externally_visible)
1828 fprintf (f, " externally_visible");
1829 if (node->resolution != LDPR_UNKNOWN)
1830 fprintf (f, " %s",
1831 ld_plugin_symbol_resolution_names[(int)node->resolution]);
1832 if (node->local.finalized)
1833 fprintf (f, " finalized");
1834 if (node->local.redefined_extern_inline)
1835 fprintf (f, " redefined_extern_inline");
1836 if (TREE_ASM_WRITTEN (node->decl))
1837 fprintf (f, " asm_written");
1838 if (node->only_called_at_startup)
1839 fprintf (f, " only_called_at_startup");
1840 if (node->only_called_at_exit)
1841 fprintf (f, " only_called_at_exit");
1842
1843 fprintf (f, "\n");
1844
1845 if (node->thunk.thunk_p)
1846 {
1847 fprintf (f, " thunk of %s (asm: %s) fixed offset %i virtual value %i has "
1848 "virtual offset %i)\n",
1849 lang_hooks.decl_printable_name (node->thunk.alias, 2),
1850 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)),
1851 (int)node->thunk.fixed_offset,
1852 (int)node->thunk.virtual_value,
1853 (int)node->thunk.virtual_offset_p);
1854 }
1855 if (node->alias && node->thunk.alias)
1856 {
1857 fprintf (f, " alias of %s",
1858 lang_hooks.decl_printable_name (node->thunk.alias, 2));
1859 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
1860 fprintf (f, " (asm: %s)",
1861 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
1862 fprintf (f, "\n");
1863 }
1864
1865 fprintf (f, " called by: ");
1866
1867 for (edge = node->callers; edge; edge = edge->next_caller)
1868 {
1869 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1870 edge->caller->uid);
1871 if (edge->count)
1872 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1873 (HOST_WIDEST_INT)edge->count);
1874 if (edge->frequency)
1875 fprintf (f, "(%.2f per call) ",
1876 edge->frequency / (double)CGRAPH_FREQ_BASE);
1877 if (!edge->inline_failed)
1878 fprintf(f, "(inlined) ");
1879 if (edge->indirect_inlining_edge)
1880 fprintf(f, "(indirect_inlining) ");
1881 if (edge->can_throw_external)
1882 fprintf(f, "(can throw external) ");
1883 }
1884
1885 fprintf (f, "\n calls: ");
1886 for (edge = node->callees; edge; edge = edge->next_callee)
1887 {
1888 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1889 edge->callee->uid);
1890 if (!edge->inline_failed)
1891 fprintf(f, "(inlined) ");
1892 if (edge->indirect_inlining_edge)
1893 fprintf(f, "(indirect_inlining) ");
1894 if (edge->count)
1895 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1896 (HOST_WIDEST_INT)edge->count);
1897 if (edge->frequency)
1898 fprintf (f, "(%.2f per call) ",
1899 edge->frequency / (double)CGRAPH_FREQ_BASE);
1900 if (edge->can_throw_external)
1901 fprintf(f, "(can throw external) ");
1902 }
1903 fprintf (f, "\n");
1904 fprintf (f, " References: ");
1905 ipa_dump_references (f, &node->ref_list);
1906 fprintf (f, " Refering this function: ");
1907 ipa_dump_refering (f, &node->ref_list);
1908
1909 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1910 indirect_calls_count++;
1911 if (indirect_calls_count)
1912 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1913 indirect_calls_count);
1914 }
1915
1916
1917 /* Dump call graph node NODE to stderr. */
1918
1919 DEBUG_FUNCTION void
1920 debug_cgraph_node (struct cgraph_node *node)
1921 {
1922 dump_cgraph_node (stderr, node);
1923 }
1924
1925
1926 /* Dump the callgraph to file F. */
1927
1928 void
1929 dump_cgraph (FILE *f)
1930 {
1931 struct cgraph_node *node;
1932
1933 fprintf (f, "callgraph:\n\n");
1934 for (node = cgraph_nodes; node; node = node->next)
1935 dump_cgraph_node (f, node);
1936 }
1937
1938
1939 /* Dump the call graph to stderr. */
1940
1941 DEBUG_FUNCTION void
1942 debug_cgraph (void)
1943 {
1944 dump_cgraph (stderr);
1945 }
1946
1947
1948 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1949
1950 void
1951 change_decl_assembler_name (tree decl, tree name)
1952 {
1953 struct cgraph_node *node;
1954 void **slot;
1955 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1956 SET_DECL_ASSEMBLER_NAME (decl, name);
1957 else
1958 {
1959 if (name == DECL_ASSEMBLER_NAME (decl))
1960 return;
1961
1962 if (assembler_name_hash
1963 && TREE_CODE (decl) == FUNCTION_DECL
1964 && (node = cgraph_get_node (decl)) != NULL)
1965 {
1966 tree old_name = DECL_ASSEMBLER_NAME (decl);
1967 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
1968 decl_assembler_name_hash (old_name),
1969 NO_INSERT);
1970 /* Inline clones are not hashed. */
1971 if (slot && *slot == node)
1972 htab_clear_slot (assembler_name_hash, slot);
1973 }
1974 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1975 && DECL_RTL_SET_P (decl))
1976 warning (0, "%D renamed after being referenced in assembly", decl);
1977
1978 SET_DECL_ASSEMBLER_NAME (decl, name);
1979 }
1980 if (assembler_name_hash
1981 && TREE_CODE (decl) == FUNCTION_DECL
1982 && (node = cgraph_get_node (decl)) != NULL)
1983 {
1984 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1985 decl_assembler_name_hash (name),
1986 INSERT);
1987 gcc_assert (!*slot);
1988 *slot = node;
1989 }
1990 }
1991
1992 /* Add a top-level asm statement to the list. */
1993
1994 struct cgraph_asm_node *
1995 cgraph_add_asm_node (tree asm_str)
1996 {
1997 struct cgraph_asm_node *node;
1998
1999 node = ggc_alloc_cleared_cgraph_asm_node ();
2000 node->asm_str = asm_str;
2001 node->order = cgraph_order++;
2002 node->next = NULL;
2003 if (cgraph_asm_nodes == NULL)
2004 cgraph_asm_nodes = node;
2005 else
2006 cgraph_asm_last_node->next = node;
2007 cgraph_asm_last_node = node;
2008 return node;
2009 }
2010
2011 /* Return true when the DECL can possibly be inlined. */
2012 bool
2013 cgraph_function_possibly_inlined_p (tree decl)
2014 {
2015 if (!cgraph_global_info_ready)
2016 return !DECL_UNINLINABLE (decl);
2017 return DECL_POSSIBLY_INLINED (decl);
2018 }
2019
2020 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2021 struct cgraph_edge *
2022 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2023 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2024 int freq_scale, bool update_original)
2025 {
2026 struct cgraph_edge *new_edge;
2027 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2028 gcov_type freq;
2029
2030 /* We do not want to ignore loop nest after frequency drops to 0. */
2031 if (!freq_scale)
2032 freq_scale = 1;
2033 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2034 if (freq > CGRAPH_FREQ_MAX)
2035 freq = CGRAPH_FREQ_MAX;
2036
2037 if (e->indirect_unknown_callee)
2038 {
2039 tree decl;
2040
2041 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2042 {
2043 struct cgraph_node *callee = cgraph_get_node (decl);
2044 gcc_checking_assert (callee);
2045 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
2046 }
2047 else
2048 {
2049 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2050 e->indirect_info->ecf_flags,
2051 count, freq);
2052 *new_edge->indirect_info = *e->indirect_info;
2053 }
2054 }
2055 else
2056 {
2057 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
2058 if (e->indirect_info)
2059 {
2060 new_edge->indirect_info
2061 = ggc_alloc_cleared_cgraph_indirect_call_info ();
2062 *new_edge->indirect_info = *e->indirect_info;
2063 }
2064 }
2065
2066 new_edge->inline_failed = e->inline_failed;
2067 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2068 new_edge->lto_stmt_uid = stmt_uid;
2069 /* Clone flags that depend on call_stmt availability manually. */
2070 new_edge->can_throw_external = e->can_throw_external;
2071 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
2072 if (update_original)
2073 {
2074 e->count -= new_edge->count;
2075 if (e->count < 0)
2076 e->count = 0;
2077 }
2078 cgraph_call_edge_duplication_hooks (e, new_edge);
2079 return new_edge;
2080 }
2081
2082
2083 /* Create node representing clone of N executed COUNT times. Decrease
2084 the execution counts from original node too.
2085 The new clone will have decl set to DECL that may or may not be the same
2086 as decl of N.
2087
2088 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2089 function's profile to reflect the fact that part of execution is handled
2090 by node.
2091 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
2092 the new clone. Otherwise the caller is responsible for doing so later. */
2093
2094 struct cgraph_node *
2095 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2096 bool update_original,
2097 VEC(cgraph_edge_p,heap) *redirect_callers,
2098 bool call_duplication_hook)
2099 {
2100 struct cgraph_node *new_node = cgraph_create_node_1 ();
2101 struct cgraph_edge *e;
2102 gcov_type count_scale;
2103 unsigned i;
2104
2105 new_node->decl = decl;
2106 new_node->origin = n->origin;
2107 if (new_node->origin)
2108 {
2109 new_node->next_nested = new_node->origin->nested;
2110 new_node->origin->nested = new_node;
2111 }
2112 new_node->analyzed = n->analyzed;
2113 new_node->local = n->local;
2114 new_node->local.externally_visible = false;
2115 new_node->local.local = true;
2116 new_node->global = n->global;
2117 new_node->rtl = n->rtl;
2118 new_node->count = count;
2119 new_node->frequency = n->frequency;
2120 new_node->clone = n->clone;
2121 new_node->clone.tree_map = 0;
2122 if (n->count)
2123 {
2124 if (new_node->count > n->count)
2125 count_scale = REG_BR_PROB_BASE;
2126 else
2127 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2128 }
2129 else
2130 count_scale = 0;
2131 if (update_original)
2132 {
2133 n->count -= count;
2134 if (n->count < 0)
2135 n->count = 0;
2136 }
2137
2138 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2139 {
2140 /* Redirect calls to the old version node to point to its new
2141 version. */
2142 cgraph_redirect_edge_callee (e, new_node);
2143 }
2144
2145
2146 for (e = n->callees;e; e=e->next_callee)
2147 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2148 count_scale, freq, update_original);
2149
2150 for (e = n->indirect_calls; e; e = e->next_callee)
2151 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2152 count_scale, freq, update_original);
2153 ipa_clone_references (new_node, NULL, &n->ref_list);
2154
2155 new_node->next_sibling_clone = n->clones;
2156 if (n->clones)
2157 n->clones->prev_sibling_clone = new_node;
2158 n->clones = new_node;
2159 new_node->clone_of = n;
2160
2161 if (n->decl != decl)
2162 {
2163 struct cgraph_node **slot;
2164 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2165 gcc_assert (!*slot);
2166 *slot = new_node;
2167 if (assembler_name_hash)
2168 {
2169 void **aslot;
2170 tree name = DECL_ASSEMBLER_NAME (decl);
2171
2172 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2173 decl_assembler_name_hash (name),
2174 INSERT);
2175 gcc_assert (!*aslot);
2176 *aslot = new_node;
2177 }
2178 }
2179
2180 if (call_duplication_hook)
2181 cgraph_call_node_duplication_hooks (n, new_node);
2182 return new_node;
2183 }
2184
2185 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2186
2187 static GTY(()) unsigned int clone_fn_id_num;
2188
2189 tree
2190 clone_function_name (tree decl, const char *suffix)
2191 {
2192 tree name = DECL_ASSEMBLER_NAME (decl);
2193 size_t len = IDENTIFIER_LENGTH (name);
2194 char *tmp_name, *prefix;
2195
2196 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2197 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2198 strcpy (prefix + len + 1, suffix);
2199 #ifndef NO_DOT_IN_LABEL
2200 prefix[len] = '.';
2201 #elif !defined NO_DOLLAR_IN_LABEL
2202 prefix[len] = '$';
2203 #else
2204 prefix[len] = '_';
2205 #endif
2206 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2207 return get_identifier (tmp_name);
2208 }
2209
2210 /* Create callgraph node clone with new declaration. The actual body will
2211 be copied later at compilation stage.
2212
2213 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2214 bitmap interface.
2215 */
2216 struct cgraph_node *
2217 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2218 VEC(cgraph_edge_p,heap) *redirect_callers,
2219 VEC(ipa_replace_map_p,gc) *tree_map,
2220 bitmap args_to_skip,
2221 const char * suffix)
2222 {
2223 tree old_decl = old_node->decl;
2224 struct cgraph_node *new_node = NULL;
2225 tree new_decl;
2226 size_t i;
2227 struct ipa_replace_map *map;
2228
2229 if (!flag_wpa)
2230 gcc_checking_assert (tree_versionable_function_p (old_decl));
2231
2232 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
2233
2234 /* Make a new FUNCTION_DECL tree node */
2235 if (!args_to_skip)
2236 new_decl = copy_node (old_decl);
2237 else
2238 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2239 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2240
2241 /* Generate a new name for the new version. */
2242 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2243 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2244 SET_DECL_RTL (new_decl, NULL);
2245
2246 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2247 CGRAPH_FREQ_BASE, false,
2248 redirect_callers, false);
2249 /* Update the properties.
2250 Make clone visible only within this translation unit. Make sure
2251 that is not weak also.
2252 ??? We cannot use COMDAT linkage because there is no
2253 ABI support for this. */
2254 DECL_EXTERNAL (new_node->decl) = 0;
2255 if (DECL_ONE_ONLY (old_decl))
2256 DECL_SECTION_NAME (new_node->decl) = NULL;
2257 DECL_COMDAT_GROUP (new_node->decl) = 0;
2258 TREE_PUBLIC (new_node->decl) = 0;
2259 DECL_COMDAT (new_node->decl) = 0;
2260 DECL_WEAK (new_node->decl) = 0;
2261 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
2262 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
2263 new_node->clone.tree_map = tree_map;
2264 new_node->clone.args_to_skip = args_to_skip;
2265 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2266 {
2267 tree var = map->new_tree;
2268
2269 STRIP_NOPS (var);
2270 if (TREE_CODE (var) != ADDR_EXPR)
2271 continue;
2272 var = get_base_var (var);
2273 if (!var)
2274 continue;
2275
2276 /* Record references of the future statement initializing the constant
2277 argument. */
2278 if (TREE_CODE (var) == FUNCTION_DECL)
2279 {
2280 struct cgraph_node *ref_node = cgraph_get_node (var);
2281 gcc_checking_assert (ref_node);
2282 ipa_record_reference (new_node, NULL, ref_node, NULL, IPA_REF_ADDR,
2283 NULL);
2284 }
2285 else if (TREE_CODE (var) == VAR_DECL)
2286 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2287 IPA_REF_ADDR, NULL);
2288 }
2289 if (!args_to_skip)
2290 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2291 else if (old_node->clone.combined_args_to_skip)
2292 {
2293 int newi = 0, oldi = 0;
2294 tree arg;
2295 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2296 struct cgraph_node *orig_node;
2297 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2298 ;
2299 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2300 {
2301 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2302 {
2303 bitmap_set_bit (new_args_to_skip, oldi);
2304 continue;
2305 }
2306 if (bitmap_bit_p (args_to_skip, newi))
2307 bitmap_set_bit (new_args_to_skip, oldi);
2308 newi++;
2309 }
2310 new_node->clone.combined_args_to_skip = new_args_to_skip;
2311 }
2312 else
2313 new_node->clone.combined_args_to_skip = args_to_skip;
2314 new_node->local.externally_visible = 0;
2315 new_node->local.local = 1;
2316 new_node->lowered = true;
2317 new_node->reachable = true;
2318
2319 cgraph_call_node_duplication_hooks (old_node, new_node);
2320
2321
2322 return new_node;
2323 }
2324
2325 /* NODE is no longer nested function; update cgraph accordingly. */
2326 void
2327 cgraph_unnest_node (struct cgraph_node *node)
2328 {
2329 struct cgraph_node **node2 = &node->origin->nested;
2330 gcc_assert (node->origin);
2331
2332 while (*node2 != node)
2333 node2 = &(*node2)->next_nested;
2334 *node2 = node->next_nested;
2335 node->origin = NULL;
2336 }
2337
2338 /* Return function availability. See cgraph.h for description of individual
2339 return values. */
2340 enum availability
2341 cgraph_function_body_availability (struct cgraph_node *node)
2342 {
2343 enum availability avail;
2344 gcc_assert (cgraph_function_flags_ready);
2345 if (!node->analyzed)
2346 avail = AVAIL_NOT_AVAILABLE;
2347 else if (node->local.local)
2348 avail = AVAIL_LOCAL;
2349 else if (!node->local.externally_visible)
2350 avail = AVAIL_AVAILABLE;
2351 /* Inline functions are safe to be analyzed even if their symbol can
2352 be overwritten at runtime. It is not meaningful to enforce any sane
2353 behaviour on replacing inline function by different body. */
2354 else if (DECL_DECLARED_INLINE_P (node->decl))
2355 avail = AVAIL_AVAILABLE;
2356
2357 /* If the function can be overwritten, return OVERWRITABLE. Take
2358 care at least of two notable extensions - the COMDAT functions
2359 used to share template instantiations in C++ (this is symmetric
2360 to code cp_cannot_inline_tree_fn and probably shall be shared and
2361 the inlinability hooks completely eliminated).
2362
2363 ??? Does the C++ one definition rule allow us to always return
2364 AVAIL_AVAILABLE here? That would be good reason to preserve this
2365 bit. */
2366
2367 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2368 avail = AVAIL_OVERWRITABLE;
2369 else avail = AVAIL_AVAILABLE;
2370
2371 return avail;
2372 }
2373
2374 /* Add the function FNDECL to the call graph.
2375 Unlike cgraph_finalize_function, this function is intended to be used
2376 by middle end and allows insertion of new function at arbitrary point
2377 of compilation. The function can be either in high, low or SSA form
2378 GIMPLE.
2379
2380 The function is assumed to be reachable and have address taken (so no
2381 API breaking optimizations are performed on it).
2382
2383 Main work done by this function is to enqueue the function for later
2384 processing to avoid need the passes to be re-entrant. */
2385
2386 void
2387 cgraph_add_new_function (tree fndecl, bool lowered)
2388 {
2389 struct cgraph_node *node;
2390 switch (cgraph_state)
2391 {
2392 case CGRAPH_STATE_CONSTRUCTION:
2393 /* Just enqueue function to be processed at nearest occurrence. */
2394 node = cgraph_create_node (fndecl);
2395 node->next_needed = cgraph_new_nodes;
2396 if (lowered)
2397 node->lowered = true;
2398 cgraph_new_nodes = node;
2399 break;
2400
2401 case CGRAPH_STATE_IPA:
2402 case CGRAPH_STATE_IPA_SSA:
2403 case CGRAPH_STATE_EXPANSION:
2404 /* Bring the function into finalized state and enqueue for later
2405 analyzing and compilation. */
2406 node = cgraph_get_create_node (fndecl);
2407 node->local.local = false;
2408 node->local.finalized = true;
2409 node->reachable = node->needed = true;
2410 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2411 {
2412 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2413 current_function_decl = fndecl;
2414 gimple_register_cfg_hooks ();
2415 tree_lowering_passes (fndecl);
2416 bitmap_obstack_initialize (NULL);
2417 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2418 execute_pass_list (pass_early_local_passes.pass.sub);
2419 bitmap_obstack_release (NULL);
2420 pop_cfun ();
2421 current_function_decl = NULL;
2422
2423 lowered = true;
2424 }
2425 if (lowered)
2426 node->lowered = true;
2427 node->next_needed = cgraph_new_nodes;
2428 cgraph_new_nodes = node;
2429 break;
2430
2431 case CGRAPH_STATE_FINISHED:
2432 /* At the very end of compilation we have to do all the work up
2433 to expansion. */
2434 node = cgraph_create_node (fndecl);
2435 if (lowered)
2436 node->lowered = true;
2437 cgraph_analyze_function (node);
2438 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2439 current_function_decl = fndecl;
2440 gimple_register_cfg_hooks ();
2441 bitmap_obstack_initialize (NULL);
2442 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2443 execute_pass_list (pass_early_local_passes.pass.sub);
2444 bitmap_obstack_release (NULL);
2445 tree_rest_of_compilation (fndecl);
2446 pop_cfun ();
2447 current_function_decl = NULL;
2448 break;
2449 }
2450
2451 /* Set a personality if required and we already passed EH lowering. */
2452 if (lowered
2453 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2454 == eh_personality_lang))
2455 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2456 }
2457
2458 /* Worker for cgraph_node_can_be_local_p. */
2459 static bool
2460 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2461 void *data ATTRIBUTE_UNUSED)
2462 {
2463 return !(!node->needed
2464 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2465 || !node->local.externally_visible));
2466 }
2467
2468 /* Return true if NODE can be made local for API change.
2469 Extern inline functions and C++ COMDAT functions can be made local
2470 at the expense of possible code size growth if function is used in multiple
2471 compilation units. */
2472 bool
2473 cgraph_node_can_be_local_p (struct cgraph_node *node)
2474 {
2475 return (!node->address_taken
2476 && !cgraph_for_node_and_aliases (node,
2477 cgraph_node_cannot_be_local_p_1,
2478 NULL, true));
2479 }
2480
2481 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2482 but other code such as notice_global_symbol generates rtl. */
2483 void
2484 cgraph_make_decl_local (tree decl)
2485 {
2486 rtx rtl, symbol;
2487
2488 if (TREE_CODE (decl) == VAR_DECL)
2489 DECL_COMMON (decl) = 0;
2490 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2491
2492 if (DECL_ONE_ONLY (decl) || DECL_COMDAT (decl))
2493 {
2494 /* It is possible that we are linking against library defining same COMDAT
2495 function. To avoid conflict we need to rename our local name of the
2496 function just in the case WHOPR partitioning decide to make it hidden
2497 to avoid cross partition references. */
2498 if (flag_wpa)
2499 {
2500 const char *old_name;
2501
2502 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2503 if (TREE_CODE (decl) == FUNCTION_DECL)
2504 {
2505 struct cgraph_node *node = cgraph_get_node (decl);
2506 change_decl_assembler_name (decl,
2507 clone_function_name (decl, "local"));
2508 if (node->local.lto_file_data)
2509 lto_record_renamed_decl (node->local.lto_file_data,
2510 old_name,
2511 IDENTIFIER_POINTER
2512 (DECL_ASSEMBLER_NAME (decl)));
2513 }
2514 else if (TREE_CODE (decl) == VAR_DECL)
2515 {
2516 struct varpool_node *vnode = varpool_get_node (decl);
2517 /* change_decl_assembler_name will warn here on vtables because
2518 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2519 SET_DECL_ASSEMBLER_NAME (decl,
2520 clone_function_name (decl, "local"));
2521 if (vnode->lto_file_data)
2522 lto_record_renamed_decl (vnode->lto_file_data,
2523 old_name,
2524 IDENTIFIER_POINTER
2525 (DECL_ASSEMBLER_NAME (decl)));
2526 }
2527 }
2528 DECL_SECTION_NAME (decl) = 0;
2529 DECL_COMDAT (decl) = 0;
2530 }
2531 DECL_COMDAT_GROUP (decl) = 0;
2532 DECL_WEAK (decl) = 0;
2533 DECL_EXTERNAL (decl) = 0;
2534 TREE_PUBLIC (decl) = 0;
2535 if (!DECL_RTL_SET_P (decl))
2536 return;
2537
2538 /* Update rtl flags. */
2539 make_decl_rtl (decl);
2540
2541 rtl = DECL_RTL (decl);
2542 if (!MEM_P (rtl))
2543 return;
2544
2545 symbol = XEXP (rtl, 0);
2546 if (GET_CODE (symbol) != SYMBOL_REF)
2547 return;
2548
2549 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2550 }
2551
2552 /* Call calback on NODE, thunks and aliases asociated to NODE.
2553 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2554 skipped. */
2555
2556 bool
2557 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2558 bool (*callback) (struct cgraph_node *, void *),
2559 void *data,
2560 bool include_overwritable)
2561 {
2562 struct cgraph_edge *e;
2563 int i;
2564 struct ipa_ref *ref;
2565
2566 if (callback (node, data))
2567 return true;
2568 for (e = node->callers; e; e = e->next_caller)
2569 if (e->caller->thunk.thunk_p
2570 && (include_overwritable
2571 || cgraph_function_body_availability (e->caller)))
2572 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2573 include_overwritable))
2574 return true;
2575 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2576 if (ref->use == IPA_REF_ALIAS)
2577 {
2578 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2579 if (include_overwritable
2580 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2581 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2582 include_overwritable))
2583 return true;
2584 }
2585 return false;
2586 }
2587
2588 /* Call calback on NODE and aliases asociated to NODE.
2589 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2590 skipped. */
2591
2592 bool
2593 cgraph_for_node_and_aliases (struct cgraph_node *node,
2594 bool (*callback) (struct cgraph_node *, void *),
2595 void *data,
2596 bool include_overwritable)
2597 {
2598 int i;
2599 struct ipa_ref *ref;
2600
2601 if (callback (node, data))
2602 return true;
2603 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2604 if (ref->use == IPA_REF_ALIAS)
2605 {
2606 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2607 if (include_overwritable
2608 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2609 if (cgraph_for_node_and_aliases (alias, callback, data,
2610 include_overwritable))
2611 return true;
2612 }
2613 return false;
2614 }
2615
2616 /* Worker to bring NODE local. */
2617
2618 static bool
2619 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2620 {
2621 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2622 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2623 {
2624 cgraph_make_decl_local (node->decl);
2625
2626 node->local.externally_visible = false;
2627 node->local.local = true;
2628 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2629 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2630 }
2631 return false;
2632 }
2633
2634 /* Bring NODE local. */
2635
2636 void
2637 cgraph_make_node_local (struct cgraph_node *node)
2638 {
2639 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2640 NULL, true);
2641 }
2642
2643 /* Worker to set nothrow flag. */
2644
2645 static bool
2646 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2647 {
2648 struct cgraph_edge *e;
2649
2650 TREE_NOTHROW (node->decl) = data != NULL;
2651
2652 if (data != NULL)
2653 for (e = node->callers; e; e = e->next_caller)
2654 e->can_throw_external = false;
2655 return false;
2656 }
2657
2658 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2659 if any to NOTHROW. */
2660
2661 void
2662 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2663 {
2664 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2665 (void *)(size_t)nothrow, false);
2666 }
2667
2668 /* Worker to set const flag. */
2669
2670 static bool
2671 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2672 {
2673 /* Static constructors and destructors without a side effect can be
2674 optimized out. */
2675 if (data && !((size_t)data & 2))
2676 {
2677 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2678 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2679 if (DECL_STATIC_DESTRUCTOR (node->decl))
2680 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2681 }
2682 TREE_READONLY (node->decl) = data != NULL;
2683 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2684 return false;
2685 }
2686
2687 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2688 if any to READONLY. */
2689
2690 void
2691 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2692 {
2693 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2694 (void *)(size_t)(readonly + (int)looping * 2),
2695 false);
2696 }
2697
2698 /* Worker to set pure flag. */
2699
2700 static bool
2701 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2702 {
2703 /* Static pureructors and destructors without a side effect can be
2704 optimized out. */
2705 if (data && !((size_t)data & 2))
2706 {
2707 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2708 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2709 if (DECL_STATIC_DESTRUCTOR (node->decl))
2710 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2711 }
2712 DECL_PURE_P (node->decl) = data != NULL;
2713 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2714 return false;
2715 }
2716
2717 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2718 if any to PURE. */
2719
2720 void
2721 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2722 {
2723 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2724 (void *)(size_t)(pure + (int)looping * 2),
2725 false);
2726 }
2727
2728 /* Data used by cgraph_propagate_frequency. */
2729
2730 struct cgraph_propagate_frequency_data
2731 {
2732 bool maybe_unlikely_executed;
2733 bool maybe_executed_once;
2734 bool only_called_at_startup;
2735 bool only_called_at_exit;
2736 };
2737
2738 /* Worker for cgraph_propagate_frequency_1. */
2739
2740 static bool
2741 cgraph_propagate_frequency_1 (struct cgraph_node *node, void *data)
2742 {
2743 struct cgraph_propagate_frequency_data *d;
2744 struct cgraph_edge *edge;
2745
2746 d = (struct cgraph_propagate_frequency_data *)data;
2747 for (edge = node->callers;
2748 edge && (d->maybe_unlikely_executed || d->maybe_executed_once
2749 || d->only_called_at_startup || d->only_called_at_exit);
2750 edge = edge->next_caller)
2751 {
2752 if (edge->caller != node)
2753 {
2754 d->only_called_at_startup &= edge->caller->only_called_at_startup;
2755 /* It makes sense to put main() together with the static constructors.
2756 It will be executed for sure, but rest of functions called from
2757 main are definitely not at startup only. */
2758 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
2759 d->only_called_at_startup = 0;
2760 d->only_called_at_exit &= edge->caller->only_called_at_exit;
2761 }
2762 if (!edge->frequency)
2763 continue;
2764 switch (edge->caller->frequency)
2765 {
2766 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2767 break;
2768 case NODE_FREQUENCY_EXECUTED_ONCE:
2769 if (dump_file && (dump_flags & TDF_DETAILS))
2770 fprintf (dump_file, " Called by %s that is executed once\n",
2771 cgraph_node_name (edge->caller));
2772 d->maybe_unlikely_executed = false;
2773 if (inline_edge_summary (edge)->loop_depth)
2774 {
2775 d->maybe_executed_once = false;
2776 if (dump_file && (dump_flags & TDF_DETAILS))
2777 fprintf (dump_file, " Called in loop\n");
2778 }
2779 break;
2780 case NODE_FREQUENCY_HOT:
2781 case NODE_FREQUENCY_NORMAL:
2782 if (dump_file && (dump_flags & TDF_DETAILS))
2783 fprintf (dump_file, " Called by %s that is normal or hot\n",
2784 cgraph_node_name (edge->caller));
2785 d->maybe_unlikely_executed = false;
2786 d->maybe_executed_once = false;
2787 break;
2788 }
2789 }
2790 return edge != NULL;
2791 }
2792
2793 /* See if the frequency of NODE can be updated based on frequencies of its
2794 callers. */
2795 bool
2796 cgraph_propagate_frequency (struct cgraph_node *node)
2797 {
2798 struct cgraph_propagate_frequency_data d = {true, true, true, true};
2799 bool changed = false;
2800
2801 if (!node->local.local)
2802 return false;
2803 gcc_assert (node->analyzed);
2804 if (dump_file && (dump_flags & TDF_DETAILS))
2805 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2806
2807 cgraph_for_node_and_aliases (node, cgraph_propagate_frequency_1, &d, true);
2808
2809 if ((d.only_called_at_startup && !d.only_called_at_exit)
2810 && !node->only_called_at_startup)
2811 {
2812 node->only_called_at_startup = true;
2813 if (dump_file)
2814 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
2815 cgraph_node_name (node));
2816 changed = true;
2817 }
2818 if ((d.only_called_at_exit && !d.only_called_at_startup)
2819 && !node->only_called_at_exit)
2820 {
2821 node->only_called_at_exit = true;
2822 if (dump_file)
2823 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
2824 cgraph_node_name (node));
2825 changed = true;
2826 }
2827 /* These come either from profile or user hints; never update them. */
2828 if (node->frequency == NODE_FREQUENCY_HOT
2829 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2830 return changed;
2831 if (d.maybe_unlikely_executed)
2832 {
2833 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2834 if (dump_file)
2835 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
2836 cgraph_node_name (node));
2837 changed = true;
2838 }
2839 else if (d.maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2840 {
2841 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2842 if (dump_file)
2843 fprintf (dump_file, "Node %s promoted to executed once.\n",
2844 cgraph_node_name (node));
2845 changed = true;
2846 }
2847 return changed;
2848 }
2849
2850 /* Return true when NODE can not return or throw and thus
2851 it is safe to ignore its side effects for IPA analysis. */
2852
2853 bool
2854 cgraph_node_cannot_return (struct cgraph_node *node)
2855 {
2856 int flags = flags_from_decl_or_type (node->decl);
2857 if (!flag_exceptions)
2858 return (flags & ECF_NORETURN) != 0;
2859 else
2860 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2861 == (ECF_NORETURN | ECF_NOTHROW));
2862 }
2863
2864 /* Return true when call of E can not lead to return from caller
2865 and thus it is safe to ignore its side effects for IPA analysis
2866 when computing side effects of the caller.
2867 FIXME: We could actually mark all edges that have no reaching
2868 patch to EXIT_BLOCK_PTR or throw to get better results. */
2869 bool
2870 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2871 {
2872 if (cgraph_node_cannot_return (e->caller))
2873 return true;
2874 if (e->indirect_unknown_callee)
2875 {
2876 int flags = e->indirect_info->ecf_flags;
2877 if (!flag_exceptions)
2878 return (flags & ECF_NORETURN) != 0;
2879 else
2880 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2881 == (ECF_NORETURN | ECF_NOTHROW));
2882 }
2883 else
2884 return cgraph_node_cannot_return (e->callee);
2885 }
2886
2887 /* Return true when function NODE can be removed from callgraph
2888 if all direct calls are eliminated. */
2889
2890 bool
2891 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2892 {
2893 gcc_assert (!node->global.inlined_to);
2894 /* Extern inlines can always go, we will use the external definition. */
2895 if (DECL_EXTERNAL (node->decl))
2896 return true;
2897 /* When function is needed, we can not remove it. */
2898 if (node->needed || node->reachable_from_other_partition)
2899 return false;
2900 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2901 || DECL_STATIC_DESTRUCTOR (node->decl))
2902 return false;
2903 /* Only COMDAT functions can be removed if externally visible. */
2904 if (node->local.externally_visible
2905 && (!DECL_COMDAT (node->decl)
2906 || cgraph_used_from_object_file_p (node)))
2907 return false;
2908 return true;
2909 }
2910
2911 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2912
2913 static bool
2914 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2915 {
2916 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2917 }
2918
2919 /* Return true when function NODE and its aliases can be removed from callgraph
2920 if all direct calls are eliminated. */
2921
2922 bool
2923 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2924 {
2925 /* Extern inlines can always go, we will use the external definition. */
2926 if (DECL_EXTERNAL (node->decl))
2927 return true;
2928 if (node->address_taken)
2929 return false;
2930 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2931 }
2932
2933 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2934
2935 static bool
2936 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2937 {
2938 return cgraph_used_from_object_file_p (node);
2939 }
2940
2941 /* Return true when function NODE can be expected to be removed
2942 from program when direct calls in this compilation unit are removed.
2943
2944 As a special case COMDAT functions are
2945 cgraph_can_remove_if_no_direct_calls_p while the are not
2946 cgraph_only_called_directly_p (it is possible they are called from other
2947 unit)
2948
2949 This function behaves as cgraph_only_called_directly_p because eliminating
2950 all uses of COMDAT function does not make it necessarily disappear from
2951 the program unless we are compiling whole program or we do LTO. In this
2952 case we know we win since dynamic linking will not really discard the
2953 linkonce section. */
2954
2955 bool
2956 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2957 {
2958 gcc_assert (!node->global.inlined_to);
2959 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2960 return false;
2961 if (!in_lto_p && !flag_whole_program)
2962 return cgraph_only_called_directly_p (node);
2963 else
2964 {
2965 if (DECL_EXTERNAL (node->decl))
2966 return true;
2967 return cgraph_can_remove_if_no_direct_calls_p (node);
2968 }
2969 }
2970
2971 /* Return true when RESOLUTION indicate that linker will use
2972 the symbol from non-LTO object files. */
2973
2974 bool
2975 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2976 {
2977 return (resolution == LDPR_PREVAILING_DEF
2978 || resolution == LDPR_PREEMPTED_REG
2979 || resolution == LDPR_RESOLVED_EXEC
2980 || resolution == LDPR_RESOLVED_DYN);
2981 }
2982
2983
2984 /* Return true when NODE is known to be used from other (non-LTO) object file.
2985 Known only when doing LTO via linker plugin. */
2986
2987 bool
2988 cgraph_used_from_object_file_p (struct cgraph_node *node)
2989 {
2990 gcc_assert (!node->global.inlined_to);
2991 if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
2992 return false;
2993 if (resolution_used_from_other_file_p (node->resolution))
2994 return true;
2995 return false;
2996 }
2997
2998 /* Worker for cgraph_only_called_directly_p. */
2999
3000 static bool
3001 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3002 {
3003 return !cgraph_only_called_directly_or_aliased_p (node);
3004 }
3005
3006 /* Return true when function NODE and all its aliases are only called
3007 directly.
3008 i.e. it is not externally visible, address was not taken and
3009 it is not used in any other non-standard way. */
3010
3011 bool
3012 cgraph_only_called_directly_p (struct cgraph_node *node)
3013 {
3014 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
3015 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
3016 NULL, true);
3017 }
3018
3019
3020 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3021
3022 static bool
3023 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
3024 {
3025 VEC (cgraph_edge_p, heap) ** redirect_callers = (VEC (cgraph_edge_p, heap) **)data;
3026 struct cgraph_edge *cs;
3027 enum availability avail;
3028 cgraph_function_or_thunk_node (node, &avail);
3029
3030 if (avail > AVAIL_OVERWRITABLE)
3031 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3032 if (!cs->indirect_inlining_edge)
3033 VEC_safe_push (cgraph_edge_p, heap, *redirect_callers, cs);
3034 return false;
3035 }
3036
3037 /* Collect all callers of NODE and its aliases that are known to lead to NODE
3038 (i.e. are not overwritable). */
3039
3040 VEC (cgraph_edge_p, heap) *
3041 collect_callers_of_node (struct cgraph_node *node)
3042 {
3043 VEC (cgraph_edge_p, heap) * redirect_callers = NULL;
3044 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
3045 &redirect_callers, false);
3046 return redirect_callers;
3047 }
3048
3049 #include "gt-cgraph.h"