]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
use templates instead of gengtype for typed allocation functions
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for intra-procedural optimization.
24 It represents a multi-graph where nodes are functions and edges are call sites. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "varasm.h"
32 #include "calls.h"
33 #include "print-tree.h"
34 #include "tree-inline.h"
35 #include "langhooks.h"
36 #include "hashtab.h"
37 #include "toplev.h"
38 #include "flags.h"
39 #include "debug.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "timevar.h"
50 #include "dumpfile.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssa.h"
55 #include "value-prof.h"
56 #include "except.h"
57 #include "diagnostic-core.h"
58 #include "rtl.h"
59 #include "ipa-utils.h"
60 #include "lto-streamer.h"
61 #include "ipa-inline.h"
62 #include "cfgloop.h"
63 #include "gimple-pretty-print.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 static void cgraph_node_remove_callers (struct cgraph_node *node);
71 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
72 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
73
74 /* Queue of cgraph nodes scheduled to be lowered. */
75 symtab_node *x_cgraph_nodes_queue;
76 #define cgraph_nodes_queue ((struct cgraph_node *)x_cgraph_nodes_queue)
77
78 /* Number of nodes in existence. */
79 int cgraph_n_nodes;
80
81 /* Maximal uid used in cgraph nodes. */
82 int cgraph_max_uid;
83
84 /* Maximal uid used in cgraph edges. */
85 int cgraph_edge_max_uid;
86
87 /* Set when whole unit has been analyzed so we can access global info. */
88 bool cgraph_global_info_ready = false;
89
90 /* What state callgraph is in right now. */
91 enum cgraph_state cgraph_state = CGRAPH_STATE_PARSING;
92
93 /* Set when the cgraph is fully build and the basic flags are computed. */
94 bool cgraph_function_flags_ready = false;
95
96 /* List of hooks triggered on cgraph_edge events. */
97 struct cgraph_edge_hook_list {
98 cgraph_edge_hook hook;
99 void *data;
100 struct cgraph_edge_hook_list *next;
101 };
102
103 /* List of hooks triggered on cgraph_node events. */
104 struct cgraph_node_hook_list {
105 cgraph_node_hook hook;
106 void *data;
107 struct cgraph_node_hook_list *next;
108 };
109
110 /* List of hooks triggered on events involving two cgraph_edges. */
111 struct cgraph_2edge_hook_list {
112 cgraph_2edge_hook hook;
113 void *data;
114 struct cgraph_2edge_hook_list *next;
115 };
116
117 /* List of hooks triggered on events involving two cgraph_nodes. */
118 struct cgraph_2node_hook_list {
119 cgraph_2node_hook hook;
120 void *data;
121 struct cgraph_2node_hook_list *next;
122 };
123
124 /* List of hooks triggered when an edge is removed. */
125 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
126 /* List of hooks triggered when a node is removed. */
127 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
128 /* List of hooks triggered when an edge is duplicated. */
129 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
130 /* List of hooks triggered when a node is duplicated. */
131 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
132 /* List of hooks triggered when an function is inserted. */
133 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
134
135 /* Head of a linked list of unused (freed) call graph nodes.
136 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
137 static GTY(()) struct cgraph_node *free_nodes;
138 /* Head of a linked list of unused (freed) call graph edges.
139 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
140 static GTY(()) struct cgraph_edge *free_edges;
141
142 /* Did procss_same_body_aliases run? */
143 bool cpp_implicit_aliases_done;
144
145 /* Map a cgraph_node to cgraph_function_version_info using this htab.
146 The cgraph_function_version_info has a THIS_NODE field that is the
147 corresponding cgraph_node.. */
148
149 static GTY((param_is (struct cgraph_function_version_info))) htab_t
150 cgraph_fnver_htab = NULL;
151
152 /* Hash function for cgraph_fnver_htab. */
153 static hashval_t
154 cgraph_fnver_htab_hash (const void *ptr)
155 {
156 int uid = ((const struct cgraph_function_version_info *)ptr)->this_node->uid;
157 return (hashval_t)(uid);
158 }
159
160 /* eq function for cgraph_fnver_htab. */
161 static int
162 cgraph_fnver_htab_eq (const void *p1, const void *p2)
163 {
164 const struct cgraph_function_version_info *n1
165 = (const struct cgraph_function_version_info *)p1;
166 const struct cgraph_function_version_info *n2
167 = (const struct cgraph_function_version_info *)p2;
168
169 return n1->this_node->uid == n2->this_node->uid;
170 }
171
172 /* Mark as GC root all allocated nodes. */
173 static GTY(()) struct cgraph_function_version_info *
174 version_info_node = NULL;
175
176 /* Get the cgraph_function_version_info node corresponding to node. */
177 struct cgraph_function_version_info *
178 get_cgraph_node_version (struct cgraph_node *node)
179 {
180 struct cgraph_function_version_info *ret;
181 struct cgraph_function_version_info key;
182 key.this_node = node;
183
184 if (cgraph_fnver_htab == NULL)
185 return NULL;
186
187 ret = (struct cgraph_function_version_info *)
188 htab_find (cgraph_fnver_htab, &key);
189
190 return ret;
191 }
192
193 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
194 corresponding to cgraph_node NODE. */
195 struct cgraph_function_version_info *
196 insert_new_cgraph_node_version (struct cgraph_node *node)
197 {
198 void **slot;
199
200 version_info_node = NULL;
201 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
202 version_info_node->this_node = node;
203
204 if (cgraph_fnver_htab == NULL)
205 cgraph_fnver_htab = htab_create_ggc (2, cgraph_fnver_htab_hash,
206 cgraph_fnver_htab_eq, NULL);
207
208 slot = htab_find_slot (cgraph_fnver_htab, version_info_node, INSERT);
209 gcc_assert (slot != NULL);
210 *slot = version_info_node;
211 return version_info_node;
212 }
213
214 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
215 DECL is a duplicate declaration. */
216 void
217 delete_function_version (tree decl)
218 {
219 struct cgraph_node *decl_node = cgraph_get_node (decl);
220 struct cgraph_function_version_info *decl_v = NULL;
221
222 if (decl_node == NULL)
223 return;
224
225 decl_v = get_cgraph_node_version (decl_node);
226
227 if (decl_v == NULL)
228 return;
229
230 if (decl_v->prev != NULL)
231 decl_v->prev->next = decl_v->next;
232
233 if (decl_v->next != NULL)
234 decl_v->next->prev = decl_v->prev;
235
236 if (cgraph_fnver_htab != NULL)
237 htab_remove_elt (cgraph_fnver_htab, decl_v);
238
239 cgraph_remove_node (decl_node);
240 }
241
242 /* Record that DECL1 and DECL2 are semantically identical function
243 versions. */
244 void
245 record_function_versions (tree decl1, tree decl2)
246 {
247 struct cgraph_node *decl1_node = cgraph_get_create_node (decl1);
248 struct cgraph_node *decl2_node = cgraph_get_create_node (decl2);
249 struct cgraph_function_version_info *decl1_v = NULL;
250 struct cgraph_function_version_info *decl2_v = NULL;
251 struct cgraph_function_version_info *before;
252 struct cgraph_function_version_info *after;
253
254 gcc_assert (decl1_node != NULL && decl2_node != NULL);
255 decl1_v = get_cgraph_node_version (decl1_node);
256 decl2_v = get_cgraph_node_version (decl2_node);
257
258 if (decl1_v != NULL && decl2_v != NULL)
259 return;
260
261 if (decl1_v == NULL)
262 decl1_v = insert_new_cgraph_node_version (decl1_node);
263
264 if (decl2_v == NULL)
265 decl2_v = insert_new_cgraph_node_version (decl2_node);
266
267 /* Chain decl2_v and decl1_v. All semantically identical versions
268 will be chained together. */
269
270 before = decl1_v;
271 after = decl2_v;
272
273 while (before->next != NULL)
274 before = before->next;
275
276 while (after->prev != NULL)
277 after= after->prev;
278
279 before->next = after;
280 after->prev = before;
281 }
282
283 /* Macros to access the next item in the list of free cgraph nodes and
284 edges. */
285 #define NEXT_FREE_NODE(NODE) cgraph ((NODE)->next)
286 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
287 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
288
289 /* Register HOOK to be called with DATA on each removed edge. */
290 struct cgraph_edge_hook_list *
291 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
292 {
293 struct cgraph_edge_hook_list *entry;
294 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
295
296 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
297 entry->hook = hook;
298 entry->data = data;
299 entry->next = NULL;
300 while (*ptr)
301 ptr = &(*ptr)->next;
302 *ptr = entry;
303 return entry;
304 }
305
306 /* Remove ENTRY from the list of hooks called on removing edges. */
307 void
308 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
309 {
310 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
311
312 while (*ptr != entry)
313 ptr = &(*ptr)->next;
314 *ptr = entry->next;
315 free (entry);
316 }
317
318 /* Call all edge removal hooks. */
319 static void
320 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
321 {
322 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
323 while (entry)
324 {
325 entry->hook (e, entry->data);
326 entry = entry->next;
327 }
328 }
329
330 /* Register HOOK to be called with DATA on each removed node. */
331 struct cgraph_node_hook_list *
332 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
333 {
334 struct cgraph_node_hook_list *entry;
335 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
336
337 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
338 entry->hook = hook;
339 entry->data = data;
340 entry->next = NULL;
341 while (*ptr)
342 ptr = &(*ptr)->next;
343 *ptr = entry;
344 return entry;
345 }
346
347 /* Remove ENTRY from the list of hooks called on removing nodes. */
348 void
349 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
350 {
351 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
352
353 while (*ptr != entry)
354 ptr = &(*ptr)->next;
355 *ptr = entry->next;
356 free (entry);
357 }
358
359 /* Call all node removal hooks. */
360 static void
361 cgraph_call_node_removal_hooks (struct cgraph_node *node)
362 {
363 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
364 while (entry)
365 {
366 entry->hook (node, entry->data);
367 entry = entry->next;
368 }
369 }
370
371 /* Register HOOK to be called with DATA on each inserted node. */
372 struct cgraph_node_hook_list *
373 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
374 {
375 struct cgraph_node_hook_list *entry;
376 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
377
378 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
379 entry->hook = hook;
380 entry->data = data;
381 entry->next = NULL;
382 while (*ptr)
383 ptr = &(*ptr)->next;
384 *ptr = entry;
385 return entry;
386 }
387
388 /* Remove ENTRY from the list of hooks called on inserted nodes. */
389 void
390 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
391 {
392 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
393
394 while (*ptr != entry)
395 ptr = &(*ptr)->next;
396 *ptr = entry->next;
397 free (entry);
398 }
399
400 /* Call all node insertion hooks. */
401 void
402 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
403 {
404 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
405 while (entry)
406 {
407 entry->hook (node, entry->data);
408 entry = entry->next;
409 }
410 }
411
412 /* Register HOOK to be called with DATA on each duplicated edge. */
413 struct cgraph_2edge_hook_list *
414 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
415 {
416 struct cgraph_2edge_hook_list *entry;
417 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
418
419 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
420 entry->hook = hook;
421 entry->data = data;
422 entry->next = NULL;
423 while (*ptr)
424 ptr = &(*ptr)->next;
425 *ptr = entry;
426 return entry;
427 }
428
429 /* Remove ENTRY from the list of hooks called on duplicating edges. */
430 void
431 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
432 {
433 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
434
435 while (*ptr != entry)
436 ptr = &(*ptr)->next;
437 *ptr = entry->next;
438 free (entry);
439 }
440
441 /* Call all edge duplication hooks. */
442 void
443 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
444 struct cgraph_edge *cs2)
445 {
446 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
447 while (entry)
448 {
449 entry->hook (cs1, cs2, entry->data);
450 entry = entry->next;
451 }
452 }
453
454 /* Register HOOK to be called with DATA on each duplicated node. */
455 struct cgraph_2node_hook_list *
456 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
457 {
458 struct cgraph_2node_hook_list *entry;
459 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
460
461 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
462 entry->hook = hook;
463 entry->data = data;
464 entry->next = NULL;
465 while (*ptr)
466 ptr = &(*ptr)->next;
467 *ptr = entry;
468 return entry;
469 }
470
471 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
472 void
473 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
474 {
475 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
476
477 while (*ptr != entry)
478 ptr = &(*ptr)->next;
479 *ptr = entry->next;
480 free (entry);
481 }
482
483 /* Call all node duplication hooks. */
484 void
485 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
486 struct cgraph_node *node2)
487 {
488 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
489 while (entry)
490 {
491 entry->hook (node1, node2, entry->data);
492 entry = entry->next;
493 }
494 }
495
496 /* Allocate new callgraph node. */
497
498 static inline struct cgraph_node *
499 cgraph_allocate_node (void)
500 {
501 struct cgraph_node *node;
502
503 if (free_nodes)
504 {
505 node = free_nodes;
506 free_nodes = NEXT_FREE_NODE (node);
507 }
508 else
509 {
510 node = ggc_cleared_alloc<cgraph_node> ();
511 node->uid = cgraph_max_uid++;
512 }
513
514 return node;
515 }
516
517 /* Allocate new callgraph node and insert it into basic data structures. */
518
519 struct cgraph_node *
520 cgraph_create_empty_node (void)
521 {
522 struct cgraph_node *node = cgraph_allocate_node ();
523
524 node->type = SYMTAB_FUNCTION;
525 node->frequency = NODE_FREQUENCY_NORMAL;
526 node->count_materialization_scale = REG_BR_PROB_BASE;
527 cgraph_n_nodes++;
528 return node;
529 }
530
531 /* Return cgraph node assigned to DECL. Create new one when needed. */
532
533 struct cgraph_node *
534 cgraph_create_node (tree decl)
535 {
536 struct cgraph_node *node = cgraph_create_empty_node ();
537 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
538
539 node->decl = decl;
540 symtab_register_node (node);
541
542 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
543 {
544 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
545 node->next_nested = node->origin->nested;
546 node->origin->nested = node;
547 }
548 return node;
549 }
550
551 /* Try to find a call graph node for declaration DECL and if it does not exist
552 or if it corresponds to an inline clone, create a new one. */
553
554 struct cgraph_node *
555 cgraph_get_create_node (tree decl)
556 {
557 struct cgraph_node *first_clone = cgraph_get_node (decl);
558
559 if (first_clone && !first_clone->global.inlined_to)
560 return first_clone;
561
562 struct cgraph_node *node = cgraph_create_node (decl);
563 if (first_clone)
564 {
565 first_clone->clone_of = node;
566 node->clones = first_clone;
567 symtab_prevail_in_asm_name_hash (node);
568 symtab_insert_node_to_hashtable (node);
569 if (dump_file)
570 fprintf (dump_file, "Introduced new external node "
571 "(%s/%i) and turned into root of the clone tree.\n",
572 xstrdup (node->name ()), node->order);
573 }
574 else if (dump_file)
575 fprintf (dump_file, "Introduced new external node "
576 "(%s/%i).\n", xstrdup (node->name ()),
577 node->order);
578 return node;
579 }
580
581 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
582 the function body is associated with (not necessarily cgraph_node (DECL). */
583
584 struct cgraph_node *
585 cgraph_create_function_alias (tree alias, tree target)
586 {
587 struct cgraph_node *alias_node;
588
589 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
590 || TREE_CODE (target) == IDENTIFIER_NODE);
591 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
592 alias_node = cgraph_get_create_node (alias);
593 gcc_assert (!alias_node->definition);
594 alias_node->alias_target = target;
595 alias_node->definition = true;
596 alias_node->alias = true;
597 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
598 alias_node->weakref = true;
599 return alias_node;
600 }
601
602 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
603 and NULL otherwise.
604 Same body aliases are output whenever the body of DECL is output,
605 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
606
607 struct cgraph_node *
608 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
609 {
610 struct cgraph_node *n;
611 #ifndef ASM_OUTPUT_DEF
612 /* If aliases aren't supported by the assembler, fail. */
613 return NULL;
614 #endif
615 /* Langhooks can create same body aliases of symbols not defined.
616 Those are useless. Drop them on the floor. */
617 if (cgraph_global_info_ready)
618 return NULL;
619
620 n = cgraph_create_function_alias (alias, decl);
621 n->cpp_implicit_alias = true;
622 if (cpp_implicit_aliases_done)
623 symtab_resolve_alias (n,
624 cgraph_get_node (decl));
625 return n;
626 }
627
628 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
629 aliases DECL with an adjustments made into the first parameter.
630 See comments in thunk_adjust for detail on the parameters. */
631
632 struct cgraph_node *
633 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
634 tree alias, tree decl ATTRIBUTE_UNUSED,
635 bool this_adjusting,
636 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
637 tree virtual_offset,
638 tree real_alias)
639 {
640 struct cgraph_node *node;
641
642 node = cgraph_get_node (alias);
643 if (node)
644 {
645 gcc_assert (node->definition);
646 gcc_assert (!node->alias);
647 gcc_assert (!node->thunk.thunk_p);
648 cgraph_remove_node (node);
649 }
650
651 node = cgraph_create_node (alias);
652 gcc_checking_assert (!virtual_offset
653 || wi::eq_p (virtual_offset, virtual_value));
654 node->thunk.fixed_offset = fixed_offset;
655 node->thunk.this_adjusting = this_adjusting;
656 node->thunk.virtual_value = virtual_value;
657 node->thunk.virtual_offset_p = virtual_offset != NULL;
658 node->thunk.alias = real_alias;
659 node->thunk.thunk_p = true;
660 node->definition = true;
661
662 return node;
663 }
664
665 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
666 Return NULL if there's no such node. */
667
668 struct cgraph_node *
669 cgraph_node_for_asm (tree asmname)
670 {
671 /* We do not want to look at inline clones. */
672 for (symtab_node *node = symtab_node_for_asm (asmname);
673 node;
674 node = node->next_sharing_asm_name)
675 {
676 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
677 if (cn && !cn->global.inlined_to)
678 return cn;
679 }
680 return NULL;
681 }
682
683 /* Returns a hash value for X (which really is a cgraph_edge). */
684
685 static hashval_t
686 edge_hash (const void *x)
687 {
688 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
689 }
690
691 /* Return nonzero if the call_stmt of of cgraph_edge X is stmt *Y. */
692
693 static int
694 edge_eq (const void *x, const void *y)
695 {
696 return ((const struct cgraph_edge *) x)->call_stmt == y;
697 }
698
699 /* Add call graph edge E to call site hash of its caller. */
700
701 static inline void
702 cgraph_update_edge_in_call_site_hash (struct cgraph_edge *e)
703 {
704 void **slot;
705 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
706 e->call_stmt,
707 htab_hash_pointer (e->call_stmt),
708 INSERT);
709 *slot = e;
710 }
711
712 /* Add call graph edge E to call site hash of its caller. */
713
714 static inline void
715 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
716 {
717 void **slot;
718 /* There are two speculative edges for every statement (one direct,
719 one indirect); always hash the direct one. */
720 if (e->speculative && e->indirect_unknown_callee)
721 return;
722 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
723 e->call_stmt,
724 htab_hash_pointer (e->call_stmt),
725 INSERT);
726 if (*slot)
727 {
728 gcc_assert (((struct cgraph_edge *)*slot)->speculative);
729 if (e->callee)
730 *slot = e;
731 return;
732 }
733 gcc_assert (!*slot || e->speculative);
734 *slot = e;
735 }
736
737 /* Return the callgraph edge representing the GIMPLE_CALL statement
738 CALL_STMT. */
739
740 struct cgraph_edge *
741 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
742 {
743 struct cgraph_edge *e, *e2;
744 int n = 0;
745
746 if (node->call_site_hash)
747 return (struct cgraph_edge *)
748 htab_find_with_hash (node->call_site_hash, call_stmt,
749 htab_hash_pointer (call_stmt));
750
751 /* This loop may turn out to be performance problem. In such case adding
752 hashtables into call nodes with very many edges is probably best
753 solution. It is not good idea to add pointer into CALL_EXPR itself
754 because we want to make possible having multiple cgraph nodes representing
755 different clones of the same body before the body is actually cloned. */
756 for (e = node->callees; e; e = e->next_callee)
757 {
758 if (e->call_stmt == call_stmt)
759 break;
760 n++;
761 }
762
763 if (!e)
764 for (e = node->indirect_calls; e; e = e->next_callee)
765 {
766 if (e->call_stmt == call_stmt)
767 break;
768 n++;
769 }
770
771 if (n > 100)
772 {
773 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
774 for (e2 = node->callees; e2; e2 = e2->next_callee)
775 cgraph_add_edge_to_call_site_hash (e2);
776 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
777 cgraph_add_edge_to_call_site_hash (e2);
778 }
779
780 return e;
781 }
782
783
784 /* Change field call_stmt of edge E to NEW_STMT.
785 If UPDATE_SPECULATIVE and E is any component of speculative
786 edge, then update all components. */
787
788 void
789 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt,
790 bool update_speculative)
791 {
792 tree decl;
793
794 /* Speculative edges has three component, update all of them
795 when asked to. */
796 if (update_speculative && e->speculative)
797 {
798 struct cgraph_edge *direct, *indirect;
799 struct ipa_ref *ref;
800
801 cgraph_speculative_call_info (e, direct, indirect, ref);
802 cgraph_set_call_stmt (direct, new_stmt, false);
803 cgraph_set_call_stmt (indirect, new_stmt, false);
804 ref->stmt = new_stmt;
805 return;
806 }
807
808 /* Only direct speculative edges go to call_site_hash. */
809 if (e->caller->call_site_hash
810 && (!e->speculative || !e->indirect_unknown_callee))
811 {
812 htab_remove_elt_with_hash (e->caller->call_site_hash,
813 e->call_stmt,
814 htab_hash_pointer (e->call_stmt));
815 }
816
817 e->call_stmt = new_stmt;
818 if (e->indirect_unknown_callee
819 && (decl = gimple_call_fndecl (new_stmt)))
820 {
821 /* Constant propagation (and possibly also inlining?) can turn an
822 indirect call into a direct one. */
823 struct cgraph_node *new_callee = cgraph_get_node (decl);
824
825 gcc_checking_assert (new_callee);
826 e = cgraph_make_edge_direct (e, new_callee);
827 }
828
829 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
830 e->can_throw_external = stmt_can_throw_external (new_stmt);
831 pop_cfun ();
832 if (e->caller->call_site_hash)
833 cgraph_add_edge_to_call_site_hash (e);
834 }
835
836 /* Allocate a cgraph_edge structure and fill it with data according to the
837 parameters of which only CALLEE can be NULL (when creating an indirect call
838 edge). */
839
840 static struct cgraph_edge *
841 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
842 gimple call_stmt, gcov_type count, int freq,
843 bool indir_unknown_callee)
844 {
845 struct cgraph_edge *edge;
846
847 /* LTO does not actually have access to the call_stmt since these
848 have not been loaded yet. */
849 if (call_stmt)
850 {
851 /* This is a rather expensive check possibly triggering
852 construction of call stmt hashtable. */
853 #ifdef ENABLE_CHECKING
854 struct cgraph_edge *e;
855 gcc_checking_assert (!(e=cgraph_edge (caller, call_stmt)) || e->speculative);
856 #endif
857
858 gcc_assert (is_gimple_call (call_stmt));
859 }
860
861 if (free_edges)
862 {
863 edge = free_edges;
864 free_edges = NEXT_FREE_EDGE (edge);
865 }
866 else
867 {
868 edge = ggc_alloc<struct cgraph_edge> ();
869 edge->uid = cgraph_edge_max_uid++;
870 }
871
872 edge->aux = NULL;
873 edge->caller = caller;
874 edge->callee = callee;
875 edge->prev_caller = NULL;
876 edge->next_caller = NULL;
877 edge->prev_callee = NULL;
878 edge->next_callee = NULL;
879 edge->lto_stmt_uid = 0;
880
881 edge->count = count;
882 gcc_assert (count >= 0);
883 edge->frequency = freq;
884 gcc_assert (freq >= 0);
885 gcc_assert (freq <= CGRAPH_FREQ_MAX);
886
887 edge->call_stmt = call_stmt;
888 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
889 edge->can_throw_external
890 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
891 pop_cfun ();
892 if (call_stmt
893 && callee && callee->decl
894 && !gimple_check_call_matching_types (call_stmt, callee->decl,
895 false))
896 edge->call_stmt_cannot_inline_p = true;
897 else
898 edge->call_stmt_cannot_inline_p = false;
899
900 edge->indirect_info = NULL;
901 edge->indirect_inlining_edge = 0;
902 edge->speculative = false;
903 edge->indirect_unknown_callee = indir_unknown_callee;
904 if (call_stmt && caller->call_site_hash)
905 cgraph_add_edge_to_call_site_hash (edge);
906
907 return edge;
908 }
909
910 /* Create edge from CALLER to CALLEE in the cgraph. */
911
912 struct cgraph_edge *
913 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
914 gimple call_stmt, gcov_type count, int freq)
915 {
916 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
917 count, freq, false);
918
919 initialize_inline_failed (edge);
920
921 edge->next_caller = callee->callers;
922 if (callee->callers)
923 callee->callers->prev_caller = edge;
924 edge->next_callee = caller->callees;
925 if (caller->callees)
926 caller->callees->prev_callee = edge;
927 caller->callees = edge;
928 callee->callers = edge;
929
930 return edge;
931 }
932
933 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
934
935 struct cgraph_indirect_call_info *
936 cgraph_allocate_init_indirect_info (void)
937 {
938 struct cgraph_indirect_call_info *ii;
939
940 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
941 ii->param_index = -1;
942 return ii;
943 }
944
945 /* Create an indirect edge with a yet-undetermined callee where the call
946 statement destination is a formal parameter of the caller with index
947 PARAM_INDEX. */
948
949 struct cgraph_edge *
950 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
951 int ecf_flags,
952 gcov_type count, int freq)
953 {
954 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
955 count, freq, true);
956 tree target;
957
958 initialize_inline_failed (edge);
959
960 edge->indirect_info = cgraph_allocate_init_indirect_info ();
961 edge->indirect_info->ecf_flags = ecf_flags;
962
963 /* Record polymorphic call info. */
964 if (call_stmt
965 && (target = gimple_call_fn (call_stmt))
966 && virtual_method_call_p (target))
967 {
968 tree otr_type;
969 HOST_WIDE_INT otr_token;
970 ipa_polymorphic_call_context context;
971
972 get_polymorphic_call_info (caller->decl,
973 target,
974 &otr_type, &otr_token,
975 &context);
976
977 /* Only record types can have virtual calls. */
978 gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
979 edge->indirect_info->polymorphic = true;
980 edge->indirect_info->param_index = -1;
981 edge->indirect_info->otr_token = otr_token;
982 edge->indirect_info->otr_type = otr_type;
983 edge->indirect_info->outer_type = context.outer_type;
984 edge->indirect_info->offset = context.offset;
985 edge->indirect_info->maybe_in_construction
986 = context.maybe_in_construction;
987 edge->indirect_info->maybe_derived_type = context.maybe_derived_type;
988 }
989
990 edge->next_callee = caller->indirect_calls;
991 if (caller->indirect_calls)
992 caller->indirect_calls->prev_callee = edge;
993 caller->indirect_calls = edge;
994
995 return edge;
996 }
997
998 /* Remove the edge E from the list of the callers of the callee. */
999
1000 static inline void
1001 cgraph_edge_remove_callee (struct cgraph_edge *e)
1002 {
1003 gcc_assert (!e->indirect_unknown_callee);
1004 if (e->prev_caller)
1005 e->prev_caller->next_caller = e->next_caller;
1006 if (e->next_caller)
1007 e->next_caller->prev_caller = e->prev_caller;
1008 if (!e->prev_caller)
1009 e->callee->callers = e->next_caller;
1010 }
1011
1012 /* Remove the edge E from the list of the callees of the caller. */
1013
1014 static inline void
1015 cgraph_edge_remove_caller (struct cgraph_edge *e)
1016 {
1017 if (e->prev_callee)
1018 e->prev_callee->next_callee = e->next_callee;
1019 if (e->next_callee)
1020 e->next_callee->prev_callee = e->prev_callee;
1021 if (!e->prev_callee)
1022 {
1023 if (e->indirect_unknown_callee)
1024 e->caller->indirect_calls = e->next_callee;
1025 else
1026 e->caller->callees = e->next_callee;
1027 }
1028 if (e->caller->call_site_hash)
1029 htab_remove_elt_with_hash (e->caller->call_site_hash,
1030 e->call_stmt,
1031 htab_hash_pointer (e->call_stmt));
1032 }
1033
1034 /* Put the edge onto the free list. */
1035
1036 static void
1037 cgraph_free_edge (struct cgraph_edge *e)
1038 {
1039 int uid = e->uid;
1040
1041 if (e->indirect_info)
1042 ggc_free (e->indirect_info);
1043
1044 /* Clear out the edge so we do not dangle pointers. */
1045 memset (e, 0, sizeof (*e));
1046 e->uid = uid;
1047 NEXT_FREE_EDGE (e) = free_edges;
1048 free_edges = e;
1049 }
1050
1051 /* Remove the edge E in the cgraph. */
1052
1053 void
1054 cgraph_remove_edge (struct cgraph_edge *e)
1055 {
1056 /* Call all edge removal hooks. */
1057 cgraph_call_edge_removal_hooks (e);
1058
1059 if (!e->indirect_unknown_callee)
1060 /* Remove from callers list of the callee. */
1061 cgraph_edge_remove_callee (e);
1062
1063 /* Remove from callees list of the callers. */
1064 cgraph_edge_remove_caller (e);
1065
1066 /* Put the edge onto the free list. */
1067 cgraph_free_edge (e);
1068 }
1069
1070 /* Set callee of call graph edge E and add it to the corresponding set of
1071 callers. */
1072
1073 static void
1074 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1075 {
1076 e->prev_caller = NULL;
1077 if (n->callers)
1078 n->callers->prev_caller = e;
1079 e->next_caller = n->callers;
1080 n->callers = e;
1081 e->callee = n;
1082 }
1083
1084 /* Turn edge E into speculative call calling N2. Update
1085 the profile so the direct call is taken COUNT times
1086 with FREQUENCY.
1087
1088 At clone materialization time, the indirect call E will
1089 be expanded as:
1090
1091 if (call_dest == N2)
1092 n2 ();
1093 else
1094 call call_dest
1095
1096 At this time the function just creates the direct call,
1097 the referencd representing the if conditional and attaches
1098 them all to the orginal indirect call statement.
1099
1100 Return direct edge created. */
1101
1102 struct cgraph_edge *
1103 cgraph_turn_edge_to_speculative (struct cgraph_edge *e,
1104 struct cgraph_node *n2,
1105 gcov_type direct_count,
1106 int direct_frequency)
1107 {
1108 struct cgraph_node *n = e->caller;
1109 struct ipa_ref *ref;
1110 struct cgraph_edge *e2;
1111
1112 if (dump_file)
1113 {
1114 fprintf (dump_file, "Indirect call -> speculative call"
1115 " %s/%i => %s/%i\n",
1116 xstrdup (n->name ()), n->order,
1117 xstrdup (n2->name ()), n2->order);
1118 }
1119 e->speculative = true;
1120 e2 = cgraph_create_edge (n, n2, e->call_stmt, direct_count, direct_frequency);
1121 initialize_inline_failed (e2);
1122 e2->speculative = true;
1123 if (TREE_NOTHROW (n2->decl))
1124 e2->can_throw_external = false;
1125 else
1126 e2->can_throw_external = e->can_throw_external;
1127 e2->lto_stmt_uid = e->lto_stmt_uid;
1128 e->count -= e2->count;
1129 e->frequency -= e2->frequency;
1130 cgraph_call_edge_duplication_hooks (e, e2);
1131 ref = ipa_record_reference (n, n2,
1132 IPA_REF_ADDR, e->call_stmt);
1133 ref->lto_stmt_uid = e->lto_stmt_uid;
1134 ref->speculative = e->speculative;
1135 cgraph_mark_address_taken_node (n2);
1136 return e2;
1137 }
1138
1139 /* Speculative call consist of three components:
1140 1) an indirect edge representing the original call
1141 2) an direct edge representing the new call
1142 3) ADDR_EXPR reference representing the speculative check.
1143 All three components are attached to single statement (the indirect
1144 call) and if one of them exists, all of them must exist.
1145
1146 Given speculative call edge E, return all three components.
1147 */
1148
1149 void
1150 cgraph_speculative_call_info (struct cgraph_edge *e,
1151 struct cgraph_edge *&direct,
1152 struct cgraph_edge *&indirect,
1153 struct ipa_ref *&reference)
1154 {
1155 struct ipa_ref *ref;
1156 int i;
1157 struct cgraph_edge *e2;
1158
1159 if (!e->indirect_unknown_callee)
1160 for (e2 = e->caller->indirect_calls;
1161 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1162 e2 = e2->next_callee)
1163 ;
1164 else
1165 {
1166 e2 = e;
1167 /* We can take advantage of the call stmt hash. */
1168 if (e2->call_stmt)
1169 {
1170 e = cgraph_edge (e->caller, e2->call_stmt);
1171 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1172 }
1173 else
1174 for (e = e->caller->callees;
1175 e2->call_stmt != e->call_stmt
1176 || e2->lto_stmt_uid != e->lto_stmt_uid;
1177 e = e->next_callee)
1178 ;
1179 }
1180 gcc_assert (e->speculative && e2->speculative);
1181 direct = e;
1182 indirect = e2;
1183
1184 reference = NULL;
1185 for (i = 0; ipa_ref_list_reference_iterate (&e->caller->ref_list,
1186 i, ref); i++)
1187 if (ref->speculative
1188 && ((ref->stmt && ref->stmt == e->call_stmt)
1189 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1190 {
1191 reference = ref;
1192 break;
1193 }
1194
1195 /* Speculative edge always consist of all three components - direct edge,
1196 indirect and reference. */
1197
1198 gcc_assert (e && e2 && ref);
1199 }
1200
1201 /* Redirect callee of E to N. The function does not update underlying
1202 call expression. */
1203
1204 void
1205 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1206 {
1207 /* Remove from callers list of the current callee. */
1208 cgraph_edge_remove_callee (e);
1209
1210 /* Insert to callers list of the new callee. */
1211 cgraph_set_edge_callee (e, n);
1212 }
1213
1214 /* Speculative call EDGE turned out to be direct call to CALLE_DECL.
1215 Remove the speculative call sequence and return edge representing the call.
1216 It is up to caller to redirect the call as appropriate. */
1217
1218 struct cgraph_edge *
1219 cgraph_resolve_speculation (struct cgraph_edge *edge, tree callee_decl)
1220 {
1221 struct cgraph_edge *e2;
1222 struct ipa_ref *ref;
1223
1224 gcc_assert (edge->speculative);
1225 cgraph_speculative_call_info (edge, e2, edge, ref);
1226 if (!callee_decl
1227 || !symtab_semantically_equivalent_p (ref->referred,
1228 symtab_get_node (callee_decl)))
1229 {
1230 if (dump_file)
1231 {
1232 if (callee_decl)
1233 {
1234 fprintf (dump_file, "Speculative indirect call %s/%i => %s/%i has "
1235 "turned out to have contradicting known target ",
1236 xstrdup (edge->caller->name ()), edge->caller->order,
1237 xstrdup (e2->callee->name ()), e2->callee->order);
1238 print_generic_expr (dump_file, callee_decl, 0);
1239 fprintf (dump_file, "\n");
1240 }
1241 else
1242 {
1243 fprintf (dump_file, "Removing speculative call %s/%i => %s/%i\n",
1244 xstrdup (edge->caller->name ()), edge->caller->order,
1245 xstrdup (e2->callee->name ()), e2->callee->order);
1246 }
1247 }
1248 }
1249 else
1250 {
1251 struct cgraph_edge *tmp = edge;
1252 if (dump_file)
1253 fprintf (dump_file, "Speculative call turned into direct call.\n");
1254 edge = e2;
1255 e2 = tmp;
1256 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1257 in the functions inlined through it. */
1258 }
1259 edge->count += e2->count;
1260 edge->frequency += e2->frequency;
1261 if (edge->frequency > CGRAPH_FREQ_MAX)
1262 edge->frequency = CGRAPH_FREQ_MAX;
1263 edge->speculative = false;
1264 e2->speculative = false;
1265 ipa_remove_reference (ref);
1266 if (e2->indirect_unknown_callee || e2->inline_failed)
1267 cgraph_remove_edge (e2);
1268 else
1269 cgraph_remove_node_and_inline_clones (e2->callee, NULL);
1270 if (edge->caller->call_site_hash)
1271 cgraph_update_edge_in_call_site_hash (edge);
1272 return edge;
1273 }
1274
1275 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1276 CALLEE. DELTA is an integer constant that is to be added to the this
1277 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1278
1279 struct cgraph_edge *
1280 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1281 {
1282 gcc_assert (edge->indirect_unknown_callee);
1283
1284 /* If we are redirecting speculative call, make it non-speculative. */
1285 if (edge->indirect_unknown_callee && edge->speculative)
1286 {
1287 edge = cgraph_resolve_speculation (edge, callee->decl);
1288
1289 /* On successful speculation just return the pre existing direct edge. */
1290 if (!edge->indirect_unknown_callee)
1291 return edge;
1292 }
1293
1294 edge->indirect_unknown_callee = 0;
1295 ggc_free (edge->indirect_info);
1296 edge->indirect_info = NULL;
1297
1298 /* Get the edge out of the indirect edge list. */
1299 if (edge->prev_callee)
1300 edge->prev_callee->next_callee = edge->next_callee;
1301 if (edge->next_callee)
1302 edge->next_callee->prev_callee = edge->prev_callee;
1303 if (!edge->prev_callee)
1304 edge->caller->indirect_calls = edge->next_callee;
1305
1306 /* Put it into the normal callee list */
1307 edge->prev_callee = NULL;
1308 edge->next_callee = edge->caller->callees;
1309 if (edge->caller->callees)
1310 edge->caller->callees->prev_callee = edge;
1311 edge->caller->callees = edge;
1312
1313 /* Insert to callers list of the new callee. */
1314 cgraph_set_edge_callee (edge, callee);
1315
1316 if (edge->call_stmt)
1317 edge->call_stmt_cannot_inline_p
1318 = !gimple_check_call_matching_types (edge->call_stmt, callee->decl,
1319 false);
1320
1321 /* We need to re-determine the inlining status of the edge. */
1322 initialize_inline_failed (edge);
1323 return edge;
1324 }
1325
1326 /* If necessary, change the function declaration in the call statement
1327 associated with E so that it corresponds to the edge callee. */
1328
1329 gimple
1330 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
1331 {
1332 tree decl = gimple_call_fndecl (e->call_stmt);
1333 tree lhs = gimple_call_lhs (e->call_stmt);
1334 gimple new_stmt;
1335 gimple_stmt_iterator gsi;
1336 #ifdef ENABLE_CHECKING
1337 struct cgraph_node *node;
1338 #endif
1339
1340 if (e->speculative)
1341 {
1342 struct cgraph_edge *e2;
1343 gimple new_stmt;
1344 struct ipa_ref *ref;
1345
1346 cgraph_speculative_call_info (e, e, e2, ref);
1347 /* If there already is an direct call (i.e. as a result of inliner's
1348 substitution), forget about speculating. */
1349 if (decl)
1350 e = cgraph_resolve_speculation (e, decl);
1351 /* If types do not match, speculation was likely wrong.
1352 The direct edge was posisbly redirected to the clone with a different
1353 signature. We did not update the call statement yet, so compare it
1354 with the reference that still points to the proper type. */
1355 else if (!gimple_check_call_matching_types (e->call_stmt,
1356 ref->referred->decl,
1357 true))
1358 {
1359 if (dump_file)
1360 fprintf (dump_file, "Not expanding speculative call of %s/%i -> %s/%i\n"
1361 "Type mismatch.\n",
1362 xstrdup (e->caller->name ()),
1363 e->caller->order,
1364 xstrdup (e->callee->name ()),
1365 e->callee->order);
1366 e = cgraph_resolve_speculation (e, NULL);
1367 /* We are producing the final function body and will throw away the
1368 callgraph edges really soon. Reset the counts/frequencies to
1369 keep verifier happy in the case of roundoff errors. */
1370 e->count = gimple_bb (e->call_stmt)->count;
1371 e->frequency = compute_call_stmt_bb_frequency
1372 (e->caller->decl, gimple_bb (e->call_stmt));
1373 }
1374 /* Expand speculation into GIMPLE code. */
1375 else
1376 {
1377 if (dump_file)
1378 fprintf (dump_file,
1379 "Expanding speculative call of %s/%i -> %s/%i count:"
1380 HOST_WIDEST_INT_PRINT_DEC"\n",
1381 xstrdup (e->caller->name ()),
1382 e->caller->order,
1383 xstrdup (e->callee->name ()),
1384 e->callee->order,
1385 (HOST_WIDEST_INT)e->count);
1386 gcc_assert (e2->speculative);
1387 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1388 new_stmt = gimple_ic (e->call_stmt, cgraph (ref->referred),
1389 e->count || e2->count
1390 ? RDIV (e->count * REG_BR_PROB_BASE,
1391 e->count + e2->count)
1392 : e->frequency || e2->frequency
1393 ? RDIV (e->frequency * REG_BR_PROB_BASE,
1394 e->frequency + e2->frequency)
1395 : REG_BR_PROB_BASE / 2,
1396 e->count, e->count + e2->count);
1397 e->speculative = false;
1398 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt,
1399 new_stmt, false);
1400 e->frequency = compute_call_stmt_bb_frequency
1401 (e->caller->decl, gimple_bb (e->call_stmt));
1402 e2->frequency = compute_call_stmt_bb_frequency
1403 (e2->caller->decl, gimple_bb (e2->call_stmt));
1404 e2->speculative = false;
1405 ref->speculative = false;
1406 ref->stmt = NULL;
1407 /* Indirect edges are not both in the call site hash.
1408 get it updated. */
1409 if (e->caller->call_site_hash)
1410 cgraph_update_edge_in_call_site_hash (e2);
1411 pop_cfun ();
1412 /* Continue redirecting E to proper target. */
1413 }
1414 }
1415
1416 if (e->indirect_unknown_callee
1417 || decl == e->callee->decl)
1418 return e->call_stmt;
1419
1420 #ifdef ENABLE_CHECKING
1421 if (decl)
1422 {
1423 node = cgraph_get_node (decl);
1424 gcc_assert (!node || !node->clone.combined_args_to_skip);
1425 }
1426 #endif
1427
1428 if (cgraph_dump_file)
1429 {
1430 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
1431 xstrdup (e->caller->name ()), e->caller->order,
1432 xstrdup (e->callee->name ()), e->callee->order);
1433 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1434 if (e->callee->clone.combined_args_to_skip)
1435 {
1436 fprintf (cgraph_dump_file, " combined args to skip: ");
1437 dump_bitmap (cgraph_dump_file,
1438 e->callee->clone.combined_args_to_skip);
1439 }
1440 }
1441
1442 if (e->callee->clone.combined_args_to_skip)
1443 {
1444 int lp_nr;
1445
1446 new_stmt
1447 = gimple_call_copy_skip_args (e->call_stmt,
1448 e->callee->clone.combined_args_to_skip);
1449 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1450 gimple_call_set_fntype (new_stmt, gimple_call_fntype (e->call_stmt));
1451
1452 if (gimple_vdef (new_stmt)
1453 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1454 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1455
1456 gsi = gsi_for_stmt (e->call_stmt);
1457 gsi_replace (&gsi, new_stmt, false);
1458 /* We need to defer cleaning EH info on the new statement to
1459 fixup-cfg. We may not have dominator information at this point
1460 and thus would end up with unreachable blocks and have no way
1461 to communicate that we need to run CFG cleanup then. */
1462 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1463 if (lp_nr != 0)
1464 {
1465 remove_stmt_from_eh_lp (e->call_stmt);
1466 add_stmt_to_eh_lp (new_stmt, lp_nr);
1467 }
1468 }
1469 else
1470 {
1471 new_stmt = e->call_stmt;
1472 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1473 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1474 }
1475
1476 /* If the call becomes noreturn, remove the lhs. */
1477 if (lhs && (gimple_call_flags (new_stmt) & ECF_NORETURN))
1478 {
1479 if (TREE_CODE (lhs) == SSA_NAME)
1480 {
1481 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1482 TREE_TYPE (lhs), NULL);
1483 var = get_or_create_ssa_default_def
1484 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1485 gimple set_stmt = gimple_build_assign (lhs, var);
1486 gsi = gsi_for_stmt (new_stmt);
1487 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1488 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1489 }
1490 gimple_call_set_lhs (new_stmt, NULL_TREE);
1491 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1492 }
1493
1494 /* If new callee has no static chain, remove it. */
1495 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1496 {
1497 gimple_call_set_chain (new_stmt, NULL);
1498 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1499 }
1500
1501 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt, false);
1502
1503 if (cgraph_dump_file)
1504 {
1505 fprintf (cgraph_dump_file, " updated to:");
1506 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1507 }
1508 return new_stmt;
1509 }
1510
1511 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1512 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1513 of OLD_STMT if it was previously call statement.
1514 If NEW_STMT is NULL, the call has been dropped without any
1515 replacement. */
1516
1517 static void
1518 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1519 gimple old_stmt, tree old_call,
1520 gimple new_stmt)
1521 {
1522 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1523 ? gimple_call_fndecl (new_stmt) : 0;
1524
1525 /* We are seeing indirect calls, then there is nothing to update. */
1526 if (!new_call && !old_call)
1527 return;
1528 /* See if we turned indirect call into direct call or folded call to one builtin
1529 into different builtin. */
1530 if (old_call != new_call)
1531 {
1532 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1533 struct cgraph_edge *ne = NULL;
1534 gcov_type count;
1535 int frequency;
1536
1537 if (e)
1538 {
1539 /* See if the edge is already there and has the correct callee. It
1540 might be so because of indirect inlining has already updated
1541 it. We also might've cloned and redirected the edge. */
1542 if (new_call && e->callee)
1543 {
1544 struct cgraph_node *callee = e->callee;
1545 while (callee)
1546 {
1547 if (callee->decl == new_call
1548 || callee->former_clone_of == new_call)
1549 {
1550 cgraph_set_call_stmt (e, new_stmt);
1551 return;
1552 }
1553 callee = callee->clone_of;
1554 }
1555 }
1556
1557 /* Otherwise remove edge and create new one; we can't simply redirect
1558 since function has changed, so inline plan and other information
1559 attached to edge is invalid. */
1560 count = e->count;
1561 frequency = e->frequency;
1562 if (e->indirect_unknown_callee || e->inline_failed)
1563 cgraph_remove_edge (e);
1564 else
1565 cgraph_remove_node_and_inline_clones (e->callee, NULL);
1566 }
1567 else if (new_call)
1568 {
1569 /* We are seeing new direct call; compute profile info based on BB. */
1570 basic_block bb = gimple_bb (new_stmt);
1571 count = bb->count;
1572 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1573 bb);
1574 }
1575
1576 if (new_call)
1577 {
1578 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1579 new_stmt, count, frequency);
1580 gcc_assert (ne->inline_failed);
1581 }
1582 }
1583 /* We only updated the call stmt; update pointer in cgraph edge.. */
1584 else if (old_stmt != new_stmt)
1585 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1586 }
1587
1588 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1589 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1590 of OLD_STMT before it was updated (updating can happen inplace). */
1591
1592 void
1593 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1594 {
1595 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1596 struct cgraph_node *node;
1597
1598 gcc_checking_assert (orig);
1599 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1600 if (orig->clones)
1601 for (node = orig->clones; node != orig;)
1602 {
1603 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1604 if (node->clones)
1605 node = node->clones;
1606 else if (node->next_sibling_clone)
1607 node = node->next_sibling_clone;
1608 else
1609 {
1610 while (node != orig && !node->next_sibling_clone)
1611 node = node->clone_of;
1612 if (node != orig)
1613 node = node->next_sibling_clone;
1614 }
1615 }
1616 }
1617
1618
1619 /* Remove all callees from the node. */
1620
1621 void
1622 cgraph_node_remove_callees (struct cgraph_node *node)
1623 {
1624 struct cgraph_edge *e, *f;
1625
1626 /* It is sufficient to remove the edges from the lists of callers of
1627 the callees. The callee list of the node can be zapped with one
1628 assignment. */
1629 for (e = node->callees; e; e = f)
1630 {
1631 f = e->next_callee;
1632 cgraph_call_edge_removal_hooks (e);
1633 if (!e->indirect_unknown_callee)
1634 cgraph_edge_remove_callee (e);
1635 cgraph_free_edge (e);
1636 }
1637 for (e = node->indirect_calls; e; e = f)
1638 {
1639 f = e->next_callee;
1640 cgraph_call_edge_removal_hooks (e);
1641 if (!e->indirect_unknown_callee)
1642 cgraph_edge_remove_callee (e);
1643 cgraph_free_edge (e);
1644 }
1645 node->indirect_calls = NULL;
1646 node->callees = NULL;
1647 if (node->call_site_hash)
1648 {
1649 htab_delete (node->call_site_hash);
1650 node->call_site_hash = NULL;
1651 }
1652 }
1653
1654 /* Remove all callers from the node. */
1655
1656 static void
1657 cgraph_node_remove_callers (struct cgraph_node *node)
1658 {
1659 struct cgraph_edge *e, *f;
1660
1661 /* It is sufficient to remove the edges from the lists of callees of
1662 the callers. The caller list of the node can be zapped with one
1663 assignment. */
1664 for (e = node->callers; e; e = f)
1665 {
1666 f = e->next_caller;
1667 cgraph_call_edge_removal_hooks (e);
1668 cgraph_edge_remove_caller (e);
1669 cgraph_free_edge (e);
1670 }
1671 node->callers = NULL;
1672 }
1673
1674 /* Helper function for cgraph_release_function_body and free_lang_data.
1675 It releases body from function DECL without having to inspect its
1676 possibly non-existent symtab node. */
1677
1678 void
1679 release_function_body (tree decl)
1680 {
1681 if (DECL_STRUCT_FUNCTION (decl))
1682 {
1683 push_cfun (DECL_STRUCT_FUNCTION (decl));
1684 if (cfun->cfg
1685 && current_loops)
1686 {
1687 cfun->curr_properties &= ~PROP_loops;
1688 loop_optimizer_finalize ();
1689 }
1690 if (cfun->gimple_df)
1691 {
1692 delete_tree_ssa ();
1693 delete_tree_cfg_annotations ();
1694 cfun->eh = NULL;
1695 }
1696 if (cfun->cfg)
1697 {
1698 gcc_assert (!dom_info_available_p (CDI_DOMINATORS));
1699 gcc_assert (!dom_info_available_p (CDI_POST_DOMINATORS));
1700 clear_edges ();
1701 cfun->cfg = NULL;
1702 }
1703 if (cfun->value_histograms)
1704 free_histograms ();
1705 pop_cfun ();
1706 gimple_set_body (decl, NULL);
1707 /* Struct function hangs a lot of data that would leak if we didn't
1708 removed all pointers to it. */
1709 ggc_free (DECL_STRUCT_FUNCTION (decl));
1710 DECL_STRUCT_FUNCTION (decl) = NULL;
1711 }
1712 DECL_SAVED_TREE (decl) = NULL;
1713 }
1714
1715 /* Release memory used to represent body of function NODE.
1716 Use this only for functions that are released before being translated to
1717 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1718 are free'd in final.c via free_after_compilation(). */
1719
1720 void
1721 cgraph_release_function_body (struct cgraph_node *node)
1722 {
1723 node->ipa_transforms_to_apply.release ();
1724 if (!node->used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
1725 {
1726 DECL_RESULT (node->decl) = NULL;
1727 DECL_ARGUMENTS (node->decl) = NULL;
1728 }
1729 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1730 of its associated function function declaration because it's
1731 needed to emit debug info later. */
1732 if (!node->used_as_abstract_origin && DECL_INITIAL (node->decl))
1733 DECL_INITIAL (node->decl) = error_mark_node;
1734 release_function_body (node->decl);
1735 if (node->lto_file_data)
1736 lto_free_function_in_decl_state_for_node (node);
1737 }
1738
1739 /* Remove the node from cgraph. */
1740
1741 void
1742 cgraph_remove_node (struct cgraph_node *node)
1743 {
1744 struct cgraph_node *n;
1745 int uid = node->uid;
1746
1747 cgraph_call_node_removal_hooks (node);
1748 cgraph_node_remove_callers (node);
1749 cgraph_node_remove_callees (node);
1750 node->ipa_transforms_to_apply.release ();
1751
1752 /* Incremental inlining access removed nodes stored in the postorder list.
1753 */
1754 node->force_output = false;
1755 node->forced_by_abi = false;
1756 for (n = node->nested; n; n = n->next_nested)
1757 n->origin = NULL;
1758 node->nested = NULL;
1759 if (node->origin)
1760 {
1761 struct cgraph_node **node2 = &node->origin->nested;
1762
1763 while (*node2 != node)
1764 node2 = &(*node2)->next_nested;
1765 *node2 = node->next_nested;
1766 }
1767 symtab_unregister_node (node);
1768 if (node->prev_sibling_clone)
1769 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1770 else if (node->clone_of)
1771 node->clone_of->clones = node->next_sibling_clone;
1772 if (node->next_sibling_clone)
1773 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1774 if (node->clones)
1775 {
1776 struct cgraph_node *n, *next;
1777
1778 if (node->clone_of)
1779 {
1780 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1781 n->clone_of = node->clone_of;
1782 n->clone_of = node->clone_of;
1783 n->next_sibling_clone = node->clone_of->clones;
1784 if (node->clone_of->clones)
1785 node->clone_of->clones->prev_sibling_clone = n;
1786 node->clone_of->clones = node->clones;
1787 }
1788 else
1789 {
1790 /* We are removing node with clones. This makes clones inconsistent,
1791 but assume they will be removed subsequently and just keep clone
1792 tree intact. This can happen in unreachable function removal since
1793 we remove unreachable functions in random order, not by bottom-up
1794 walk of clone trees. */
1795 for (n = node->clones; n; n = next)
1796 {
1797 next = n->next_sibling_clone;
1798 n->next_sibling_clone = NULL;
1799 n->prev_sibling_clone = NULL;
1800 n->clone_of = NULL;
1801 }
1802 }
1803 }
1804
1805 /* While all the clones are removed after being proceeded, the function
1806 itself is kept in the cgraph even after it is compiled. Check whether
1807 we are done with this body and reclaim it proactively if this is the case.
1808 */
1809 if (cgraph_state != CGRAPH_LTO_STREAMING)
1810 {
1811 n = cgraph_get_node (node->decl);
1812 if (!n
1813 || (!n->clones && !n->clone_of && !n->global.inlined_to
1814 && (cgraph_global_info_ready
1815 && (TREE_ASM_WRITTEN (n->decl)
1816 || DECL_EXTERNAL (n->decl)
1817 || !n->analyzed
1818 || (!flag_wpa && n->in_other_partition)))))
1819 cgraph_release_function_body (node);
1820 }
1821
1822 node->decl = NULL;
1823 if (node->call_site_hash)
1824 {
1825 htab_delete (node->call_site_hash);
1826 node->call_site_hash = NULL;
1827 }
1828 cgraph_n_nodes--;
1829
1830 /* Clear out the node to NULL all pointers and add the node to the free
1831 list. */
1832 memset (node, 0, sizeof (*node));
1833 node->type = SYMTAB_FUNCTION;
1834 node->uid = uid;
1835 SET_NEXT_FREE_NODE (node, free_nodes);
1836 free_nodes = node;
1837 }
1838
1839 /* Likewise indicate that a node is having address taken. */
1840
1841 void
1842 cgraph_mark_address_taken_node (struct cgraph_node *node)
1843 {
1844 /* Indirect inlining can figure out that all uses of the address are
1845 inlined. */
1846 if (node->global.inlined_to)
1847 {
1848 gcc_assert (cfun->after_inlining);
1849 gcc_assert (node->callers->indirect_inlining_edge);
1850 return;
1851 }
1852 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1853 IPA_REF_ADDR reference exists (and thus it should be set on node
1854 representing alias we take address of) and as a test whether address
1855 of the object was taken (and thus it should be set on node alias is
1856 referring to). We should remove the first use and the remove the
1857 following set. */
1858 node->address_taken = 1;
1859 node = cgraph_function_or_thunk_node (node, NULL);
1860 node->address_taken = 1;
1861 }
1862
1863 /* Return local info for the compiled function. */
1864
1865 struct cgraph_local_info *
1866 cgraph_local_info (tree decl)
1867 {
1868 struct cgraph_node *node;
1869
1870 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1871 node = cgraph_get_node (decl);
1872 if (!node)
1873 return NULL;
1874 return &node->local;
1875 }
1876
1877 /* Return local info for the compiled function. */
1878
1879 struct cgraph_global_info *
1880 cgraph_global_info (tree decl)
1881 {
1882 struct cgraph_node *node;
1883
1884 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1885 node = cgraph_get_node (decl);
1886 if (!node)
1887 return NULL;
1888 return &node->global;
1889 }
1890
1891 /* Return local info for the compiled function. */
1892
1893 struct cgraph_rtl_info *
1894 cgraph_rtl_info (tree decl)
1895 {
1896 struct cgraph_node *node;
1897
1898 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1899 node = cgraph_get_node (decl);
1900 if (!node
1901 || (decl != current_function_decl
1902 && !TREE_ASM_WRITTEN (node->decl)))
1903 return NULL;
1904 return &node->rtl;
1905 }
1906
1907 /* Return a string describing the failure REASON. */
1908
1909 const char*
1910 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1911 {
1912 #undef DEFCIFCODE
1913 #define DEFCIFCODE(code, type, string) string,
1914
1915 static const char *cif_string_table[CIF_N_REASONS] = {
1916 #include "cif-code.def"
1917 };
1918
1919 /* Signedness of an enum type is implementation defined, so cast it
1920 to unsigned before testing. */
1921 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1922 return cif_string_table[reason];
1923 }
1924
1925 /* Return a type describing the failure REASON. */
1926
1927 cgraph_inline_failed_type_t
1928 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1929 {
1930 #undef DEFCIFCODE
1931 #define DEFCIFCODE(code, type, string) type,
1932
1933 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1934 #include "cif-code.def"
1935 };
1936
1937 /* Signedness of an enum type is implementation defined, so cast it
1938 to unsigned before testing. */
1939 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1940 return cif_type_table[reason];
1941 }
1942
1943 /* Names used to print out the availability enum. */
1944 const char * const cgraph_availability_names[] =
1945 {"unset", "not_available", "overwritable", "available", "local"};
1946
1947
1948 /* Dump call graph node NODE to file F. */
1949
1950 void
1951 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1952 {
1953 struct cgraph_edge *edge;
1954 int indirect_calls_count = 0;
1955
1956 dump_symtab_base (f, node);
1957
1958 if (node->global.inlined_to)
1959 fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
1960 xstrdup (node->name ()),
1961 node->order,
1962 xstrdup (node->global.inlined_to->name ()),
1963 node->global.inlined_to->order);
1964 if (node->clone_of)
1965 fprintf (f, " Clone of %s/%i\n",
1966 node->clone_of->asm_name (),
1967 node->clone_of->order);
1968 if (cgraph_function_flags_ready)
1969 fprintf (f, " Availability: %s\n",
1970 cgraph_availability_names [cgraph_function_body_availability (node)]);
1971
1972 if (node->profile_id)
1973 fprintf (f, " Profile id: %i\n",
1974 node->profile_id);
1975 fprintf (f, " First run: %i\n", node->tp_first_run);
1976 fprintf (f, " Function flags:");
1977 if (node->count)
1978 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1979 (HOST_WIDEST_INT)node->count);
1980 if (node->origin)
1981 fprintf (f, " nested in: %s", node->origin->asm_name ());
1982 if (gimple_has_body_p (node->decl))
1983 fprintf (f, " body");
1984 if (node->process)
1985 fprintf (f, " process");
1986 if (node->local.local)
1987 fprintf (f, " local");
1988 if (node->local.redefined_extern_inline)
1989 fprintf (f, " redefined_extern_inline");
1990 if (node->only_called_at_startup)
1991 fprintf (f, " only_called_at_startup");
1992 if (node->only_called_at_exit)
1993 fprintf (f, " only_called_at_exit");
1994 if (node->tm_clone)
1995 fprintf (f, " tm_clone");
1996
1997 fprintf (f, "\n");
1998
1999 if (node->thunk.thunk_p)
2000 {
2001 fprintf (f, " Thunk");
2002 if (node->thunk.alias)
2003 fprintf (f, " of %s (asm: %s)",
2004 lang_hooks.decl_printable_name (node->thunk.alias, 2),
2005 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
2006 fprintf (f, " fixed offset %i virtual value %i has "
2007 "virtual offset %i)\n",
2008 (int)node->thunk.fixed_offset,
2009 (int)node->thunk.virtual_value,
2010 (int)node->thunk.virtual_offset_p);
2011 }
2012 if (node->alias && node->thunk.alias
2013 && DECL_P (node->thunk.alias))
2014 {
2015 fprintf (f, " Alias of %s",
2016 lang_hooks.decl_printable_name (node->thunk.alias, 2));
2017 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
2018 fprintf (f, " (asm: %s)",
2019 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
2020 fprintf (f, "\n");
2021 }
2022
2023 fprintf (f, " Called by: ");
2024
2025 for (edge = node->callers; edge; edge = edge->next_caller)
2026 {
2027 fprintf (f, "%s/%i ", edge->caller->asm_name (),
2028 edge->caller->order);
2029 if (edge->count)
2030 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
2031 (HOST_WIDEST_INT)edge->count);
2032 if (edge->frequency)
2033 fprintf (f, "(%.2f per call) ",
2034 edge->frequency / (double)CGRAPH_FREQ_BASE);
2035 if (edge->speculative)
2036 fprintf (f, "(speculative) ");
2037 if (!edge->inline_failed)
2038 fprintf (f, "(inlined) ");
2039 if (edge->indirect_inlining_edge)
2040 fprintf (f, "(indirect_inlining) ");
2041 if (edge->can_throw_external)
2042 fprintf (f, "(can throw external) ");
2043 }
2044
2045 fprintf (f, "\n Calls: ");
2046 for (edge = node->callees; edge; edge = edge->next_callee)
2047 {
2048 fprintf (f, "%s/%i ", edge->callee->asm_name (),
2049 edge->callee->order);
2050 if (edge->speculative)
2051 fprintf (f, "(speculative) ");
2052 if (!edge->inline_failed)
2053 fprintf (f, "(inlined) ");
2054 if (edge->indirect_inlining_edge)
2055 fprintf (f, "(indirect_inlining) ");
2056 if (edge->count)
2057 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
2058 (HOST_WIDEST_INT)edge->count);
2059 if (edge->frequency)
2060 fprintf (f, "(%.2f per call) ",
2061 edge->frequency / (double)CGRAPH_FREQ_BASE);
2062 if (edge->can_throw_external)
2063 fprintf (f, "(can throw external) ");
2064 }
2065 fprintf (f, "\n");
2066
2067 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
2068 indirect_calls_count++;
2069 if (indirect_calls_count)
2070 fprintf (f, " Has %i outgoing edges for indirect calls.\n",
2071 indirect_calls_count);
2072 }
2073
2074
2075 /* Dump call graph node NODE to stderr. */
2076
2077 DEBUG_FUNCTION void
2078 debug_cgraph_node (struct cgraph_node *node)
2079 {
2080 dump_cgraph_node (stderr, node);
2081 }
2082
2083
2084 /* Dump the callgraph to file F. */
2085
2086 void
2087 dump_cgraph (FILE *f)
2088 {
2089 struct cgraph_node *node;
2090
2091 fprintf (f, "callgraph:\n\n");
2092 FOR_EACH_FUNCTION (node)
2093 dump_cgraph_node (f, node);
2094 }
2095
2096
2097 /* Dump the call graph to stderr. */
2098
2099 DEBUG_FUNCTION void
2100 debug_cgraph (void)
2101 {
2102 dump_cgraph (stderr);
2103 }
2104
2105 /* Return true when the DECL can possibly be inlined. */
2106 bool
2107 cgraph_function_possibly_inlined_p (tree decl)
2108 {
2109 if (!cgraph_global_info_ready)
2110 return !DECL_UNINLINABLE (decl);
2111 return DECL_POSSIBLY_INLINED (decl);
2112 }
2113
2114 /* NODE is no longer nested function; update cgraph accordingly. */
2115 void
2116 cgraph_unnest_node (struct cgraph_node *node)
2117 {
2118 struct cgraph_node **node2 = &node->origin->nested;
2119 gcc_assert (node->origin);
2120
2121 while (*node2 != node)
2122 node2 = &(*node2)->next_nested;
2123 *node2 = node->next_nested;
2124 node->origin = NULL;
2125 }
2126
2127 /* Return function availability. See cgraph.h for description of individual
2128 return values. */
2129 enum availability
2130 cgraph_function_body_availability (struct cgraph_node *node)
2131 {
2132 enum availability avail;
2133 if (!node->analyzed)
2134 avail = AVAIL_NOT_AVAILABLE;
2135 else if (node->local.local)
2136 avail = AVAIL_LOCAL;
2137 else if (node->alias && node->weakref)
2138 cgraph_function_or_thunk_node (node, &avail);
2139 else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (node->decl)))
2140 avail = AVAIL_OVERWRITABLE;
2141 else if (!node->externally_visible)
2142 avail = AVAIL_AVAILABLE;
2143 /* Inline functions are safe to be analyzed even if their symbol can
2144 be overwritten at runtime. It is not meaningful to enforce any sane
2145 behaviour on replacing inline function by different body. */
2146 else if (DECL_DECLARED_INLINE_P (node->decl))
2147 avail = AVAIL_AVAILABLE;
2148
2149 /* If the function can be overwritten, return OVERWRITABLE. Take
2150 care at least of two notable extensions - the COMDAT functions
2151 used to share template instantiations in C++ (this is symmetric
2152 to code cp_cannot_inline_tree_fn and probably shall be shared and
2153 the inlinability hooks completely eliminated).
2154
2155 ??? Does the C++ one definition rule allow us to always return
2156 AVAIL_AVAILABLE here? That would be good reason to preserve this
2157 bit. */
2158
2159 else if (decl_replaceable_p (node->decl)
2160 && !DECL_EXTERNAL (node->decl))
2161 avail = AVAIL_OVERWRITABLE;
2162 else avail = AVAIL_AVAILABLE;
2163
2164 return avail;
2165 }
2166
2167 /* Worker for cgraph_node_can_be_local_p. */
2168 static bool
2169 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2170 void *data ATTRIBUTE_UNUSED)
2171 {
2172 return !(!node->force_output
2173 && ((DECL_COMDAT (node->decl)
2174 && !node->forced_by_abi
2175 && !symtab_used_from_object_file_p (node)
2176 && !node->same_comdat_group)
2177 || !node->externally_visible));
2178 }
2179
2180 /* Return true if NODE can be made local for API change.
2181 Extern inline functions and C++ COMDAT functions can be made local
2182 at the expense of possible code size growth if function is used in multiple
2183 compilation units. */
2184 bool
2185 cgraph_node_can_be_local_p (struct cgraph_node *node)
2186 {
2187 return (!node->address_taken
2188 && !cgraph_for_node_and_aliases (node,
2189 cgraph_node_cannot_be_local_p_1,
2190 NULL, true));
2191 }
2192
2193 /* Call calback on NODE, thunks and aliases associated to NODE.
2194 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2195 skipped. */
2196
2197 bool
2198 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2199 bool (*callback) (struct cgraph_node *, void *),
2200 void *data,
2201 bool include_overwritable)
2202 {
2203 struct cgraph_edge *e;
2204 int i;
2205 struct ipa_ref *ref;
2206
2207 if (callback (node, data))
2208 return true;
2209 for (e = node->callers; e; e = e->next_caller)
2210 if (e->caller->thunk.thunk_p
2211 && (include_overwritable
2212 || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
2213 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2214 include_overwritable))
2215 return true;
2216 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, i, ref); i++)
2217 if (ref->use == IPA_REF_ALIAS)
2218 {
2219 struct cgraph_node *alias = ipa_ref_referring_node (ref);
2220 if (include_overwritable
2221 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2222 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2223 include_overwritable))
2224 return true;
2225 }
2226 return false;
2227 }
2228
2229 /* Call calback on NODE and aliases associated to NODE.
2230 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2231 skipped. */
2232
2233 bool
2234 cgraph_for_node_and_aliases (struct cgraph_node *node,
2235 bool (*callback) (struct cgraph_node *, void *),
2236 void *data,
2237 bool include_overwritable)
2238 {
2239 int i;
2240 struct ipa_ref *ref;
2241
2242 if (callback (node, data))
2243 return true;
2244 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, i, ref); i++)
2245 if (ref->use == IPA_REF_ALIAS)
2246 {
2247 struct cgraph_node *alias = ipa_ref_referring_node (ref);
2248 if (include_overwritable
2249 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2250 if (cgraph_for_node_and_aliases (alias, callback, data,
2251 include_overwritable))
2252 return true;
2253 }
2254 return false;
2255 }
2256
2257 /* Worker to bring NODE local. */
2258
2259 static bool
2260 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2261 {
2262 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2263 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2264 {
2265 symtab_make_decl_local (node->decl);
2266
2267 node->externally_visible = false;
2268 node->forced_by_abi = false;
2269 node->local.local = true;
2270 node->unique_name = (node->resolution == LDPR_PREVAILING_DEF_IRONLY
2271 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP);
2272 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2273 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2274 }
2275 return false;
2276 }
2277
2278 /* Bring NODE local. */
2279
2280 void
2281 cgraph_make_node_local (struct cgraph_node *node)
2282 {
2283 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2284 NULL, true);
2285 }
2286
2287 /* Worker to set nothrow flag. */
2288
2289 static bool
2290 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2291 {
2292 struct cgraph_edge *e;
2293
2294 TREE_NOTHROW (node->decl) = data != NULL;
2295
2296 if (data != NULL)
2297 for (e = node->callers; e; e = e->next_caller)
2298 e->can_throw_external = false;
2299 return false;
2300 }
2301
2302 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2303 if any to NOTHROW. */
2304
2305 void
2306 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2307 {
2308 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2309 (void *)(size_t)nothrow, false);
2310 }
2311
2312 /* Worker to set const flag. */
2313
2314 static bool
2315 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2316 {
2317 /* Static constructors and destructors without a side effect can be
2318 optimized out. */
2319 if (data && !((size_t)data & 2))
2320 {
2321 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2322 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2323 if (DECL_STATIC_DESTRUCTOR (node->decl))
2324 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2325 }
2326 TREE_READONLY (node->decl) = data != NULL;
2327 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2328 return false;
2329 }
2330
2331 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2332 if any to READONLY. */
2333
2334 void
2335 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2336 {
2337 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2338 (void *)(size_t)(readonly + (int)looping * 2),
2339 false);
2340 }
2341
2342 /* Worker to set pure flag. */
2343
2344 static bool
2345 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2346 {
2347 /* Static constructors and destructors without a side effect can be
2348 optimized out. */
2349 if (data && !((size_t)data & 2))
2350 {
2351 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2352 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2353 if (DECL_STATIC_DESTRUCTOR (node->decl))
2354 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2355 }
2356 DECL_PURE_P (node->decl) = data != NULL;
2357 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2358 return false;
2359 }
2360
2361 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2362 if any to PURE. */
2363
2364 void
2365 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2366 {
2367 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2368 (void *)(size_t)(pure + (int)looping * 2),
2369 false);
2370 }
2371
2372 /* Return true when NODE can not return or throw and thus
2373 it is safe to ignore its side effects for IPA analysis. */
2374
2375 bool
2376 cgraph_node_cannot_return (struct cgraph_node *node)
2377 {
2378 int flags = flags_from_decl_or_type (node->decl);
2379 if (!flag_exceptions)
2380 return (flags & ECF_NORETURN) != 0;
2381 else
2382 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2383 == (ECF_NORETURN | ECF_NOTHROW));
2384 }
2385
2386 /* Return true when call of E can not lead to return from caller
2387 and thus it is safe to ignore its side effects for IPA analysis
2388 when computing side effects of the caller.
2389 FIXME: We could actually mark all edges that have no reaching
2390 patch to the exit block or throw to get better results. */
2391 bool
2392 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2393 {
2394 if (cgraph_node_cannot_return (e->caller))
2395 return true;
2396 if (e->indirect_unknown_callee)
2397 {
2398 int flags = e->indirect_info->ecf_flags;
2399 if (!flag_exceptions)
2400 return (flags & ECF_NORETURN) != 0;
2401 else
2402 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2403 == (ECF_NORETURN | ECF_NOTHROW));
2404 }
2405 else
2406 return cgraph_node_cannot_return (e->callee);
2407 }
2408
2409 /* Return true when function NODE can be removed from callgraph
2410 if all direct calls are eliminated. */
2411
2412 bool
2413 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2414 {
2415 gcc_assert (!node->global.inlined_to);
2416 /* Extern inlines can always go, we will use the external definition. */
2417 if (DECL_EXTERNAL (node->decl))
2418 return true;
2419 /* When function is needed, we can not remove it. */
2420 if (node->force_output || node->used_from_other_partition)
2421 return false;
2422 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2423 || DECL_STATIC_DESTRUCTOR (node->decl))
2424 return false;
2425 /* Only COMDAT functions can be removed if externally visible. */
2426 if (node->externally_visible
2427 && (!DECL_COMDAT (node->decl)
2428 || node->forced_by_abi
2429 || symtab_used_from_object_file_p (node)))
2430 return false;
2431 return true;
2432 }
2433
2434 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2435
2436 static bool
2437 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2438 {
2439 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2440 }
2441
2442 /* Return true when function NODE and its aliases can be removed from callgraph
2443 if all direct calls are eliminated. */
2444
2445 bool
2446 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2447 {
2448 /* Extern inlines can always go, we will use the external definition. */
2449 if (DECL_EXTERNAL (node->decl))
2450 return true;
2451 if (node->address_taken)
2452 return false;
2453 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2454 }
2455
2456 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2457
2458 static bool
2459 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2460 {
2461 return symtab_used_from_object_file_p (node);
2462 }
2463
2464 /* Return true when function NODE can be expected to be removed
2465 from program when direct calls in this compilation unit are removed.
2466
2467 As a special case COMDAT functions are
2468 cgraph_can_remove_if_no_direct_calls_p while the are not
2469 cgraph_only_called_directly_p (it is possible they are called from other
2470 unit)
2471
2472 This function behaves as cgraph_only_called_directly_p because eliminating
2473 all uses of COMDAT function does not make it necessarily disappear from
2474 the program unless we are compiling whole program or we do LTO. In this
2475 case we know we win since dynamic linking will not really discard the
2476 linkonce section. */
2477
2478 bool
2479 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2480 {
2481 gcc_assert (!node->global.inlined_to);
2482 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2483 return false;
2484 if (!in_lto_p && !flag_whole_program)
2485 return cgraph_only_called_directly_p (node);
2486 else
2487 {
2488 if (DECL_EXTERNAL (node->decl))
2489 return true;
2490 return cgraph_can_remove_if_no_direct_calls_p (node);
2491 }
2492 }
2493
2494
2495 /* Worker for cgraph_only_called_directly_p. */
2496
2497 static bool
2498 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2499 {
2500 return !cgraph_only_called_directly_or_aliased_p (node);
2501 }
2502
2503 /* Return true when function NODE and all its aliases are only called
2504 directly.
2505 i.e. it is not externally visible, address was not taken and
2506 it is not used in any other non-standard way. */
2507
2508 bool
2509 cgraph_only_called_directly_p (struct cgraph_node *node)
2510 {
2511 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
2512 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
2513 NULL, true);
2514 }
2515
2516
2517 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2518
2519 static bool
2520 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
2521 {
2522 vec<cgraph_edge_p> *redirect_callers = (vec<cgraph_edge_p> *)data;
2523 struct cgraph_edge *cs;
2524 enum availability avail;
2525 cgraph_function_or_thunk_node (node, &avail);
2526
2527 if (avail > AVAIL_OVERWRITABLE)
2528 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2529 if (!cs->indirect_inlining_edge)
2530 redirect_callers->safe_push (cs);
2531 return false;
2532 }
2533
2534 /* Collect all callers of NODE and its aliases that are known to lead to NODE
2535 (i.e. are not overwritable). */
2536
2537 vec<cgraph_edge_p>
2538 collect_callers_of_node (struct cgraph_node *node)
2539 {
2540 vec<cgraph_edge_p> redirect_callers = vNULL;
2541 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
2542 &redirect_callers, false);
2543 return redirect_callers;
2544 }
2545
2546 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. */
2547
2548 static bool
2549 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
2550 {
2551 bool skipped_thunk = false;
2552 node = cgraph_function_or_thunk_node (node, NULL);
2553 node2 = cgraph_function_or_thunk_node (node2, NULL);
2554
2555 /* There are no virtual clones of thunks so check former_clone_of or if we
2556 might have skipped thunks because this adjustments are no longer
2557 necessary. */
2558 while (node->thunk.thunk_p)
2559 {
2560 if (node2->former_clone_of == node->decl)
2561 return true;
2562 if (!node->thunk.this_adjusting)
2563 return false;
2564 node = cgraph_function_or_thunk_node (node->callees->callee, NULL);
2565 skipped_thunk = true;
2566 }
2567
2568 if (skipped_thunk
2569 && (!node2->clone_of
2570 || !node2->clone.args_to_skip
2571 || !bitmap_bit_p (node2->clone.args_to_skip, 0)))
2572 return false;
2573
2574 while (node != node2 && node2)
2575 node2 = node2->clone_of;
2576 return node2 != NULL;
2577 }
2578
2579 /* Verify edge E count and frequency. */
2580
2581 static bool
2582 verify_edge_count_and_frequency (struct cgraph_edge *e)
2583 {
2584 bool error_found = false;
2585 if (e->count < 0)
2586 {
2587 error ("caller edge count is negative");
2588 error_found = true;
2589 }
2590 if (e->frequency < 0)
2591 {
2592 error ("caller edge frequency is negative");
2593 error_found = true;
2594 }
2595 if (e->frequency > CGRAPH_FREQ_MAX)
2596 {
2597 error ("caller edge frequency is too large");
2598 error_found = true;
2599 }
2600 if (gimple_has_body_p (e->caller->decl)
2601 && !e->caller->global.inlined_to
2602 && !e->speculative
2603 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
2604 Remove this once edges are actually removed from the function at that time. */
2605 && (e->frequency
2606 || (inline_edge_summary_vec.exists ()
2607 && ((inline_edge_summary_vec.length () <= (unsigned) e->uid)
2608 || !inline_edge_summary (e)->predicate)))
2609 && (e->frequency
2610 != compute_call_stmt_bb_frequency (e->caller->decl,
2611 gimple_bb (e->call_stmt))))
2612 {
2613 error ("caller edge frequency %i does not match BB frequency %i",
2614 e->frequency,
2615 compute_call_stmt_bb_frequency (e->caller->decl,
2616 gimple_bb (e->call_stmt)));
2617 error_found = true;
2618 }
2619 return error_found;
2620 }
2621
2622 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
2623 static void
2624 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
2625 {
2626 bool fndecl_was_null = false;
2627 /* debug_gimple_stmt needs correct cfun */
2628 if (cfun != this_cfun)
2629 set_cfun (this_cfun);
2630 /* ...and an actual current_function_decl */
2631 if (!current_function_decl)
2632 {
2633 current_function_decl = this_cfun->decl;
2634 fndecl_was_null = true;
2635 }
2636 debug_gimple_stmt (stmt);
2637 if (fndecl_was_null)
2638 current_function_decl = NULL;
2639 }
2640
2641 /* Verify that call graph edge E corresponds to DECL from the associated
2642 statement. Return true if the verification should fail. */
2643
2644 static bool
2645 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
2646 {
2647 struct cgraph_node *node;
2648
2649 if (!decl || e->callee->global.inlined_to)
2650 return false;
2651 if (cgraph_state == CGRAPH_LTO_STREAMING)
2652 return false;
2653 node = cgraph_get_node (decl);
2654
2655 /* We do not know if a node from a different partition is an alias or what it
2656 aliases and therefore cannot do the former_clone_of check reliably. When
2657 body_removed is set, we have lost all information about what was alias or
2658 thunk of and also cannot proceed. */
2659 if (!node
2660 || node->body_removed
2661 || node->in_other_partition
2662 || e->callee->in_other_partition)
2663 return false;
2664
2665 /* Optimizers can redirect unreachable calls or calls triggering undefined
2666 behaviour to builtin_unreachable. */
2667 if (DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
2668 && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
2669 return false;
2670 node = cgraph_function_or_thunk_node (node, NULL);
2671
2672 if (e->callee->former_clone_of != node->decl
2673 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
2674 && !clone_of_p (node, e->callee))
2675 return true;
2676 else
2677 return false;
2678 }
2679
2680 /* Verify cgraph nodes of given cgraph node. */
2681 DEBUG_FUNCTION void
2682 verify_cgraph_node (struct cgraph_node *node)
2683 {
2684 struct cgraph_edge *e;
2685 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2686 basic_block this_block;
2687 gimple_stmt_iterator gsi;
2688 bool error_found = false;
2689
2690 if (seen_error ())
2691 return;
2692
2693 timevar_push (TV_CGRAPH_VERIFY);
2694 error_found |= verify_symtab_base (node);
2695 for (e = node->callees; e; e = e->next_callee)
2696 if (e->aux)
2697 {
2698 error ("aux field set for edge %s->%s",
2699 identifier_to_locale (e->caller->name ()),
2700 identifier_to_locale (e->callee->name ()));
2701 error_found = true;
2702 }
2703 if (node->count < 0)
2704 {
2705 error ("execution count is negative");
2706 error_found = true;
2707 }
2708 if (node->global.inlined_to && node->same_comdat_group)
2709 {
2710 error ("inline clone in same comdat group list");
2711 error_found = true;
2712 }
2713 if (!node->definition && !node->in_other_partition && node->local.local)
2714 {
2715 error ("local symbols must be defined");
2716 error_found = true;
2717 }
2718 if (node->global.inlined_to && node->externally_visible)
2719 {
2720 error ("externally visible inline clone");
2721 error_found = true;
2722 }
2723 if (node->global.inlined_to && node->address_taken)
2724 {
2725 error ("inline clone with address taken");
2726 error_found = true;
2727 }
2728 if (node->global.inlined_to && node->force_output)
2729 {
2730 error ("inline clone is forced to output");
2731 error_found = true;
2732 }
2733 for (e = node->indirect_calls; e; e = e->next_callee)
2734 {
2735 if (e->aux)
2736 {
2737 error ("aux field set for indirect edge from %s",
2738 identifier_to_locale (e->caller->name ()));
2739 error_found = true;
2740 }
2741 if (!e->indirect_unknown_callee
2742 || !e->indirect_info)
2743 {
2744 error ("An indirect edge from %s is not marked as indirect or has "
2745 "associated indirect_info, the corresponding statement is: ",
2746 identifier_to_locale (e->caller->name ()));
2747 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2748 error_found = true;
2749 }
2750 }
2751 bool check_comdat = symtab_comdat_local_p (node);
2752 for (e = node->callers; e; e = e->next_caller)
2753 {
2754 if (verify_edge_count_and_frequency (e))
2755 error_found = true;
2756 if (check_comdat
2757 && !symtab_in_same_comdat_p (e->caller, node))
2758 {
2759 error ("comdat-local function called by %s outside its comdat",
2760 identifier_to_locale (e->caller->name ()));
2761 error_found = true;
2762 }
2763 if (!e->inline_failed)
2764 {
2765 if (node->global.inlined_to
2766 != (e->caller->global.inlined_to
2767 ? e->caller->global.inlined_to : e->caller))
2768 {
2769 error ("inlined_to pointer is wrong");
2770 error_found = true;
2771 }
2772 if (node->callers->next_caller)
2773 {
2774 error ("multiple inline callers");
2775 error_found = true;
2776 }
2777 }
2778 else
2779 if (node->global.inlined_to)
2780 {
2781 error ("inlined_to pointer set for noninline callers");
2782 error_found = true;
2783 }
2784 }
2785 for (e = node->indirect_calls; e; e = e->next_callee)
2786 if (verify_edge_count_and_frequency (e))
2787 error_found = true;
2788 if (!node->callers && node->global.inlined_to)
2789 {
2790 error ("inlined_to pointer is set but no predecessors found");
2791 error_found = true;
2792 }
2793 if (node->global.inlined_to == node)
2794 {
2795 error ("inlined_to pointer refers to itself");
2796 error_found = true;
2797 }
2798
2799 if (node->clone_of)
2800 {
2801 struct cgraph_node *n;
2802 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
2803 if (n == node)
2804 break;
2805 if (!n)
2806 {
2807 error ("node has wrong clone_of");
2808 error_found = true;
2809 }
2810 }
2811 if (node->clones)
2812 {
2813 struct cgraph_node *n;
2814 for (n = node->clones; n; n = n->next_sibling_clone)
2815 if (n->clone_of != node)
2816 break;
2817 if (n)
2818 {
2819 error ("node has wrong clone list");
2820 error_found = true;
2821 }
2822 }
2823 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
2824 {
2825 error ("node is in clone list but it is not clone");
2826 error_found = true;
2827 }
2828 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
2829 {
2830 error ("node has wrong prev_clone pointer");
2831 error_found = true;
2832 }
2833 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
2834 {
2835 error ("double linked list of clones corrupted");
2836 error_found = true;
2837 }
2838
2839 if (node->analyzed && node->alias)
2840 {
2841 bool ref_found = false;
2842 int i;
2843 struct ipa_ref *ref;
2844
2845 if (node->callees)
2846 {
2847 error ("Alias has call edges");
2848 error_found = true;
2849 }
2850 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list,
2851 i, ref); i++)
2852 if (ref->use != IPA_REF_ALIAS)
2853 {
2854 error ("Alias has non-alias reference");
2855 error_found = true;
2856 }
2857 else if (ref_found)
2858 {
2859 error ("Alias has more than one alias reference");
2860 error_found = true;
2861 }
2862 else
2863 ref_found = true;
2864 if (!ref_found)
2865 {
2866 error ("Analyzed alias has no reference");
2867 error_found = true;
2868 }
2869 }
2870 if (node->analyzed && node->thunk.thunk_p)
2871 {
2872 if (!node->callees)
2873 {
2874 error ("No edge out of thunk node");
2875 error_found = true;
2876 }
2877 else if (node->callees->next_callee)
2878 {
2879 error ("More than one edge out of thunk node");
2880 error_found = true;
2881 }
2882 if (gimple_has_body_p (node->decl))
2883 {
2884 error ("Thunk is not supposed to have body");
2885 error_found = true;
2886 }
2887 }
2888 else if (node->analyzed && gimple_has_body_p (node->decl)
2889 && !TREE_ASM_WRITTEN (node->decl)
2890 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
2891 && !flag_wpa)
2892 {
2893 if (this_cfun->cfg)
2894 {
2895 pointer_set_t *stmts = pointer_set_create ();
2896 int i;
2897 struct ipa_ref *ref;
2898
2899 /* Reach the trees by walking over the CFG, and note the
2900 enclosing basic-blocks in the call edges. */
2901 FOR_EACH_BB_FN (this_block, this_cfun)
2902 {
2903 for (gsi = gsi_start_phis (this_block);
2904 !gsi_end_p (gsi); gsi_next (&gsi))
2905 pointer_set_insert (stmts, gsi_stmt (gsi));
2906 for (gsi = gsi_start_bb (this_block);
2907 !gsi_end_p (gsi);
2908 gsi_next (&gsi))
2909 {
2910 gimple stmt = gsi_stmt (gsi);
2911 pointer_set_insert (stmts, stmt);
2912 if (is_gimple_call (stmt))
2913 {
2914 struct cgraph_edge *e = cgraph_edge (node, stmt);
2915 tree decl = gimple_call_fndecl (stmt);
2916 if (e)
2917 {
2918 if (e->aux)
2919 {
2920 error ("shared call_stmt:");
2921 cgraph_debug_gimple_stmt (this_cfun, stmt);
2922 error_found = true;
2923 }
2924 if (!e->indirect_unknown_callee)
2925 {
2926 if (verify_edge_corresponds_to_fndecl (e, decl))
2927 {
2928 error ("edge points to wrong declaration:");
2929 debug_tree (e->callee->decl);
2930 fprintf (stderr," Instead of:");
2931 debug_tree (decl);
2932 error_found = true;
2933 }
2934 }
2935 else if (decl)
2936 {
2937 error ("an indirect edge with unknown callee "
2938 "corresponding to a call_stmt with "
2939 "a known declaration:");
2940 error_found = true;
2941 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2942 }
2943 e->aux = (void *)1;
2944 }
2945 else if (decl)
2946 {
2947 error ("missing callgraph edge for call stmt:");
2948 cgraph_debug_gimple_stmt (this_cfun, stmt);
2949 error_found = true;
2950 }
2951 }
2952 }
2953 }
2954 for (i = 0;
2955 ipa_ref_list_reference_iterate (&node->ref_list, i, ref);
2956 i++)
2957 if (ref->stmt && !pointer_set_contains (stmts, ref->stmt))
2958 {
2959 error ("reference to dead statement");
2960 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
2961 error_found = true;
2962 }
2963 pointer_set_destroy (stmts);
2964 }
2965 else
2966 /* No CFG available?! */
2967 gcc_unreachable ();
2968
2969 for (e = node->callees; e; e = e->next_callee)
2970 {
2971 if (!e->aux)
2972 {
2973 error ("edge %s->%s has no corresponding call_stmt",
2974 identifier_to_locale (e->caller->name ()),
2975 identifier_to_locale (e->callee->name ()));
2976 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2977 error_found = true;
2978 }
2979 e->aux = 0;
2980 }
2981 for (e = node->indirect_calls; e; e = e->next_callee)
2982 {
2983 if (!e->aux && !e->speculative)
2984 {
2985 error ("an indirect edge from %s has no corresponding call_stmt",
2986 identifier_to_locale (e->caller->name ()));
2987 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2988 error_found = true;
2989 }
2990 e->aux = 0;
2991 }
2992 }
2993 if (error_found)
2994 {
2995 dump_cgraph_node (stderr, node);
2996 internal_error ("verify_cgraph_node failed");
2997 }
2998 timevar_pop (TV_CGRAPH_VERIFY);
2999 }
3000
3001 /* Verify whole cgraph structure. */
3002 DEBUG_FUNCTION void
3003 verify_cgraph (void)
3004 {
3005 struct cgraph_node *node;
3006
3007 if (seen_error ())
3008 return;
3009
3010 FOR_EACH_FUNCTION (node)
3011 verify_cgraph_node (node);
3012 }
3013
3014 /* Given NODE, walk the alias chain to return the function NODE is alias of.
3015 Walk through thunk, too.
3016 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
3017
3018 struct cgraph_node *
3019 cgraph_function_node (struct cgraph_node *node, enum availability *availability)
3020 {
3021 do
3022 {
3023 node = cgraph_function_or_thunk_node (node, availability);
3024 if (node->thunk.thunk_p)
3025 {
3026 node = node->callees->callee;
3027 if (availability)
3028 {
3029 enum availability a;
3030 a = cgraph_function_body_availability (node);
3031 if (a < *availability)
3032 *availability = a;
3033 }
3034 node = cgraph_function_or_thunk_node (node, availability);
3035 }
3036 } while (node && node->thunk.thunk_p);
3037 return node;
3038 }
3039
3040 /* When doing LTO, read NODE's body from disk if it is not already present. */
3041
3042 bool
3043 cgraph_get_body (struct cgraph_node *node)
3044 {
3045 struct lto_file_decl_data *file_data;
3046 const char *data, *name;
3047 size_t len;
3048 tree decl = node->decl;
3049
3050 if (DECL_RESULT (decl))
3051 return false;
3052
3053 gcc_assert (in_lto_p);
3054
3055 file_data = node->lto_file_data;
3056 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3057
3058 /* We may have renamed the declaration, e.g., a static function. */
3059 name = lto_get_decl_name_mapping (file_data, name);
3060
3061 data = lto_get_section_data (file_data, LTO_section_function_body,
3062 name, &len);
3063 if (!data)
3064 {
3065 dump_cgraph_node (stderr, node);
3066 fatal_error ("%s: section %s is missing",
3067 file_data->file_name,
3068 name);
3069 }
3070
3071 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3072
3073 lto_input_function_body (file_data, node, data);
3074 lto_stats.num_function_bodies++;
3075 lto_free_section_data (file_data, LTO_section_function_body, name,
3076 data, len);
3077 lto_free_function_in_decl_state_for_node (node);
3078 return true;
3079 }
3080
3081 /* Verify if the type of the argument matches that of the function
3082 declaration. If we cannot verify this or there is a mismatch,
3083 return false. */
3084
3085 static bool
3086 gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
3087 {
3088 tree parms, p;
3089 unsigned int i, nargs;
3090
3091 /* Calls to internal functions always match their signature. */
3092 if (gimple_call_internal_p (stmt))
3093 return true;
3094
3095 nargs = gimple_call_num_args (stmt);
3096
3097 /* Get argument types for verification. */
3098 if (fndecl)
3099 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3100 else
3101 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3102
3103 /* Verify if the type of the argument matches that of the function
3104 declaration. If we cannot verify this or there is a mismatch,
3105 return false. */
3106 if (fndecl && DECL_ARGUMENTS (fndecl))
3107 {
3108 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3109 i < nargs;
3110 i++, p = DECL_CHAIN (p))
3111 {
3112 tree arg;
3113 /* We cannot distinguish a varargs function from the case
3114 of excess parameters, still deferring the inlining decision
3115 to the callee is possible. */
3116 if (!p)
3117 break;
3118 arg = gimple_call_arg (stmt, i);
3119 if (p == error_mark_node
3120 || DECL_ARG_TYPE (p) == error_mark_node
3121 || arg == error_mark_node
3122 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3123 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3124 return false;
3125 }
3126 if (args_count_match && p)
3127 return false;
3128 }
3129 else if (parms)
3130 {
3131 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3132 {
3133 tree arg;
3134 /* If this is a varargs function defer inlining decision
3135 to callee. */
3136 if (!p)
3137 break;
3138 arg = gimple_call_arg (stmt, i);
3139 if (TREE_VALUE (p) == error_mark_node
3140 || arg == error_mark_node
3141 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3142 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3143 && !fold_convertible_p (TREE_VALUE (p), arg)))
3144 return false;
3145 }
3146 }
3147 else
3148 {
3149 if (nargs != 0)
3150 return false;
3151 }
3152 return true;
3153 }
3154
3155 /* Verify if the type of the argument and lhs of CALL_STMT matches
3156 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3157 true, the arg count needs to be the same.
3158 If we cannot verify this or there is a mismatch, return false. */
3159
3160 bool
3161 gimple_check_call_matching_types (gimple call_stmt, tree callee,
3162 bool args_count_match)
3163 {
3164 tree lhs;
3165
3166 if ((DECL_RESULT (callee)
3167 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3168 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3169 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3170 TREE_TYPE (lhs))
3171 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3172 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3173 return false;
3174 return true;
3175 }
3176
3177 #include "gt-cgraph.h"