]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
Missed function specialization + partial devirtualization
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "context.h"
61 #include "gimplify.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "selftest.h"
65 #include "tree-into-ssa.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 /* Queue of cgraph nodes scheduled to be lowered. */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
73
74 /* Symbol table global context. */
75 symbol_table *symtab;
76
77 /* List of hooks triggered on cgraph_edge events. */
78 struct cgraph_edge_hook_list {
79 cgraph_edge_hook hook;
80 void *data;
81 struct cgraph_edge_hook_list *next;
82 };
83
84 /* List of hooks triggered on cgraph_node events. */
85 struct cgraph_node_hook_list {
86 cgraph_node_hook hook;
87 void *data;
88 struct cgraph_node_hook_list *next;
89 };
90
91 /* List of hooks triggered on events involving two cgraph_edges. */
92 struct cgraph_2edge_hook_list {
93 cgraph_2edge_hook hook;
94 void *data;
95 struct cgraph_2edge_hook_list *next;
96 };
97
98 /* List of hooks triggered on events involving two cgraph_nodes. */
99 struct cgraph_2node_hook_list {
100 cgraph_2node_hook hook;
101 void *data;
102 struct cgraph_2node_hook_list *next;
103 };
104
105 /* Hash descriptor for cgraph_function_version_info. */
106
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
108 {
109 static hashval_t hash (cgraph_function_version_info *);
110 static bool equal (cgraph_function_version_info *,
111 cgraph_function_version_info *);
112 };
113
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115 The cgraph_function_version_info has a THIS_NODE field that is the
116 corresponding cgraph_node.. */
117
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
119
120 /* Hash function for cgraph_fnver_htab. */
121 hashval_t
122 function_version_hasher::hash (cgraph_function_version_info *ptr)
123 {
124 int uid = ptr->this_node->get_uid ();
125 return (hashval_t)(uid);
126 }
127
128 /* eq function for cgraph_fnver_htab. */
129 bool
130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 cgraph_function_version_info *n2)
132 {
133 return n1->this_node->get_uid () == n2->this_node->get_uid ();
134 }
135
136 /* Mark as GC root all allocated nodes. */
137 static GTY(()) struct cgraph_function_version_info *
138 version_info_node = NULL;
139
140 /* Return true if NODE's address can be compared. */
141
142 bool
143 symtab_node::address_can_be_compared_p ()
144 {
145 /* Address of virtual tables and functions is never compared. */
146 if (DECL_VIRTUAL_P (decl))
147 return false;
148 /* Address of C++ cdtors is never compared. */
149 if (is_a <cgraph_node *> (this)
150 && (DECL_CXX_CONSTRUCTOR_P (decl)
151 || DECL_CXX_DESTRUCTOR_P (decl)))
152 return false;
153 /* Constant pool symbols addresses are never compared.
154 flag_merge_constants permits us to assume the same on readonly vars. */
155 if (is_a <varpool_node *> (this)
156 && (DECL_IN_CONSTANT_POOL (decl)
157 || (flag_merge_constants >= 2
158 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
159 return false;
160 return true;
161 }
162
163 /* Get the cgraph_function_version_info node corresponding to node. */
164 cgraph_function_version_info *
165 cgraph_node::function_version (void)
166 {
167 cgraph_function_version_info key;
168 key.this_node = this;
169
170 if (cgraph_fnver_htab == NULL)
171 return NULL;
172
173 return cgraph_fnver_htab->find (&key);
174 }
175
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177 corresponding to cgraph_node NODE. */
178 cgraph_function_version_info *
179 cgraph_node::insert_new_function_version (void)
180 {
181 version_info_node = NULL;
182 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183 version_info_node->this_node = this;
184
185 if (cgraph_fnver_htab == NULL)
186 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
187
188 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
189 = version_info_node;
190 return version_info_node;
191 }
192
193 /* Remove the cgraph_function_version_info node given by DECL_V. */
194 static void
195 delete_function_version (cgraph_function_version_info *decl_v)
196 {
197 if (decl_v == NULL)
198 return;
199
200 if (version_info_node == decl_v)
201 version_info_node = NULL;
202
203 if (decl_v->prev != NULL)
204 decl_v->prev->next = decl_v->next;
205
206 if (decl_v->next != NULL)
207 decl_v->next->prev = decl_v->prev;
208
209 if (cgraph_fnver_htab != NULL)
210 cgraph_fnver_htab->remove_elt (decl_v);
211 }
212
213 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
214 DECL is a duplicate declaration. */
215 void
216 cgraph_node::delete_function_version_by_decl (tree decl)
217 {
218 cgraph_node *decl_node = cgraph_node::get (decl);
219
220 if (decl_node == NULL)
221 return;
222
223 delete_function_version (decl_node->function_version ());
224
225 decl_node->remove ();
226 }
227
228 /* Record that DECL1 and DECL2 are semantically identical function
229 versions. */
230 void
231 cgraph_node::record_function_versions (tree decl1, tree decl2)
232 {
233 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
234 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
235 cgraph_function_version_info *decl1_v = NULL;
236 cgraph_function_version_info *decl2_v = NULL;
237 cgraph_function_version_info *before;
238 cgraph_function_version_info *after;
239
240 gcc_assert (decl1_node != NULL && decl2_node != NULL);
241 decl1_v = decl1_node->function_version ();
242 decl2_v = decl2_node->function_version ();
243
244 if (decl1_v != NULL && decl2_v != NULL)
245 return;
246
247 if (decl1_v == NULL)
248 decl1_v = decl1_node->insert_new_function_version ();
249
250 if (decl2_v == NULL)
251 decl2_v = decl2_node->insert_new_function_version ();
252
253 /* Chain decl2_v and decl1_v. All semantically identical versions
254 will be chained together. */
255
256 before = decl1_v;
257 after = decl2_v;
258
259 while (before->next != NULL)
260 before = before->next;
261
262 while (after->prev != NULL)
263 after= after->prev;
264
265 before->next = after;
266 after->prev = before;
267 }
268
269 /* Initialize callgraph dump file. */
270
271 void
272 symbol_table::initialize (void)
273 {
274 if (!dump_file)
275 dump_file = dump_begin (TDI_cgraph, NULL);
276
277 if (!ipa_clones_dump_file)
278 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
279 }
280
281 /* Allocate new callgraph node and insert it into basic data structures. */
282
283 cgraph_node *
284 symbol_table::create_empty (void)
285 {
286 cgraph_count++;
287 return new (ggc_alloc<cgraph_node> ()) cgraph_node (cgraph_max_uid++);
288 }
289
290 /* Register HOOK to be called with DATA on each removed edge. */
291 cgraph_edge_hook_list *
292 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
293 {
294 cgraph_edge_hook_list *entry;
295 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
296
297 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
298 entry->hook = hook;
299 entry->data = data;
300 entry->next = NULL;
301 while (*ptr)
302 ptr = &(*ptr)->next;
303 *ptr = entry;
304 return entry;
305 }
306
307 /* Remove ENTRY from the list of hooks called on removing edges. */
308 void
309 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
310 {
311 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
312
313 while (*ptr != entry)
314 ptr = &(*ptr)->next;
315 *ptr = entry->next;
316 free (entry);
317 }
318
319 /* Call all edge removal hooks. */
320 void
321 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
322 {
323 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
324 while (entry)
325 {
326 entry->hook (e, entry->data);
327 entry = entry->next;
328 }
329 }
330
331 /* Register HOOK to be called with DATA on each removed node. */
332 cgraph_node_hook_list *
333 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
334 {
335 cgraph_node_hook_list *entry;
336 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
337
338 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
339 entry->hook = hook;
340 entry->data = data;
341 entry->next = NULL;
342 while (*ptr)
343 ptr = &(*ptr)->next;
344 *ptr = entry;
345 return entry;
346 }
347
348 /* Remove ENTRY from the list of hooks called on removing nodes. */
349 void
350 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
351 {
352 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
353
354 while (*ptr != entry)
355 ptr = &(*ptr)->next;
356 *ptr = entry->next;
357 free (entry);
358 }
359
360 /* Call all node removal hooks. */
361 void
362 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
363 {
364 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
365 while (entry)
366 {
367 entry->hook (node, entry->data);
368 entry = entry->next;
369 }
370 }
371
372 /* Call all node removal hooks. */
373 void
374 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
375 {
376 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
377 while (entry)
378 {
379 entry->hook (node, entry->data);
380 entry = entry->next;
381 }
382 }
383
384
385 /* Register HOOK to be called with DATA on each inserted node. */
386 cgraph_node_hook_list *
387 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
388 {
389 cgraph_node_hook_list *entry;
390 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
391
392 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
393 entry->hook = hook;
394 entry->data = data;
395 entry->next = NULL;
396 while (*ptr)
397 ptr = &(*ptr)->next;
398 *ptr = entry;
399 return entry;
400 }
401
402 /* Remove ENTRY from the list of hooks called on inserted nodes. */
403 void
404 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
405 {
406 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
407
408 while (*ptr != entry)
409 ptr = &(*ptr)->next;
410 *ptr = entry->next;
411 free (entry);
412 }
413
414 /* Register HOOK to be called with DATA on each duplicated edge. */
415 cgraph_2edge_hook_list *
416 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
417 {
418 cgraph_2edge_hook_list *entry;
419 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
420
421 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
422 entry->hook = hook;
423 entry->data = data;
424 entry->next = NULL;
425 while (*ptr)
426 ptr = &(*ptr)->next;
427 *ptr = entry;
428 return entry;
429 }
430
431 /* Remove ENTRY from the list of hooks called on duplicating edges. */
432 void
433 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
434 {
435 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
436
437 while (*ptr != entry)
438 ptr = &(*ptr)->next;
439 *ptr = entry->next;
440 free (entry);
441 }
442
443 /* Call all edge duplication hooks. */
444 void
445 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
446 {
447 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
448 while (entry)
449 {
450 entry->hook (cs1, cs2, entry->data);
451 entry = entry->next;
452 }
453 }
454
455 /* Register HOOK to be called with DATA on each duplicated node. */
456 cgraph_2node_hook_list *
457 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
458 {
459 cgraph_2node_hook_list *entry;
460 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
461
462 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
463 entry->hook = hook;
464 entry->data = data;
465 entry->next = NULL;
466 while (*ptr)
467 ptr = &(*ptr)->next;
468 *ptr = entry;
469 return entry;
470 }
471
472 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
473 void
474 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
475 {
476 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
477
478 while (*ptr != entry)
479 ptr = &(*ptr)->next;
480 *ptr = entry->next;
481 free (entry);
482 }
483
484 /* Call all node duplication hooks. */
485 void
486 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
487 cgraph_node *node2)
488 {
489 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
490 while (entry)
491 {
492 entry->hook (node, node2, entry->data);
493 entry = entry->next;
494 }
495 }
496
497 /* Return cgraph node assigned to DECL. Create new one when needed. */
498
499 cgraph_node *
500 cgraph_node::create (tree decl)
501 {
502 cgraph_node *node = symtab->create_empty ();
503 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
504
505 node->decl = decl;
506
507 if ((flag_openacc || flag_openmp)
508 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
509 {
510 node->offloadable = 1;
511 if (ENABLE_OFFLOADING)
512 g->have_offload = true;
513 }
514
515 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
516 node->ifunc_resolver = true;
517
518 node->register_symbol ();
519
520 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
521 {
522 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
523 node->next_nested = node->origin->nested;
524 node->origin->nested = node;
525 }
526 return node;
527 }
528
529 /* Try to find a call graph node for declaration DECL and if it does not exist
530 or if it corresponds to an inline clone, create a new one. */
531
532 cgraph_node *
533 cgraph_node::get_create (tree decl)
534 {
535 cgraph_node *first_clone = cgraph_node::get (decl);
536
537 if (first_clone && !first_clone->inlined_to)
538 return first_clone;
539
540 cgraph_node *node = cgraph_node::create (decl);
541 if (first_clone)
542 {
543 first_clone->clone_of = node;
544 node->clones = first_clone;
545 node->order = first_clone->order;
546 symtab->symtab_prevail_in_asm_name_hash (node);
547 node->decl->decl_with_vis.symtab_node = node;
548 if (dump_file)
549 fprintf (dump_file, "Introduced new external node "
550 "(%s) and turned into root of the clone tree.\n",
551 node->dump_name ());
552 }
553 else if (dump_file)
554 fprintf (dump_file, "Introduced new external node "
555 "(%s).\n", node->dump_name ());
556 return node;
557 }
558
559 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
560 the function body is associated with (not necessarily cgraph_node (DECL). */
561
562 cgraph_node *
563 cgraph_node::create_alias (tree alias, tree target)
564 {
565 cgraph_node *alias_node;
566
567 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
568 || TREE_CODE (target) == IDENTIFIER_NODE);
569 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
570 alias_node = cgraph_node::get_create (alias);
571 gcc_assert (!alias_node->definition);
572 alias_node->alias_target = target;
573 alias_node->definition = true;
574 alias_node->alias = true;
575 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
576 alias_node->transparent_alias = alias_node->weakref = true;
577 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
578 alias_node->ifunc_resolver = true;
579 return alias_node;
580 }
581
582 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
583 and NULL otherwise.
584 Same body aliases are output whenever the body of DECL is output,
585 and cgraph_node::get (ALIAS) transparently returns
586 cgraph_node::get (DECL). */
587
588 cgraph_node *
589 cgraph_node::create_same_body_alias (tree alias, tree decl)
590 {
591 cgraph_node *n;
592
593 /* If aliases aren't supported by the assembler, fail. */
594 if (!TARGET_SUPPORTS_ALIASES)
595 return NULL;
596
597 /* Langhooks can create same body aliases of symbols not defined.
598 Those are useless. Drop them on the floor. */
599 if (symtab->global_info_ready)
600 return NULL;
601
602 n = cgraph_node::create_alias (alias, decl);
603 n->cpp_implicit_alias = true;
604 if (symtab->cpp_implicit_aliases_done)
605 n->resolve_alias (cgraph_node::get (decl));
606 return n;
607 }
608
609 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
610 aliases DECL with an adjustments made into the first parameter.
611 See comments in struct cgraph_thunk_info for detail on the parameters. */
612
613 cgraph_node *
614 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
615 HOST_WIDE_INT fixed_offset,
616 HOST_WIDE_INT virtual_value,
617 HOST_WIDE_INT indirect_offset,
618 tree virtual_offset,
619 tree real_alias)
620 {
621 cgraph_node *node;
622
623 node = cgraph_node::get (alias);
624 if (node)
625 node->reset ();
626 else
627 node = cgraph_node::create (alias);
628
629 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
630 gcc_checking_assert (virtual_offset
631 ? virtual_value == wi::to_wide (virtual_offset)
632 : virtual_value == 0);
633
634 node->thunk.fixed_offset = fixed_offset;
635 node->thunk.virtual_value = virtual_value;
636 node->thunk.indirect_offset = indirect_offset;
637 node->thunk.alias = real_alias;
638 node->thunk.this_adjusting = this_adjusting;
639 node->thunk.virtual_offset_p = virtual_offset != NULL;
640 node->thunk.thunk_p = true;
641 node->definition = true;
642
643 return node;
644 }
645
646 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
647 Return NULL if there's no such node. */
648
649 cgraph_node *
650 cgraph_node::get_for_asmname (tree asmname)
651 {
652 /* We do not want to look at inline clones. */
653 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
654 node;
655 node = node->next_sharing_asm_name)
656 {
657 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
658 if (cn && !cn->inlined_to)
659 return cn;
660 }
661 return NULL;
662 }
663
664 /* Returns a hash value for X (which really is a cgraph_edge). */
665
666 hashval_t
667 cgraph_edge_hasher::hash (cgraph_edge *e)
668 {
669 /* This is a really poor hash function, but it is what htab_hash_pointer
670 uses. */
671 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
672 }
673
674 /* Returns a hash value for X (which really is a cgraph_edge). */
675
676 hashval_t
677 cgraph_edge_hasher::hash (gimple *call_stmt)
678 {
679 /* This is a really poor hash function, but it is what htab_hash_pointer
680 uses. */
681 return (hashval_t) ((intptr_t)call_stmt >> 3);
682 }
683
684 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
685
686 inline bool
687 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
688 {
689 return x->call_stmt == y;
690 }
691
692 /* Add call graph edge E to call site hash of its caller. */
693
694 static inline void
695 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
696 {
697 gimple *call = e->call_stmt;
698 *e->caller->call_site_hash->find_slot_with_hash
699 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
700 }
701
702 /* Add call graph edge E to call site hash of its caller. */
703
704 static inline void
705 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
706 {
707 /* There are two speculative edges for every statement (one direct,
708 one indirect); always hash the direct one. */
709 if (e->speculative && e->indirect_unknown_callee)
710 return;
711 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
712 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
713 if (*slot)
714 {
715 gcc_assert (((cgraph_edge *)*slot)->speculative);
716 if (e->callee)
717 *slot = e;
718 return;
719 }
720 gcc_assert (!*slot || e->speculative);
721 *slot = e;
722 }
723
724 /* Return the callgraph edge representing the GIMPLE_CALL statement
725 CALL_STMT. */
726
727 cgraph_edge *
728 cgraph_node::get_edge (gimple *call_stmt)
729 {
730 cgraph_edge *e, *e2;
731 int n = 0;
732
733 if (call_site_hash)
734 return call_site_hash->find_with_hash
735 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
736
737 /* This loop may turn out to be performance problem. In such case adding
738 hashtables into call nodes with very many edges is probably best
739 solution. It is not good idea to add pointer into CALL_EXPR itself
740 because we want to make possible having multiple cgraph nodes representing
741 different clones of the same body before the body is actually cloned. */
742 for (e = callees; e; e = e->next_callee)
743 {
744 if (e->call_stmt == call_stmt)
745 break;
746 n++;
747 }
748
749 if (!e)
750 for (e = indirect_calls; e; e = e->next_callee)
751 {
752 if (e->call_stmt == call_stmt)
753 break;
754 n++;
755 }
756
757 if (n > 100)
758 {
759 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
760 for (e2 = callees; e2; e2 = e2->next_callee)
761 cgraph_add_edge_to_call_site_hash (e2);
762 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
763 cgraph_add_edge_to_call_site_hash (e2);
764 }
765
766 return e;
767 }
768
769
770 /* Change field call_stmt of edge E to NEW_STMT. If UPDATE_SPECULATIVE and E
771 is any component of speculative edge, then update all components.
772 Speculations can be resolved in the process and EDGE can be removed and
773 deallocated. Return the edge that now represents the call. */
774
775 cgraph_edge *
776 cgraph_edge::set_call_stmt (cgraph_edge *e, gcall *new_stmt,
777 bool update_speculative)
778 {
779 tree decl;
780
781 /* Speculative edges has three component, update all of them
782 when asked to. */
783 if (update_speculative && e->speculative)
784 {
785 cgraph_edge *direct, *indirect;
786 ipa_ref *ref;
787 bool e_indirect = e->indirect_unknown_callee;
788
789 e->speculative_call_info (direct, indirect, ref);
790 ref->stmt = new_stmt;
791 cgraph_edge *d2 = set_call_stmt (direct, new_stmt, false);
792 gcc_assert (direct == d2);
793 indirect = set_call_stmt (indirect, new_stmt, false);
794 return e_indirect ? indirect : direct;
795 }
796
797 /* Only direct speculative edges go to call_site_hash. */
798 if (e->caller->call_site_hash
799 && (!e->speculative || !e->indirect_unknown_callee))
800 e->caller->call_site_hash->remove_elt_with_hash
801 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt));
802
803 e->call_stmt = new_stmt;
804 if (e->indirect_unknown_callee
805 && (decl = gimple_call_fndecl (new_stmt)))
806 {
807 /* Constant propagation (and possibly also inlining?) can turn an
808 indirect call into a direct one. */
809 cgraph_node *new_callee = cgraph_node::get (decl);
810
811 gcc_checking_assert (new_callee);
812 e = make_direct (e, new_callee);
813 }
814
815 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
816 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
817 if (e->caller->call_site_hash)
818 cgraph_add_edge_to_call_site_hash (e);
819 return e;
820 }
821
822 /* Allocate a cgraph_edge structure and fill it with data according to the
823 parameters of which only CALLEE can be NULL (when creating an indirect call
824 edge). CLONING_P should be set if properties that are copied from an
825 original edge should not be calculated. */
826
827 cgraph_edge *
828 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
829 gcall *call_stmt, profile_count count,
830 bool indir_unknown_callee, bool cloning_p)
831 {
832 cgraph_edge *edge;
833
834 /* LTO does not actually have access to the call_stmt since these
835 have not been loaded yet. */
836 if (call_stmt)
837 {
838 /* This is a rather expensive check possibly triggering
839 construction of call stmt hashtable. */
840 cgraph_edge *e;
841 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
842 || e->speculative);
843
844 gcc_assert (is_gimple_call (call_stmt));
845 }
846
847 edge = ggc_alloc<cgraph_edge> ();
848 edge->m_summary_id = -1;
849 edges_count++;
850
851 gcc_assert (++edges_max_uid != 0);
852 edge->m_uid = edges_max_uid;
853 edge->aux = NULL;
854 edge->caller = caller;
855 edge->callee = callee;
856 edge->prev_caller = NULL;
857 edge->next_caller = NULL;
858 edge->prev_callee = NULL;
859 edge->next_callee = NULL;
860 edge->lto_stmt_uid = 0;
861 edge->target_prob = 0;
862 edge->speculative_id = 0;
863
864 edge->count = count;
865 edge->call_stmt = call_stmt;
866 edge->indirect_info = NULL;
867 edge->indirect_inlining_edge = 0;
868 edge->speculative = false;
869 edge->indirect_unknown_callee = indir_unknown_callee;
870 if (call_stmt && caller->call_site_hash)
871 cgraph_add_edge_to_call_site_hash (edge);
872
873 if (cloning_p)
874 return edge;
875
876 edge->can_throw_external
877 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
878 call_stmt) : false;
879 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
880 edge->call_stmt_cannot_inline_p = false;
881
882 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
883 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
884 edge->in_polymorphic_cdtor
885 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
886 caller->decl);
887 else
888 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
889
890 return edge;
891 }
892
893 /* Create edge from a given function to CALLEE in the cgraph. CLONING_P should
894 be set if properties that are copied from an original edge should not be
895 calculated. */
896
897 cgraph_edge *
898 cgraph_node::create_edge (cgraph_node *callee,
899 gcall *call_stmt, profile_count count, bool cloning_p)
900 {
901 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
902 false, cloning_p);
903
904 if (!cloning_p)
905 initialize_inline_failed (edge);
906
907 edge->next_caller = callee->callers;
908 if (callee->callers)
909 callee->callers->prev_caller = edge;
910 edge->next_callee = callees;
911 if (callees)
912 callees->prev_callee = edge;
913 callees = edge;
914 callee->callers = edge;
915
916 return edge;
917 }
918
919 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
920
921 cgraph_indirect_call_info *
922 cgraph_allocate_init_indirect_info (void)
923 {
924 cgraph_indirect_call_info *ii;
925
926 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
927 ii->param_index = -1;
928 return ii;
929 }
930
931 /* Create an indirect edge with a yet-undetermined callee where the call
932 statement destination is a formal parameter of the caller with index
933 PARAM_INDEX. CLONING_P should be set if properties that are copied from an
934 original edge should not be calculated and indirect_info structure should
935 not be calculated. */
936
937 cgraph_edge *
938 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
939 profile_count count,
940 bool cloning_p)
941 {
942 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt, count, true,
943 cloning_p);
944 tree target;
945
946 if (!cloning_p)
947 initialize_inline_failed (edge);
948
949 edge->indirect_info = cgraph_allocate_init_indirect_info ();
950 edge->indirect_info->ecf_flags = ecf_flags;
951 edge->indirect_info->vptr_changed = true;
952
953 /* Record polymorphic call info. */
954 if (!cloning_p
955 && call_stmt
956 && (target = gimple_call_fn (call_stmt))
957 && virtual_method_call_p (target))
958 {
959 ipa_polymorphic_call_context context (decl, target, call_stmt);
960
961 /* Only record types can have virtual calls. */
962 edge->indirect_info->polymorphic = true;
963 edge->indirect_info->param_index = -1;
964 edge->indirect_info->otr_token
965 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
966 edge->indirect_info->otr_type = obj_type_ref_class (target);
967 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
968 edge->indirect_info->context = context;
969 }
970
971 edge->next_callee = indirect_calls;
972 if (indirect_calls)
973 indirect_calls->prev_callee = edge;
974 indirect_calls = edge;
975
976 return edge;
977 }
978
979 /* Remove the edge from the list of the callees of the caller. */
980
981 void
982 cgraph_edge::remove_caller (void)
983 {
984 if (prev_callee)
985 prev_callee->next_callee = next_callee;
986 if (next_callee)
987 next_callee->prev_callee = prev_callee;
988 if (!prev_callee)
989 {
990 if (indirect_unknown_callee)
991 caller->indirect_calls = next_callee;
992 else
993 caller->callees = next_callee;
994 }
995 if (caller->call_site_hash)
996 caller->call_site_hash->remove_elt_with_hash
997 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
998 }
999
1000 /* Put the edge onto the free list. */
1001
1002 void
1003 symbol_table::free_edge (cgraph_edge *e)
1004 {
1005 edges_count--;
1006 if (e->m_summary_id != -1)
1007 edge_released_summary_ids.safe_push (e->m_summary_id);
1008
1009 if (e->indirect_info)
1010 ggc_free (e->indirect_info);
1011 ggc_free (e);
1012 }
1013
1014 /* Remove the edge in the cgraph. */
1015
1016 void
1017 cgraph_edge::remove (cgraph_edge *edge)
1018 {
1019 /* Call all edge removal hooks. */
1020 symtab->call_edge_removal_hooks (edge);
1021
1022 if (!edge->indirect_unknown_callee)
1023 /* Remove from callers list of the callee. */
1024 edge->remove_callee ();
1025
1026 /* Remove from callees list of the callers. */
1027 edge->remove_caller ();
1028
1029 /* Put the edge onto the free list. */
1030 symtab->free_edge (edge);
1031 }
1032
1033 /* Turn edge into speculative call calling N2. Update
1034 the profile so the direct call is taken COUNT times
1035 with FREQUENCY.
1036
1037 At clone materialization time, the indirect call E will
1038 be expanded as:
1039
1040 if (call_dest == N2)
1041 n2 ();
1042 else
1043 call call_dest
1044
1045 At this time the function just creates the direct call,
1046 the reference representing the if conditional and attaches
1047 them all to the original indirect call statement.
1048
1049 speculative_id is used to link direct calls with their corresponding
1050 IPA_REF_ADDR references when representing speculative calls.
1051
1052 target_prob is the probability of the speculative call.
1053
1054 Return direct edge created. */
1055
1056 cgraph_edge *
1057 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count,
1058 unsigned int speculative_id, int target_prob)
1059 {
1060 cgraph_node *n = caller;
1061 ipa_ref *ref = NULL;
1062 cgraph_edge *e2;
1063
1064 if (dump_file)
1065 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1066 n->dump_name (), n2->dump_name ());
1067 speculative = true;
1068 e2 = n->create_edge (n2, call_stmt, direct_count);
1069 initialize_inline_failed (e2);
1070 e2->speculative = true;
1071 if (TREE_NOTHROW (n2->decl))
1072 e2->can_throw_external = false;
1073 else
1074 e2->can_throw_external = can_throw_external;
1075 e2->lto_stmt_uid = lto_stmt_uid;
1076 e2->speculative_id = speculative_id;
1077 e2->target_prob = target_prob;
1078 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1079 count -= e2->count;
1080 symtab->call_edge_duplication_hooks (this, e2);
1081 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1082 ref->lto_stmt_uid = lto_stmt_uid;
1083 ref->speculative_id = speculative_id;
1084 ref->speculative = speculative;
1085 n2->mark_address_taken ();
1086 return e2;
1087 }
1088
1089 /* Speculative calls represent a transformation of indirect calls
1090 which may be later inserted into gimple in the following form:
1091
1092 if (call_dest == target1)
1093 target1 ();
1094 else if (call_dest == target2)
1095 target2 ();
1096 else
1097 call_dest ();
1098
1099 This is a win in the case when target1 and target2 are common values for
1100 call_dest as determined by ipa-devirt or indirect call profiling.
1101 In particular this may enable inlining and other optimizations.
1102
1103 Speculative call consists of the following main components:
1104
1105 1) One or more "speculative" direct call (num_speculative_call_targets is
1106 speculative direct call count belongs to the speculative indirect call)
1107 2) One or more IPA_REF_ADDR references (representing the fact that code above
1108 takes address of target1 and target2)
1109 3) The fallback "speculative" indirect call
1110
1111 Direct calls and corresponding references are linked by
1112 speculative_id.
1113
1114 speculative_call_info returns triple
1115 (direct_call, indirect call, IPA_REF_ADDR reference)
1116 when called on one edge participating in the speculative call:
1117
1118 1) If called on direct call, its corresponding IPA_REF_ADDR and related
1119 indirect call are returned.
1120
1121 2) If called on indirect call, it will return one of direct edges and its
1122 matching IPA_REF_ADDR.
1123 */
1124
1125 void
1126 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1127 cgraph_edge *&indirect,
1128 ipa_ref *&reference)
1129 {
1130 ipa_ref *ref;
1131 int i;
1132 cgraph_edge *e2;
1133 cgraph_edge *e = this;
1134
1135 if (!e->indirect_unknown_callee)
1136 for (e2 = e->caller->indirect_calls;
1137 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1138 e2 = e2->next_callee)
1139 ;
1140 else
1141 {
1142 e2 = e;
1143 /* We can take advantage of the call stmt hash. */
1144 if (e2->call_stmt)
1145 {
1146 e = e->caller->get_edge (e2->call_stmt);
1147 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1148 }
1149 else
1150 for (e = e->caller->callees;
1151 e2->call_stmt != e->call_stmt
1152 || e2->lto_stmt_uid != e->lto_stmt_uid;
1153 e = e->next_callee)
1154 ;
1155 }
1156 gcc_assert (e->speculative && e2->speculative);
1157 direct = e;
1158 indirect = e2;
1159
1160 reference = NULL;
1161 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1162 if (ref->speculative && ref->speculative_id == e->speculative_id
1163 && ((ref->stmt && ref->stmt == e->call_stmt)
1164 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1165 {
1166 reference = ref;
1167 break;
1168 }
1169
1170 /* Speculative edge always consist of all three components - direct edge,
1171 indirect and reference. */
1172
1173 gcc_assert (e && e2 && ref);
1174 }
1175
1176 /* Speculative call EDGE turned out to be direct call to CALLEE_DECL. Remove
1177 the speculative call sequence and return edge representing the call, the
1178 original EDGE can be removed and deallocated. Return the edge that now
1179 represents the call.
1180
1181 For "speculative" indirect call that contains multiple "speculative"
1182 targets (i.e. edge->indirect_info->num_speculative_call_targets > 1),
1183 decrease the count and only remove current direct edge.
1184
1185 If no speculative direct call left to the speculative indirect call, remove
1186 the speculative of both the indirect call and corresponding direct edge.
1187
1188 It is up to caller to iteratively resolve each "speculative" direct call and
1189 redirect the call as appropriate. */
1190
1191 cgraph_edge *
1192 cgraph_edge::resolve_speculation (cgraph_edge *edge, tree callee_decl)
1193 {
1194 cgraph_edge *e2;
1195 ipa_ref *ref;
1196
1197 gcc_assert (edge->speculative);
1198 edge->speculative_call_info (e2, edge, ref);
1199 if (!callee_decl
1200 || !ref->referred->semantically_equivalent_p
1201 (symtab_node::get (callee_decl)))
1202 {
1203 if (dump_file)
1204 {
1205 if (callee_decl)
1206 {
1207 fprintf (dump_file, "Speculative indirect call %s => %s has "
1208 "turned out to have contradicting known target ",
1209 edge->caller->dump_name (),
1210 e2->callee->dump_name ());
1211 print_generic_expr (dump_file, callee_decl);
1212 fprintf (dump_file, "\n");
1213 }
1214 else
1215 {
1216 fprintf (dump_file, "Removing speculative call %s => %s\n",
1217 edge->caller->dump_name (),
1218 e2->callee->dump_name ());
1219 }
1220 }
1221 }
1222 else
1223 {
1224 cgraph_edge *tmp = edge;
1225 if (dump_file)
1226 fprintf (dump_file, "Speculative call turned into direct call.\n");
1227 edge = e2;
1228 e2 = tmp;
1229 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1230 in the functions inlined through it. */
1231 }
1232 edge->count += e2->count;
1233 if (edge->num_speculative_call_targets_p ())
1234 {
1235 /* The indirect edge has multiple speculative targets, don't remove
1236 speculative until all related direct edges are resolved. */
1237 edge->indirect_info->num_speculative_call_targets--;
1238 if (!edge->indirect_info->num_speculative_call_targets)
1239 edge->speculative = false;
1240 }
1241 else
1242 edge->speculative = false;
1243 e2->speculative = false;
1244 ref->remove_reference ();
1245 if (e2->indirect_unknown_callee || e2->inline_failed)
1246 remove (e2);
1247 else
1248 e2->callee->remove_symbol_and_inline_clones ();
1249 if (edge->caller->call_site_hash)
1250 cgraph_update_edge_in_call_site_hash (edge);
1251 return edge;
1252 }
1253
1254 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1255 CALLEE. Speculations can be resolved in the process and EDGE can be removed
1256 and deallocated. Return the edge that now represents the call. */
1257
1258 cgraph_edge *
1259 cgraph_edge::make_direct (cgraph_edge *edge, cgraph_node *callee)
1260 {
1261 gcc_assert (edge->indirect_unknown_callee);
1262
1263 /* If we are redirecting speculative call, make it non-speculative. */
1264 if (edge->speculative)
1265 {
1266 edge = resolve_speculation (edge, callee->decl);
1267
1268 /* On successful speculation just return the pre existing direct edge. */
1269 if (!edge->indirect_unknown_callee)
1270 return edge;
1271 }
1272
1273 edge->indirect_unknown_callee = 0;
1274 ggc_free (edge->indirect_info);
1275 edge->indirect_info = NULL;
1276
1277 /* Get the edge out of the indirect edge list. */
1278 if (edge->prev_callee)
1279 edge->prev_callee->next_callee = edge->next_callee;
1280 if (edge->next_callee)
1281 edge->next_callee->prev_callee = edge->prev_callee;
1282 if (!edge->prev_callee)
1283 edge->caller->indirect_calls = edge->next_callee;
1284
1285 /* Put it into the normal callee list */
1286 edge->prev_callee = NULL;
1287 edge->next_callee = edge->caller->callees;
1288 if (edge->caller->callees)
1289 edge->caller->callees->prev_callee = edge;
1290 edge->caller->callees = edge;
1291
1292 /* Insert to callers list of the new callee. */
1293 edge->set_callee (callee);
1294
1295 /* We need to re-determine the inlining status of the edge. */
1296 initialize_inline_failed (edge);
1297 return edge;
1298 }
1299
1300 /* If necessary, change the function declaration in the call statement
1301 associated with E so that it corresponds to the edge callee. Speculations
1302 can be resolved in the process and EDGE can be removed and deallocated.
1303
1304 The edge could be one of speculative direct call generated from speculative
1305 indirect call. In this circumstance, decrease the speculative targets
1306 count (i.e. num_speculative_call_targets) and redirect call stmt to the
1307 corresponding i-th target. If no speculative direct call left to the
1308 speculative indirect call, remove "speculative" of the indirect call and
1309 also redirect stmt to it's final direct target.
1310
1311 It is up to caller to iteratively transform each "speculative"
1312 direct call as appropriate. */
1313
1314 gimple *
1315 cgraph_edge::redirect_call_stmt_to_callee (cgraph_edge *e)
1316 {
1317 tree decl = gimple_call_fndecl (e->call_stmt);
1318 gcall *new_stmt;
1319 gimple_stmt_iterator gsi;
1320
1321 if (e->speculative)
1322 {
1323 cgraph_edge *e2;
1324 gcall *new_stmt;
1325 ipa_ref *ref;
1326
1327 e->speculative_call_info (e, e2, ref);
1328 /* If there already is an direct call (i.e. as a result of inliner's
1329 substitution), forget about speculating. */
1330 if (decl)
1331 e = resolve_speculation (e, decl);
1332 else
1333 {
1334 /* Expand speculation into GIMPLE code. */
1335 if (dump_file)
1336 {
1337 fprintf (dump_file,
1338 "Expanding speculative call of %s -> %s count: ",
1339 e->caller->dump_name (),
1340 e->callee->dump_name ());
1341 e->count.dump (dump_file);
1342 fprintf (dump_file, "\n");
1343 }
1344 gcc_assert (e2->speculative);
1345 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1346
1347 profile_probability prob = e->count.probability_in (e->count
1348 + e2->count);
1349 if (!prob.initialized_p ())
1350 prob = profile_probability::even ();
1351 new_stmt = gimple_ic (e->call_stmt,
1352 dyn_cast<cgraph_node *> (ref->referred),
1353 prob);
1354 e->speculative = false;
1355 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1356 false);
1357 e->count = gimple_bb (e->call_stmt)->count;
1358 if (e2->num_speculative_call_targets_p ())
1359 {
1360 /* The indirect edge has multiple speculative targets, don't
1361 remove speculative until all related direct edges are
1362 redirected. */
1363 e2->indirect_info->num_speculative_call_targets--;
1364 if (!e2->indirect_info->num_speculative_call_targets)
1365 e2->speculative = false;
1366 }
1367 else
1368 e2->speculative = false;
1369 e2->count = gimple_bb (e2->call_stmt)->count;
1370 ref->speculative = false;
1371 ref->stmt = NULL;
1372 /* Indirect edges are not both in the call site hash.
1373 get it updated. */
1374 if (e->caller->call_site_hash)
1375 cgraph_update_edge_in_call_site_hash (e2);
1376 pop_cfun ();
1377 /* Continue redirecting E to proper target. */
1378 }
1379 }
1380
1381
1382 if (e->indirect_unknown_callee
1383 || decl == e->callee->decl)
1384 return e->call_stmt;
1385
1386 if (flag_checking && decl)
1387 {
1388 cgraph_node *node = cgraph_node::get (decl);
1389 gcc_assert (!node || !node->clone.param_adjustments);
1390 }
1391
1392 if (symtab->dump_file)
1393 {
1394 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1395 e->caller->dump_name (), e->callee->dump_name ());
1396 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1397 if (e->callee->clone.param_adjustments)
1398 e->callee->clone.param_adjustments->dump (symtab->dump_file);
1399 unsigned performed_len
1400 = vec_safe_length (e->caller->clone.performed_splits);
1401 if (performed_len > 0)
1402 fprintf (symtab->dump_file, "Performed splits records:\n");
1403 for (unsigned i = 0; i < performed_len; i++)
1404 {
1405 ipa_param_performed_split *sm
1406 = &(*e->caller->clone.performed_splits)[i];
1407 print_node_brief (symtab->dump_file, " dummy_decl: ", sm->dummy_decl,
1408 TDF_UID);
1409 fprintf (symtab->dump_file, ", unit_offset: %u\n", sm->unit_offset);
1410 }
1411 }
1412
1413 if (ipa_param_adjustments *padjs = e->callee->clone.param_adjustments)
1414 {
1415 /* We need to defer cleaning EH info on the new statement to
1416 fixup-cfg. We may not have dominator information at this point
1417 and thus would end up with unreachable blocks and have no way
1418 to communicate that we need to run CFG cleanup then. */
1419 int lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1420 if (lp_nr != 0)
1421 remove_stmt_from_eh_lp (e->call_stmt);
1422
1423 tree old_fntype = gimple_call_fntype (e->call_stmt);
1424 new_stmt = padjs->modify_call (e->call_stmt,
1425 e->caller->clone.performed_splits,
1426 e->callee->decl, false);
1427 cgraph_node *origin = e->callee;
1428 while (origin->clone_of)
1429 origin = origin->clone_of;
1430
1431 if ((origin->former_clone_of
1432 && old_fntype == TREE_TYPE (origin->former_clone_of))
1433 || old_fntype == TREE_TYPE (origin->decl))
1434 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1435 else
1436 {
1437 tree new_fntype = padjs->build_new_function_type (old_fntype, true);
1438 gimple_call_set_fntype (new_stmt, new_fntype);
1439 }
1440
1441 if (lp_nr != 0)
1442 add_stmt_to_eh_lp (new_stmt, lp_nr);
1443 }
1444 else
1445 {
1446 new_stmt = e->call_stmt;
1447 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1448 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1449 }
1450
1451 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1452 adjust gimple_call_fntype too. */
1453 if (gimple_call_noreturn_p (new_stmt)
1454 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1455 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1456 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1457 == void_type_node))
1458 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1459
1460 /* If the call becomes noreturn, remove the LHS if possible. */
1461 tree lhs = gimple_call_lhs (new_stmt);
1462 if (lhs
1463 && gimple_call_noreturn_p (new_stmt)
1464 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1465 || should_remove_lhs_p (lhs)))
1466 {
1467 if (TREE_CODE (lhs) == SSA_NAME)
1468 {
1469 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1470 TREE_TYPE (lhs), NULL);
1471 var = get_or_create_ssa_default_def
1472 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1473 gimple *set_stmt = gimple_build_assign (lhs, var);
1474 gsi = gsi_for_stmt (new_stmt);
1475 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1476 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1477 }
1478 gimple_call_set_lhs (new_stmt, NULL_TREE);
1479 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1480 }
1481
1482 /* If new callee has no static chain, remove it. */
1483 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1484 {
1485 gimple_call_set_chain (new_stmt, NULL);
1486 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1487 }
1488
1489 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1490 new_stmt);
1491
1492 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1493
1494 if (symtab->dump_file)
1495 {
1496 fprintf (symtab->dump_file, " updated to:");
1497 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1498 }
1499 return new_stmt;
1500 }
1501
1502 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1503 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1504 of OLD_STMT if it was previously call statement.
1505 If NEW_STMT is NULL, the call has been dropped without any
1506 replacement. */
1507
1508 static void
1509 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1510 gimple *old_stmt, tree old_call,
1511 gimple *new_stmt)
1512 {
1513 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1514 ? gimple_call_fndecl (new_stmt) : 0;
1515
1516 /* We are seeing indirect calls, then there is nothing to update. */
1517 if (!new_call && !old_call)
1518 return;
1519 /* See if we turned indirect call into direct call or folded call to one builtin
1520 into different builtin. */
1521 if (old_call != new_call)
1522 {
1523 cgraph_edge *e = node->get_edge (old_stmt);
1524 cgraph_edge *ne = NULL;
1525 profile_count count;
1526
1527 if (e)
1528 {
1529 /* Keep calls marked as dead dead. */
1530 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1531 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1532 {
1533 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1534 as_a <gcall *> (new_stmt));
1535 return;
1536 }
1537 /* See if the edge is already there and has the correct callee. It
1538 might be so because of indirect inlining has already updated
1539 it. We also might've cloned and redirected the edge. */
1540 if (new_call && e->callee)
1541 {
1542 cgraph_node *callee = e->callee;
1543 while (callee)
1544 {
1545 if (callee->decl == new_call
1546 || callee->former_clone_of == new_call)
1547 {
1548 cgraph_edge::set_call_stmt (e, as_a <gcall *> (new_stmt));
1549 return;
1550 }
1551 callee = callee->clone_of;
1552 }
1553 }
1554
1555 /* Otherwise remove edge and create new one; we can't simply redirect
1556 since function has changed, so inline plan and other information
1557 attached to edge is invalid. */
1558 count = e->count;
1559 if (e->indirect_unknown_callee || e->inline_failed)
1560 cgraph_edge::remove (e);
1561 else
1562 e->callee->remove_symbol_and_inline_clones ();
1563 }
1564 else if (new_call)
1565 {
1566 /* We are seeing new direct call; compute profile info based on BB. */
1567 basic_block bb = gimple_bb (new_stmt);
1568 count = bb->count;
1569 }
1570
1571 if (new_call)
1572 {
1573 ne = node->create_edge (cgraph_node::get_create (new_call),
1574 as_a <gcall *> (new_stmt), count);
1575 gcc_assert (ne->inline_failed);
1576 }
1577 }
1578 /* We only updated the call stmt; update pointer in cgraph edge.. */
1579 else if (old_stmt != new_stmt)
1580 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1581 as_a <gcall *> (new_stmt));
1582 }
1583
1584 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1585 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1586 of OLD_STMT before it was updated (updating can happen inplace). */
1587
1588 void
1589 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1590 gimple *new_stmt)
1591 {
1592 cgraph_node *orig = cgraph_node::get (cfun->decl);
1593 cgraph_node *node;
1594
1595 gcc_checking_assert (orig);
1596 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1597 if (orig->clones)
1598 for (node = orig->clones; node != orig;)
1599 {
1600 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1601 if (node->clones)
1602 node = node->clones;
1603 else if (node->next_sibling_clone)
1604 node = node->next_sibling_clone;
1605 else
1606 {
1607 while (node != orig && !node->next_sibling_clone)
1608 node = node->clone_of;
1609 if (node != orig)
1610 node = node->next_sibling_clone;
1611 }
1612 }
1613 }
1614
1615
1616 /* Remove all callees from the node. */
1617
1618 void
1619 cgraph_node::remove_callees (void)
1620 {
1621 cgraph_edge *e, *f;
1622
1623 /* It is sufficient to remove the edges from the lists of callers of
1624 the callees. The callee list of the node can be zapped with one
1625 assignment. */
1626 for (e = callees; e; e = f)
1627 {
1628 f = e->next_callee;
1629 symtab->call_edge_removal_hooks (e);
1630 if (!e->indirect_unknown_callee)
1631 e->remove_callee ();
1632 symtab->free_edge (e);
1633 }
1634 for (e = indirect_calls; e; e = f)
1635 {
1636 f = e->next_callee;
1637 symtab->call_edge_removal_hooks (e);
1638 if (!e->indirect_unknown_callee)
1639 e->remove_callee ();
1640 symtab->free_edge (e);
1641 }
1642 indirect_calls = NULL;
1643 callees = NULL;
1644 if (call_site_hash)
1645 {
1646 call_site_hash->empty ();
1647 call_site_hash = NULL;
1648 }
1649 }
1650
1651 /* Remove all callers from the node. */
1652
1653 void
1654 cgraph_node::remove_callers (void)
1655 {
1656 cgraph_edge *e, *f;
1657
1658 /* It is sufficient to remove the edges from the lists of callees of
1659 the callers. The caller list of the node can be zapped with one
1660 assignment. */
1661 for (e = callers; e; e = f)
1662 {
1663 f = e->next_caller;
1664 symtab->call_edge_removal_hooks (e);
1665 e->remove_caller ();
1666 symtab->free_edge (e);
1667 }
1668 callers = NULL;
1669 }
1670
1671 /* Helper function for cgraph_release_function_body and free_lang_data.
1672 It releases body from function DECL without having to inspect its
1673 possibly non-existent symtab node. */
1674
1675 void
1676 release_function_body (tree decl)
1677 {
1678 function *fn = DECL_STRUCT_FUNCTION (decl);
1679 if (fn)
1680 {
1681 if (fn->cfg
1682 && loops_for_fn (fn))
1683 {
1684 fn->curr_properties &= ~PROP_loops;
1685 loop_optimizer_finalize (fn);
1686 }
1687 if (fn->gimple_df)
1688 {
1689 delete_tree_ssa (fn);
1690 fn->eh = NULL;
1691 }
1692 if (fn->cfg)
1693 {
1694 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1695 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1696 delete_tree_cfg_annotations (fn);
1697 clear_edges (fn);
1698 fn->cfg = NULL;
1699 }
1700 if (fn->value_histograms)
1701 free_histograms (fn);
1702 gimple_set_body (decl, NULL);
1703 /* Struct function hangs a lot of data that would leak if we didn't
1704 removed all pointers to it. */
1705 ggc_free (fn);
1706 DECL_STRUCT_FUNCTION (decl) = NULL;
1707 }
1708 DECL_SAVED_TREE (decl) = NULL;
1709 }
1710
1711 /* Release memory used to represent body of function.
1712 Use this only for functions that are released before being translated to
1713 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1714 are free'd in final.c via free_after_compilation().
1715 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1716
1717 void
1718 cgraph_node::release_body (bool keep_arguments)
1719 {
1720 ipa_transforms_to_apply.release ();
1721 if (!used_as_abstract_origin && symtab->state != PARSING)
1722 {
1723 DECL_RESULT (decl) = NULL;
1724
1725 if (!keep_arguments)
1726 DECL_ARGUMENTS (decl) = NULL;
1727 }
1728 /* If the node is abstract and needed, then do not clear
1729 DECL_INITIAL of its associated function declaration because it's
1730 needed to emit debug info later. */
1731 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1732 DECL_INITIAL (decl) = error_mark_node;
1733 release_function_body (decl);
1734 if (lto_file_data)
1735 {
1736 lto_free_function_in_decl_state_for_node (this);
1737 lto_file_data = NULL;
1738 }
1739 }
1740
1741 /* Remove function from symbol table. */
1742
1743 void
1744 cgraph_node::remove (void)
1745 {
1746 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1747 fprintf (symtab->ipa_clones_dump_file,
1748 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1749 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1750 DECL_SOURCE_COLUMN (decl));
1751
1752 symtab->call_cgraph_removal_hooks (this);
1753 remove_callers ();
1754 remove_callees ();
1755 ipa_transforms_to_apply.release ();
1756 delete_function_version (function_version ());
1757
1758 /* Incremental inlining access removed nodes stored in the postorder list.
1759 */
1760 force_output = false;
1761 forced_by_abi = false;
1762 cgraph_node *next;
1763 for (cgraph_node *n = nested; n; n = next)
1764 {
1765 next = n->next_nested;
1766 n->origin = NULL;
1767 n->next_nested = NULL;
1768 }
1769 nested = NULL;
1770 if (origin)
1771 {
1772 cgraph_node **node2 = &origin->nested;
1773
1774 while (*node2 != this)
1775 node2 = &(*node2)->next_nested;
1776 *node2 = next_nested;
1777 }
1778 unregister ();
1779 if (prev_sibling_clone)
1780 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1781 else if (clone_of)
1782 clone_of->clones = next_sibling_clone;
1783 if (next_sibling_clone)
1784 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1785 if (clones)
1786 {
1787 cgraph_node *n, *next;
1788
1789 if (clone_of)
1790 {
1791 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1792 n->clone_of = clone_of;
1793 n->clone_of = clone_of;
1794 n->next_sibling_clone = clone_of->clones;
1795 if (clone_of->clones)
1796 clone_of->clones->prev_sibling_clone = n;
1797 clone_of->clones = clones;
1798 }
1799 else
1800 {
1801 /* We are removing node with clones. This makes clones inconsistent,
1802 but assume they will be removed subsequently and just keep clone
1803 tree intact. This can happen in unreachable function removal since
1804 we remove unreachable functions in random order, not by bottom-up
1805 walk of clone trees. */
1806 for (n = clones; n; n = next)
1807 {
1808 next = n->next_sibling_clone;
1809 n->next_sibling_clone = NULL;
1810 n->prev_sibling_clone = NULL;
1811 n->clone_of = NULL;
1812 }
1813 }
1814 }
1815
1816 /* While all the clones are removed after being proceeded, the function
1817 itself is kept in the cgraph even after it is compiled. Check whether
1818 we are done with this body and reclaim it proactively if this is the case.
1819 */
1820 if (symtab->state != LTO_STREAMING)
1821 {
1822 cgraph_node *n = cgraph_node::get (decl);
1823 if (!n
1824 || (!n->clones && !n->clone_of && !n->inlined_to
1825 && ((symtab->global_info_ready || in_lto_p)
1826 && (TREE_ASM_WRITTEN (n->decl)
1827 || DECL_EXTERNAL (n->decl)
1828 || !n->analyzed
1829 || (!flag_wpa && n->in_other_partition)))))
1830 release_body ();
1831 }
1832 else
1833 {
1834 lto_free_function_in_decl_state_for_node (this);
1835 lto_file_data = NULL;
1836 }
1837
1838 decl = NULL;
1839 if (call_site_hash)
1840 {
1841 call_site_hash->empty ();
1842 call_site_hash = NULL;
1843 }
1844
1845 symtab->release_symbol (this);
1846 }
1847
1848 /* Likewise indicate that a node is having address taken. */
1849
1850 void
1851 cgraph_node::mark_address_taken (void)
1852 {
1853 /* Indirect inlining can figure out that all uses of the address are
1854 inlined. */
1855 if (inlined_to)
1856 {
1857 gcc_assert (cfun->after_inlining);
1858 gcc_assert (callers->indirect_inlining_edge);
1859 return;
1860 }
1861 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1862 IPA_REF_ADDR reference exists (and thus it should be set on node
1863 representing alias we take address of) and as a test whether address
1864 of the object was taken (and thus it should be set on node alias is
1865 referring to). We should remove the first use and the remove the
1866 following set. */
1867 address_taken = 1;
1868 cgraph_node *node = ultimate_alias_target ();
1869 node->address_taken = 1;
1870 }
1871
1872 /* Return local info node for the compiled function. */
1873
1874 cgraph_node *
1875 cgraph_node::local_info_node (tree decl)
1876 {
1877 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1878 cgraph_node *node = get (decl);
1879 if (!node)
1880 return NULL;
1881 return node->ultimate_alias_target ();
1882 }
1883
1884 /* Return RTL info for the compiled function. */
1885
1886 cgraph_rtl_info *
1887 cgraph_node::rtl_info (const_tree decl)
1888 {
1889 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1890 cgraph_node *node = get (decl);
1891 if (!node)
1892 return NULL;
1893 enum availability avail;
1894 node = node->ultimate_alias_target (&avail);
1895 if (decl != current_function_decl
1896 && (avail < AVAIL_AVAILABLE
1897 || (node->decl != current_function_decl
1898 && !TREE_ASM_WRITTEN (node->decl))))
1899 return NULL;
1900 /* Allocate if it doesn't exist. */
1901 if (node->rtl == NULL)
1902 {
1903 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1904 SET_HARD_REG_SET (node->rtl->function_used_regs);
1905 }
1906 return node->rtl;
1907 }
1908
1909 /* Return a string describing the failure REASON. */
1910
1911 const char*
1912 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1913 {
1914 #undef DEFCIFCODE
1915 #define DEFCIFCODE(code, type, string) string,
1916
1917 static const char *cif_string_table[CIF_N_REASONS] = {
1918 #include "cif-code.def"
1919 };
1920
1921 /* Signedness of an enum type is implementation defined, so cast it
1922 to unsigned before testing. */
1923 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1924 return cif_string_table[reason];
1925 }
1926
1927 /* Return a type describing the failure REASON. */
1928
1929 cgraph_inline_failed_type_t
1930 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1931 {
1932 #undef DEFCIFCODE
1933 #define DEFCIFCODE(code, type, string) type,
1934
1935 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1936 #include "cif-code.def"
1937 };
1938
1939 /* Signedness of an enum type is implementation defined, so cast it
1940 to unsigned before testing. */
1941 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1942 return cif_type_table[reason];
1943 }
1944
1945 /* Names used to print out the availability enum. */
1946 const char * const cgraph_availability_names[] =
1947 {"unset", "not_available", "overwritable", "available", "local"};
1948
1949 /* Output flags of edge to a file F. */
1950
1951 void
1952 cgraph_edge::dump_edge_flags (FILE *f)
1953 {
1954 if (speculative)
1955 fprintf (f, "(speculative) ");
1956 if (!inline_failed)
1957 fprintf (f, "(inlined) ");
1958 if (call_stmt_cannot_inline_p)
1959 fprintf (f, "(call_stmt_cannot_inline_p) ");
1960 if (indirect_inlining_edge)
1961 fprintf (f, "(indirect_inlining) ");
1962 if (count.initialized_p ())
1963 {
1964 fprintf (f, "(");
1965 count.dump (f);
1966 fprintf (f, ",");
1967 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
1968 }
1969 if (can_throw_external)
1970 fprintf (f, "(can throw external) ");
1971 }
1972
1973 /* Dump call graph node to file F. */
1974
1975 void
1976 cgraph_node::dump (FILE *f)
1977 {
1978 cgraph_edge *edge;
1979
1980 dump_base (f);
1981
1982 if (inlined_to)
1983 fprintf (f, " Function %s is inline copy in %s\n",
1984 dump_name (),
1985 inlined_to->dump_name ());
1986 if (clone_of)
1987 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
1988 if (symtab->function_flags_ready)
1989 fprintf (f, " Availability: %s\n",
1990 cgraph_availability_names [get_availability ()]);
1991
1992 if (profile_id)
1993 fprintf (f, " Profile id: %i\n",
1994 profile_id);
1995 if (unit_id)
1996 fprintf (f, " Unit id: %i\n",
1997 unit_id);
1998 cgraph_function_version_info *vi = function_version ();
1999 if (vi != NULL)
2000 {
2001 fprintf (f, " Version info: ");
2002 if (vi->prev != NULL)
2003 {
2004 fprintf (f, "prev: ");
2005 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2006 }
2007 if (vi->next != NULL)
2008 {
2009 fprintf (f, "next: ");
2010 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2011 }
2012 if (vi->dispatcher_resolver != NULL_TREE)
2013 fprintf (f, "dispatcher: %s",
2014 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2015
2016 fprintf (f, "\n");
2017 }
2018 fprintf (f, " Function flags:");
2019 if (count.initialized_p ())
2020 {
2021 fprintf (f, " count:");
2022 count.dump (f);
2023 }
2024 if (tp_first_run > 0)
2025 fprintf (f, " first_run:%" PRId64, (int64_t) tp_first_run);
2026 if (origin)
2027 fprintf (f, " nested in:%s", origin->dump_asm_name ());
2028 if (gimple_has_body_p (decl))
2029 fprintf (f, " body");
2030 if (process)
2031 fprintf (f, " process");
2032 if (local)
2033 fprintf (f, " local");
2034 if (redefined_extern_inline)
2035 fprintf (f, " redefined_extern_inline");
2036 if (only_called_at_startup)
2037 fprintf (f, " only_called_at_startup");
2038 if (only_called_at_exit)
2039 fprintf (f, " only_called_at_exit");
2040 if (tm_clone)
2041 fprintf (f, " tm_clone");
2042 if (calls_comdat_local)
2043 fprintf (f, " calls_comdat_local");
2044 if (icf_merged)
2045 fprintf (f, " icf_merged");
2046 if (merged_comdat)
2047 fprintf (f, " merged_comdat");
2048 if (merged_extern_inline)
2049 fprintf (f, " merged_extern_inline");
2050 if (split_part)
2051 fprintf (f, " split_part");
2052 if (indirect_call_target)
2053 fprintf (f, " indirect_call_target");
2054 if (nonfreeing_fn)
2055 fprintf (f, " nonfreeing_fn");
2056 if (DECL_STATIC_CONSTRUCTOR (decl))
2057 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2058 if (DECL_STATIC_DESTRUCTOR (decl))
2059 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2060 if (frequency == NODE_FREQUENCY_HOT)
2061 fprintf (f, " hot");
2062 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2063 fprintf (f, " unlikely_executed");
2064 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2065 fprintf (f, " executed_once");
2066 if (opt_for_fn (decl, optimize_size))
2067 fprintf (f, " optimize_size");
2068 if (parallelized_function)
2069 fprintf (f, " parallelized_function");
2070 if (DECL_IS_OPERATOR_NEW_P (decl))
2071 fprintf (f, " operator_new");
2072 if (DECL_IS_OPERATOR_DELETE_P (decl))
2073 fprintf (f, " operator_delete");
2074
2075
2076 fprintf (f, "\n");
2077
2078 if (thunk.thunk_p)
2079 {
2080 fprintf (f, " Thunk");
2081 if (thunk.alias)
2082 fprintf (f, " of %s (asm:%s)",
2083 lang_hooks.decl_printable_name (thunk.alias, 2),
2084 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2085 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2086 "has virtual offset %i\n",
2087 (int)thunk.fixed_offset,
2088 (int)thunk.virtual_value,
2089 (int)thunk.indirect_offset,
2090 (int)thunk.virtual_offset_p);
2091 }
2092 else if (former_thunk_p ())
2093 fprintf (f, " Former thunk fixed offset %i virtual value %i "
2094 "indirect_offset %i has virtual offset %i\n",
2095 (int)thunk.fixed_offset,
2096 (int)thunk.virtual_value,
2097 (int)thunk.indirect_offset,
2098 (int)thunk.virtual_offset_p);
2099 if (alias && thunk.alias
2100 && DECL_P (thunk.alias))
2101 {
2102 fprintf (f, " Alias of %s",
2103 lang_hooks.decl_printable_name (thunk.alias, 2));
2104 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2105 fprintf (f, " (asm:%s)",
2106 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2107 fprintf (f, "\n");
2108 }
2109
2110 fprintf (f, " Called by: ");
2111
2112 profile_count sum = profile_count::zero ();
2113 for (edge = callers; edge; edge = edge->next_caller)
2114 {
2115 fprintf (f, "%s ", edge->caller->dump_asm_name ());
2116 edge->dump_edge_flags (f);
2117 if (edge->count.initialized_p ())
2118 sum += edge->count.ipa ();
2119 }
2120
2121 fprintf (f, "\n Calls: ");
2122 for (edge = callees; edge; edge = edge->next_callee)
2123 {
2124 fprintf (f, "%s ", edge->callee->dump_asm_name ());
2125 edge->dump_edge_flags (f);
2126 }
2127 fprintf (f, "\n");
2128
2129 if (count.ipa ().initialized_p ())
2130 {
2131 bool ok = true;
2132 bool min = false;
2133 ipa_ref *ref;
2134
2135 FOR_EACH_ALIAS (this, ref)
2136 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2137 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2138
2139 if (inlined_to
2140 || (symtab->state < EXPANSION
2141 && ultimate_alias_target () == this && only_called_directly_p ()))
2142 ok = !count.ipa ().differs_from_p (sum);
2143 else if (count.ipa () > profile_count::from_gcov_type (100)
2144 && count.ipa () < sum.apply_scale (99, 100))
2145 ok = false, min = true;
2146 if (!ok)
2147 {
2148 fprintf (f, " Invalid sum of caller counts ");
2149 sum.dump (f);
2150 if (min)
2151 fprintf (f, ", should be at most ");
2152 else
2153 fprintf (f, ", should be ");
2154 count.ipa ().dump (f);
2155 fprintf (f, "\n");
2156 }
2157 }
2158
2159 for (edge = indirect_calls; edge; edge = edge->next_callee)
2160 {
2161 if (edge->indirect_info->polymorphic)
2162 {
2163 fprintf (f, " Polymorphic indirect call of type ");
2164 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2165 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2166 }
2167 else
2168 fprintf (f, " Indirect call");
2169 edge->dump_edge_flags (f);
2170 if (edge->indirect_info->param_index != -1)
2171 {
2172 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2173 if (edge->indirect_info->agg_contents)
2174 fprintf (f, " loaded from %s %s at offset %i",
2175 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2176 edge->indirect_info->by_ref ? "passed by reference":"",
2177 (int)edge->indirect_info->offset);
2178 if (edge->indirect_info->vptr_changed)
2179 fprintf (f, " (vptr maybe changed)");
2180 }
2181 fprintf (f, " Num speculative call targets: %i",
2182 edge->indirect_info->num_speculative_call_targets);
2183 fprintf (f, "\n");
2184 if (edge->indirect_info->polymorphic)
2185 edge->indirect_info->context.dump (f);
2186 }
2187 }
2188
2189 /* Dump call graph node to file F in graphviz format. */
2190
2191 void
2192 cgraph_node::dump_graphviz (FILE *f)
2193 {
2194 cgraph_edge *edge;
2195
2196 for (edge = callees; edge; edge = edge->next_callee)
2197 {
2198 cgraph_node *callee = edge->callee;
2199
2200 fprintf (f, "\t\"%s\" -> \"%s\"\n", dump_name (), callee->dump_name ());
2201 }
2202 }
2203
2204
2205 /* Dump call graph node NODE to stderr. */
2206
2207 DEBUG_FUNCTION void
2208 cgraph_node::debug (void)
2209 {
2210 dump (stderr);
2211 }
2212
2213 /* Dump the callgraph to file F. */
2214
2215 void
2216 cgraph_node::dump_cgraph (FILE *f)
2217 {
2218 cgraph_node *node;
2219
2220 fprintf (f, "callgraph:\n\n");
2221 FOR_EACH_FUNCTION (node)
2222 node->dump (f);
2223 }
2224
2225 /* Return true when the DECL can possibly be inlined. */
2226
2227 bool
2228 cgraph_function_possibly_inlined_p (tree decl)
2229 {
2230 if (!symtab->global_info_ready)
2231 return !DECL_UNINLINABLE (decl);
2232 return DECL_POSSIBLY_INLINED (decl);
2233 }
2234
2235 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2236 void
2237 cgraph_node::unnest (void)
2238 {
2239 cgraph_node **node2 = &origin->nested;
2240 gcc_assert (origin);
2241
2242 while (*node2 != this)
2243 node2 = &(*node2)->next_nested;
2244 *node2 = next_nested;
2245 origin = NULL;
2246 }
2247
2248 /* Return function availability. See cgraph.h for description of individual
2249 return values. */
2250 enum availability
2251 cgraph_node::get_availability (symtab_node *ref)
2252 {
2253 if (ref)
2254 {
2255 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2256 if (cref)
2257 ref = cref->inlined_to;
2258 }
2259 enum availability avail;
2260 if (!analyzed)
2261 avail = AVAIL_NOT_AVAILABLE;
2262 else if (local)
2263 avail = AVAIL_LOCAL;
2264 else if (inlined_to)
2265 avail = AVAIL_AVAILABLE;
2266 else if (transparent_alias)
2267 ultimate_alias_target (&avail, ref);
2268 else if (ifunc_resolver
2269 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2270 avail = AVAIL_INTERPOSABLE;
2271 else if (!externally_visible)
2272 avail = AVAIL_AVAILABLE;
2273 /* If this is a reference from symbol itself and there are no aliases, we
2274 may be sure that the symbol was not interposed by something else because
2275 the symbol itself would be unreachable otherwise.
2276
2277 Also comdat groups are always resolved in groups. */
2278 else if ((this == ref && !has_aliases_p ())
2279 || (ref && get_comdat_group ()
2280 && get_comdat_group () == ref->get_comdat_group ()))
2281 avail = AVAIL_AVAILABLE;
2282 /* Inline functions are safe to be analyzed even if their symbol can
2283 be overwritten at runtime. It is not meaningful to enforce any sane
2284 behavior on replacing inline function by different body. */
2285 else if (DECL_DECLARED_INLINE_P (decl))
2286 avail = AVAIL_AVAILABLE;
2287
2288 /* If the function can be overwritten, return OVERWRITABLE. Take
2289 care at least of two notable extensions - the COMDAT functions
2290 used to share template instantiations in C++ (this is symmetric
2291 to code cp_cannot_inline_tree_fn and probably shall be shared and
2292 the inlinability hooks completely eliminated). */
2293
2294 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2295 avail = AVAIL_INTERPOSABLE;
2296 else avail = AVAIL_AVAILABLE;
2297
2298 return avail;
2299 }
2300
2301 /* Worker for cgraph_node_can_be_local_p. */
2302 static bool
2303 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2304 {
2305 return !(!node->force_output
2306 && !node->ifunc_resolver
2307 /* Limitation of gas requires us to output targets of symver aliases
2308 as global symbols. This is binutils PR 25295. */
2309 && !node->symver
2310 && ((DECL_COMDAT (node->decl)
2311 && !node->forced_by_abi
2312 && !node->used_from_object_file_p ()
2313 && !node->same_comdat_group)
2314 || !node->externally_visible));
2315 }
2316
2317 /* Return true if cgraph_node can be made local for API change.
2318 Extern inline functions and C++ COMDAT functions can be made local
2319 at the expense of possible code size growth if function is used in multiple
2320 compilation units. */
2321 bool
2322 cgraph_node::can_be_local_p (void)
2323 {
2324 return (!address_taken
2325 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2326 NULL, true));
2327 }
2328
2329 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2330 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2331 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2332 skipped. */
2333 bool
2334 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2335 (cgraph_node *, void *),
2336 void *data,
2337 bool include_overwritable,
2338 bool exclude_virtual_thunks)
2339 {
2340 cgraph_edge *e;
2341 ipa_ref *ref;
2342 enum availability avail = AVAIL_AVAILABLE;
2343
2344 if (include_overwritable
2345 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2346 {
2347 if (callback (this, data))
2348 return true;
2349 }
2350 FOR_EACH_ALIAS (this, ref)
2351 {
2352 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2353 if (include_overwritable
2354 || alias->get_availability () > AVAIL_INTERPOSABLE)
2355 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2356 include_overwritable,
2357 exclude_virtual_thunks))
2358 return true;
2359 }
2360 if (avail <= AVAIL_INTERPOSABLE)
2361 return false;
2362 for (e = callers; e; e = e->next_caller)
2363 if (e->caller->thunk.thunk_p
2364 && (include_overwritable
2365 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2366 && !(exclude_virtual_thunks
2367 && e->caller->thunk.virtual_offset_p))
2368 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2369 include_overwritable,
2370 exclude_virtual_thunks))
2371 return true;
2372
2373 return false;
2374 }
2375
2376 /* Worker to bring NODE local. */
2377
2378 bool
2379 cgraph_node::make_local (cgraph_node *node, void *)
2380 {
2381 gcc_checking_assert (node->can_be_local_p ());
2382 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2383 {
2384 node->make_decl_local ();
2385 node->set_section (NULL);
2386 node->set_comdat_group (NULL);
2387 node->externally_visible = false;
2388 node->forced_by_abi = false;
2389 node->local = true;
2390 node->set_section (NULL);
2391 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2392 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2393 && !flag_incremental_link);
2394 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2395 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2396 }
2397 return false;
2398 }
2399
2400 /* Bring cgraph node local. */
2401
2402 void
2403 cgraph_node::make_local (void)
2404 {
2405 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2406 }
2407
2408 /* Worker to set nothrow flag. */
2409
2410 static void
2411 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2412 bool *changed)
2413 {
2414 cgraph_edge *e;
2415
2416 if (nothrow && !TREE_NOTHROW (node->decl))
2417 {
2418 /* With non-call exceptions we can't say for sure if other function body
2419 was not possibly optimized to still throw. */
2420 if (!non_call || node->binds_to_current_def_p ())
2421 {
2422 TREE_NOTHROW (node->decl) = true;
2423 *changed = true;
2424 for (e = node->callers; e; e = e->next_caller)
2425 e->can_throw_external = false;
2426 }
2427 }
2428 else if (!nothrow && TREE_NOTHROW (node->decl))
2429 {
2430 TREE_NOTHROW (node->decl) = false;
2431 *changed = true;
2432 }
2433 ipa_ref *ref;
2434 FOR_EACH_ALIAS (node, ref)
2435 {
2436 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2437 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2438 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2439 }
2440 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2441 if (e->caller->thunk.thunk_p
2442 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2443 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2444 }
2445
2446 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2447 if any to NOTHROW. */
2448
2449 bool
2450 cgraph_node::set_nothrow_flag (bool nothrow)
2451 {
2452 bool changed = false;
2453 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2454
2455 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2456 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2457 else
2458 {
2459 ipa_ref *ref;
2460
2461 FOR_EACH_ALIAS (this, ref)
2462 {
2463 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2464 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2465 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2466 }
2467 }
2468 return changed;
2469 }
2470
2471 /* Worker to set malloc flag. */
2472 static void
2473 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2474 {
2475 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2476 {
2477 DECL_IS_MALLOC (node->decl) = true;
2478 *changed = true;
2479 }
2480
2481 ipa_ref *ref;
2482 FOR_EACH_ALIAS (node, ref)
2483 {
2484 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2485 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2486 set_malloc_flag_1 (alias, malloc_p, changed);
2487 }
2488
2489 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2490 if (e->caller->thunk.thunk_p
2491 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2492 set_malloc_flag_1 (e->caller, malloc_p, changed);
2493 }
2494
2495 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2496
2497 bool
2498 cgraph_node::set_malloc_flag (bool malloc_p)
2499 {
2500 bool changed = false;
2501
2502 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2503 set_malloc_flag_1 (this, malloc_p, &changed);
2504 else
2505 {
2506 ipa_ref *ref;
2507
2508 FOR_EACH_ALIAS (this, ref)
2509 {
2510 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2511 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2512 set_malloc_flag_1 (alias, malloc_p, &changed);
2513 }
2514 }
2515 return changed;
2516 }
2517
2518 /* Worker to set_const_flag. */
2519
2520 static void
2521 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2522 bool *changed)
2523 {
2524 /* Static constructors and destructors without a side effect can be
2525 optimized out. */
2526 if (set_const && !looping)
2527 {
2528 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2529 {
2530 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2531 *changed = true;
2532 }
2533 if (DECL_STATIC_DESTRUCTOR (node->decl))
2534 {
2535 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2536 *changed = true;
2537 }
2538 }
2539 if (!set_const)
2540 {
2541 if (TREE_READONLY (node->decl))
2542 {
2543 TREE_READONLY (node->decl) = 0;
2544 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2545 *changed = true;
2546 }
2547 }
2548 else
2549 {
2550 /* Consider function:
2551
2552 bool a(int *p)
2553 {
2554 return *p==*p;
2555 }
2556
2557 During early optimization we will turn this into:
2558
2559 bool a(int *p)
2560 {
2561 return true;
2562 }
2563
2564 Now if this function will be detected as CONST however when interposed
2565 it may end up being just pure. We always must assume the worst
2566 scenario here. */
2567 if (TREE_READONLY (node->decl))
2568 {
2569 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2570 {
2571 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2572 *changed = true;
2573 }
2574 }
2575 else if (node->binds_to_current_def_p ())
2576 {
2577 TREE_READONLY (node->decl) = true;
2578 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2579 DECL_PURE_P (node->decl) = false;
2580 *changed = true;
2581 }
2582 else
2583 {
2584 if (dump_file && (dump_flags & TDF_DETAILS))
2585 fprintf (dump_file, "Dropping state to PURE because function does "
2586 "not bind to current def.\n");
2587 if (!DECL_PURE_P (node->decl))
2588 {
2589 DECL_PURE_P (node->decl) = true;
2590 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2591 *changed = true;
2592 }
2593 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2594 {
2595 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2596 *changed = true;
2597 }
2598 }
2599 }
2600
2601 ipa_ref *ref;
2602 FOR_EACH_ALIAS (node, ref)
2603 {
2604 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2605 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2606 set_const_flag_1 (alias, set_const, looping, changed);
2607 }
2608 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2609 if (e->caller->thunk.thunk_p
2610 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2611 {
2612 /* Virtual thunks access virtual offset in the vtable, so they can
2613 only be pure, never const. */
2614 if (set_const
2615 && (e->caller->thunk.virtual_offset_p
2616 || !node->binds_to_current_def_p (e->caller)))
2617 *changed |= e->caller->set_pure_flag (true, looping);
2618 else
2619 set_const_flag_1 (e->caller, set_const, looping, changed);
2620 }
2621 }
2622
2623 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2624 If SET_CONST if false, clear the flag.
2625
2626 When setting the flag be careful about possible interposition and
2627 do not set the flag for functions that can be interposed and set pure
2628 flag for functions that can bind to other definition.
2629
2630 Return true if any change was done. */
2631
2632 bool
2633 cgraph_node::set_const_flag (bool set_const, bool looping)
2634 {
2635 bool changed = false;
2636 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2637 set_const_flag_1 (this, set_const, looping, &changed);
2638 else
2639 {
2640 ipa_ref *ref;
2641
2642 FOR_EACH_ALIAS (this, ref)
2643 {
2644 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2645 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2646 set_const_flag_1 (alias, set_const, looping, &changed);
2647 }
2648 }
2649 return changed;
2650 }
2651
2652 /* Info used by set_pure_flag_1. */
2653
2654 struct set_pure_flag_info
2655 {
2656 bool pure;
2657 bool looping;
2658 bool changed;
2659 };
2660
2661 /* Worker to set_pure_flag. */
2662
2663 static bool
2664 set_pure_flag_1 (cgraph_node *node, void *data)
2665 {
2666 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2667 /* Static constructors and destructors without a side effect can be
2668 optimized out. */
2669 if (info->pure && !info->looping)
2670 {
2671 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2672 {
2673 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2674 info->changed = true;
2675 }
2676 if (DECL_STATIC_DESTRUCTOR (node->decl))
2677 {
2678 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2679 info->changed = true;
2680 }
2681 }
2682 if (info->pure)
2683 {
2684 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2685 {
2686 DECL_PURE_P (node->decl) = true;
2687 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2688 info->changed = true;
2689 }
2690 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2691 && !info->looping)
2692 {
2693 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2694 info->changed = true;
2695 }
2696 }
2697 else
2698 {
2699 if (DECL_PURE_P (node->decl))
2700 {
2701 DECL_PURE_P (node->decl) = false;
2702 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2703 info->changed = true;
2704 }
2705 }
2706 return false;
2707 }
2708
2709 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2710 if any to PURE.
2711
2712 When setting the flag, be careful about possible interposition.
2713 Return true if any change was done. */
2714
2715 bool
2716 cgraph_node::set_pure_flag (bool pure, bool looping)
2717 {
2718 struct set_pure_flag_info info = {pure, looping, false};
2719 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2720 return info.changed;
2721 }
2722
2723 /* Return true when cgraph_node cannot return or throw and thus
2724 it is safe to ignore its side effects for IPA analysis. */
2725
2726 bool
2727 cgraph_node::cannot_return_p (void)
2728 {
2729 int flags = flags_from_decl_or_type (decl);
2730 if (!opt_for_fn (decl, flag_exceptions))
2731 return (flags & ECF_NORETURN) != 0;
2732 else
2733 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2734 == (ECF_NORETURN | ECF_NOTHROW));
2735 }
2736
2737 /* Return true when call of edge cannot lead to return from caller
2738 and thus it is safe to ignore its side effects for IPA analysis
2739 when computing side effects of the caller.
2740 FIXME: We could actually mark all edges that have no reaching
2741 patch to the exit block or throw to get better results. */
2742 bool
2743 cgraph_edge::cannot_lead_to_return_p (void)
2744 {
2745 if (caller->cannot_return_p ())
2746 return true;
2747 if (indirect_unknown_callee)
2748 {
2749 int flags = indirect_info->ecf_flags;
2750 if (!opt_for_fn (caller->decl, flag_exceptions))
2751 return (flags & ECF_NORETURN) != 0;
2752 else
2753 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2754 == (ECF_NORETURN | ECF_NOTHROW));
2755 }
2756 else
2757 return callee->cannot_return_p ();
2758 }
2759
2760 /* Return true if the edge may be considered hot. */
2761
2762 bool
2763 cgraph_edge::maybe_hot_p (void)
2764 {
2765 if (!maybe_hot_count_p (NULL, count.ipa ()))
2766 return false;
2767 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2768 || (callee
2769 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2770 return false;
2771 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2772 && (callee
2773 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2774 return false;
2775 if (opt_for_fn (caller->decl, optimize_size))
2776 return false;
2777 if (caller->frequency == NODE_FREQUENCY_HOT)
2778 return true;
2779 if (!count.initialized_p ())
2780 return true;
2781 cgraph_node *where = caller->inlined_to ? caller->inlined_to : caller;
2782 if (!where->count.initialized_p ())
2783 return false;
2784 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2785 {
2786 if (count.apply_scale (2, 1) < where->count.apply_scale (3, 1))
2787 return false;
2788 }
2789 else if (count.apply_scale (param_hot_bb_frequency_fraction , 1)
2790 < where->count)
2791 return false;
2792 return true;
2793 }
2794
2795 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2796
2797 static bool
2798 nonremovable_p (cgraph_node *node, void *)
2799 {
2800 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2801 }
2802
2803 /* Return true if whole comdat group can be removed if there are no direct
2804 calls to THIS. */
2805
2806 bool
2807 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2808 {
2809 struct ipa_ref *ref;
2810
2811 /* For local symbols or non-comdat group it is the same as
2812 can_remove_if_no_direct_calls_p. */
2813 if (!externally_visible || !same_comdat_group)
2814 {
2815 if (DECL_EXTERNAL (decl))
2816 return true;
2817 if (address_taken)
2818 return false;
2819 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2820 }
2821
2822 if (will_inline && address_taken)
2823 return false;
2824
2825 /* Otherwise check if we can remove the symbol itself and then verify
2826 that only uses of the comdat groups are direct call to THIS
2827 or its aliases. */
2828 if (!can_remove_if_no_direct_calls_and_refs_p ())
2829 return false;
2830
2831 /* Check that all refs come from within the comdat group. */
2832 for (int i = 0; iterate_referring (i, ref); i++)
2833 if (ref->referring->get_comdat_group () != get_comdat_group ())
2834 return false;
2835
2836 struct cgraph_node *target = ultimate_alias_target ();
2837 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2838 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2839 {
2840 if (!externally_visible)
2841 continue;
2842 if (!next->alias
2843 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2844 return false;
2845
2846 /* If we see different symbol than THIS, be sure to check calls. */
2847 if (next->ultimate_alias_target () != target)
2848 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2849 if (e->caller->get_comdat_group () != get_comdat_group ()
2850 || will_inline)
2851 return false;
2852
2853 /* If function is not being inlined, we care only about
2854 references outside of the comdat group. */
2855 if (!will_inline)
2856 for (int i = 0; next->iterate_referring (i, ref); i++)
2857 if (ref->referring->get_comdat_group () != get_comdat_group ())
2858 return false;
2859 }
2860 return true;
2861 }
2862
2863 /* Return true when function cgraph_node can be expected to be removed
2864 from program when direct calls in this compilation unit are removed.
2865
2866 As a special case COMDAT functions are
2867 cgraph_can_remove_if_no_direct_calls_p while the are not
2868 cgraph_only_called_directly_p (it is possible they are called from other
2869 unit)
2870
2871 This function behaves as cgraph_only_called_directly_p because eliminating
2872 all uses of COMDAT function does not make it necessarily disappear from
2873 the program unless we are compiling whole program or we do LTO. In this
2874 case we know we win since dynamic linking will not really discard the
2875 linkonce section. */
2876
2877 bool
2878 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2879 (bool will_inline)
2880 {
2881 gcc_assert (!inlined_to);
2882 if (DECL_EXTERNAL (decl))
2883 return true;
2884
2885 if (!in_lto_p && !flag_whole_program)
2886 {
2887 /* If the symbol is in comdat group, we need to verify that whole comdat
2888 group becomes unreachable. Technically we could skip references from
2889 within the group, too. */
2890 if (!only_called_directly_p ())
2891 return false;
2892 if (same_comdat_group && externally_visible)
2893 {
2894 struct cgraph_node *target = ultimate_alias_target ();
2895
2896 if (will_inline && address_taken)
2897 return true;
2898 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2899 next != this;
2900 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2901 {
2902 if (!externally_visible)
2903 continue;
2904 if (!next->alias
2905 && !next->only_called_directly_p ())
2906 return false;
2907
2908 /* If we see different symbol than THIS,
2909 be sure to check calls. */
2910 if (next->ultimate_alias_target () != target)
2911 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2912 if (e->caller->get_comdat_group () != get_comdat_group ()
2913 || will_inline)
2914 return false;
2915 }
2916 }
2917 return true;
2918 }
2919 else
2920 return can_remove_if_no_direct_calls_p (will_inline);
2921 }
2922
2923
2924 /* Worker for cgraph_only_called_directly_p. */
2925
2926 static bool
2927 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2928 {
2929 return !node->only_called_directly_or_aliased_p ();
2930 }
2931
2932 /* Return true when function cgraph_node and all its aliases are only called
2933 directly.
2934 i.e. it is not externally visible, address was not taken and
2935 it is not used in any other non-standard way. */
2936
2937 bool
2938 cgraph_node::only_called_directly_p (void)
2939 {
2940 gcc_assert (ultimate_alias_target () == this);
2941 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2942 NULL, true);
2943 }
2944
2945
2946 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2947
2948 static bool
2949 collect_callers_of_node_1 (cgraph_node *node, void *data)
2950 {
2951 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2952 cgraph_edge *cs;
2953 enum availability avail;
2954 node->ultimate_alias_target (&avail);
2955
2956 if (avail > AVAIL_INTERPOSABLE)
2957 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2958 if (!cs->indirect_inlining_edge
2959 && !cs->caller->thunk.thunk_p)
2960 redirect_callers->safe_push (cs);
2961 return false;
2962 }
2963
2964 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2965 cgraph_node (i.e. are not overwritable). */
2966
2967 vec<cgraph_edge *>
2968 cgraph_node::collect_callers (void)
2969 {
2970 vec<cgraph_edge *> redirect_callers = vNULL;
2971 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
2972 &redirect_callers, false);
2973 return redirect_callers;
2974 }
2975
2976
2977 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
2978 optimistically true if this cannot be determined. */
2979
2980 static bool
2981 clone_of_p (cgraph_node *node, cgraph_node *node2)
2982 {
2983 node = node->ultimate_alias_target ();
2984 node2 = node2->ultimate_alias_target ();
2985
2986 if (node2->clone_of == node
2987 || node2->former_clone_of == node->decl)
2988 return true;
2989
2990 if (!node->thunk.thunk_p && !node->former_thunk_p ())
2991 {
2992 while (node2 && node->decl != node2->decl)
2993 node2 = node2->clone_of;
2994 return node2 != NULL;
2995 }
2996
2997 /* There are no virtual clones of thunks so check former_clone_of or if we
2998 might have skipped thunks because this adjustments are no longer
2999 necessary. */
3000 while (node->thunk.thunk_p || node->former_thunk_p ())
3001 {
3002 if (!node->thunk.this_adjusting)
3003 return false;
3004 /* In case of instrumented expanded thunks, which can have multiple calls
3005 in them, we do not know how to continue and just have to be
3006 optimistic. */
3007 if (node->callees->next_callee)
3008 return true;
3009 node = node->callees->callee->ultimate_alias_target ();
3010
3011 if (!node2->clone.param_adjustments
3012 || node2->clone.param_adjustments->first_param_intact_p ())
3013 return false;
3014 if (node2->former_clone_of == node->decl)
3015 return true;
3016
3017 cgraph_node *n2 = node2;
3018 while (n2 && node->decl != n2->decl)
3019 n2 = n2->clone_of;
3020 if (n2)
3021 return true;
3022 }
3023
3024 return false;
3025 }
3026
3027 /* Verify edge count and frequency. */
3028
3029 bool
3030 cgraph_edge::verify_count ()
3031 {
3032 bool error_found = false;
3033 if (!count.verify ())
3034 {
3035 error ("caller edge count invalid");
3036 error_found = true;
3037 }
3038 return error_found;
3039 }
3040
3041 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3042 static void
3043 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3044 {
3045 bool fndecl_was_null = false;
3046 /* debug_gimple_stmt needs correct cfun */
3047 if (cfun != this_cfun)
3048 set_cfun (this_cfun);
3049 /* ...and an actual current_function_decl */
3050 if (!current_function_decl)
3051 {
3052 current_function_decl = this_cfun->decl;
3053 fndecl_was_null = true;
3054 }
3055 debug_gimple_stmt (stmt);
3056 if (fndecl_was_null)
3057 current_function_decl = NULL;
3058 }
3059
3060 /* Verify that call graph edge corresponds to DECL from the associated
3061 statement. Return true if the verification should fail. */
3062
3063 bool
3064 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3065 {
3066 cgraph_node *node;
3067
3068 if (!decl || callee->inlined_to)
3069 return false;
3070 if (symtab->state == LTO_STREAMING)
3071 return false;
3072 node = cgraph_node::get (decl);
3073
3074 /* We do not know if a node from a different partition is an alias or what it
3075 aliases and therefore cannot do the former_clone_of check reliably. When
3076 body_removed is set, we have lost all information about what was alias or
3077 thunk of and also cannot proceed. */
3078 if (!node
3079 || node->body_removed
3080 || node->in_other_partition
3081 || callee->icf_merged
3082 || callee->in_other_partition)
3083 return false;
3084
3085 node = node->ultimate_alias_target ();
3086
3087 /* Optimizers can redirect unreachable calls or calls triggering undefined
3088 behavior to builtin_unreachable. */
3089
3090 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3091 return false;
3092
3093 if (callee->former_clone_of != node->decl
3094 && (node != callee->ultimate_alias_target ())
3095 && !clone_of_p (node, callee))
3096 return true;
3097 else
3098 return false;
3099 }
3100
3101 /* Disable warnings about missing quoting in GCC diagnostics for
3102 the verification errors. Their format strings don't follow GCC
3103 diagnostic conventions and the calls are ultimately followed by
3104 one to internal_error. */
3105 #if __GNUC__ >= 10
3106 # pragma GCC diagnostic push
3107 # pragma GCC diagnostic ignored "-Wformat-diag"
3108 #endif
3109
3110 /* Verify cgraph nodes of given cgraph node. */
3111 DEBUG_FUNCTION void
3112 cgraph_node::verify_node (void)
3113 {
3114 cgraph_edge *e;
3115 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3116 basic_block this_block;
3117 gimple_stmt_iterator gsi;
3118 bool error_found = false;
3119
3120 if (seen_error ())
3121 return;
3122
3123 timevar_push (TV_CGRAPH_VERIFY);
3124 error_found |= verify_base ();
3125 for (e = callees; e; e = e->next_callee)
3126 if (e->aux)
3127 {
3128 error ("aux field set for edge %s->%s",
3129 identifier_to_locale (e->caller->name ()),
3130 identifier_to_locale (e->callee->name ()));
3131 error_found = true;
3132 }
3133 if (!count.verify ())
3134 {
3135 error ("cgraph count invalid");
3136 error_found = true;
3137 }
3138 if (inlined_to && same_comdat_group)
3139 {
3140 error ("inline clone in same comdat group list");
3141 error_found = true;
3142 }
3143 if (inlined_to && !count.compatible_p (inlined_to->count))
3144 {
3145 error ("inline clone count is not compatible");
3146 count.debug ();
3147 inlined_to->count.debug ();
3148 error_found = true;
3149 }
3150 if (tp_first_run < 0)
3151 {
3152 error ("tp_first_run must be non-negative");
3153 error_found = true;
3154 }
3155 if (!definition && !in_other_partition && local)
3156 {
3157 error ("local symbols must be defined");
3158 error_found = true;
3159 }
3160 if (inlined_to && externally_visible)
3161 {
3162 error ("externally visible inline clone");
3163 error_found = true;
3164 }
3165 if (inlined_to && address_taken)
3166 {
3167 error ("inline clone with address taken");
3168 error_found = true;
3169 }
3170 if (inlined_to && force_output)
3171 {
3172 error ("inline clone is forced to output");
3173 error_found = true;
3174 }
3175 if (calls_comdat_local && !same_comdat_group)
3176 {
3177 error ("calls_comdat_local is set outside of a comdat group");
3178 error_found = true;
3179 }
3180 for (e = indirect_calls; e; e = e->next_callee)
3181 {
3182 if (e->aux)
3183 {
3184 error ("aux field set for indirect edge from %s",
3185 identifier_to_locale (e->caller->name ()));
3186 error_found = true;
3187 }
3188 if (!e->count.compatible_p (count))
3189 {
3190 error ("edge count is not compatible with function count");
3191 e->count.debug ();
3192 count.debug ();
3193 error_found = true;
3194 }
3195 if (!e->indirect_unknown_callee
3196 || !e->indirect_info)
3197 {
3198 error ("An indirect edge from %s is not marked as indirect or has "
3199 "associated indirect_info, the corresponding statement is: ",
3200 identifier_to_locale (e->caller->name ()));
3201 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3202 error_found = true;
3203 }
3204 }
3205 bool check_comdat = comdat_local_p ();
3206 for (e = callers; e; e = e->next_caller)
3207 {
3208 if (e->verify_count ())
3209 error_found = true;
3210 if (check_comdat
3211 && !in_same_comdat_group_p (e->caller))
3212 {
3213 error ("comdat-local function called by %s outside its comdat",
3214 identifier_to_locale (e->caller->name ()));
3215 error_found = true;
3216 }
3217 if (!e->inline_failed)
3218 {
3219 if (inlined_to
3220 != (e->caller->inlined_to
3221 ? e->caller->inlined_to : e->caller))
3222 {
3223 error ("inlined_to pointer is wrong");
3224 error_found = true;
3225 }
3226 if (callers->next_caller)
3227 {
3228 error ("multiple inline callers");
3229 error_found = true;
3230 }
3231 }
3232 else
3233 if (inlined_to)
3234 {
3235 error ("inlined_to pointer set for noninline callers");
3236 error_found = true;
3237 }
3238 }
3239 for (e = callees; e; e = e->next_callee)
3240 {
3241 if (e->verify_count ())
3242 error_found = true;
3243 if (!e->count.compatible_p (count))
3244 {
3245 error ("edge count is not compatible with function count");
3246 e->count.debug ();
3247 count.debug ();
3248 error_found = true;
3249 }
3250 if (gimple_has_body_p (e->caller->decl)
3251 && !e->caller->inlined_to
3252 && !e->speculative
3253 /* Optimized out calls are redirected to __builtin_unreachable. */
3254 && (e->count.nonzero_p ()
3255 || ! e->callee->decl
3256 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3257 && count
3258 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3259 && (!e->count.ipa_p ()
3260 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3261 {
3262 error ("caller edge count does not match BB count");
3263 fprintf (stderr, "edge count: ");
3264 e->count.dump (stderr);
3265 fprintf (stderr, "\n bb count: ");
3266 gimple_bb (e->call_stmt)->count.dump (stderr);
3267 fprintf (stderr, "\n");
3268 error_found = true;
3269 }
3270 }
3271 for (e = indirect_calls; e; e = e->next_callee)
3272 {
3273 if (e->verify_count ())
3274 error_found = true;
3275 if (gimple_has_body_p (e->caller->decl)
3276 && !e->caller->inlined_to
3277 && !e->speculative
3278 && e->count.ipa_p ()
3279 && count
3280 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3281 && (!e->count.ipa_p ()
3282 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3283 {
3284 error ("indirect call count does not match BB count");
3285 fprintf (stderr, "edge count: ");
3286 e->count.dump (stderr);
3287 fprintf (stderr, "\n bb count: ");
3288 gimple_bb (e->call_stmt)->count.dump (stderr);
3289 fprintf (stderr, "\n");
3290 error_found = true;
3291 }
3292 }
3293 if (!callers && inlined_to)
3294 {
3295 error ("inlined_to pointer is set but no predecessors found");
3296 error_found = true;
3297 }
3298 if (inlined_to == this)
3299 {
3300 error ("inlined_to pointer refers to itself");
3301 error_found = true;
3302 }
3303
3304 if (clone_of)
3305 {
3306 cgraph_node *first_clone = clone_of->clones;
3307 if (first_clone != this)
3308 {
3309 if (prev_sibling_clone->clone_of != clone_of)
3310 {
3311 error ("cgraph_node has wrong clone_of");
3312 error_found = true;
3313 }
3314 }
3315 }
3316 if (clones)
3317 {
3318 cgraph_node *n;
3319 for (n = clones; n; n = n->next_sibling_clone)
3320 if (n->clone_of != this)
3321 break;
3322 if (n)
3323 {
3324 error ("cgraph_node has wrong clone list");
3325 error_found = true;
3326 }
3327 }
3328 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3329 {
3330 error ("cgraph_node is in clone list but it is not clone");
3331 error_found = true;
3332 }
3333 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3334 {
3335 error ("cgraph_node has wrong prev_clone pointer");
3336 error_found = true;
3337 }
3338 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3339 {
3340 error ("double linked list of clones corrupted");
3341 error_found = true;
3342 }
3343
3344 if (analyzed && alias)
3345 {
3346 bool ref_found = false;
3347 int i;
3348 ipa_ref *ref = NULL;
3349
3350 if (callees)
3351 {
3352 error ("Alias has call edges");
3353 error_found = true;
3354 }
3355 for (i = 0; iterate_reference (i, ref); i++)
3356 if (ref->use != IPA_REF_ALIAS)
3357 {
3358 error ("Alias has non-alias reference");
3359 error_found = true;
3360 }
3361 else if (ref_found)
3362 {
3363 error ("Alias has more than one alias reference");
3364 error_found = true;
3365 }
3366 else
3367 ref_found = true;
3368 if (!ref_found)
3369 {
3370 error ("Analyzed alias has no reference");
3371 error_found = true;
3372 }
3373 }
3374
3375 if (analyzed && thunk.thunk_p)
3376 {
3377 if (!callees)
3378 {
3379 error ("No edge out of thunk node");
3380 error_found = true;
3381 }
3382 else if (callees->next_callee)
3383 {
3384 error ("More than one edge out of thunk node");
3385 error_found = true;
3386 }
3387 if (gimple_has_body_p (decl) && !inlined_to)
3388 {
3389 error ("Thunk is not supposed to have body");
3390 error_found = true;
3391 }
3392 }
3393 else if (analyzed && gimple_has_body_p (decl)
3394 && !TREE_ASM_WRITTEN (decl)
3395 && (!DECL_EXTERNAL (decl) || inlined_to)
3396 && !flag_wpa)
3397 {
3398 if (this_cfun->cfg)
3399 {
3400 hash_set<gimple *> stmts;
3401 int i;
3402 ipa_ref *ref = NULL;
3403
3404 /* Reach the trees by walking over the CFG, and note the
3405 enclosing basic-blocks in the call edges. */
3406 FOR_EACH_BB_FN (this_block, this_cfun)
3407 {
3408 for (gsi = gsi_start_phis (this_block);
3409 !gsi_end_p (gsi); gsi_next (&gsi))
3410 stmts.add (gsi_stmt (gsi));
3411 for (gsi = gsi_start_bb (this_block);
3412 !gsi_end_p (gsi);
3413 gsi_next (&gsi))
3414 {
3415 gimple *stmt = gsi_stmt (gsi);
3416 stmts.add (stmt);
3417 if (is_gimple_call (stmt))
3418 {
3419 cgraph_edge *e = get_edge (stmt);
3420 tree decl = gimple_call_fndecl (stmt);
3421 if (e)
3422 {
3423 if (e->aux)
3424 {
3425 error ("shared call_stmt:");
3426 cgraph_debug_gimple_stmt (this_cfun, stmt);
3427 error_found = true;
3428 }
3429 if (!e->indirect_unknown_callee)
3430 {
3431 if (e->verify_corresponds_to_fndecl (decl))
3432 {
3433 error ("edge points to wrong declaration:");
3434 debug_tree (e->callee->decl);
3435 fprintf (stderr," Instead of:");
3436 debug_tree (decl);
3437 error_found = true;
3438 }
3439 }
3440 else if (decl)
3441 {
3442 error ("an indirect edge with unknown callee "
3443 "corresponding to a call_stmt with "
3444 "a known declaration:");
3445 error_found = true;
3446 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3447 }
3448 e->aux = (void *)1;
3449 }
3450 else if (decl)
3451 {
3452 error ("missing callgraph edge for call stmt:");
3453 cgraph_debug_gimple_stmt (this_cfun, stmt);
3454 error_found = true;
3455 }
3456 }
3457 }
3458 }
3459 for (i = 0; iterate_reference (i, ref); i++)
3460 if (ref->stmt && !stmts.contains (ref->stmt))
3461 {
3462 error ("reference to dead statement");
3463 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3464 error_found = true;
3465 }
3466 }
3467 else
3468 /* No CFG available?! */
3469 gcc_unreachable ();
3470
3471 for (e = callees; e; e = e->next_callee)
3472 {
3473 if (!e->aux && !e->speculative)
3474 {
3475 error ("edge %s->%s has no corresponding call_stmt",
3476 identifier_to_locale (e->caller->name ()),
3477 identifier_to_locale (e->callee->name ()));
3478 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3479 error_found = true;
3480 }
3481 e->aux = 0;
3482 }
3483 for (e = indirect_calls; e; e = e->next_callee)
3484 {
3485 if (!e->aux && !e->speculative)
3486 {
3487 error ("an indirect edge from %s has no corresponding call_stmt",
3488 identifier_to_locale (e->caller->name ()));
3489 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3490 error_found = true;
3491 }
3492 e->aux = 0;
3493 }
3494 }
3495
3496 if (nested != NULL)
3497 {
3498 for (cgraph_node *n = nested; n != NULL; n = n->next_nested)
3499 {
3500 if (n->origin == NULL)
3501 {
3502 error ("missing origin for a node in a nested list");
3503 error_found = true;
3504 }
3505 else if (n->origin != this)
3506 {
3507 error ("origin points to a different parent");
3508 error_found = true;
3509 break;
3510 }
3511 }
3512 }
3513 if (next_nested != NULL && origin == NULL)
3514 {
3515 error ("missing origin for a node in a nested list");
3516 error_found = true;
3517 }
3518
3519 if (error_found)
3520 {
3521 dump (stderr);
3522 internal_error ("verify_cgraph_node failed");
3523 }
3524 timevar_pop (TV_CGRAPH_VERIFY);
3525 }
3526
3527 /* Verify whole cgraph structure. */
3528 DEBUG_FUNCTION void
3529 cgraph_node::verify_cgraph_nodes (void)
3530 {
3531 cgraph_node *node;
3532
3533 if (seen_error ())
3534 return;
3535
3536 FOR_EACH_FUNCTION (node)
3537 node->verify ();
3538 }
3539
3540 #if __GNUC__ >= 10
3541 # pragma GCC diagnostic pop
3542 #endif
3543
3544 /* Walk the alias chain to return the function cgraph_node is alias of.
3545 Walk through thunks, too.
3546 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3547 When REF is non-NULL, assume that reference happens in symbol REF
3548 when determining the availability. */
3549
3550 cgraph_node *
3551 cgraph_node::function_symbol (enum availability *availability,
3552 struct symtab_node *ref)
3553 {
3554 cgraph_node *node = ultimate_alias_target (availability, ref);
3555
3556 while (node->thunk.thunk_p)
3557 {
3558 ref = node;
3559 node = node->callees->callee;
3560 if (availability)
3561 {
3562 enum availability a;
3563 a = node->get_availability (ref);
3564 if (a < *availability)
3565 *availability = a;
3566 }
3567 node = node->ultimate_alias_target (availability, ref);
3568 }
3569 return node;
3570 }
3571
3572 /* Walk the alias chain to return the function cgraph_node is alias of.
3573 Walk through non virtual thunks, too. Thus we return either a function
3574 or a virtual thunk node.
3575 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3576 When REF is non-NULL, assume that reference happens in symbol REF
3577 when determining the availability. */
3578
3579 cgraph_node *
3580 cgraph_node::function_or_virtual_thunk_symbol
3581 (enum availability *availability,
3582 struct symtab_node *ref)
3583 {
3584 cgraph_node *node = ultimate_alias_target (availability, ref);
3585
3586 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3587 {
3588 ref = node;
3589 node = node->callees->callee;
3590 if (availability)
3591 {
3592 enum availability a;
3593 a = node->get_availability (ref);
3594 if (a < *availability)
3595 *availability = a;
3596 }
3597 node = node->ultimate_alias_target (availability, ref);
3598 }
3599 return node;
3600 }
3601
3602 /* When doing LTO, read cgraph_node's body from disk if it is not already
3603 present. */
3604
3605 bool
3606 cgraph_node::get_untransformed_body (void)
3607 {
3608 lto_file_decl_data *file_data;
3609 const char *data, *name;
3610 size_t len;
3611 tree decl = this->decl;
3612
3613 /* Check if body is already there. Either we have gimple body or
3614 the function is thunk and in that case we set DECL_ARGUMENTS. */
3615 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3616 return false;
3617
3618 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3619
3620 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3621
3622 file_data = lto_file_data;
3623 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3624
3625 /* We may have renamed the declaration, e.g., a static function. */
3626 name = lto_get_decl_name_mapping (file_data, name);
3627 struct lto_in_decl_state *decl_state
3628 = lto_get_function_in_decl_state (file_data, decl);
3629
3630 cgraph_node *origin = this;
3631 while (origin->clone_of)
3632 origin = origin->clone_of;
3633
3634 int stream_order = origin->order - file_data->order_base;
3635 data = lto_get_section_data (file_data, LTO_section_function_body,
3636 name, stream_order, &len,
3637 decl_state->compressed);
3638 if (!data)
3639 fatal_error (input_location, "%s: section %s.%d is missing",
3640 file_data->file_name, name, stream_order);
3641
3642 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3643
3644 if (!quiet_flag)
3645 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3646 lto_input_function_body (file_data, this, data);
3647 lto_stats.num_function_bodies++;
3648 lto_free_section_data (file_data, LTO_section_function_body, name,
3649 data, len, decl_state->compressed);
3650 lto_free_function_in_decl_state_for_node (this);
3651 /* Keep lto file data so ipa-inline-analysis knows about cross module
3652 inlining. */
3653
3654 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3655
3656 return true;
3657 }
3658
3659 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3660 if it is not already present. When some IPA transformations are scheduled,
3661 apply them. */
3662
3663 bool
3664 cgraph_node::get_body (void)
3665 {
3666 bool updated;
3667
3668 updated = get_untransformed_body ();
3669
3670 /* Getting transformed body makes no sense for inline clones;
3671 we should never use this on real clones because they are materialized
3672 early.
3673 TODO: Materializing clones here will likely lead to smaller LTRANS
3674 footprint. */
3675 gcc_assert (!inlined_to && !clone_of);
3676 if (ipa_transforms_to_apply.exists ())
3677 {
3678 opt_pass *saved_current_pass = current_pass;
3679 FILE *saved_dump_file = dump_file;
3680 const char *saved_dump_file_name = dump_file_name;
3681 dump_flags_t saved_dump_flags = dump_flags;
3682 dump_file_name = NULL;
3683 set_dump_file (NULL);
3684
3685 push_cfun (DECL_STRUCT_FUNCTION (decl));
3686
3687 update_ssa (TODO_update_ssa_only_virtuals);
3688 execute_all_ipa_transforms (true);
3689 cgraph_edge::rebuild_edges ();
3690 free_dominance_info (CDI_DOMINATORS);
3691 free_dominance_info (CDI_POST_DOMINATORS);
3692 pop_cfun ();
3693 updated = true;
3694
3695 current_pass = saved_current_pass;
3696 set_dump_file (saved_dump_file);
3697 dump_file_name = saved_dump_file_name;
3698 dump_flags = saved_dump_flags;
3699 }
3700 return updated;
3701 }
3702
3703 /* Return the DECL_STRUCT_FUNCTION of the function. */
3704
3705 struct function *
3706 cgraph_node::get_fun () const
3707 {
3708 const cgraph_node *node = this;
3709 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3710
3711 while (!fun && node->clone_of)
3712 {
3713 node = node->clone_of;
3714 fun = DECL_STRUCT_FUNCTION (node->decl);
3715 }
3716
3717 return fun;
3718 }
3719
3720 /* Reset all state within cgraph.c so that we can rerun the compiler
3721 within the same process. For use by toplev::finalize. */
3722
3723 void
3724 cgraph_c_finalize (void)
3725 {
3726 symtab = NULL;
3727
3728 x_cgraph_nodes_queue = NULL;
3729
3730 cgraph_fnver_htab = NULL;
3731 version_info_node = NULL;
3732 }
3733
3734 /* A worker for call_for_symbol_and_aliases. */
3735
3736 bool
3737 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3738 void *),
3739 void *data,
3740 bool include_overwritable)
3741 {
3742 ipa_ref *ref;
3743 FOR_EACH_ALIAS (this, ref)
3744 {
3745 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3746 if (include_overwritable
3747 || alias->get_availability () > AVAIL_INTERPOSABLE)
3748 if (alias->call_for_symbol_and_aliases (callback, data,
3749 include_overwritable))
3750 return true;
3751 }
3752 return false;
3753 }
3754
3755 /* Return true if NODE has thunk. */
3756
3757 bool
3758 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3759 {
3760 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3761 if (e->caller->thunk.thunk_p)
3762 return true;
3763 return false;
3764 }
3765
3766 /* Expected frequency of executions within the function. */
3767
3768 sreal
3769 cgraph_edge::sreal_frequency ()
3770 {
3771 return count.to_sreal_scale (caller->inlined_to
3772 ? caller->inlined_to->count
3773 : caller->count);
3774 }
3775
3776
3777 /* During LTO stream in this can be used to check whether call can possibly
3778 be internal to the current translation unit. */
3779
3780 bool
3781 cgraph_edge::possibly_call_in_translation_unit_p (void)
3782 {
3783 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
3784
3785 /* While incremental linking we may end up getting function body later. */
3786 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
3787 return true;
3788
3789 /* We may be smarter here and avoid streaming in indirect calls we can't
3790 track, but that would require arranging streaming the indirect call
3791 summary first. */
3792 if (!callee)
3793 return true;
3794
3795 /* If callee is local to the original translation unit, it will be
3796 defined. */
3797 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
3798 return true;
3799
3800 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
3801 yet) and see if it is a definition. In fact we may also resolve aliases,
3802 but that is probably not too important. */
3803 symtab_node *node = callee;
3804 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
3805 node = node->previous_sharing_asm_name;
3806 if (node->previous_sharing_asm_name)
3807 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
3808 gcc_assert (TREE_PUBLIC (node->decl));
3809 return node->get_availability () >= AVAIL_INTERPOSABLE;
3810 }
3811
3812 /* Return num_speculative_targets of this edge. */
3813
3814 int
3815 cgraph_edge::num_speculative_call_targets_p (void)
3816 {
3817 return indirect_info ? indirect_info->num_speculative_call_targets : 0;
3818 }
3819
3820 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
3821 This needs to be a global so that it can be a GC root, and thus
3822 prevent the stashed copy from being garbage-collected if the GC runs
3823 during a symbol_table_test. */
3824
3825 symbol_table *saved_symtab;
3826
3827 #if CHECKING_P
3828
3829 namespace selftest {
3830
3831 /* class selftest::symbol_table_test. */
3832
3833 /* Constructor. Store the old value of symtab, and create a new one. */
3834
3835 symbol_table_test::symbol_table_test ()
3836 {
3837 gcc_assert (saved_symtab == NULL);
3838 saved_symtab = symtab;
3839 symtab = new (ggc_alloc<symbol_table> ()) symbol_table ();
3840 }
3841
3842 /* Destructor. Restore the old value of symtab. */
3843
3844 symbol_table_test::~symbol_table_test ()
3845 {
3846 gcc_assert (saved_symtab != NULL);
3847 symtab = saved_symtab;
3848 saved_symtab = NULL;
3849 }
3850
3851 /* Verify that symbol_table_test works. */
3852
3853 static void
3854 test_symbol_table_test ()
3855 {
3856 /* Simulate running two selftests involving symbol tables. */
3857 for (int i = 0; i < 2; i++)
3858 {
3859 symbol_table_test stt;
3860 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
3861 get_identifier ("test_decl"),
3862 build_function_type_list (void_type_node,
3863 NULL_TREE));
3864 cgraph_node *node = cgraph_node::get_create (test_decl);
3865 gcc_assert (node);
3866
3867 /* Verify that the node has order 0 on both iterations,
3868 and thus that nodes have predictable dump names in selftests. */
3869 ASSERT_EQ (node->order, 0);
3870 ASSERT_STREQ (node->dump_name (), "test_decl/0");
3871 }
3872 }
3873
3874 /* Run all of the selftests within this file. */
3875
3876 void
3877 cgraph_c_tests ()
3878 {
3879 test_symbol_table_test ();
3880 }
3881
3882 } // namespace selftest
3883
3884 #endif /* CHECKING_P */
3885
3886 #include "gt-cgraph.h"