]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
Fix verifier ICE on wrong comdat local flag [PR93347]
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "context.h"
61 #include "gimplify.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "selftest.h"
65 #include "tree-into-ssa.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 /* Queue of cgraph nodes scheduled to be lowered. */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
73
74 /* Symbol table global context. */
75 symbol_table *symtab;
76
77 /* List of hooks triggered on cgraph_edge events. */
78 struct cgraph_edge_hook_list {
79 cgraph_edge_hook hook;
80 void *data;
81 struct cgraph_edge_hook_list *next;
82 };
83
84 /* List of hooks triggered on cgraph_node events. */
85 struct cgraph_node_hook_list {
86 cgraph_node_hook hook;
87 void *data;
88 struct cgraph_node_hook_list *next;
89 };
90
91 /* List of hooks triggered on events involving two cgraph_edges. */
92 struct cgraph_2edge_hook_list {
93 cgraph_2edge_hook hook;
94 void *data;
95 struct cgraph_2edge_hook_list *next;
96 };
97
98 /* List of hooks triggered on events involving two cgraph_nodes. */
99 struct cgraph_2node_hook_list {
100 cgraph_2node_hook hook;
101 void *data;
102 struct cgraph_2node_hook_list *next;
103 };
104
105 /* Hash descriptor for cgraph_function_version_info. */
106
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
108 {
109 static hashval_t hash (cgraph_function_version_info *);
110 static bool equal (cgraph_function_version_info *,
111 cgraph_function_version_info *);
112 };
113
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115 The cgraph_function_version_info has a THIS_NODE field that is the
116 corresponding cgraph_node.. */
117
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
119
120 /* Hash function for cgraph_fnver_htab. */
121 hashval_t
122 function_version_hasher::hash (cgraph_function_version_info *ptr)
123 {
124 int uid = ptr->this_node->get_uid ();
125 return (hashval_t)(uid);
126 }
127
128 /* eq function for cgraph_fnver_htab. */
129 bool
130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 cgraph_function_version_info *n2)
132 {
133 return n1->this_node->get_uid () == n2->this_node->get_uid ();
134 }
135
136 /* Mark as GC root all allocated nodes. */
137 static GTY(()) struct cgraph_function_version_info *
138 version_info_node = NULL;
139
140 /* Return true if NODE's address can be compared. */
141
142 bool
143 symtab_node::address_can_be_compared_p ()
144 {
145 /* Address of virtual tables and functions is never compared. */
146 if (DECL_VIRTUAL_P (decl))
147 return false;
148 /* Address of C++ cdtors is never compared. */
149 if (is_a <cgraph_node *> (this)
150 && (DECL_CXX_CONSTRUCTOR_P (decl)
151 || DECL_CXX_DESTRUCTOR_P (decl)))
152 return false;
153 /* Constant pool symbols addresses are never compared.
154 flag_merge_constants permits us to assume the same on readonly vars. */
155 if (is_a <varpool_node *> (this)
156 && (DECL_IN_CONSTANT_POOL (decl)
157 || (flag_merge_constants >= 2
158 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
159 return false;
160 return true;
161 }
162
163 /* Get the cgraph_function_version_info node corresponding to node. */
164 cgraph_function_version_info *
165 cgraph_node::function_version (void)
166 {
167 cgraph_function_version_info key;
168 key.this_node = this;
169
170 if (cgraph_fnver_htab == NULL)
171 return NULL;
172
173 return cgraph_fnver_htab->find (&key);
174 }
175
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177 corresponding to cgraph_node NODE. */
178 cgraph_function_version_info *
179 cgraph_node::insert_new_function_version (void)
180 {
181 version_info_node = NULL;
182 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183 version_info_node->this_node = this;
184
185 if (cgraph_fnver_htab == NULL)
186 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
187
188 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
189 = version_info_node;
190 return version_info_node;
191 }
192
193 /* Remove the cgraph_function_version_info node given by DECL_V. */
194 static void
195 delete_function_version (cgraph_function_version_info *decl_v)
196 {
197 if (decl_v == NULL)
198 return;
199
200 if (version_info_node == decl_v)
201 version_info_node = NULL;
202
203 if (decl_v->prev != NULL)
204 decl_v->prev->next = decl_v->next;
205
206 if (decl_v->next != NULL)
207 decl_v->next->prev = decl_v->prev;
208
209 if (cgraph_fnver_htab != NULL)
210 cgraph_fnver_htab->remove_elt (decl_v);
211 }
212
213 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
214 DECL is a duplicate declaration. */
215 void
216 cgraph_node::delete_function_version_by_decl (tree decl)
217 {
218 cgraph_node *decl_node = cgraph_node::get (decl);
219
220 if (decl_node == NULL)
221 return;
222
223 delete_function_version (decl_node->function_version ());
224
225 decl_node->remove ();
226 }
227
228 /* Record that DECL1 and DECL2 are semantically identical function
229 versions. */
230 void
231 cgraph_node::record_function_versions (tree decl1, tree decl2)
232 {
233 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
234 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
235 cgraph_function_version_info *decl1_v = NULL;
236 cgraph_function_version_info *decl2_v = NULL;
237 cgraph_function_version_info *before;
238 cgraph_function_version_info *after;
239
240 gcc_assert (decl1_node != NULL && decl2_node != NULL);
241 decl1_v = decl1_node->function_version ();
242 decl2_v = decl2_node->function_version ();
243
244 if (decl1_v != NULL && decl2_v != NULL)
245 return;
246
247 if (decl1_v == NULL)
248 decl1_v = decl1_node->insert_new_function_version ();
249
250 if (decl2_v == NULL)
251 decl2_v = decl2_node->insert_new_function_version ();
252
253 /* Chain decl2_v and decl1_v. All semantically identical versions
254 will be chained together. */
255
256 before = decl1_v;
257 after = decl2_v;
258
259 while (before->next != NULL)
260 before = before->next;
261
262 while (after->prev != NULL)
263 after= after->prev;
264
265 before->next = after;
266 after->prev = before;
267 }
268
269 /* Initialize callgraph dump file. */
270
271 void
272 symbol_table::initialize (void)
273 {
274 if (!dump_file)
275 dump_file = dump_begin (TDI_cgraph, NULL);
276
277 if (!ipa_clones_dump_file)
278 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
279 }
280
281 /* Allocate new callgraph node and insert it into basic data structures. */
282
283 cgraph_node *
284 symbol_table::create_empty (void)
285 {
286 cgraph_count++;
287 return new (ggc_alloc<cgraph_node> ()) cgraph_node (cgraph_max_uid++);
288 }
289
290 /* Register HOOK to be called with DATA on each removed edge. */
291 cgraph_edge_hook_list *
292 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
293 {
294 cgraph_edge_hook_list *entry;
295 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
296
297 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
298 entry->hook = hook;
299 entry->data = data;
300 entry->next = NULL;
301 while (*ptr)
302 ptr = &(*ptr)->next;
303 *ptr = entry;
304 return entry;
305 }
306
307 /* Remove ENTRY from the list of hooks called on removing edges. */
308 void
309 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
310 {
311 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
312
313 while (*ptr != entry)
314 ptr = &(*ptr)->next;
315 *ptr = entry->next;
316 free (entry);
317 }
318
319 /* Call all edge removal hooks. */
320 void
321 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
322 {
323 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
324 while (entry)
325 {
326 entry->hook (e, entry->data);
327 entry = entry->next;
328 }
329 }
330
331 /* Register HOOK to be called with DATA on each removed node. */
332 cgraph_node_hook_list *
333 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
334 {
335 cgraph_node_hook_list *entry;
336 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
337
338 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
339 entry->hook = hook;
340 entry->data = data;
341 entry->next = NULL;
342 while (*ptr)
343 ptr = &(*ptr)->next;
344 *ptr = entry;
345 return entry;
346 }
347
348 /* Remove ENTRY from the list of hooks called on removing nodes. */
349 void
350 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
351 {
352 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
353
354 while (*ptr != entry)
355 ptr = &(*ptr)->next;
356 *ptr = entry->next;
357 free (entry);
358 }
359
360 /* Call all node removal hooks. */
361 void
362 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
363 {
364 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
365 while (entry)
366 {
367 entry->hook (node, entry->data);
368 entry = entry->next;
369 }
370 }
371
372 /* Call all node removal hooks. */
373 void
374 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
375 {
376 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
377 while (entry)
378 {
379 entry->hook (node, entry->data);
380 entry = entry->next;
381 }
382 }
383
384
385 /* Register HOOK to be called with DATA on each inserted node. */
386 cgraph_node_hook_list *
387 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
388 {
389 cgraph_node_hook_list *entry;
390 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
391
392 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
393 entry->hook = hook;
394 entry->data = data;
395 entry->next = NULL;
396 while (*ptr)
397 ptr = &(*ptr)->next;
398 *ptr = entry;
399 return entry;
400 }
401
402 /* Remove ENTRY from the list of hooks called on inserted nodes. */
403 void
404 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
405 {
406 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
407
408 while (*ptr != entry)
409 ptr = &(*ptr)->next;
410 *ptr = entry->next;
411 free (entry);
412 }
413
414 /* Register HOOK to be called with DATA on each duplicated edge. */
415 cgraph_2edge_hook_list *
416 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
417 {
418 cgraph_2edge_hook_list *entry;
419 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
420
421 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
422 entry->hook = hook;
423 entry->data = data;
424 entry->next = NULL;
425 while (*ptr)
426 ptr = &(*ptr)->next;
427 *ptr = entry;
428 return entry;
429 }
430
431 /* Remove ENTRY from the list of hooks called on duplicating edges. */
432 void
433 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
434 {
435 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
436
437 while (*ptr != entry)
438 ptr = &(*ptr)->next;
439 *ptr = entry->next;
440 free (entry);
441 }
442
443 /* Call all edge duplication hooks. */
444 void
445 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
446 {
447 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
448 while (entry)
449 {
450 entry->hook (cs1, cs2, entry->data);
451 entry = entry->next;
452 }
453 }
454
455 /* Register HOOK to be called with DATA on each duplicated node. */
456 cgraph_2node_hook_list *
457 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
458 {
459 cgraph_2node_hook_list *entry;
460 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
461
462 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
463 entry->hook = hook;
464 entry->data = data;
465 entry->next = NULL;
466 while (*ptr)
467 ptr = &(*ptr)->next;
468 *ptr = entry;
469 return entry;
470 }
471
472 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
473 void
474 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
475 {
476 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
477
478 while (*ptr != entry)
479 ptr = &(*ptr)->next;
480 *ptr = entry->next;
481 free (entry);
482 }
483
484 /* Call all node duplication hooks. */
485 void
486 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
487 cgraph_node *node2)
488 {
489 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
490 while (entry)
491 {
492 entry->hook (node, node2, entry->data);
493 entry = entry->next;
494 }
495 }
496
497 /* Return cgraph node assigned to DECL. Create new one when needed. */
498
499 cgraph_node *
500 cgraph_node::create (tree decl)
501 {
502 cgraph_node *node = symtab->create_empty ();
503 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
504
505 node->decl = decl;
506
507 if ((flag_openacc || flag_openmp)
508 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
509 {
510 node->offloadable = 1;
511 if (ENABLE_OFFLOADING)
512 g->have_offload = true;
513 }
514
515 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
516 node->ifunc_resolver = true;
517
518 node->register_symbol ();
519
520 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
521 {
522 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
523 node->next_nested = node->origin->nested;
524 node->origin->nested = node;
525 }
526 return node;
527 }
528
529 /* Try to find a call graph node for declaration DECL and if it does not exist
530 or if it corresponds to an inline clone, create a new one. */
531
532 cgraph_node *
533 cgraph_node::get_create (tree decl)
534 {
535 cgraph_node *first_clone = cgraph_node::get (decl);
536
537 if (first_clone && !first_clone->inlined_to)
538 return first_clone;
539
540 cgraph_node *node = cgraph_node::create (decl);
541 if (first_clone)
542 {
543 first_clone->clone_of = node;
544 node->clones = first_clone;
545 node->order = first_clone->order;
546 symtab->symtab_prevail_in_asm_name_hash (node);
547 node->decl->decl_with_vis.symtab_node = node;
548 if (dump_file)
549 fprintf (dump_file, "Introduced new external node "
550 "(%s) and turned into root of the clone tree.\n",
551 node->dump_name ());
552 }
553 else if (dump_file)
554 fprintf (dump_file, "Introduced new external node "
555 "(%s).\n", node->dump_name ());
556 return node;
557 }
558
559 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
560 the function body is associated with
561 (not necessarily cgraph_node (DECL)). */
562
563 cgraph_node *
564 cgraph_node::create_alias (tree alias, tree target)
565 {
566 cgraph_node *alias_node;
567
568 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
569 || TREE_CODE (target) == IDENTIFIER_NODE);
570 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
571 alias_node = cgraph_node::get_create (alias);
572 gcc_assert (!alias_node->definition);
573 alias_node->alias_target = target;
574 alias_node->definition = true;
575 alias_node->alias = true;
576 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
577 alias_node->transparent_alias = alias_node->weakref = true;
578 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
579 alias_node->ifunc_resolver = true;
580 return alias_node;
581 }
582
583 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
584 and NULL otherwise.
585 Same body aliases are output whenever the body of DECL is output,
586 and cgraph_node::get (ALIAS) transparently returns
587 cgraph_node::get (DECL). */
588
589 cgraph_node *
590 cgraph_node::create_same_body_alias (tree alias, tree decl)
591 {
592 cgraph_node *n;
593
594 /* If aliases aren't supported by the assembler, fail. */
595 if (!TARGET_SUPPORTS_ALIASES)
596 return NULL;
597
598 /* Langhooks can create same body aliases of symbols not defined.
599 Those are useless. Drop them on the floor. */
600 if (symtab->global_info_ready)
601 return NULL;
602
603 n = cgraph_node::create_alias (alias, decl);
604 n->cpp_implicit_alias = true;
605 if (symtab->cpp_implicit_aliases_done)
606 n->resolve_alias (cgraph_node::get (decl));
607 return n;
608 }
609
610 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
611 aliases DECL with an adjustments made into the first parameter.
612 See comments in struct cgraph_thunk_info for detail on the parameters. */
613
614 cgraph_node *
615 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
616 HOST_WIDE_INT fixed_offset,
617 HOST_WIDE_INT virtual_value,
618 HOST_WIDE_INT indirect_offset,
619 tree virtual_offset,
620 tree real_alias)
621 {
622 cgraph_node *node;
623
624 node = cgraph_node::get (alias);
625 if (node)
626 node->reset ();
627 else
628 node = cgraph_node::create (alias);
629
630 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
631 gcc_checking_assert (virtual_offset
632 ? virtual_value == wi::to_wide (virtual_offset)
633 : virtual_value == 0);
634
635 node->thunk.fixed_offset = fixed_offset;
636 node->thunk.virtual_value = virtual_value;
637 node->thunk.indirect_offset = indirect_offset;
638 node->thunk.alias = real_alias;
639 node->thunk.this_adjusting = this_adjusting;
640 node->thunk.virtual_offset_p = virtual_offset != NULL;
641 node->thunk.thunk_p = true;
642 node->definition = true;
643
644 return node;
645 }
646
647 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
648 Return NULL if there's no such node. */
649
650 cgraph_node *
651 cgraph_node::get_for_asmname (tree asmname)
652 {
653 /* We do not want to look at inline clones. */
654 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
655 node;
656 node = node->next_sharing_asm_name)
657 {
658 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
659 if (cn && !cn->inlined_to)
660 return cn;
661 }
662 return NULL;
663 }
664
665 /* Returns a hash value for X (which really is a cgraph_edge). */
666
667 hashval_t
668 cgraph_edge_hasher::hash (cgraph_edge *e)
669 {
670 /* This is a really poor hash function, but it is what htab_hash_pointer
671 uses. */
672 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
673 }
674
675 /* Returns a hash value for X (which really is a cgraph_edge). */
676
677 hashval_t
678 cgraph_edge_hasher::hash (gimple *call_stmt)
679 {
680 /* This is a really poor hash function, but it is what htab_hash_pointer
681 uses. */
682 return (hashval_t) ((intptr_t)call_stmt >> 3);
683 }
684
685 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
686
687 inline bool
688 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
689 {
690 return x->call_stmt == y;
691 }
692
693 /* Add call graph edge E to call site hash of its caller. */
694
695 static inline void
696 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
697 {
698 gimple *call = e->call_stmt;
699 *e->caller->call_site_hash->find_slot_with_hash
700 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
701 }
702
703 /* Add call graph edge E to call site hash of its caller. */
704
705 static inline void
706 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
707 {
708 /* There are two speculative edges for every statement (one direct,
709 one indirect); always hash the direct one. */
710 if (e->speculative && e->indirect_unknown_callee)
711 return;
712 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
713 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
714 if (*slot)
715 {
716 gcc_assert (((cgraph_edge *)*slot)->speculative);
717 if (e->callee && (!e->prev_callee
718 || !e->prev_callee->speculative
719 || e->prev_callee->call_stmt != e->call_stmt))
720 *slot = e;
721 return;
722 }
723 gcc_assert (!*slot || e->speculative);
724 *slot = e;
725 }
726
727 /* Return the callgraph edge representing the GIMPLE_CALL statement
728 CALL_STMT. */
729
730 cgraph_edge *
731 cgraph_node::get_edge (gimple *call_stmt)
732 {
733 cgraph_edge *e, *e2;
734 int n = 0;
735
736 if (call_site_hash)
737 return call_site_hash->find_with_hash
738 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
739
740 /* This loop may turn out to be performance problem. In such case adding
741 hashtables into call nodes with very many edges is probably best
742 solution. It is not good idea to add pointer into CALL_EXPR itself
743 because we want to make possible having multiple cgraph nodes representing
744 different clones of the same body before the body is actually cloned. */
745 for (e = callees; e; e = e->next_callee)
746 {
747 if (e->call_stmt == call_stmt)
748 break;
749 n++;
750 }
751
752 if (!e)
753 for (e = indirect_calls; e; e = e->next_callee)
754 {
755 if (e->call_stmt == call_stmt)
756 break;
757 n++;
758 }
759
760 if (n > 100)
761 {
762 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
763 for (e2 = callees; e2; e2 = e2->next_callee)
764 cgraph_add_edge_to_call_site_hash (e2);
765 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
766 cgraph_add_edge_to_call_site_hash (e2);
767 }
768
769 return e;
770 }
771
772
773 /* Change field call_stmt of edge E to NEW_STMT. If UPDATE_SPECULATIVE and E
774 is any component of speculative edge, then update all components.
775 Speculations can be resolved in the process and EDGE can be removed and
776 deallocated. Return the edge that now represents the call. */
777
778 cgraph_edge *
779 cgraph_edge::set_call_stmt (cgraph_edge *e, gcall *new_stmt,
780 bool update_speculative)
781 {
782 tree decl;
783
784 /* Speculative edges has three component, update all of them
785 when asked to. */
786 if (update_speculative && e->speculative)
787 {
788 cgraph_edge *direct, *indirect, *next;
789 ipa_ref *ref;
790 bool e_indirect = e->indirect_unknown_callee;
791 int n = 0;
792
793 direct = e->first_speculative_call_target ();
794 indirect = e->speculative_call_indirect_edge ();
795
796 gcall *old_stmt = direct->call_stmt;
797 for (cgraph_edge *d = direct; d; d = next)
798 {
799 next = d->next_speculative_call_target ();
800 cgraph_edge *d2 = set_call_stmt (d, new_stmt, false);
801 gcc_assert (d2 == d);
802 n++;
803 }
804 gcc_checking_assert (indirect->num_speculative_call_targets_p () == n);
805 for (unsigned int i = 0; e->caller->iterate_reference (i, ref); i++)
806 if (ref->speculative && ref->stmt == old_stmt)
807 {
808 ref->stmt = new_stmt;
809 n--;
810 }
811
812 indirect = set_call_stmt (indirect, new_stmt, false);
813 return e_indirect ? indirect : direct;
814 }
815
816 /* Only direct speculative edges go to call_site_hash. */
817 if (e->caller->call_site_hash
818 && (!e->speculative || !e->indirect_unknown_callee)
819 /* It is possible that edge was previously speculative. In this case
820 we have different value in call stmt hash which needs preserving. */
821 && e->caller->get_edge (e->call_stmt) == e)
822 e->caller->call_site_hash->remove_elt_with_hash
823 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt));
824
825 e->call_stmt = new_stmt;
826 if (e->indirect_unknown_callee
827 && (decl = gimple_call_fndecl (new_stmt)))
828 {
829 /* Constant propagation (and possibly also inlining?) can turn an
830 indirect call into a direct one. */
831 cgraph_node *new_callee = cgraph_node::get (decl);
832
833 gcc_checking_assert (new_callee);
834 e = make_direct (e, new_callee);
835 }
836
837 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
838 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
839 /* Update call stite hash. For speculative calls we only record the first
840 direct edge. */
841 if (e->caller->call_site_hash
842 && (!e->speculative
843 || (e->callee
844 && (!e->prev_callee || !e->prev_callee->speculative
845 || e->prev_callee->call_stmt != e->call_stmt))
846 || (e->speculative && !e->callee)))
847 cgraph_add_edge_to_call_site_hash (e);
848 return e;
849 }
850
851 /* Allocate a cgraph_edge structure and fill it with data according to the
852 parameters of which only CALLEE can be NULL (when creating an indirect call
853 edge). CLONING_P should be set if properties that are copied from an
854 original edge should not be calculated. */
855
856 cgraph_edge *
857 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
858 gcall *call_stmt, profile_count count,
859 bool indir_unknown_callee, bool cloning_p)
860 {
861 cgraph_edge *edge;
862
863 /* LTO does not actually have access to the call_stmt since these
864 have not been loaded yet. */
865 if (call_stmt)
866 {
867 /* This is a rather expensive check possibly triggering
868 construction of call stmt hashtable. */
869 cgraph_edge *e;
870 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
871 || e->speculative);
872
873 gcc_assert (is_gimple_call (call_stmt));
874 }
875
876 edge = ggc_alloc<cgraph_edge> ();
877 edge->m_summary_id = -1;
878 edges_count++;
879
880 gcc_assert (++edges_max_uid != 0);
881 edge->m_uid = edges_max_uid;
882 edge->aux = NULL;
883 edge->caller = caller;
884 edge->callee = callee;
885 edge->prev_caller = NULL;
886 edge->next_caller = NULL;
887 edge->prev_callee = NULL;
888 edge->next_callee = NULL;
889 edge->lto_stmt_uid = 0;
890 edge->speculative_id = 0;
891
892 edge->count = count;
893 edge->call_stmt = call_stmt;
894 edge->indirect_info = NULL;
895 edge->indirect_inlining_edge = 0;
896 edge->speculative = false;
897 edge->indirect_unknown_callee = indir_unknown_callee;
898 if (call_stmt && caller->call_site_hash)
899 cgraph_add_edge_to_call_site_hash (edge);
900
901 if (cloning_p)
902 return edge;
903
904 edge->can_throw_external
905 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
906 call_stmt) : false;
907 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
908 edge->call_stmt_cannot_inline_p = false;
909
910 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
911 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
912 edge->in_polymorphic_cdtor
913 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
914 caller->decl);
915 else
916 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
917
918 if (callee && symtab->state != LTO_STREAMING
919 && edge->callee->comdat_local_p ())
920 edge->caller->calls_comdat_local = true;
921
922 return edge;
923 }
924
925 /* Create edge from a given function to CALLEE in the cgraph. CLONING_P should
926 be set if properties that are copied from an original edge should not be
927 calculated. */
928
929 cgraph_edge *
930 cgraph_node::create_edge (cgraph_node *callee,
931 gcall *call_stmt, profile_count count, bool cloning_p)
932 {
933 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
934 false, cloning_p);
935
936 if (!cloning_p)
937 initialize_inline_failed (edge);
938
939 edge->next_caller = callee->callers;
940 if (callee->callers)
941 callee->callers->prev_caller = edge;
942 edge->next_callee = callees;
943 if (callees)
944 callees->prev_callee = edge;
945 callees = edge;
946 callee->callers = edge;
947
948 return edge;
949 }
950
951 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
952
953 cgraph_indirect_call_info *
954 cgraph_allocate_init_indirect_info (void)
955 {
956 cgraph_indirect_call_info *ii;
957
958 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
959 ii->param_index = -1;
960 return ii;
961 }
962
963 /* Create an indirect edge with a yet-undetermined callee where the call
964 statement destination is a formal parameter of the caller with index
965 PARAM_INDEX. CLONING_P should be set if properties that are copied from an
966 original edge should not be calculated and indirect_info structure should
967 not be calculated. */
968
969 cgraph_edge *
970 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
971 profile_count count,
972 bool cloning_p)
973 {
974 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt, count, true,
975 cloning_p);
976 tree target;
977
978 if (!cloning_p)
979 initialize_inline_failed (edge);
980
981 edge->indirect_info = cgraph_allocate_init_indirect_info ();
982 edge->indirect_info->ecf_flags = ecf_flags;
983 edge->indirect_info->vptr_changed = true;
984
985 /* Record polymorphic call info. */
986 if (!cloning_p
987 && call_stmt
988 && (target = gimple_call_fn (call_stmt))
989 && virtual_method_call_p (target))
990 {
991 ipa_polymorphic_call_context context (decl, target, call_stmt);
992
993 /* Only record types can have virtual calls. */
994 edge->indirect_info->polymorphic = true;
995 edge->indirect_info->param_index = -1;
996 edge->indirect_info->otr_token
997 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
998 edge->indirect_info->otr_type = obj_type_ref_class (target);
999 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
1000 edge->indirect_info->context = context;
1001 }
1002
1003 edge->next_callee = indirect_calls;
1004 if (indirect_calls)
1005 indirect_calls->prev_callee = edge;
1006 indirect_calls = edge;
1007
1008 return edge;
1009 }
1010
1011 /* Remove the edge from the list of the callees of the caller. */
1012
1013 void
1014 cgraph_edge::remove_caller (void)
1015 {
1016 if (prev_callee)
1017 prev_callee->next_callee = next_callee;
1018 if (next_callee)
1019 next_callee->prev_callee = prev_callee;
1020 if (!prev_callee)
1021 {
1022 if (indirect_unknown_callee)
1023 caller->indirect_calls = next_callee;
1024 else
1025 caller->callees = next_callee;
1026 }
1027 if (caller->call_site_hash
1028 && this == caller->get_edge (call_stmt))
1029 caller->call_site_hash->remove_elt_with_hash
1030 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1031 }
1032
1033 /* Put the edge onto the free list. */
1034
1035 void
1036 symbol_table::free_edge (cgraph_edge *e)
1037 {
1038 edges_count--;
1039 if (e->m_summary_id != -1)
1040 edge_released_summary_ids.safe_push (e->m_summary_id);
1041
1042 if (e->indirect_info)
1043 ggc_free (e->indirect_info);
1044 ggc_free (e);
1045 }
1046
1047 /* Remove the edge in the cgraph. */
1048
1049 void
1050 cgraph_edge::remove (cgraph_edge *edge)
1051 {
1052 /* Call all edge removal hooks. */
1053 symtab->call_edge_removal_hooks (edge);
1054
1055 if (!edge->indirect_unknown_callee)
1056 /* Remove from callers list of the callee. */
1057 edge->remove_callee ();
1058
1059 /* Remove from callees list of the callers. */
1060 edge->remove_caller ();
1061
1062 /* Put the edge onto the free list. */
1063 symtab->free_edge (edge);
1064 }
1065
1066 /* Turn edge into speculative call calling N2. Update
1067 the profile so the direct call is taken COUNT times
1068 with FREQUENCY.
1069
1070 At clone materialization time, the indirect call E will
1071 be expanded as:
1072
1073 if (call_dest == N2)
1074 n2 ();
1075 else
1076 call call_dest
1077
1078 At this time the function just creates the direct call,
1079 the reference representing the if conditional and attaches
1080 them all to the original indirect call statement.
1081
1082 speculative_id is used to link direct calls with their corresponding
1083 IPA_REF_ADDR references when representing speculative calls.
1084
1085 Return direct edge created. */
1086
1087 cgraph_edge *
1088 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count,
1089 unsigned int speculative_id)
1090 {
1091 cgraph_node *n = caller;
1092 ipa_ref *ref = NULL;
1093 cgraph_edge *e2;
1094
1095 if (dump_file)
1096 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1097 n->dump_name (), n2->dump_name ());
1098 speculative = true;
1099 e2 = n->create_edge (n2, call_stmt, direct_count);
1100 initialize_inline_failed (e2);
1101 e2->speculative = true;
1102 if (TREE_NOTHROW (n2->decl))
1103 e2->can_throw_external = false;
1104 else
1105 e2->can_throw_external = can_throw_external;
1106 e2->lto_stmt_uid = lto_stmt_uid;
1107 e2->speculative_id = speculative_id;
1108 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1109 indirect_info->num_speculative_call_targets++;
1110 count -= e2->count;
1111 symtab->call_edge_duplication_hooks (this, e2);
1112 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1113 ref->lto_stmt_uid = lto_stmt_uid;
1114 ref->speculative_id = speculative_id;
1115 ref->speculative = speculative;
1116 n2->mark_address_taken ();
1117 return e2;
1118 }
1119
1120 /* Speculative call consists of an indirect edge and one or more
1121 direct edge+ref pairs.
1122
1123 Given an edge which is part of speculative call, return the first
1124 direct call edge in the speculative call sequence. */
1125
1126 cgraph_edge *
1127 cgraph_edge::first_speculative_call_target ()
1128 {
1129 cgraph_edge *e = this;
1130
1131 gcc_checking_assert (e->speculative);
1132 if (e->callee)
1133 {
1134 while (e->prev_callee && e->prev_callee->speculative
1135 && e->prev_callee->call_stmt == e->call_stmt
1136 && e->prev_callee->lto_stmt_uid == e->lto_stmt_uid)
1137 e = e->prev_callee;
1138 return e;
1139 }
1140 /* Call stmt site hash always points to the first target of the
1141 speculative call sequence. */
1142 if (e->call_stmt)
1143 return e->caller->get_edge (e->call_stmt);
1144 for (cgraph_edge *e2 = e->caller->callees; true; e2 = e2->next_callee)
1145 if (e2->speculative
1146 && e->call_stmt == e2->call_stmt
1147 && e->lto_stmt_uid == e2->lto_stmt_uid)
1148 return e2;
1149 }
1150
1151 /* We always maintain first direct edge in the call site hash, if one
1152 exists. E is going to be removed. See if it is first one and update
1153 hash accordingly. INDIRECT is the indirect edge of speculative call.
1154 We assume that INDIRECT->num_speculative_call_targets_p () is already
1155 updated for removal of E. */
1156 static void
1157 update_call_stmt_hash_for_removing_direct_edge (cgraph_edge *e,
1158 cgraph_edge *indirect)
1159 {
1160 if (e->caller->call_site_hash)
1161 {
1162 if (e->caller->get_edge (e->call_stmt) != e)
1163 ;
1164 else if (!indirect->num_speculative_call_targets_p ())
1165 cgraph_update_edge_in_call_site_hash (indirect);
1166 else
1167 {
1168 gcc_checking_assert (e->next_callee && e->next_callee->speculative
1169 && e->next_callee->call_stmt == e->call_stmt);
1170 cgraph_update_edge_in_call_site_hash (e->next_callee);
1171 }
1172 }
1173 }
1174
1175 /* Speculative call EDGE turned out to be direct call to CALLEE_DECL. Remove
1176 the speculative call sequence and return edge representing the call, the
1177 original EDGE can be removed and deallocated. Return the edge that now
1178 represents the call.
1179
1180 For "speculative" indirect call that contains multiple "speculative"
1181 targets (i.e. edge->indirect_info->num_speculative_call_targets > 1),
1182 decrease the count and only remove current direct edge.
1183
1184 If no speculative direct call left to the speculative indirect call, remove
1185 the speculative of both the indirect call and corresponding direct edge.
1186
1187 It is up to caller to iteratively resolve each "speculative" direct call and
1188 redirect the call as appropriate. */
1189
1190 cgraph_edge *
1191 cgraph_edge::resolve_speculation (cgraph_edge *edge, tree callee_decl)
1192 {
1193 cgraph_edge *e2;
1194 ipa_ref *ref;
1195
1196 gcc_assert (edge->speculative && (!callee_decl || edge->callee));
1197 if (!edge->callee)
1198 e2 = edge->first_speculative_call_target ();
1199 else
1200 e2 = edge;
1201 ref = e2->speculative_call_target_ref ();
1202 edge = edge->speculative_call_indirect_edge ();
1203 if (!callee_decl
1204 || !ref->referred->semantically_equivalent_p
1205 (symtab_node::get (callee_decl)))
1206 {
1207 if (dump_file)
1208 {
1209 if (callee_decl)
1210 {
1211 fprintf (dump_file, "Speculative indirect call %s => %s has "
1212 "turned out to have contradicting known target ",
1213 edge->caller->dump_name (),
1214 e2->callee->dump_name ());
1215 print_generic_expr (dump_file, callee_decl);
1216 fprintf (dump_file, "\n");
1217 }
1218 else
1219 {
1220 fprintf (dump_file, "Removing speculative call %s => %s\n",
1221 edge->caller->dump_name (),
1222 e2->callee->dump_name ());
1223 }
1224 }
1225 }
1226 else
1227 {
1228 cgraph_edge *tmp = edge;
1229 if (dump_file)
1230 fprintf (dump_file, "Speculative call turned into direct call.\n");
1231 edge = e2;
1232 e2 = tmp;
1233 /* FIXME: If EDGE is inlined, we should scale up the frequencies
1234 and counts in the functions inlined through it. */
1235 }
1236 edge->count += e2->count;
1237 if (edge->num_speculative_call_targets_p ())
1238 {
1239 /* The indirect edge has multiple speculative targets, don't remove
1240 speculative until all related direct edges are resolved. */
1241 edge->indirect_info->num_speculative_call_targets--;
1242 if (!edge->indirect_info->num_speculative_call_targets)
1243 edge->speculative = false;
1244 }
1245 else
1246 edge->speculative = false;
1247 e2->speculative = false;
1248 update_call_stmt_hash_for_removing_direct_edge (e2, edge);
1249 ref->remove_reference ();
1250 if (e2->indirect_unknown_callee || e2->inline_failed)
1251 remove (e2);
1252 else
1253 e2->callee->remove_symbol_and_inline_clones ();
1254 return edge;
1255 }
1256
1257 /* Return edge corresponding to speculative call to a given target.
1258 NULL if speculative call does not have one. */
1259
1260 cgraph_edge *
1261 cgraph_edge::speculative_call_for_target (cgraph_node *target)
1262 {
1263 for (cgraph_edge *direct = first_speculative_call_target ();
1264 direct;
1265 direct = direct->next_speculative_call_target ())
1266 if (direct->speculative_call_target_ref ()
1267 ->referred->semantically_equivalent_p (target))
1268 return direct;
1269 return NULL;
1270 }
1271
1272 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1273 CALLEE. Speculations can be resolved in the process and EDGE can be removed
1274 and deallocated. Return the edge that now represents the call. */
1275
1276 cgraph_edge *
1277 cgraph_edge::make_direct (cgraph_edge *edge, cgraph_node *callee)
1278 {
1279 gcc_assert (edge->indirect_unknown_callee);
1280
1281 /* If we are redirecting speculative call, make it non-speculative. */
1282 if (edge->speculative)
1283 {
1284 cgraph_edge *found = NULL;
1285 cgraph_edge *direct, *next;
1286
1287 edge = edge->speculative_call_indirect_edge ();
1288
1289 /* Look all speculative targets and remove all but one corresponding
1290 to callee (if it exists). */
1291 for (direct = edge->first_speculative_call_target ();
1292 direct;
1293 direct = next)
1294 {
1295 next = direct->next_speculative_call_target ();
1296
1297 /* Compare ref not direct->callee. Direct edge is possibly
1298 inlined or redirected. */
1299 if (!direct->speculative_call_target_ref ()
1300 ->referred->semantically_equivalent_p (callee))
1301 edge = direct->resolve_speculation (direct, NULL);
1302 else
1303 {
1304 gcc_checking_assert (!found);
1305 found = direct;
1306 }
1307 }
1308
1309 /* On successful speculation just remove the indirect edge and
1310 return the pre existing direct edge.
1311 It is important to not remove it and redirect because the direct
1312 edge may be inlined or redirected. */
1313 if (found)
1314 {
1315 cgraph_edge *e2 = resolve_speculation (found, callee->decl);
1316 gcc_checking_assert (!found->speculative && e2 == found);
1317 return found;
1318 }
1319 gcc_checking_assert (!edge->speculative);
1320 }
1321
1322 edge->indirect_unknown_callee = 0;
1323 ggc_free (edge->indirect_info);
1324 edge->indirect_info = NULL;
1325
1326 /* Get the edge out of the indirect edge list. */
1327 if (edge->prev_callee)
1328 edge->prev_callee->next_callee = edge->next_callee;
1329 if (edge->next_callee)
1330 edge->next_callee->prev_callee = edge->prev_callee;
1331 if (!edge->prev_callee)
1332 edge->caller->indirect_calls = edge->next_callee;
1333
1334 /* Put it into the normal callee list */
1335 edge->prev_callee = NULL;
1336 edge->next_callee = edge->caller->callees;
1337 if (edge->caller->callees)
1338 edge->caller->callees->prev_callee = edge;
1339 edge->caller->callees = edge;
1340
1341 /* Insert to callers list of the new callee. */
1342 edge->set_callee (callee);
1343
1344 /* We need to re-determine the inlining status of the edge. */
1345 initialize_inline_failed (edge);
1346 return edge;
1347 }
1348
1349 /* Redirect callee of the edge to N. The function does not update underlying
1350 call expression. */
1351
1352 void
1353 cgraph_edge::redirect_callee (cgraph_node *n)
1354 {
1355 bool loc = callee->comdat_local_p ();
1356 /* Remove from callers list of the current callee. */
1357 remove_callee ();
1358
1359 /* Insert to callers list of the new callee. */
1360 set_callee (n);
1361
1362 if (!inline_failed)
1363 return;
1364 if (!loc && n->comdat_local_p ())
1365 {
1366 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1367 to->calls_comdat_local = true;
1368 }
1369 else if (loc && !n->comdat_local_p ())
1370 {
1371 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1372 gcc_checking_assert (to->calls_comdat_local);
1373 to->calls_comdat_local = to->check_calls_comdat_local_p ();
1374 }
1375 }
1376
1377 /* If necessary, change the function declaration in the call statement
1378 associated with E so that it corresponds to the edge callee. Speculations
1379 can be resolved in the process and EDGE can be removed and deallocated.
1380
1381 The edge could be one of speculative direct call generated from speculative
1382 indirect call. In this circumstance, decrease the speculative targets
1383 count (i.e. num_speculative_call_targets) and redirect call stmt to the
1384 corresponding i-th target. If no speculative direct call left to the
1385 speculative indirect call, remove "speculative" of the indirect call and
1386 also redirect stmt to it's final direct target.
1387
1388 It is up to caller to iteratively transform each "speculative"
1389 direct call as appropriate. */
1390
1391 gimple *
1392 cgraph_edge::redirect_call_stmt_to_callee (cgraph_edge *e)
1393 {
1394 tree decl = gimple_call_fndecl (e->call_stmt);
1395 gcall *new_stmt;
1396 gimple_stmt_iterator gsi;
1397
1398 if (e->speculative)
1399 {
1400 /* If there already is an direct call (i.e. as a result of inliner's
1401 substitution), forget about speculating. */
1402 if (decl)
1403 e = make_direct (e->speculative_call_indirect_edge (),
1404 cgraph_node::get (decl));
1405 else
1406 {
1407 /* Be sure we redirect all speculative targets before poking
1408 abou tindirect edge. */
1409 gcc_checking_assert (e->callee);
1410 cgraph_edge *indirect = e->speculative_call_indirect_edge ();
1411 gcall *new_stmt;
1412 ipa_ref *ref;
1413
1414 /* Expand speculation into GIMPLE code. */
1415 if (dump_file)
1416 {
1417 fprintf (dump_file,
1418 "Expanding speculative call of %s -> %s count: ",
1419 e->caller->dump_name (),
1420 e->callee->dump_name ());
1421 e->count.dump (dump_file);
1422 fprintf (dump_file, "\n");
1423 }
1424 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1425
1426 profile_count all = indirect->count;
1427 for (cgraph_edge *e2 = e->first_speculative_call_target ();
1428 e2;
1429 e2 = e2->next_speculative_call_target ())
1430 all = all + e2->count;
1431 profile_probability prob = e->count.probability_in (all);
1432 if (!prob.initialized_p ())
1433 prob = profile_probability::even ();
1434 ref = e->speculative_call_target_ref ();
1435 new_stmt = gimple_ic (e->call_stmt,
1436 dyn_cast<cgraph_node *> (ref->referred),
1437 prob);
1438 e->speculative = false;
1439 if (indirect->num_speculative_call_targets_p ())
1440 {
1441 /* The indirect edge has multiple speculative targets, don't
1442 remove speculative until all related direct edges are
1443 redirected. */
1444 indirect->indirect_info->num_speculative_call_targets--;
1445 if (!indirect->indirect_info->num_speculative_call_targets)
1446 indirect->speculative = false;
1447 }
1448 else
1449 indirect->speculative = false;
1450 /* Indirect edges are not both in the call site hash.
1451 get it updated. */
1452 update_call_stmt_hash_for_removing_direct_edge (e, indirect);
1453 cgraph_edge::set_call_stmt (e, new_stmt, false);
1454 e->count = gimple_bb (e->call_stmt)->count;
1455
1456 /* Once we are done with expanding the sequence, update also indirect
1457 call probability. Until then the basic block accounts for the
1458 sum of indirect edge and all non-expanded speculations. */
1459 if (!indirect->speculative)
1460 indirect->count = gimple_bb (indirect->call_stmt)->count;
1461 ref->speculative = false;
1462 ref->stmt = NULL;
1463 pop_cfun ();
1464 /* Continue redirecting E to proper target. */
1465 }
1466 }
1467
1468
1469 if (e->indirect_unknown_callee
1470 || decl == e->callee->decl)
1471 return e->call_stmt;
1472
1473 if (flag_checking && decl)
1474 {
1475 cgraph_node *node = cgraph_node::get (decl);
1476 gcc_assert (!node || !node->clone.param_adjustments);
1477 }
1478
1479 if (symtab->dump_file)
1480 {
1481 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1482 e->caller->dump_name (), e->callee->dump_name ());
1483 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1484 if (e->callee->clone.param_adjustments)
1485 e->callee->clone.param_adjustments->dump (symtab->dump_file);
1486 unsigned performed_len
1487 = vec_safe_length (e->caller->clone.performed_splits);
1488 if (performed_len > 0)
1489 fprintf (symtab->dump_file, "Performed splits records:\n");
1490 for (unsigned i = 0; i < performed_len; i++)
1491 {
1492 ipa_param_performed_split *sm
1493 = &(*e->caller->clone.performed_splits)[i];
1494 print_node_brief (symtab->dump_file, " dummy_decl: ", sm->dummy_decl,
1495 TDF_UID);
1496 fprintf (symtab->dump_file, ", unit_offset: %u\n", sm->unit_offset);
1497 }
1498 }
1499
1500 if (ipa_param_adjustments *padjs = e->callee->clone.param_adjustments)
1501 {
1502 /* We need to defer cleaning EH info on the new statement to
1503 fixup-cfg. We may not have dominator information at this point
1504 and thus would end up with unreachable blocks and have no way
1505 to communicate that we need to run CFG cleanup then. */
1506 int lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1507 if (lp_nr != 0)
1508 remove_stmt_from_eh_lp (e->call_stmt);
1509
1510 tree old_fntype = gimple_call_fntype (e->call_stmt);
1511 new_stmt = padjs->modify_call (e->call_stmt,
1512 e->caller->clone.performed_splits,
1513 e->callee->decl, false);
1514 cgraph_node *origin = e->callee;
1515 while (origin->clone_of)
1516 origin = origin->clone_of;
1517
1518 if ((origin->former_clone_of
1519 && old_fntype == TREE_TYPE (origin->former_clone_of))
1520 || old_fntype == TREE_TYPE (origin->decl))
1521 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1522 else
1523 {
1524 tree new_fntype = padjs->build_new_function_type (old_fntype, true);
1525 gimple_call_set_fntype (new_stmt, new_fntype);
1526 }
1527
1528 if (lp_nr != 0)
1529 add_stmt_to_eh_lp (new_stmt, lp_nr);
1530 }
1531 else
1532 {
1533 new_stmt = e->call_stmt;
1534 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1535 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1536 }
1537
1538 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1539 adjust gimple_call_fntype too. */
1540 if (gimple_call_noreturn_p (new_stmt)
1541 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1542 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1543 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1544 == void_type_node))
1545 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1546
1547 /* If the call becomes noreturn, remove the LHS if possible. */
1548 tree lhs = gimple_call_lhs (new_stmt);
1549 if (lhs
1550 && gimple_call_noreturn_p (new_stmt)
1551 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1552 || should_remove_lhs_p (lhs)))
1553 {
1554 if (TREE_CODE (lhs) == SSA_NAME)
1555 {
1556 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1557 TREE_TYPE (lhs), NULL);
1558 var = get_or_create_ssa_default_def
1559 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1560 gimple *set_stmt = gimple_build_assign (lhs, var);
1561 gsi = gsi_for_stmt (new_stmt);
1562 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1563 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1564 }
1565 gimple_call_set_lhs (new_stmt, NULL_TREE);
1566 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1567 }
1568
1569 /* If new callee has no static chain, remove it. */
1570 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1571 {
1572 gimple_call_set_chain (new_stmt, NULL);
1573 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1574 }
1575
1576 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1577 new_stmt);
1578
1579 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1580
1581 if (symtab->dump_file)
1582 {
1583 fprintf (symtab->dump_file, " updated to:");
1584 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1585 }
1586 return new_stmt;
1587 }
1588
1589 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1590 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1591 of OLD_STMT if it was previously call statement.
1592 If NEW_STMT is NULL, the call has been dropped without any
1593 replacement. */
1594
1595 static void
1596 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1597 gimple *old_stmt, tree old_call,
1598 gimple *new_stmt)
1599 {
1600 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1601 ? gimple_call_fndecl (new_stmt) : 0;
1602
1603 /* We are seeing indirect calls, then there is nothing to update. */
1604 if (!new_call && !old_call)
1605 return;
1606 /* See if we turned indirect call into direct call or folded call to one builtin
1607 into different builtin. */
1608 if (old_call != new_call)
1609 {
1610 cgraph_edge *e = node->get_edge (old_stmt);
1611 cgraph_edge *ne = NULL;
1612 profile_count count;
1613
1614 if (e)
1615 {
1616 /* Keep calls marked as dead dead. */
1617 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1618 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1619 {
1620 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1621 as_a <gcall *> (new_stmt));
1622 return;
1623 }
1624 /* See if the edge is already there and has the correct callee. It
1625 might be so because of indirect inlining has already updated
1626 it. We also might've cloned and redirected the edge. */
1627 if (new_call && e->callee)
1628 {
1629 cgraph_node *callee = e->callee;
1630 while (callee)
1631 {
1632 if (callee->decl == new_call
1633 || callee->former_clone_of == new_call)
1634 {
1635 cgraph_edge::set_call_stmt (e, as_a <gcall *> (new_stmt));
1636 return;
1637 }
1638 callee = callee->clone_of;
1639 }
1640 }
1641
1642 /* Otherwise remove edge and create new one; we can't simply redirect
1643 since function has changed, so inline plan and other information
1644 attached to edge is invalid. */
1645 count = e->count;
1646 if (e->indirect_unknown_callee || e->inline_failed)
1647 cgraph_edge::remove (e);
1648 else
1649 e->callee->remove_symbol_and_inline_clones ();
1650 }
1651 else if (new_call)
1652 {
1653 /* We are seeing new direct call; compute profile info based on BB. */
1654 basic_block bb = gimple_bb (new_stmt);
1655 count = bb->count;
1656 }
1657
1658 if (new_call)
1659 {
1660 ne = node->create_edge (cgraph_node::get_create (new_call),
1661 as_a <gcall *> (new_stmt), count);
1662 gcc_assert (ne->inline_failed);
1663 }
1664 }
1665 /* We only updated the call stmt; update pointer in cgraph edge.. */
1666 else if (old_stmt != new_stmt)
1667 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1668 as_a <gcall *> (new_stmt));
1669 }
1670
1671 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1672 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1673 of OLD_STMT before it was updated (updating can happen inplace). */
1674
1675 void
1676 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1677 gimple *new_stmt)
1678 {
1679 cgraph_node *orig = cgraph_node::get (cfun->decl);
1680 cgraph_node *node;
1681
1682 gcc_checking_assert (orig);
1683 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1684 if (orig->clones)
1685 for (node = orig->clones; node != orig;)
1686 {
1687 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1688 if (node->clones)
1689 node = node->clones;
1690 else if (node->next_sibling_clone)
1691 node = node->next_sibling_clone;
1692 else
1693 {
1694 while (node != orig && !node->next_sibling_clone)
1695 node = node->clone_of;
1696 if (node != orig)
1697 node = node->next_sibling_clone;
1698 }
1699 }
1700 }
1701
1702
1703 /* Remove all callees from the node. */
1704
1705 void
1706 cgraph_node::remove_callees (void)
1707 {
1708 cgraph_edge *e, *f;
1709
1710 calls_comdat_local = false;
1711
1712 /* It is sufficient to remove the edges from the lists of callers of
1713 the callees. The callee list of the node can be zapped with one
1714 assignment. */
1715 for (e = callees; e; e = f)
1716 {
1717 f = e->next_callee;
1718 symtab->call_edge_removal_hooks (e);
1719 if (!e->indirect_unknown_callee)
1720 e->remove_callee ();
1721 symtab->free_edge (e);
1722 }
1723 for (e = indirect_calls; e; e = f)
1724 {
1725 f = e->next_callee;
1726 symtab->call_edge_removal_hooks (e);
1727 if (!e->indirect_unknown_callee)
1728 e->remove_callee ();
1729 symtab->free_edge (e);
1730 }
1731 indirect_calls = NULL;
1732 callees = NULL;
1733 if (call_site_hash)
1734 {
1735 call_site_hash->empty ();
1736 call_site_hash = NULL;
1737 }
1738 }
1739
1740 /* Remove all callers from the node. */
1741
1742 void
1743 cgraph_node::remove_callers (void)
1744 {
1745 cgraph_edge *e, *f;
1746
1747 /* It is sufficient to remove the edges from the lists of callees of
1748 the callers. The caller list of the node can be zapped with one
1749 assignment. */
1750 for (e = callers; e; e = f)
1751 {
1752 f = e->next_caller;
1753 symtab->call_edge_removal_hooks (e);
1754 e->remove_caller ();
1755 symtab->free_edge (e);
1756 }
1757 callers = NULL;
1758 }
1759
1760 /* Helper function for cgraph_release_function_body and free_lang_data.
1761 It releases body from function DECL without having to inspect its
1762 possibly non-existent symtab node. */
1763
1764 void
1765 release_function_body (tree decl)
1766 {
1767 function *fn = DECL_STRUCT_FUNCTION (decl);
1768 if (fn)
1769 {
1770 if (fn->cfg
1771 && loops_for_fn (fn))
1772 {
1773 fn->curr_properties &= ~PROP_loops;
1774 loop_optimizer_finalize (fn);
1775 }
1776 if (fn->gimple_df)
1777 {
1778 delete_tree_ssa (fn);
1779 fn->eh = NULL;
1780 }
1781 if (fn->cfg)
1782 {
1783 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1784 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1785 delete_tree_cfg_annotations (fn);
1786 clear_edges (fn);
1787 fn->cfg = NULL;
1788 }
1789 if (fn->value_histograms)
1790 free_histograms (fn);
1791 gimple_set_body (decl, NULL);
1792 /* Struct function hangs a lot of data that would leak if we didn't
1793 removed all pointers to it. */
1794 ggc_free (fn);
1795 DECL_STRUCT_FUNCTION (decl) = NULL;
1796 }
1797 DECL_SAVED_TREE (decl) = NULL;
1798 }
1799
1800 /* Release memory used to represent body of function.
1801 Use this only for functions that are released before being translated to
1802 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1803 are free'd in final.c via free_after_compilation().
1804 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1805
1806 void
1807 cgraph_node::release_body (bool keep_arguments)
1808 {
1809 ipa_transforms_to_apply.release ();
1810 if (!used_as_abstract_origin && symtab->state != PARSING)
1811 {
1812 DECL_RESULT (decl) = NULL;
1813
1814 if (!keep_arguments)
1815 DECL_ARGUMENTS (decl) = NULL;
1816 }
1817 /* If the node is abstract and needed, then do not clear
1818 DECL_INITIAL of its associated function declaration because it's
1819 needed to emit debug info later. */
1820 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1821 DECL_INITIAL (decl) = error_mark_node;
1822 release_function_body (decl);
1823 if (lto_file_data)
1824 {
1825 lto_free_function_in_decl_state_for_node (this);
1826 lto_file_data = NULL;
1827 }
1828 }
1829
1830 /* Remove function from symbol table. */
1831
1832 void
1833 cgraph_node::remove (void)
1834 {
1835 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1836 fprintf (symtab->ipa_clones_dump_file,
1837 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1838 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1839 DECL_SOURCE_COLUMN (decl));
1840
1841 symtab->call_cgraph_removal_hooks (this);
1842 remove_callers ();
1843 remove_callees ();
1844 ipa_transforms_to_apply.release ();
1845 delete_function_version (function_version ());
1846
1847 /* Incremental inlining access removed nodes stored in the postorder list.
1848 */
1849 force_output = false;
1850 forced_by_abi = false;
1851 cgraph_node *next;
1852 for (cgraph_node *n = nested; n; n = next)
1853 {
1854 next = n->next_nested;
1855 n->origin = NULL;
1856 n->next_nested = NULL;
1857 }
1858 nested = NULL;
1859 if (origin)
1860 {
1861 cgraph_node **node2 = &origin->nested;
1862
1863 while (*node2 != this)
1864 node2 = &(*node2)->next_nested;
1865 *node2 = next_nested;
1866 }
1867 unregister ();
1868 if (prev_sibling_clone)
1869 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1870 else if (clone_of)
1871 clone_of->clones = next_sibling_clone;
1872 if (next_sibling_clone)
1873 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1874 if (clones)
1875 {
1876 cgraph_node *n, *next;
1877
1878 if (clone_of)
1879 {
1880 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1881 n->clone_of = clone_of;
1882 n->clone_of = clone_of;
1883 n->next_sibling_clone = clone_of->clones;
1884 if (clone_of->clones)
1885 clone_of->clones->prev_sibling_clone = n;
1886 clone_of->clones = clones;
1887 }
1888 else
1889 {
1890 /* We are removing node with clones. This makes clones inconsistent,
1891 but assume they will be removed subsequently and just keep clone
1892 tree intact. This can happen in unreachable function removal since
1893 we remove unreachable functions in random order, not by bottom-up
1894 walk of clone trees. */
1895 for (n = clones; n; n = next)
1896 {
1897 next = n->next_sibling_clone;
1898 n->next_sibling_clone = NULL;
1899 n->prev_sibling_clone = NULL;
1900 n->clone_of = NULL;
1901 }
1902 }
1903 }
1904
1905 /* While all the clones are removed after being proceeded, the function
1906 itself is kept in the cgraph even after it is compiled. Check whether
1907 we are done with this body and reclaim it proactively if this is the case.
1908 */
1909 if (symtab->state != LTO_STREAMING)
1910 {
1911 cgraph_node *n = cgraph_node::get (decl);
1912 if (!n
1913 || (!n->clones && !n->clone_of && !n->inlined_to
1914 && ((symtab->global_info_ready || in_lto_p)
1915 && (TREE_ASM_WRITTEN (n->decl)
1916 || DECL_EXTERNAL (n->decl)
1917 || !n->analyzed
1918 || (!flag_wpa && n->in_other_partition)))))
1919 release_body ();
1920 }
1921 else
1922 {
1923 lto_free_function_in_decl_state_for_node (this);
1924 lto_file_data = NULL;
1925 }
1926
1927 decl = NULL;
1928 if (call_site_hash)
1929 {
1930 call_site_hash->empty ();
1931 call_site_hash = NULL;
1932 }
1933
1934 symtab->release_symbol (this);
1935 }
1936
1937 /* Likewise indicate that a node is having address taken. */
1938
1939 void
1940 cgraph_node::mark_address_taken (void)
1941 {
1942 /* Indirect inlining can figure out that all uses of the address are
1943 inlined. */
1944 if (inlined_to)
1945 {
1946 gcc_assert (cfun->after_inlining);
1947 gcc_assert (callers->indirect_inlining_edge);
1948 return;
1949 }
1950 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1951 IPA_REF_ADDR reference exists (and thus it should be set on node
1952 representing alias we take address of) and as a test whether address
1953 of the object was taken (and thus it should be set on node alias is
1954 referring to). We should remove the first use and the remove the
1955 following set. */
1956 address_taken = 1;
1957 cgraph_node *node = ultimate_alias_target ();
1958 node->address_taken = 1;
1959 }
1960
1961 /* Return local info node for the compiled function. */
1962
1963 cgraph_node *
1964 cgraph_node::local_info_node (tree decl)
1965 {
1966 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1967 cgraph_node *node = get (decl);
1968 if (!node)
1969 return NULL;
1970 return node->ultimate_alias_target ();
1971 }
1972
1973 /* Return RTL info for the compiled function. */
1974
1975 cgraph_rtl_info *
1976 cgraph_node::rtl_info (const_tree decl)
1977 {
1978 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1979 cgraph_node *node = get (decl);
1980 if (!node)
1981 return NULL;
1982 enum availability avail;
1983 node = node->ultimate_alias_target (&avail);
1984 if (decl != current_function_decl
1985 && (avail < AVAIL_AVAILABLE
1986 || (node->decl != current_function_decl
1987 && !TREE_ASM_WRITTEN (node->decl))))
1988 return NULL;
1989 /* Allocate if it doesn't exist. */
1990 if (node->rtl == NULL)
1991 {
1992 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1993 SET_HARD_REG_SET (node->rtl->function_used_regs);
1994 }
1995 return node->rtl;
1996 }
1997
1998 /* Return a string describing the failure REASON. */
1999
2000 const char*
2001 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
2002 {
2003 #undef DEFCIFCODE
2004 #define DEFCIFCODE(code, type, string) string,
2005
2006 static const char *cif_string_table[CIF_N_REASONS] = {
2007 #include "cif-code.def"
2008 };
2009
2010 /* Signedness of an enum type is implementation defined, so cast it
2011 to unsigned before testing. */
2012 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2013 return cif_string_table[reason];
2014 }
2015
2016 /* Return a type describing the failure REASON. */
2017
2018 cgraph_inline_failed_type_t
2019 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
2020 {
2021 #undef DEFCIFCODE
2022 #define DEFCIFCODE(code, type, string) type,
2023
2024 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
2025 #include "cif-code.def"
2026 };
2027
2028 /* Signedness of an enum type is implementation defined, so cast it
2029 to unsigned before testing. */
2030 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2031 return cif_type_table[reason];
2032 }
2033
2034 /* Names used to print out the availability enum. */
2035 const char * const cgraph_availability_names[] =
2036 {"unset", "not_available", "overwritable", "available", "local"};
2037
2038 /* Output flags of edge to a file F. */
2039
2040 void
2041 cgraph_edge::dump_edge_flags (FILE *f)
2042 {
2043 if (speculative)
2044 fprintf (f, "(speculative) ");
2045 if (!inline_failed)
2046 fprintf (f, "(inlined) ");
2047 if (call_stmt_cannot_inline_p)
2048 fprintf (f, "(call_stmt_cannot_inline_p) ");
2049 if (indirect_inlining_edge)
2050 fprintf (f, "(indirect_inlining) ");
2051 if (count.initialized_p ())
2052 {
2053 fprintf (f, "(");
2054 count.dump (f);
2055 fprintf (f, ",");
2056 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
2057 }
2058 if (can_throw_external)
2059 fprintf (f, "(can throw external) ");
2060 }
2061
2062 /* Dump call graph node to file F. */
2063
2064 void
2065 cgraph_node::dump (FILE *f)
2066 {
2067 cgraph_edge *edge;
2068
2069 dump_base (f);
2070
2071 if (inlined_to)
2072 fprintf (f, " Function %s is inline copy in %s\n",
2073 dump_name (),
2074 inlined_to->dump_name ());
2075 if (clone_of)
2076 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2077 if (symtab->function_flags_ready)
2078 fprintf (f, " Availability: %s\n",
2079 cgraph_availability_names [get_availability ()]);
2080
2081 if (profile_id)
2082 fprintf (f, " Profile id: %i\n",
2083 profile_id);
2084 if (unit_id)
2085 fprintf (f, " Unit id: %i\n",
2086 unit_id);
2087 cgraph_function_version_info *vi = function_version ();
2088 if (vi != NULL)
2089 {
2090 fprintf (f, " Version info: ");
2091 if (vi->prev != NULL)
2092 {
2093 fprintf (f, "prev: ");
2094 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2095 }
2096 if (vi->next != NULL)
2097 {
2098 fprintf (f, "next: ");
2099 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2100 }
2101 if (vi->dispatcher_resolver != NULL_TREE)
2102 fprintf (f, "dispatcher: %s",
2103 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2104
2105 fprintf (f, "\n");
2106 }
2107 fprintf (f, " Function flags:");
2108 if (count.initialized_p ())
2109 {
2110 fprintf (f, " count:");
2111 count.dump (f);
2112 }
2113 if (tp_first_run > 0)
2114 fprintf (f, " first_run:%" PRId64, (int64_t) tp_first_run);
2115 if (origin)
2116 fprintf (f, " nested in:%s", origin->dump_asm_name ());
2117 if (gimple_has_body_p (decl))
2118 fprintf (f, " body");
2119 if (process)
2120 fprintf (f, " process");
2121 if (local)
2122 fprintf (f, " local");
2123 if (redefined_extern_inline)
2124 fprintf (f, " redefined_extern_inline");
2125 if (only_called_at_startup)
2126 fprintf (f, " only_called_at_startup");
2127 if (only_called_at_exit)
2128 fprintf (f, " only_called_at_exit");
2129 if (tm_clone)
2130 fprintf (f, " tm_clone");
2131 if (calls_comdat_local)
2132 fprintf (f, " calls_comdat_local");
2133 if (icf_merged)
2134 fprintf (f, " icf_merged");
2135 if (merged_comdat)
2136 fprintf (f, " merged_comdat");
2137 if (merged_extern_inline)
2138 fprintf (f, " merged_extern_inline");
2139 if (split_part)
2140 fprintf (f, " split_part");
2141 if (indirect_call_target)
2142 fprintf (f, " indirect_call_target");
2143 if (nonfreeing_fn)
2144 fprintf (f, " nonfreeing_fn");
2145 if (DECL_STATIC_CONSTRUCTOR (decl))
2146 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2147 if (DECL_STATIC_DESTRUCTOR (decl))
2148 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2149 if (frequency == NODE_FREQUENCY_HOT)
2150 fprintf (f, " hot");
2151 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2152 fprintf (f, " unlikely_executed");
2153 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2154 fprintf (f, " executed_once");
2155 if (opt_for_fn (decl, optimize_size))
2156 fprintf (f, " optimize_size");
2157 if (parallelized_function)
2158 fprintf (f, " parallelized_function");
2159 if (DECL_IS_OPERATOR_NEW_P (decl))
2160 fprintf (f, " operator_new");
2161 if (DECL_IS_OPERATOR_DELETE_P (decl))
2162 fprintf (f, " operator_delete");
2163
2164
2165 fprintf (f, "\n");
2166
2167 if (thunk.thunk_p)
2168 {
2169 fprintf (f, " Thunk");
2170 if (thunk.alias)
2171 fprintf (f, " of %s (asm:%s)",
2172 lang_hooks.decl_printable_name (thunk.alias, 2),
2173 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2174 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2175 "has virtual offset %i\n",
2176 (int)thunk.fixed_offset,
2177 (int)thunk.virtual_value,
2178 (int)thunk.indirect_offset,
2179 (int)thunk.virtual_offset_p);
2180 }
2181 else if (former_thunk_p ())
2182 fprintf (f, " Former thunk fixed offset %i virtual value %i "
2183 "indirect_offset %i has virtual offset %i\n",
2184 (int)thunk.fixed_offset,
2185 (int)thunk.virtual_value,
2186 (int)thunk.indirect_offset,
2187 (int)thunk.virtual_offset_p);
2188 if (alias && thunk.alias
2189 && DECL_P (thunk.alias))
2190 {
2191 fprintf (f, " Alias of %s",
2192 lang_hooks.decl_printable_name (thunk.alias, 2));
2193 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2194 fprintf (f, " (asm:%s)",
2195 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2196 fprintf (f, "\n");
2197 }
2198
2199 fprintf (f, " Called by: ");
2200
2201 profile_count sum = profile_count::zero ();
2202 for (edge = callers; edge; edge = edge->next_caller)
2203 {
2204 fprintf (f, "%s ", edge->caller->dump_asm_name ());
2205 edge->dump_edge_flags (f);
2206 if (edge->count.initialized_p ())
2207 sum += edge->count.ipa ();
2208 }
2209
2210 fprintf (f, "\n Calls: ");
2211 for (edge = callees; edge; edge = edge->next_callee)
2212 {
2213 fprintf (f, "%s ", edge->callee->dump_asm_name ());
2214 edge->dump_edge_flags (f);
2215 }
2216 fprintf (f, "\n");
2217
2218 if (count.ipa ().initialized_p ())
2219 {
2220 bool ok = true;
2221 bool min = false;
2222 ipa_ref *ref;
2223
2224 FOR_EACH_ALIAS (this, ref)
2225 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2226 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2227
2228 if (inlined_to
2229 || (symtab->state < EXPANSION
2230 && ultimate_alias_target () == this && only_called_directly_p ()))
2231 ok = !count.ipa ().differs_from_p (sum);
2232 else if (count.ipa () > profile_count::from_gcov_type (100)
2233 && count.ipa () < sum.apply_scale (99, 100))
2234 ok = false, min = true;
2235 if (!ok)
2236 {
2237 fprintf (f, " Invalid sum of caller counts ");
2238 sum.dump (f);
2239 if (min)
2240 fprintf (f, ", should be at most ");
2241 else
2242 fprintf (f, ", should be ");
2243 count.ipa ().dump (f);
2244 fprintf (f, "\n");
2245 }
2246 }
2247
2248 for (edge = indirect_calls; edge; edge = edge->next_callee)
2249 {
2250 if (edge->indirect_info->polymorphic)
2251 {
2252 fprintf (f, " Polymorphic indirect call of type ");
2253 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2254 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2255 }
2256 else
2257 fprintf (f, " Indirect call");
2258 edge->dump_edge_flags (f);
2259 if (edge->indirect_info->param_index != -1)
2260 {
2261 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2262 if (edge->indirect_info->agg_contents)
2263 fprintf (f, " loaded from %s %s at offset %i",
2264 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2265 edge->indirect_info->by_ref ? "passed by reference":"",
2266 (int)edge->indirect_info->offset);
2267 if (edge->indirect_info->vptr_changed)
2268 fprintf (f, " (vptr maybe changed)");
2269 }
2270 fprintf (f, " Num speculative call targets: %i",
2271 edge->indirect_info->num_speculative_call_targets);
2272 fprintf (f, "\n");
2273 if (edge->indirect_info->polymorphic)
2274 edge->indirect_info->context.dump (f);
2275 }
2276 }
2277
2278 /* Dump call graph node to file F in graphviz format. */
2279
2280 void
2281 cgraph_node::dump_graphviz (FILE *f)
2282 {
2283 cgraph_edge *edge;
2284
2285 for (edge = callees; edge; edge = edge->next_callee)
2286 {
2287 cgraph_node *callee = edge->callee;
2288
2289 fprintf (f, "\t\"%s\" -> \"%s\"\n", dump_name (), callee->dump_name ());
2290 }
2291 }
2292
2293
2294 /* Dump call graph node NODE to stderr. */
2295
2296 DEBUG_FUNCTION void
2297 cgraph_node::debug (void)
2298 {
2299 dump (stderr);
2300 }
2301
2302 /* Dump the callgraph to file F. */
2303
2304 void
2305 cgraph_node::dump_cgraph (FILE *f)
2306 {
2307 cgraph_node *node;
2308
2309 fprintf (f, "callgraph:\n\n");
2310 FOR_EACH_FUNCTION (node)
2311 node->dump (f);
2312 }
2313
2314 /* Return true when the DECL can possibly be inlined. */
2315
2316 bool
2317 cgraph_function_possibly_inlined_p (tree decl)
2318 {
2319 if (!symtab->global_info_ready)
2320 return !DECL_UNINLINABLE (decl);
2321 return DECL_POSSIBLY_INLINED (decl);
2322 }
2323
2324 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2325 void
2326 cgraph_node::unnest (void)
2327 {
2328 cgraph_node **node2 = &origin->nested;
2329 gcc_assert (origin);
2330
2331 while (*node2 != this)
2332 node2 = &(*node2)->next_nested;
2333 *node2 = next_nested;
2334 origin = NULL;
2335 }
2336
2337 /* Return function availability. See cgraph.h for description of individual
2338 return values. */
2339 enum availability
2340 cgraph_node::get_availability (symtab_node *ref)
2341 {
2342 if (ref)
2343 {
2344 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2345 if (cref)
2346 ref = cref->inlined_to;
2347 }
2348 enum availability avail;
2349 if (!analyzed)
2350 avail = AVAIL_NOT_AVAILABLE;
2351 else if (local)
2352 avail = AVAIL_LOCAL;
2353 else if (inlined_to)
2354 avail = AVAIL_AVAILABLE;
2355 else if (transparent_alias)
2356 ultimate_alias_target (&avail, ref);
2357 else if (ifunc_resolver
2358 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2359 avail = AVAIL_INTERPOSABLE;
2360 else if (!externally_visible)
2361 avail = AVAIL_AVAILABLE;
2362 /* If this is a reference from symbol itself and there are no aliases, we
2363 may be sure that the symbol was not interposed by something else because
2364 the symbol itself would be unreachable otherwise.
2365
2366 Also comdat groups are always resolved in groups. */
2367 else if ((this == ref && !has_aliases_p ())
2368 || (ref && get_comdat_group ()
2369 && get_comdat_group () == ref->get_comdat_group ()))
2370 avail = AVAIL_AVAILABLE;
2371 /* Inline functions are safe to be analyzed even if their symbol can
2372 be overwritten at runtime. It is not meaningful to enforce any sane
2373 behavior on replacing inline function by different body. */
2374 else if (DECL_DECLARED_INLINE_P (decl))
2375 avail = AVAIL_AVAILABLE;
2376
2377 /* If the function can be overwritten, return OVERWRITABLE. Take
2378 care at least of two notable extensions - the COMDAT functions
2379 used to share template instantiations in C++ (this is symmetric
2380 to code cp_cannot_inline_tree_fn and probably shall be shared and
2381 the inlinability hooks completely eliminated). */
2382
2383 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2384 avail = AVAIL_INTERPOSABLE;
2385 else avail = AVAIL_AVAILABLE;
2386
2387 return avail;
2388 }
2389
2390 /* Worker for cgraph_node_can_be_local_p. */
2391 static bool
2392 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2393 {
2394 return !(!node->force_output
2395 && !node->ifunc_resolver
2396 /* Limitation of gas requires us to output targets of symver aliases
2397 as global symbols. This is binutils PR 25295. */
2398 && !node->symver
2399 && ((DECL_COMDAT (node->decl)
2400 && !node->forced_by_abi
2401 && !node->used_from_object_file_p ()
2402 && !node->same_comdat_group)
2403 || !node->externally_visible));
2404 }
2405
2406 /* Return true if cgraph_node can be made local for API change.
2407 Extern inline functions and C++ COMDAT functions can be made local
2408 at the expense of possible code size growth if function is used in multiple
2409 compilation units. */
2410 bool
2411 cgraph_node::can_be_local_p (void)
2412 {
2413 return (!address_taken
2414 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2415 NULL, true));
2416 }
2417
2418 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2419 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2420 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2421 skipped. */
2422 bool
2423 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2424 (cgraph_node *, void *),
2425 void *data,
2426 bool include_overwritable,
2427 bool exclude_virtual_thunks)
2428 {
2429 cgraph_edge *e;
2430 ipa_ref *ref;
2431 enum availability avail = AVAIL_AVAILABLE;
2432
2433 if (include_overwritable
2434 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2435 {
2436 if (callback (this, data))
2437 return true;
2438 }
2439 FOR_EACH_ALIAS (this, ref)
2440 {
2441 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2442 if (include_overwritable
2443 || alias->get_availability () > AVAIL_INTERPOSABLE)
2444 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2445 include_overwritable,
2446 exclude_virtual_thunks))
2447 return true;
2448 }
2449 if (avail <= AVAIL_INTERPOSABLE)
2450 return false;
2451 for (e = callers; e; e = e->next_caller)
2452 if (e->caller->thunk.thunk_p
2453 && (include_overwritable
2454 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2455 && !(exclude_virtual_thunks
2456 && e->caller->thunk.virtual_offset_p))
2457 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2458 include_overwritable,
2459 exclude_virtual_thunks))
2460 return true;
2461
2462 return false;
2463 }
2464
2465 /* Worker to bring NODE local. */
2466
2467 bool
2468 cgraph_node::make_local (cgraph_node *node, void *)
2469 {
2470 gcc_checking_assert (node->can_be_local_p ());
2471 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2472 {
2473 node->make_decl_local ();
2474 node->set_section (NULL);
2475 node->set_comdat_group (NULL);
2476 node->externally_visible = false;
2477 node->forced_by_abi = false;
2478 node->local = true;
2479 node->set_section (NULL);
2480 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2481 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2482 && !flag_incremental_link);
2483 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2484 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2485 }
2486 return false;
2487 }
2488
2489 /* Bring cgraph node local. */
2490
2491 void
2492 cgraph_node::make_local (void)
2493 {
2494 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2495 }
2496
2497 /* Worker to set nothrow flag. */
2498
2499 static void
2500 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2501 bool *changed)
2502 {
2503 cgraph_edge *e;
2504
2505 if (nothrow && !TREE_NOTHROW (node->decl))
2506 {
2507 /* With non-call exceptions we can't say for sure if other function body
2508 was not possibly optimized to still throw. */
2509 if (!non_call || node->binds_to_current_def_p ())
2510 {
2511 TREE_NOTHROW (node->decl) = true;
2512 *changed = true;
2513 for (e = node->callers; e; e = e->next_caller)
2514 e->can_throw_external = false;
2515 }
2516 }
2517 else if (!nothrow && TREE_NOTHROW (node->decl))
2518 {
2519 TREE_NOTHROW (node->decl) = false;
2520 *changed = true;
2521 }
2522 ipa_ref *ref;
2523 FOR_EACH_ALIAS (node, ref)
2524 {
2525 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2526 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2527 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2528 }
2529 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2530 if (e->caller->thunk.thunk_p
2531 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2532 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2533 }
2534
2535 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2536 if any to NOTHROW. */
2537
2538 bool
2539 cgraph_node::set_nothrow_flag (bool nothrow)
2540 {
2541 bool changed = false;
2542 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2543
2544 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2545 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2546 else
2547 {
2548 ipa_ref *ref;
2549
2550 FOR_EACH_ALIAS (this, ref)
2551 {
2552 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2553 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2554 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2555 }
2556 }
2557 return changed;
2558 }
2559
2560 /* Worker to set malloc flag. */
2561 static void
2562 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2563 {
2564 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2565 {
2566 DECL_IS_MALLOC (node->decl) = true;
2567 *changed = true;
2568 }
2569
2570 ipa_ref *ref;
2571 FOR_EACH_ALIAS (node, ref)
2572 {
2573 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2574 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2575 set_malloc_flag_1 (alias, malloc_p, changed);
2576 }
2577
2578 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2579 if (e->caller->thunk.thunk_p
2580 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2581 set_malloc_flag_1 (e->caller, malloc_p, changed);
2582 }
2583
2584 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2585
2586 bool
2587 cgraph_node::set_malloc_flag (bool malloc_p)
2588 {
2589 bool changed = false;
2590
2591 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2592 set_malloc_flag_1 (this, malloc_p, &changed);
2593 else
2594 {
2595 ipa_ref *ref;
2596
2597 FOR_EACH_ALIAS (this, ref)
2598 {
2599 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2600 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2601 set_malloc_flag_1 (alias, malloc_p, &changed);
2602 }
2603 }
2604 return changed;
2605 }
2606
2607 /* Worker to set_const_flag. */
2608
2609 static void
2610 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2611 bool *changed)
2612 {
2613 /* Static constructors and destructors without a side effect can be
2614 optimized out. */
2615 if (set_const && !looping)
2616 {
2617 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2618 {
2619 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2620 *changed = true;
2621 }
2622 if (DECL_STATIC_DESTRUCTOR (node->decl))
2623 {
2624 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2625 *changed = true;
2626 }
2627 }
2628 if (!set_const)
2629 {
2630 if (TREE_READONLY (node->decl))
2631 {
2632 TREE_READONLY (node->decl) = 0;
2633 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2634 *changed = true;
2635 }
2636 }
2637 else
2638 {
2639 /* Consider function:
2640
2641 bool a(int *p)
2642 {
2643 return *p==*p;
2644 }
2645
2646 During early optimization we will turn this into:
2647
2648 bool a(int *p)
2649 {
2650 return true;
2651 }
2652
2653 Now if this function will be detected as CONST however when interposed
2654 it may end up being just pure. We always must assume the worst
2655 scenario here. */
2656 if (TREE_READONLY (node->decl))
2657 {
2658 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2659 {
2660 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2661 *changed = true;
2662 }
2663 }
2664 else if (node->binds_to_current_def_p ())
2665 {
2666 TREE_READONLY (node->decl) = true;
2667 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2668 DECL_PURE_P (node->decl) = false;
2669 *changed = true;
2670 }
2671 else
2672 {
2673 if (dump_file && (dump_flags & TDF_DETAILS))
2674 fprintf (dump_file, "Dropping state to PURE because function does "
2675 "not bind to current def.\n");
2676 if (!DECL_PURE_P (node->decl))
2677 {
2678 DECL_PURE_P (node->decl) = true;
2679 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2680 *changed = true;
2681 }
2682 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2683 {
2684 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2685 *changed = true;
2686 }
2687 }
2688 }
2689
2690 ipa_ref *ref;
2691 FOR_EACH_ALIAS (node, ref)
2692 {
2693 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2694 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2695 set_const_flag_1 (alias, set_const, looping, changed);
2696 }
2697 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2698 if (e->caller->thunk.thunk_p
2699 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2700 {
2701 /* Virtual thunks access virtual offset in the vtable, so they can
2702 only be pure, never const. */
2703 if (set_const
2704 && (e->caller->thunk.virtual_offset_p
2705 || !node->binds_to_current_def_p (e->caller)))
2706 *changed |= e->caller->set_pure_flag (true, looping);
2707 else
2708 set_const_flag_1 (e->caller, set_const, looping, changed);
2709 }
2710 }
2711
2712 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2713 If SET_CONST if false, clear the flag.
2714
2715 When setting the flag be careful about possible interposition and
2716 do not set the flag for functions that can be interposed and set pure
2717 flag for functions that can bind to other definition.
2718
2719 Return true if any change was done. */
2720
2721 bool
2722 cgraph_node::set_const_flag (bool set_const, bool looping)
2723 {
2724 bool changed = false;
2725 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2726 set_const_flag_1 (this, set_const, looping, &changed);
2727 else
2728 {
2729 ipa_ref *ref;
2730
2731 FOR_EACH_ALIAS (this, ref)
2732 {
2733 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2734 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2735 set_const_flag_1 (alias, set_const, looping, &changed);
2736 }
2737 }
2738 return changed;
2739 }
2740
2741 /* Info used by set_pure_flag_1. */
2742
2743 struct set_pure_flag_info
2744 {
2745 bool pure;
2746 bool looping;
2747 bool changed;
2748 };
2749
2750 /* Worker to set_pure_flag. */
2751
2752 static bool
2753 set_pure_flag_1 (cgraph_node *node, void *data)
2754 {
2755 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2756 /* Static constructors and destructors without a side effect can be
2757 optimized out. */
2758 if (info->pure && !info->looping)
2759 {
2760 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2761 {
2762 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2763 info->changed = true;
2764 }
2765 if (DECL_STATIC_DESTRUCTOR (node->decl))
2766 {
2767 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2768 info->changed = true;
2769 }
2770 }
2771 if (info->pure)
2772 {
2773 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2774 {
2775 DECL_PURE_P (node->decl) = true;
2776 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2777 info->changed = true;
2778 }
2779 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2780 && !info->looping)
2781 {
2782 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2783 info->changed = true;
2784 }
2785 }
2786 else
2787 {
2788 if (DECL_PURE_P (node->decl))
2789 {
2790 DECL_PURE_P (node->decl) = false;
2791 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2792 info->changed = true;
2793 }
2794 }
2795 return false;
2796 }
2797
2798 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2799 if any to PURE.
2800
2801 When setting the flag, be careful about possible interposition.
2802 Return true if any change was done. */
2803
2804 bool
2805 cgraph_node::set_pure_flag (bool pure, bool looping)
2806 {
2807 struct set_pure_flag_info info = {pure, looping, false};
2808 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2809 return info.changed;
2810 }
2811
2812 /* Return true when cgraph_node cannot return or throw and thus
2813 it is safe to ignore its side effects for IPA analysis. */
2814
2815 bool
2816 cgraph_node::cannot_return_p (void)
2817 {
2818 int flags = flags_from_decl_or_type (decl);
2819 if (!opt_for_fn (decl, flag_exceptions))
2820 return (flags & ECF_NORETURN) != 0;
2821 else
2822 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2823 == (ECF_NORETURN | ECF_NOTHROW));
2824 }
2825
2826 /* Return true when call of edge cannot lead to return from caller
2827 and thus it is safe to ignore its side effects for IPA analysis
2828 when computing side effects of the caller.
2829 FIXME: We could actually mark all edges that have no reaching
2830 patch to the exit block or throw to get better results. */
2831 bool
2832 cgraph_edge::cannot_lead_to_return_p (void)
2833 {
2834 if (caller->cannot_return_p ())
2835 return true;
2836 if (indirect_unknown_callee)
2837 {
2838 int flags = indirect_info->ecf_flags;
2839 if (!opt_for_fn (caller->decl, flag_exceptions))
2840 return (flags & ECF_NORETURN) != 0;
2841 else
2842 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2843 == (ECF_NORETURN | ECF_NOTHROW));
2844 }
2845 else
2846 return callee->cannot_return_p ();
2847 }
2848
2849 /* Return true if the edge may be considered hot. */
2850
2851 bool
2852 cgraph_edge::maybe_hot_p (void)
2853 {
2854 if (!maybe_hot_count_p (NULL, count.ipa ()))
2855 return false;
2856 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2857 || (callee
2858 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2859 return false;
2860 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2861 && (callee
2862 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2863 return false;
2864 if (opt_for_fn (caller->decl, optimize_size))
2865 return false;
2866 if (caller->frequency == NODE_FREQUENCY_HOT)
2867 return true;
2868 if (!count.initialized_p ())
2869 return true;
2870 cgraph_node *where = caller->inlined_to ? caller->inlined_to : caller;
2871 if (!where->count.initialized_p ())
2872 return false;
2873 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2874 {
2875 if (count.apply_scale (2, 1) < where->count.apply_scale (3, 1))
2876 return false;
2877 }
2878 else if (count.apply_scale (param_hot_bb_frequency_fraction , 1)
2879 < where->count)
2880 return false;
2881 return true;
2882 }
2883
2884 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2885
2886 static bool
2887 nonremovable_p (cgraph_node *node, void *)
2888 {
2889 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2890 }
2891
2892 /* Return true if whole comdat group can be removed if there are no direct
2893 calls to THIS. */
2894
2895 bool
2896 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2897 {
2898 struct ipa_ref *ref;
2899
2900 /* For local symbols or non-comdat group it is the same as
2901 can_remove_if_no_direct_calls_p. */
2902 if (!externally_visible || !same_comdat_group)
2903 {
2904 if (DECL_EXTERNAL (decl))
2905 return true;
2906 if (address_taken)
2907 return false;
2908 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2909 }
2910
2911 if (will_inline && address_taken)
2912 return false;
2913
2914 /* Otherwise check if we can remove the symbol itself and then verify
2915 that only uses of the comdat groups are direct call to THIS
2916 or its aliases. */
2917 if (!can_remove_if_no_direct_calls_and_refs_p ())
2918 return false;
2919
2920 /* Check that all refs come from within the comdat group. */
2921 for (int i = 0; iterate_referring (i, ref); i++)
2922 if (ref->referring->get_comdat_group () != get_comdat_group ())
2923 return false;
2924
2925 struct cgraph_node *target = ultimate_alias_target ();
2926 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2927 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2928 {
2929 if (!externally_visible)
2930 continue;
2931 if (!next->alias
2932 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2933 return false;
2934
2935 /* If we see different symbol than THIS, be sure to check calls. */
2936 if (next->ultimate_alias_target () != target)
2937 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2938 if (e->caller->get_comdat_group () != get_comdat_group ()
2939 || will_inline)
2940 return false;
2941
2942 /* If function is not being inlined, we care only about
2943 references outside of the comdat group. */
2944 if (!will_inline)
2945 for (int i = 0; next->iterate_referring (i, ref); i++)
2946 if (ref->referring->get_comdat_group () != get_comdat_group ())
2947 return false;
2948 }
2949 return true;
2950 }
2951
2952 /* Return true when function cgraph_node can be expected to be removed
2953 from program when direct calls in this compilation unit are removed.
2954
2955 As a special case COMDAT functions are
2956 cgraph_can_remove_if_no_direct_calls_p while the are not
2957 cgraph_only_called_directly_p (it is possible they are called from other
2958 unit)
2959
2960 This function behaves as cgraph_only_called_directly_p because eliminating
2961 all uses of COMDAT function does not make it necessarily disappear from
2962 the program unless we are compiling whole program or we do LTO. In this
2963 case we know we win since dynamic linking will not really discard the
2964 linkonce section. */
2965
2966 bool
2967 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2968 (bool will_inline)
2969 {
2970 gcc_assert (!inlined_to);
2971 if (DECL_EXTERNAL (decl))
2972 return true;
2973
2974 if (!in_lto_p && !flag_whole_program)
2975 {
2976 /* If the symbol is in comdat group, we need to verify that whole comdat
2977 group becomes unreachable. Technically we could skip references from
2978 within the group, too. */
2979 if (!only_called_directly_p ())
2980 return false;
2981 if (same_comdat_group && externally_visible)
2982 {
2983 struct cgraph_node *target = ultimate_alias_target ();
2984
2985 if (will_inline && address_taken)
2986 return true;
2987 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2988 next != this;
2989 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2990 {
2991 if (!externally_visible)
2992 continue;
2993 if (!next->alias
2994 && !next->only_called_directly_p ())
2995 return false;
2996
2997 /* If we see different symbol than THIS,
2998 be sure to check calls. */
2999 if (next->ultimate_alias_target () != target)
3000 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
3001 if (e->caller->get_comdat_group () != get_comdat_group ()
3002 || will_inline)
3003 return false;
3004 }
3005 }
3006 return true;
3007 }
3008 else
3009 return can_remove_if_no_direct_calls_p (will_inline);
3010 }
3011
3012
3013 /* Worker for cgraph_only_called_directly_p. */
3014
3015 static bool
3016 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
3017 {
3018 return !node->only_called_directly_or_aliased_p ();
3019 }
3020
3021 /* Return true when function cgraph_node and all its aliases are only called
3022 directly.
3023 i.e. it is not externally visible, address was not taken and
3024 it is not used in any other non-standard way. */
3025
3026 bool
3027 cgraph_node::only_called_directly_p (void)
3028 {
3029 gcc_assert (ultimate_alias_target () == this);
3030 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
3031 NULL, true);
3032 }
3033
3034
3035 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3036
3037 static bool
3038 collect_callers_of_node_1 (cgraph_node *node, void *data)
3039 {
3040 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
3041 cgraph_edge *cs;
3042 enum availability avail;
3043 node->ultimate_alias_target (&avail);
3044
3045 if (avail > AVAIL_INTERPOSABLE)
3046 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3047 if (!cs->indirect_inlining_edge
3048 && !cs->caller->thunk.thunk_p)
3049 redirect_callers->safe_push (cs);
3050 return false;
3051 }
3052
3053 /* Collect all callers of cgraph_node and its aliases that are known to lead to
3054 cgraph_node (i.e. are not overwritable). */
3055
3056 vec<cgraph_edge *>
3057 cgraph_node::collect_callers (void)
3058 {
3059 vec<cgraph_edge *> redirect_callers = vNULL;
3060 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
3061 &redirect_callers, false);
3062 return redirect_callers;
3063 }
3064
3065
3066 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
3067 optimistically true if this cannot be determined. */
3068
3069 static bool
3070 clone_of_p (cgraph_node *node, cgraph_node *node2)
3071 {
3072 node = node->ultimate_alias_target ();
3073 node2 = node2->ultimate_alias_target ();
3074
3075 if (node2->clone_of == node
3076 || node2->former_clone_of == node->decl)
3077 return true;
3078
3079 if (!node->thunk.thunk_p && !node->former_thunk_p ())
3080 {
3081 while (node2 && node->decl != node2->decl)
3082 node2 = node2->clone_of;
3083 return node2 != NULL;
3084 }
3085
3086 /* There are no virtual clones of thunks so check former_clone_of or if we
3087 might have skipped thunks because this adjustments are no longer
3088 necessary. */
3089 while (node->thunk.thunk_p || node->former_thunk_p ())
3090 {
3091 if (!node->thunk.this_adjusting)
3092 return false;
3093 /* In case of instrumented expanded thunks, which can have multiple calls
3094 in them, we do not know how to continue and just have to be
3095 optimistic. */
3096 if (node->callees->next_callee)
3097 return true;
3098 node = node->callees->callee->ultimate_alias_target ();
3099
3100 if (!node2->clone.param_adjustments
3101 || node2->clone.param_adjustments->first_param_intact_p ())
3102 return false;
3103 if (node2->former_clone_of == node->decl)
3104 return true;
3105
3106 cgraph_node *n2 = node2;
3107 while (n2 && node->decl != n2->decl)
3108 n2 = n2->clone_of;
3109 if (n2)
3110 return true;
3111 }
3112
3113 return false;
3114 }
3115
3116 /* Verify edge count and frequency. */
3117
3118 bool
3119 cgraph_edge::verify_count ()
3120 {
3121 bool error_found = false;
3122 if (!count.verify ())
3123 {
3124 error ("caller edge count invalid");
3125 error_found = true;
3126 }
3127 return error_found;
3128 }
3129
3130 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3131 static void
3132 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3133 {
3134 bool fndecl_was_null = false;
3135 /* debug_gimple_stmt needs correct cfun */
3136 if (cfun != this_cfun)
3137 set_cfun (this_cfun);
3138 /* ...and an actual current_function_decl */
3139 if (!current_function_decl)
3140 {
3141 current_function_decl = this_cfun->decl;
3142 fndecl_was_null = true;
3143 }
3144 debug_gimple_stmt (stmt);
3145 if (fndecl_was_null)
3146 current_function_decl = NULL;
3147 }
3148
3149 /* Verify that call graph edge corresponds to DECL from the associated
3150 statement. Return true if the verification should fail. */
3151
3152 bool
3153 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3154 {
3155 cgraph_node *node;
3156
3157 if (!decl || callee->inlined_to)
3158 return false;
3159 if (symtab->state == LTO_STREAMING)
3160 return false;
3161 node = cgraph_node::get (decl);
3162
3163 /* We do not know if a node from a different partition is an alias or what it
3164 aliases and therefore cannot do the former_clone_of check reliably. When
3165 body_removed is set, we have lost all information about what was alias or
3166 thunk of and also cannot proceed. */
3167 if (!node
3168 || node->body_removed
3169 || node->in_other_partition
3170 || callee->icf_merged
3171 || callee->in_other_partition)
3172 return false;
3173
3174 node = node->ultimate_alias_target ();
3175
3176 /* Optimizers can redirect unreachable calls or calls triggering undefined
3177 behavior to builtin_unreachable. */
3178
3179 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3180 return false;
3181
3182 if (callee->former_clone_of != node->decl
3183 && (node != callee->ultimate_alias_target ())
3184 && !clone_of_p (node, callee))
3185 return true;
3186 else
3187 return false;
3188 }
3189
3190 /* Disable warnings about missing quoting in GCC diagnostics for
3191 the verification errors. Their format strings don't follow GCC
3192 diagnostic conventions and the calls are ultimately followed by
3193 one to internal_error. */
3194 #if __GNUC__ >= 10
3195 # pragma GCC diagnostic push
3196 # pragma GCC diagnostic ignored "-Wformat-diag"
3197 #endif
3198
3199 /* Verify consistency of speculative call in NODE corresponding to STMT
3200 and LTO_STMT_UID. If INDIRECT is set, assume that it is the indirect
3201 edge of call sequence. Return true if error is found.
3202
3203 This function is called to every component of indirect call (direct edges,
3204 indirect edge and refs). To save duplicated work, do full testing only
3205 in that case. */
3206 static bool
3207 verify_speculative_call (struct cgraph_node *node, gimple *stmt,
3208 unsigned int lto_stmt_uid,
3209 struct cgraph_edge *indirect)
3210 {
3211 if (indirect == NULL)
3212 {
3213 for (indirect = node->indirect_calls; indirect;
3214 indirect = indirect->next_callee)
3215 if (indirect->call_stmt == stmt
3216 && indirect->lto_stmt_uid == lto_stmt_uid)
3217 break;
3218 if (!indirect)
3219 {
3220 error ("missing indirect call in speculative call sequence");
3221 return true;
3222 }
3223 if (!indirect->speculative)
3224 {
3225 error ("indirect call in speculative call sequence has no "
3226 "speculative flag");
3227 return true;
3228 }
3229 return false;
3230 }
3231
3232 /* Maximal number of targets. We probably will never want to have more than
3233 this. */
3234 const unsigned int num = 256;
3235 cgraph_edge *direct_calls[num];
3236 ipa_ref *refs[num];
3237
3238 for (unsigned int i = 0; i < num; i++)
3239 {
3240 direct_calls[i] = NULL;
3241 refs[i] = NULL;
3242 }
3243
3244 cgraph_edge *first_call = NULL;
3245 cgraph_edge *prev_call = NULL;
3246
3247 for (cgraph_edge *direct = node->callees; direct;
3248 direct = direct->next_callee)
3249 if (direct->call_stmt == stmt && direct->lto_stmt_uid == lto_stmt_uid)
3250 {
3251 if (!first_call)
3252 first_call = direct;
3253 if (prev_call && direct != prev_call->next_callee)
3254 {
3255 error ("speculative edges are not adjacent");
3256 return true;
3257 }
3258 prev_call = direct;
3259 if (!direct->speculative)
3260 {
3261 error ("direct call to %s in speculative call sequence has no "
3262 "speculative flag", direct->callee->dump_name ());
3263 return true;
3264 }
3265 if (direct->speculative_id >= num)
3266 {
3267 error ("direct call to %s in speculative call sequence has "
3268 "speculative_id %i out of range",
3269 direct->callee->dump_name (), direct->speculative_id);
3270 return true;
3271 }
3272 if (direct_calls[direct->speculative_id])
3273 {
3274 error ("duplicate direct call to %s in speculative call sequence "
3275 "with speculative_id %i",
3276 direct->callee->dump_name (), direct->speculative_id);
3277 return true;
3278 }
3279 direct_calls[direct->speculative_id] = direct;
3280 }
3281
3282 if (first_call->call_stmt
3283 && first_call != node->get_edge (first_call->call_stmt))
3284 {
3285 error ("call stmt hash does not point to first direct edge of "
3286 "speculative call sequence");
3287 return true;
3288 }
3289
3290 ipa_ref *ref;
3291 for (int i = 0; node->iterate_reference (i, ref); i++)
3292 if (ref->speculative
3293 && ref->stmt == stmt && ref->lto_stmt_uid == lto_stmt_uid)
3294 {
3295 if (ref->speculative_id >= num)
3296 {
3297 error ("direct call to %s in speculative call sequence has "
3298 "speculative_id %i out of range",
3299 ref->referred->dump_name (), ref->speculative_id);
3300 return true;
3301 }
3302 if (refs[ref->speculative_id])
3303 {
3304 error ("duplicate reference %s in speculative call sequence "
3305 "with speculative_id %i",
3306 ref->referred->dump_name (), ref->speculative_id);
3307 return true;
3308 }
3309 refs[ref->speculative_id] = ref;
3310 }
3311
3312 int num_targets = 0;
3313 for (unsigned int i = 0 ; i < num ; i++)
3314 {
3315 if (refs[i] && !direct_calls[i])
3316 {
3317 error ("missing direct call for speculation %i", i);
3318 return true;
3319 }
3320 if (!refs[i] && direct_calls[i])
3321 {
3322 error ("missing ref for speculation %i", i);
3323 return true;
3324 }
3325 if (refs[i] != NULL)
3326 num_targets++;
3327 }
3328
3329 if (num_targets != indirect->num_speculative_call_targets_p ())
3330 {
3331 error ("number of speculative targets %i mismatched with "
3332 "num_speculative_call_targets %i",
3333 num_targets,
3334 indirect->num_speculative_call_targets_p ());
3335 return true;
3336 }
3337 return false;
3338 }
3339
3340 /* Verify cgraph nodes of given cgraph node. */
3341 DEBUG_FUNCTION void
3342 cgraph_node::verify_node (void)
3343 {
3344 cgraph_edge *e;
3345 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3346 basic_block this_block;
3347 gimple_stmt_iterator gsi;
3348 bool error_found = false;
3349 int i;
3350 ipa_ref *ref = NULL;
3351
3352 if (seen_error ())
3353 return;
3354
3355 timevar_push (TV_CGRAPH_VERIFY);
3356 error_found |= verify_base ();
3357 for (e = callees; e; e = e->next_callee)
3358 if (e->aux)
3359 {
3360 error ("aux field set for edge %s->%s",
3361 identifier_to_locale (e->caller->name ()),
3362 identifier_to_locale (e->callee->name ()));
3363 error_found = true;
3364 }
3365 if (!count.verify ())
3366 {
3367 error ("cgraph count invalid");
3368 error_found = true;
3369 }
3370 if (inlined_to && same_comdat_group)
3371 {
3372 error ("inline clone in same comdat group list");
3373 error_found = true;
3374 }
3375 if (inlined_to && !count.compatible_p (inlined_to->count))
3376 {
3377 error ("inline clone count is not compatible");
3378 count.debug ();
3379 inlined_to->count.debug ();
3380 error_found = true;
3381 }
3382 if (tp_first_run < 0)
3383 {
3384 error ("tp_first_run must be non-negative");
3385 error_found = true;
3386 }
3387 if (!definition && !in_other_partition && local)
3388 {
3389 error ("local symbols must be defined");
3390 error_found = true;
3391 }
3392 if (inlined_to && externally_visible)
3393 {
3394 error ("externally visible inline clone");
3395 error_found = true;
3396 }
3397 if (inlined_to && address_taken)
3398 {
3399 error ("inline clone with address taken");
3400 error_found = true;
3401 }
3402 if (inlined_to && force_output)
3403 {
3404 error ("inline clone is forced to output");
3405 error_found = true;
3406 }
3407 if (symtab->state != LTO_STREAMING)
3408 {
3409 if (calls_comdat_local && !same_comdat_group)
3410 {
3411 error ("calls_comdat_local is set outside of a comdat group");
3412 error_found = true;
3413 }
3414 if (!inlined_to && calls_comdat_local != check_calls_comdat_local_p ())
3415 {
3416 error ("invalid calls_comdat_local flag");
3417 error_found = true;
3418 }
3419 }
3420 if (DECL_IS_MALLOC (decl)
3421 && !POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3422 {
3423 error ("malloc attribute should be used for a function that "
3424 "returns a pointer");
3425 error_found = true;
3426 }
3427 for (e = indirect_calls; e; e = e->next_callee)
3428 {
3429 if (e->aux)
3430 {
3431 error ("aux field set for indirect edge from %s",
3432 identifier_to_locale (e->caller->name ()));
3433 error_found = true;
3434 }
3435 if (!e->count.compatible_p (count))
3436 {
3437 error ("edge count is not compatible with function count");
3438 e->count.debug ();
3439 count.debug ();
3440 error_found = true;
3441 }
3442 if (!e->indirect_unknown_callee
3443 || !e->indirect_info)
3444 {
3445 error ("An indirect edge from %s is not marked as indirect or has "
3446 "associated indirect_info, the corresponding statement is: ",
3447 identifier_to_locale (e->caller->name ()));
3448 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3449 error_found = true;
3450 }
3451 if (e->call_stmt && e->lto_stmt_uid)
3452 {
3453 error ("edge has both call_stmt and lto_stmt_uid set");
3454 error_found = true;
3455 }
3456 }
3457 bool check_comdat = comdat_local_p ();
3458 for (e = callers; e; e = e->next_caller)
3459 {
3460 if (e->verify_count ())
3461 error_found = true;
3462 if (check_comdat
3463 && !in_same_comdat_group_p (e->caller))
3464 {
3465 error ("comdat-local function called by %s outside its comdat",
3466 identifier_to_locale (e->caller->name ()));
3467 error_found = true;
3468 }
3469 if (!e->inline_failed)
3470 {
3471 if (inlined_to
3472 != (e->caller->inlined_to
3473 ? e->caller->inlined_to : e->caller))
3474 {
3475 error ("inlined_to pointer is wrong");
3476 error_found = true;
3477 }
3478 if (callers->next_caller)
3479 {
3480 error ("multiple inline callers");
3481 error_found = true;
3482 }
3483 }
3484 else
3485 if (inlined_to)
3486 {
3487 error ("inlined_to pointer set for noninline callers");
3488 error_found = true;
3489 }
3490 }
3491 for (e = callees; e; e = e->next_callee)
3492 {
3493 if (e->verify_count ())
3494 error_found = true;
3495 if (!e->count.compatible_p (count))
3496 {
3497 error ("edge count is not compatible with function count");
3498 e->count.debug ();
3499 count.debug ();
3500 error_found = true;
3501 }
3502 if (gimple_has_body_p (e->caller->decl)
3503 && !e->caller->inlined_to
3504 && !e->speculative
3505 /* Optimized out calls are redirected to __builtin_unreachable. */
3506 && (e->count.nonzero_p ()
3507 || ! e->callee->decl
3508 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3509 && count
3510 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3511 && (!e->count.ipa_p ()
3512 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3513 {
3514 error ("caller edge count does not match BB count");
3515 fprintf (stderr, "edge count: ");
3516 e->count.dump (stderr);
3517 fprintf (stderr, "\n bb count: ");
3518 gimple_bb (e->call_stmt)->count.dump (stderr);
3519 fprintf (stderr, "\n");
3520 error_found = true;
3521 }
3522 if (e->call_stmt && e->lto_stmt_uid)
3523 {
3524 error ("edge has both call_stmt and lto_stmt_uid set");
3525 error_found = true;
3526 }
3527 if (e->speculative
3528 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3529 NULL))
3530 error_found = true;
3531 }
3532 for (e = indirect_calls; e; e = e->next_callee)
3533 {
3534 if (e->verify_count ())
3535 error_found = true;
3536 if (gimple_has_body_p (e->caller->decl)
3537 && !e->caller->inlined_to
3538 && !e->speculative
3539 && e->count.ipa_p ()
3540 && count
3541 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3542 && (!e->count.ipa_p ()
3543 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3544 {
3545 error ("indirect call count does not match BB count");
3546 fprintf (stderr, "edge count: ");
3547 e->count.dump (stderr);
3548 fprintf (stderr, "\n bb count: ");
3549 gimple_bb (e->call_stmt)->count.dump (stderr);
3550 fprintf (stderr, "\n");
3551 error_found = true;
3552 }
3553 if (e->speculative
3554 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3555 e))
3556 error_found = true;
3557 }
3558 for (i = 0; iterate_reference (i, ref); i++)
3559 {
3560 if (ref->stmt && ref->lto_stmt_uid)
3561 {
3562 error ("reference has both stmt and lto_stmt_uid set");
3563 error_found = true;
3564 }
3565 if (ref->speculative
3566 && verify_speculative_call (this, ref->stmt,
3567 ref->lto_stmt_uid, NULL))
3568 error_found = true;
3569 }
3570
3571 if (!callers && inlined_to)
3572 {
3573 error ("inlined_to pointer is set but no predecessors found");
3574 error_found = true;
3575 }
3576 if (inlined_to == this)
3577 {
3578 error ("inlined_to pointer refers to itself");
3579 error_found = true;
3580 }
3581
3582 if (clone_of)
3583 {
3584 cgraph_node *first_clone = clone_of->clones;
3585 if (first_clone != this)
3586 {
3587 if (prev_sibling_clone->clone_of != clone_of)
3588 {
3589 error ("cgraph_node has wrong clone_of");
3590 error_found = true;
3591 }
3592 }
3593 }
3594 if (clones)
3595 {
3596 cgraph_node *n;
3597 for (n = clones; n; n = n->next_sibling_clone)
3598 if (n->clone_of != this)
3599 break;
3600 if (n)
3601 {
3602 error ("cgraph_node has wrong clone list");
3603 error_found = true;
3604 }
3605 }
3606 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3607 {
3608 error ("cgraph_node is in clone list but it is not clone");
3609 error_found = true;
3610 }
3611 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3612 {
3613 error ("cgraph_node has wrong prev_clone pointer");
3614 error_found = true;
3615 }
3616 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3617 {
3618 error ("double linked list of clones corrupted");
3619 error_found = true;
3620 }
3621
3622 if (analyzed && alias)
3623 {
3624 bool ref_found = false;
3625 int i;
3626 ipa_ref *ref = NULL;
3627
3628 if (callees)
3629 {
3630 error ("Alias has call edges");
3631 error_found = true;
3632 }
3633 for (i = 0; iterate_reference (i, ref); i++)
3634 if (ref->use != IPA_REF_ALIAS)
3635 {
3636 error ("Alias has non-alias reference");
3637 error_found = true;
3638 }
3639 else if (ref_found)
3640 {
3641 error ("Alias has more than one alias reference");
3642 error_found = true;
3643 }
3644 else
3645 ref_found = true;
3646 if (!ref_found)
3647 {
3648 error ("Analyzed alias has no reference");
3649 error_found = true;
3650 }
3651 }
3652
3653 if (analyzed && thunk.thunk_p)
3654 {
3655 if (!callees)
3656 {
3657 error ("No edge out of thunk node");
3658 error_found = true;
3659 }
3660 else if (callees->next_callee)
3661 {
3662 error ("More than one edge out of thunk node");
3663 error_found = true;
3664 }
3665 if (gimple_has_body_p (decl) && !inlined_to)
3666 {
3667 error ("Thunk is not supposed to have body");
3668 error_found = true;
3669 }
3670 }
3671 else if (analyzed && gimple_has_body_p (decl)
3672 && !TREE_ASM_WRITTEN (decl)
3673 && (!DECL_EXTERNAL (decl) || inlined_to)
3674 && !flag_wpa)
3675 {
3676 if (this_cfun->cfg)
3677 {
3678 hash_set<gimple *> stmts;
3679
3680 /* Reach the trees by walking over the CFG, and note the
3681 enclosing basic-blocks in the call edges. */
3682 FOR_EACH_BB_FN (this_block, this_cfun)
3683 {
3684 for (gsi = gsi_start_phis (this_block);
3685 !gsi_end_p (gsi); gsi_next (&gsi))
3686 stmts.add (gsi_stmt (gsi));
3687 for (gsi = gsi_start_bb (this_block);
3688 !gsi_end_p (gsi);
3689 gsi_next (&gsi))
3690 {
3691 gimple *stmt = gsi_stmt (gsi);
3692 stmts.add (stmt);
3693 if (is_gimple_call (stmt))
3694 {
3695 cgraph_edge *e = get_edge (stmt);
3696 tree decl = gimple_call_fndecl (stmt);
3697 if (e)
3698 {
3699 if (e->aux)
3700 {
3701 error ("shared call_stmt:");
3702 cgraph_debug_gimple_stmt (this_cfun, stmt);
3703 error_found = true;
3704 }
3705 if (!e->indirect_unknown_callee)
3706 {
3707 if (e->verify_corresponds_to_fndecl (decl))
3708 {
3709 error ("edge points to wrong declaration:");
3710 debug_tree (e->callee->decl);
3711 fprintf (stderr," Instead of:");
3712 debug_tree (decl);
3713 error_found = true;
3714 }
3715 }
3716 else if (decl)
3717 {
3718 error ("an indirect edge with unknown callee "
3719 "corresponding to a call_stmt with "
3720 "a known declaration:");
3721 error_found = true;
3722 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3723 }
3724 e->aux = (void *)1;
3725 }
3726 else if (decl)
3727 {
3728 error ("missing callgraph edge for call stmt:");
3729 cgraph_debug_gimple_stmt (this_cfun, stmt);
3730 error_found = true;
3731 }
3732 }
3733 }
3734 }
3735 for (i = 0; iterate_reference (i, ref); i++)
3736 if (ref->stmt && !stmts.contains (ref->stmt))
3737 {
3738 error ("reference to dead statement");
3739 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3740 error_found = true;
3741 }
3742 }
3743 else
3744 /* No CFG available?! */
3745 gcc_unreachable ();
3746
3747 for (e = callees; e; e = e->next_callee)
3748 {
3749 if (!e->aux && !e->speculative)
3750 {
3751 error ("edge %s->%s has no corresponding call_stmt",
3752 identifier_to_locale (e->caller->name ()),
3753 identifier_to_locale (e->callee->name ()));
3754 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3755 error_found = true;
3756 }
3757 e->aux = 0;
3758 }
3759 for (e = indirect_calls; e; e = e->next_callee)
3760 {
3761 if (!e->aux && !e->speculative)
3762 {
3763 error ("an indirect edge from %s has no corresponding call_stmt",
3764 identifier_to_locale (e->caller->name ()));
3765 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3766 error_found = true;
3767 }
3768 e->aux = 0;
3769 }
3770 }
3771
3772 if (nested != NULL)
3773 {
3774 for (cgraph_node *n = nested; n != NULL; n = n->next_nested)
3775 {
3776 if (n->origin == NULL)
3777 {
3778 error ("missing origin for a node in a nested list");
3779 error_found = true;
3780 }
3781 else if (n->origin != this)
3782 {
3783 error ("origin points to a different parent");
3784 error_found = true;
3785 break;
3786 }
3787 }
3788 }
3789 if (next_nested != NULL && origin == NULL)
3790 {
3791 error ("missing origin for a node in a nested list");
3792 error_found = true;
3793 }
3794
3795 if (error_found)
3796 {
3797 dump (stderr);
3798 internal_error ("verify_cgraph_node failed");
3799 }
3800 timevar_pop (TV_CGRAPH_VERIFY);
3801 }
3802
3803 /* Verify whole cgraph structure. */
3804 DEBUG_FUNCTION void
3805 cgraph_node::verify_cgraph_nodes (void)
3806 {
3807 cgraph_node *node;
3808
3809 if (seen_error ())
3810 return;
3811
3812 FOR_EACH_FUNCTION (node)
3813 node->verify ();
3814 }
3815
3816 #if __GNUC__ >= 10
3817 # pragma GCC diagnostic pop
3818 #endif
3819
3820 /* Walk the alias chain to return the function cgraph_node is alias of.
3821 Walk through thunks, too.
3822 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3823 When REF is non-NULL, assume that reference happens in symbol REF
3824 when determining the availability. */
3825
3826 cgraph_node *
3827 cgraph_node::function_symbol (enum availability *availability,
3828 struct symtab_node *ref)
3829 {
3830 cgraph_node *node = ultimate_alias_target (availability, ref);
3831
3832 while (node->thunk.thunk_p)
3833 {
3834 enum availability a;
3835
3836 ref = node;
3837 node = node->callees->callee;
3838 node = node->ultimate_alias_target (availability ? &a : NULL, ref);
3839 if (availability && a < *availability)
3840 *availability = a;
3841 }
3842 return node;
3843 }
3844
3845 /* Walk the alias chain to return the function cgraph_node is alias of.
3846 Walk through non virtual thunks, too. Thus we return either a function
3847 or a virtual thunk node.
3848 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3849 When REF is non-NULL, assume that reference happens in symbol REF
3850 when determining the availability. */
3851
3852 cgraph_node *
3853 cgraph_node::function_or_virtual_thunk_symbol
3854 (enum availability *availability,
3855 struct symtab_node *ref)
3856 {
3857 cgraph_node *node = ultimate_alias_target (availability, ref);
3858
3859 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3860 {
3861 enum availability a;
3862
3863 ref = node;
3864 node = node->callees->callee;
3865 node = node->ultimate_alias_target (availability ? &a : NULL, ref);
3866 if (availability && a < *availability)
3867 *availability = a;
3868 }
3869 return node;
3870 }
3871
3872 /* When doing LTO, read cgraph_node's body from disk if it is not already
3873 present. */
3874
3875 bool
3876 cgraph_node::get_untransformed_body (void)
3877 {
3878 lto_file_decl_data *file_data;
3879 const char *data, *name;
3880 size_t len;
3881 tree decl = this->decl;
3882
3883 /* Check if body is already there. Either we have gimple body or
3884 the function is thunk and in that case we set DECL_ARGUMENTS. */
3885 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3886 return false;
3887
3888 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3889
3890 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3891
3892 file_data = lto_file_data;
3893 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3894
3895 /* We may have renamed the declaration, e.g., a static function. */
3896 name = lto_get_decl_name_mapping (file_data, name);
3897 struct lto_in_decl_state *decl_state
3898 = lto_get_function_in_decl_state (file_data, decl);
3899
3900 cgraph_node *origin = this;
3901 while (origin->clone_of)
3902 origin = origin->clone_of;
3903
3904 int stream_order = origin->order - file_data->order_base;
3905 data = lto_get_section_data (file_data, LTO_section_function_body,
3906 name, stream_order, &len,
3907 decl_state->compressed);
3908 if (!data)
3909 fatal_error (input_location, "%s: section %s.%d is missing",
3910 file_data->file_name, name, stream_order);
3911
3912 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3913
3914 if (!quiet_flag)
3915 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3916 lto_input_function_body (file_data, this, data);
3917 lto_stats.num_function_bodies++;
3918 lto_free_section_data (file_data, LTO_section_function_body, name,
3919 data, len, decl_state->compressed);
3920 lto_free_function_in_decl_state_for_node (this);
3921 /* Keep lto file data so ipa-inline-analysis knows about cross module
3922 inlining. */
3923
3924 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3925
3926 return true;
3927 }
3928
3929 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3930 if it is not already present. When some IPA transformations are scheduled,
3931 apply them. */
3932
3933 bool
3934 cgraph_node::get_body (void)
3935 {
3936 bool updated;
3937
3938 updated = get_untransformed_body ();
3939
3940 /* Getting transformed body makes no sense for inline clones;
3941 we should never use this on real clones because they are materialized
3942 early.
3943 TODO: Materializing clones here will likely lead to smaller LTRANS
3944 footprint. */
3945 gcc_assert (!inlined_to && !clone_of);
3946 if (ipa_transforms_to_apply.exists ())
3947 {
3948 opt_pass *saved_current_pass = current_pass;
3949 FILE *saved_dump_file = dump_file;
3950 const char *saved_dump_file_name = dump_file_name;
3951 dump_flags_t saved_dump_flags = dump_flags;
3952 dump_file_name = NULL;
3953 set_dump_file (NULL);
3954
3955 push_cfun (DECL_STRUCT_FUNCTION (decl));
3956
3957 update_ssa (TODO_update_ssa_only_virtuals);
3958 execute_all_ipa_transforms (true);
3959 cgraph_edge::rebuild_edges ();
3960 free_dominance_info (CDI_DOMINATORS);
3961 free_dominance_info (CDI_POST_DOMINATORS);
3962 pop_cfun ();
3963 updated = true;
3964
3965 current_pass = saved_current_pass;
3966 set_dump_file (saved_dump_file);
3967 dump_file_name = saved_dump_file_name;
3968 dump_flags = saved_dump_flags;
3969 }
3970 return updated;
3971 }
3972
3973 /* Return the DECL_STRUCT_FUNCTION of the function. */
3974
3975 struct function *
3976 cgraph_node::get_fun () const
3977 {
3978 const cgraph_node *node = this;
3979 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3980
3981 while (!fun && node->clone_of)
3982 {
3983 node = node->clone_of;
3984 fun = DECL_STRUCT_FUNCTION (node->decl);
3985 }
3986
3987 return fun;
3988 }
3989
3990 /* Reset all state within cgraph.c so that we can rerun the compiler
3991 within the same process. For use by toplev::finalize. */
3992
3993 void
3994 cgraph_c_finalize (void)
3995 {
3996 symtab = NULL;
3997
3998 x_cgraph_nodes_queue = NULL;
3999
4000 cgraph_fnver_htab = NULL;
4001 version_info_node = NULL;
4002 }
4003
4004 /* A worker for call_for_symbol_and_aliases. */
4005
4006 bool
4007 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
4008 void *),
4009 void *data,
4010 bool include_overwritable)
4011 {
4012 ipa_ref *ref;
4013 FOR_EACH_ALIAS (this, ref)
4014 {
4015 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
4016 if (include_overwritable
4017 || alias->get_availability () > AVAIL_INTERPOSABLE)
4018 if (alias->call_for_symbol_and_aliases (callback, data,
4019 include_overwritable))
4020 return true;
4021 }
4022 return false;
4023 }
4024
4025 /* Return true if NODE has thunk. */
4026
4027 bool
4028 cgraph_node::has_thunk_p (cgraph_node *node, void *)
4029 {
4030 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
4031 if (e->caller->thunk.thunk_p)
4032 return true;
4033 return false;
4034 }
4035
4036 /* Expected frequency of executions within the function. */
4037
4038 sreal
4039 cgraph_edge::sreal_frequency ()
4040 {
4041 return count.to_sreal_scale (caller->inlined_to
4042 ? caller->inlined_to->count
4043 : caller->count);
4044 }
4045
4046
4047 /* During LTO stream in this can be used to check whether call can possibly
4048 be internal to the current translation unit. */
4049
4050 bool
4051 cgraph_edge::possibly_call_in_translation_unit_p (void)
4052 {
4053 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
4054
4055 /* While incremental linking we may end up getting function body later. */
4056 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
4057 return true;
4058
4059 /* We may be smarter here and avoid streaming in indirect calls we can't
4060 track, but that would require arranging streaming the indirect call
4061 summary first. */
4062 if (!callee)
4063 return true;
4064
4065 /* If callee is local to the original translation unit, it will be
4066 defined. */
4067 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
4068 return true;
4069
4070 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
4071 yet) and see if it is a definition. In fact we may also resolve aliases,
4072 but that is probably not too important. */
4073 symtab_node *node = callee;
4074 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
4075 node = node->previous_sharing_asm_name;
4076 if (node->previous_sharing_asm_name)
4077 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
4078 gcc_assert (TREE_PUBLIC (node->decl));
4079 return node->get_availability () >= AVAIL_INTERPOSABLE;
4080 }
4081
4082 /* Return num_speculative_targets of this edge. */
4083
4084 int
4085 cgraph_edge::num_speculative_call_targets_p (void)
4086 {
4087 return indirect_info ? indirect_info->num_speculative_call_targets : 0;
4088 }
4089
4090 /* Check if function calls comdat local. This is used to recompute
4091 calls_comdat_local flag after function transformations. */
4092 bool
4093 cgraph_node::check_calls_comdat_local_p ()
4094 {
4095 for (cgraph_edge *e = callees; e; e = e->next_callee)
4096 if (e->inline_failed
4097 ? e->callee->comdat_local_p ()
4098 : e->callee->check_calls_comdat_local_p ())
4099 return true;
4100 return false;
4101 }
4102
4103 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
4104 This needs to be a global so that it can be a GC root, and thus
4105 prevent the stashed copy from being garbage-collected if the GC runs
4106 during a symbol_table_test. */
4107
4108 symbol_table *saved_symtab;
4109
4110 #if CHECKING_P
4111
4112 namespace selftest {
4113
4114 /* class selftest::symbol_table_test. */
4115
4116 /* Constructor. Store the old value of symtab, and create a new one. */
4117
4118 symbol_table_test::symbol_table_test ()
4119 {
4120 gcc_assert (saved_symtab == NULL);
4121 saved_symtab = symtab;
4122 symtab = new (ggc_alloc<symbol_table> ()) symbol_table ();
4123 }
4124
4125 /* Destructor. Restore the old value of symtab. */
4126
4127 symbol_table_test::~symbol_table_test ()
4128 {
4129 gcc_assert (saved_symtab != NULL);
4130 symtab = saved_symtab;
4131 saved_symtab = NULL;
4132 }
4133
4134 /* Verify that symbol_table_test works. */
4135
4136 static void
4137 test_symbol_table_test ()
4138 {
4139 /* Simulate running two selftests involving symbol tables. */
4140 for (int i = 0; i < 2; i++)
4141 {
4142 symbol_table_test stt;
4143 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
4144 get_identifier ("test_decl"),
4145 build_function_type_list (void_type_node,
4146 NULL_TREE));
4147 cgraph_node *node = cgraph_node::get_create (test_decl);
4148 gcc_assert (node);
4149
4150 /* Verify that the node has order 0 on both iterations,
4151 and thus that nodes have predictable dump names in selftests. */
4152 ASSERT_EQ (node->order, 0);
4153 ASSERT_STREQ (node->dump_name (), "test_decl/0");
4154 }
4155 }
4156
4157 /* Run all of the selftests within this file. */
4158
4159 void
4160 cgraph_c_tests ()
4161 {
4162 test_symbol_table_test ();
4163 }
4164
4165 } // namespace selftest
4166
4167 #endif /* CHECKING_P */
4168
4169 #include "gt-cgraph.h"