]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
[Ada] Avoid "others => <>" association in resolved record aggregates
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "context.h"
61 #include "gimplify.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "selftest.h"
65 #include "tree-into-ssa.h"
66 #include "ipa-inline.h"
67
68 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
69 #include "tree-pass.h"
70
71 /* Queue of cgraph nodes scheduled to be lowered. */
72 symtab_node *x_cgraph_nodes_queue;
73 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
74
75 /* Symbol table global context. */
76 symbol_table *symtab;
77
78 /* List of hooks triggered on cgraph_edge events. */
79 struct cgraph_edge_hook_list {
80 cgraph_edge_hook hook;
81 void *data;
82 struct cgraph_edge_hook_list *next;
83 };
84
85 /* List of hooks triggered on cgraph_node events. */
86 struct cgraph_node_hook_list {
87 cgraph_node_hook hook;
88 void *data;
89 struct cgraph_node_hook_list *next;
90 };
91
92 /* List of hooks triggered on events involving two cgraph_edges. */
93 struct cgraph_2edge_hook_list {
94 cgraph_2edge_hook hook;
95 void *data;
96 struct cgraph_2edge_hook_list *next;
97 };
98
99 /* List of hooks triggered on events involving two cgraph_nodes. */
100 struct cgraph_2node_hook_list {
101 cgraph_2node_hook hook;
102 void *data;
103 struct cgraph_2node_hook_list *next;
104 };
105
106 /* Hash descriptor for cgraph_function_version_info. */
107
108 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
109 {
110 static hashval_t hash (cgraph_function_version_info *);
111 static bool equal (cgraph_function_version_info *,
112 cgraph_function_version_info *);
113 };
114
115 /* Map a cgraph_node to cgraph_function_version_info using this htab.
116 The cgraph_function_version_info has a THIS_NODE field that is the
117 corresponding cgraph_node.. */
118
119 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
120
121 /* Hash function for cgraph_fnver_htab. */
122 hashval_t
123 function_version_hasher::hash (cgraph_function_version_info *ptr)
124 {
125 int uid = ptr->this_node->get_uid ();
126 return (hashval_t)(uid);
127 }
128
129 /* eq function for cgraph_fnver_htab. */
130 bool
131 function_version_hasher::equal (cgraph_function_version_info *n1,
132 cgraph_function_version_info *n2)
133 {
134 return n1->this_node->get_uid () == n2->this_node->get_uid ();
135 }
136
137 /* Mark as GC root all allocated nodes. */
138 static GTY(()) struct cgraph_function_version_info *
139 version_info_node = NULL;
140
141 /* Return true if NODE's address can be compared. */
142
143 bool
144 symtab_node::address_can_be_compared_p ()
145 {
146 /* Address of virtual tables and functions is never compared. */
147 if (DECL_VIRTUAL_P (decl))
148 return false;
149 /* Address of C++ cdtors is never compared. */
150 if (is_a <cgraph_node *> (this)
151 && (DECL_CXX_CONSTRUCTOR_P (decl)
152 || DECL_CXX_DESTRUCTOR_P (decl)))
153 return false;
154 /* Constant pool symbols addresses are never compared.
155 flag_merge_constants permits us to assume the same on readonly vars. */
156 if (is_a <varpool_node *> (this)
157 && (DECL_IN_CONSTANT_POOL (decl)
158 || (flag_merge_constants >= 2
159 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
160 return false;
161 return true;
162 }
163
164 /* Get the cgraph_function_version_info node corresponding to node. */
165 cgraph_function_version_info *
166 cgraph_node::function_version (void)
167 {
168 cgraph_function_version_info key;
169 key.this_node = this;
170
171 if (cgraph_fnver_htab == NULL)
172 return NULL;
173
174 return cgraph_fnver_htab->find (&key);
175 }
176
177 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
178 corresponding to cgraph_node NODE. */
179 cgraph_function_version_info *
180 cgraph_node::insert_new_function_version (void)
181 {
182 version_info_node = NULL;
183 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
184 version_info_node->this_node = this;
185
186 if (cgraph_fnver_htab == NULL)
187 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
188
189 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
190 = version_info_node;
191 return version_info_node;
192 }
193
194 /* Remove the cgraph_function_version_info node given by DECL_V. */
195 static void
196 delete_function_version (cgraph_function_version_info *decl_v)
197 {
198 if (decl_v == NULL)
199 return;
200
201 if (version_info_node == decl_v)
202 version_info_node = NULL;
203
204 if (decl_v->prev != NULL)
205 decl_v->prev->next = decl_v->next;
206
207 if (decl_v->next != NULL)
208 decl_v->next->prev = decl_v->prev;
209
210 if (cgraph_fnver_htab != NULL)
211 cgraph_fnver_htab->remove_elt (decl_v);
212 }
213
214 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
215 DECL is a duplicate declaration. */
216 void
217 cgraph_node::delete_function_version_by_decl (tree decl)
218 {
219 cgraph_node *decl_node = cgraph_node::get (decl);
220
221 if (decl_node == NULL)
222 return;
223
224 delete_function_version (decl_node->function_version ());
225
226 decl_node->remove ();
227 }
228
229 /* Record that DECL1 and DECL2 are semantically identical function
230 versions. */
231 void
232 cgraph_node::record_function_versions (tree decl1, tree decl2)
233 {
234 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
235 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
236 cgraph_function_version_info *decl1_v = NULL;
237 cgraph_function_version_info *decl2_v = NULL;
238 cgraph_function_version_info *before;
239 cgraph_function_version_info *after;
240
241 gcc_assert (decl1_node != NULL && decl2_node != NULL);
242 decl1_v = decl1_node->function_version ();
243 decl2_v = decl2_node->function_version ();
244
245 if (decl1_v != NULL && decl2_v != NULL)
246 return;
247
248 if (decl1_v == NULL)
249 decl1_v = decl1_node->insert_new_function_version ();
250
251 if (decl2_v == NULL)
252 decl2_v = decl2_node->insert_new_function_version ();
253
254 /* Chain decl2_v and decl1_v. All semantically identical versions
255 will be chained together. */
256
257 before = decl1_v;
258 after = decl2_v;
259
260 while (before->next != NULL)
261 before = before->next;
262
263 while (after->prev != NULL)
264 after= after->prev;
265
266 before->next = after;
267 after->prev = before;
268 }
269
270 /* Initialize callgraph dump file. */
271
272 void
273 symbol_table::initialize (void)
274 {
275 if (!dump_file)
276 dump_file = dump_begin (TDI_cgraph, NULL);
277
278 if (!ipa_clones_dump_file)
279 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
280 }
281
282 /* Allocate new callgraph node and insert it into basic data structures. */
283
284 cgraph_node *
285 symbol_table::create_empty (void)
286 {
287 cgraph_count++;
288 return new (ggc_alloc<cgraph_node> ()) cgraph_node (cgraph_max_uid++);
289 }
290
291 /* Register HOOK to be called with DATA on each removed edge. */
292 cgraph_edge_hook_list *
293 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
294 {
295 cgraph_edge_hook_list *entry;
296 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
297
298 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
299 entry->hook = hook;
300 entry->data = data;
301 entry->next = NULL;
302 while (*ptr)
303 ptr = &(*ptr)->next;
304 *ptr = entry;
305 return entry;
306 }
307
308 /* Remove ENTRY from the list of hooks called on removing edges. */
309 void
310 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
311 {
312 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
313
314 while (*ptr != entry)
315 ptr = &(*ptr)->next;
316 *ptr = entry->next;
317 free (entry);
318 }
319
320 /* Call all edge removal hooks. */
321 void
322 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
323 {
324 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
325 while (entry)
326 {
327 entry->hook (e, entry->data);
328 entry = entry->next;
329 }
330 }
331
332 /* Register HOOK to be called with DATA on each removed node. */
333 cgraph_node_hook_list *
334 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
335 {
336 cgraph_node_hook_list *entry;
337 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
338
339 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
340 entry->hook = hook;
341 entry->data = data;
342 entry->next = NULL;
343 while (*ptr)
344 ptr = &(*ptr)->next;
345 *ptr = entry;
346 return entry;
347 }
348
349 /* Remove ENTRY from the list of hooks called on removing nodes. */
350 void
351 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
352 {
353 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
354
355 while (*ptr != entry)
356 ptr = &(*ptr)->next;
357 *ptr = entry->next;
358 free (entry);
359 }
360
361 /* Call all node removal hooks. */
362 void
363 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
364 {
365 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
366 while (entry)
367 {
368 entry->hook (node, entry->data);
369 entry = entry->next;
370 }
371 }
372
373 /* Call all node removal hooks. */
374 void
375 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
376 {
377 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
378 while (entry)
379 {
380 entry->hook (node, entry->data);
381 entry = entry->next;
382 }
383 }
384
385
386 /* Register HOOK to be called with DATA on each inserted node. */
387 cgraph_node_hook_list *
388 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
389 {
390 cgraph_node_hook_list *entry;
391 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
392
393 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
394 entry->hook = hook;
395 entry->data = data;
396 entry->next = NULL;
397 while (*ptr)
398 ptr = &(*ptr)->next;
399 *ptr = entry;
400 return entry;
401 }
402
403 /* Remove ENTRY from the list of hooks called on inserted nodes. */
404 void
405 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
406 {
407 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
408
409 while (*ptr != entry)
410 ptr = &(*ptr)->next;
411 *ptr = entry->next;
412 free (entry);
413 }
414
415 /* Register HOOK to be called with DATA on each duplicated edge. */
416 cgraph_2edge_hook_list *
417 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
418 {
419 cgraph_2edge_hook_list *entry;
420 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
421
422 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
423 entry->hook = hook;
424 entry->data = data;
425 entry->next = NULL;
426 while (*ptr)
427 ptr = &(*ptr)->next;
428 *ptr = entry;
429 return entry;
430 }
431
432 /* Remove ENTRY from the list of hooks called on duplicating edges. */
433 void
434 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
435 {
436 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
437
438 while (*ptr != entry)
439 ptr = &(*ptr)->next;
440 *ptr = entry->next;
441 free (entry);
442 }
443
444 /* Call all edge duplication hooks. */
445 void
446 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
447 {
448 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
449 while (entry)
450 {
451 entry->hook (cs1, cs2, entry->data);
452 entry = entry->next;
453 }
454 }
455
456 /* Register HOOK to be called with DATA on each duplicated node. */
457 cgraph_2node_hook_list *
458 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
459 {
460 cgraph_2node_hook_list *entry;
461 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
462
463 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
464 entry->hook = hook;
465 entry->data = data;
466 entry->next = NULL;
467 while (*ptr)
468 ptr = &(*ptr)->next;
469 *ptr = entry;
470 return entry;
471 }
472
473 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
474 void
475 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
476 {
477 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
478
479 while (*ptr != entry)
480 ptr = &(*ptr)->next;
481 *ptr = entry->next;
482 free (entry);
483 }
484
485 /* Call all node duplication hooks. */
486 void
487 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
488 cgraph_node *node2)
489 {
490 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
491 while (entry)
492 {
493 entry->hook (node, node2, entry->data);
494 entry = entry->next;
495 }
496 }
497
498 /* Return cgraph node assigned to DECL. Create new one when needed. */
499
500 cgraph_node *
501 cgraph_node::create (tree decl)
502 {
503 cgraph_node *node = symtab->create_empty ();
504 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
505
506 node->decl = decl;
507
508 if ((flag_openacc || flag_openmp)
509 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
510 {
511 node->offloadable = 1;
512 if (ENABLE_OFFLOADING)
513 g->have_offload = true;
514 }
515
516 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
517 node->ifunc_resolver = true;
518
519 node->register_symbol ();
520
521 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
522 {
523 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
524 node->next_nested = node->origin->nested;
525 node->origin->nested = node;
526 }
527 return node;
528 }
529
530 /* Try to find a call graph node for declaration DECL and if it does not exist
531 or if it corresponds to an inline clone, create a new one. */
532
533 cgraph_node *
534 cgraph_node::get_create (tree decl)
535 {
536 cgraph_node *first_clone = cgraph_node::get (decl);
537
538 if (first_clone && !first_clone->inlined_to)
539 return first_clone;
540
541 cgraph_node *node = cgraph_node::create (decl);
542 if (first_clone)
543 {
544 first_clone->clone_of = node;
545 node->clones = first_clone;
546 node->order = first_clone->order;
547 symtab->symtab_prevail_in_asm_name_hash (node);
548 node->decl->decl_with_vis.symtab_node = node;
549 if (dump_file)
550 fprintf (dump_file, "Introduced new external node "
551 "(%s) and turned into root of the clone tree.\n",
552 node->dump_name ());
553 }
554 else if (dump_file)
555 fprintf (dump_file, "Introduced new external node "
556 "(%s).\n", node->dump_name ());
557 return node;
558 }
559
560 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
561 the function body is associated with
562 (not necessarily cgraph_node (DECL)). */
563
564 cgraph_node *
565 cgraph_node::create_alias (tree alias, tree target)
566 {
567 cgraph_node *alias_node;
568
569 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
570 || TREE_CODE (target) == IDENTIFIER_NODE);
571 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
572 alias_node = cgraph_node::get_create (alias);
573 gcc_assert (!alias_node->definition);
574 alias_node->alias_target = target;
575 alias_node->definition = true;
576 alias_node->alias = true;
577 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
578 alias_node->transparent_alias = alias_node->weakref = true;
579 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
580 alias_node->ifunc_resolver = true;
581 return alias_node;
582 }
583
584 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
585 and NULL otherwise.
586 Same body aliases are output whenever the body of DECL is output,
587 and cgraph_node::get (ALIAS) transparently returns
588 cgraph_node::get (DECL). */
589
590 cgraph_node *
591 cgraph_node::create_same_body_alias (tree alias, tree decl)
592 {
593 cgraph_node *n;
594
595 /* If aliases aren't supported by the assembler, fail. */
596 if (!TARGET_SUPPORTS_ALIASES)
597 return NULL;
598
599 /* Langhooks can create same body aliases of symbols not defined.
600 Those are useless. Drop them on the floor. */
601 if (symtab->global_info_ready)
602 return NULL;
603
604 n = cgraph_node::create_alias (alias, decl);
605 n->cpp_implicit_alias = true;
606 if (symtab->cpp_implicit_aliases_done)
607 n->resolve_alias (cgraph_node::get (decl));
608 return n;
609 }
610
611 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
612 aliases DECL with an adjustments made into the first parameter.
613 See comments in struct cgraph_thunk_info for detail on the parameters. */
614
615 cgraph_node *
616 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
617 HOST_WIDE_INT fixed_offset,
618 HOST_WIDE_INT virtual_value,
619 HOST_WIDE_INT indirect_offset,
620 tree virtual_offset,
621 tree real_alias)
622 {
623 cgraph_node *node;
624
625 node = cgraph_node::get (alias);
626 if (node)
627 node->reset ();
628 else
629 node = cgraph_node::create (alias);
630
631 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
632 gcc_checking_assert (virtual_offset
633 ? virtual_value == wi::to_wide (virtual_offset)
634 : virtual_value == 0);
635
636 node->thunk.fixed_offset = fixed_offset;
637 node->thunk.virtual_value = virtual_value;
638 node->thunk.indirect_offset = indirect_offset;
639 node->thunk.alias = real_alias;
640 node->thunk.this_adjusting = this_adjusting;
641 node->thunk.virtual_offset_p = virtual_offset != NULL;
642 node->thunk.thunk_p = true;
643 node->definition = true;
644
645 return node;
646 }
647
648 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
649 Return NULL if there's no such node. */
650
651 cgraph_node *
652 cgraph_node::get_for_asmname (tree asmname)
653 {
654 /* We do not want to look at inline clones. */
655 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
656 node;
657 node = node->next_sharing_asm_name)
658 {
659 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
660 if (cn && !cn->inlined_to)
661 return cn;
662 }
663 return NULL;
664 }
665
666 /* Returns a hash value for X (which really is a cgraph_edge). */
667
668 hashval_t
669 cgraph_edge_hasher::hash (cgraph_edge *e)
670 {
671 /* This is a really poor hash function, but it is what htab_hash_pointer
672 uses. */
673 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
674 }
675
676 /* Returns a hash value for X (which really is a cgraph_edge). */
677
678 hashval_t
679 cgraph_edge_hasher::hash (gimple *call_stmt)
680 {
681 /* This is a really poor hash function, but it is what htab_hash_pointer
682 uses. */
683 return (hashval_t) ((intptr_t)call_stmt >> 3);
684 }
685
686 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
687
688 inline bool
689 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
690 {
691 return x->call_stmt == y;
692 }
693
694 /* Add call graph edge E to call site hash of its caller. */
695
696 static inline void
697 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
698 {
699 gimple *call = e->call_stmt;
700 *e->caller->call_site_hash->find_slot_with_hash
701 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
702 }
703
704 /* Add call graph edge E to call site hash of its caller. */
705
706 static inline void
707 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
708 {
709 /* There are two speculative edges for every statement (one direct,
710 one indirect); always hash the direct one. */
711 if (e->speculative && e->indirect_unknown_callee)
712 return;
713 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
714 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
715 if (*slot)
716 {
717 gcc_assert (((cgraph_edge *)*slot)->speculative);
718 if (e->callee && (!e->prev_callee
719 || !e->prev_callee->speculative
720 || e->prev_callee->call_stmt != e->call_stmt))
721 *slot = e;
722 return;
723 }
724 gcc_assert (!*slot || e->speculative);
725 *slot = e;
726 }
727
728 /* Return the callgraph edge representing the GIMPLE_CALL statement
729 CALL_STMT. */
730
731 cgraph_edge *
732 cgraph_node::get_edge (gimple *call_stmt)
733 {
734 cgraph_edge *e, *e2;
735 int n = 0;
736
737 if (call_site_hash)
738 return call_site_hash->find_with_hash
739 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
740
741 /* This loop may turn out to be performance problem. In such case adding
742 hashtables into call nodes with very many edges is probably best
743 solution. It is not good idea to add pointer into CALL_EXPR itself
744 because we want to make possible having multiple cgraph nodes representing
745 different clones of the same body before the body is actually cloned. */
746 for (e = callees; e; e = e->next_callee)
747 {
748 if (e->call_stmt == call_stmt)
749 break;
750 n++;
751 }
752
753 if (!e)
754 for (e = indirect_calls; e; e = e->next_callee)
755 {
756 if (e->call_stmt == call_stmt)
757 break;
758 n++;
759 }
760
761 if (n > 100)
762 {
763 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
764 for (e2 = callees; e2; e2 = e2->next_callee)
765 cgraph_add_edge_to_call_site_hash (e2);
766 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
767 cgraph_add_edge_to_call_site_hash (e2);
768 }
769
770 return e;
771 }
772
773
774 /* Change field call_stmt of edge E to NEW_STMT. If UPDATE_SPECULATIVE and E
775 is any component of speculative edge, then update all components.
776 Speculations can be resolved in the process and EDGE can be removed and
777 deallocated. Return the edge that now represents the call. */
778
779 cgraph_edge *
780 cgraph_edge::set_call_stmt (cgraph_edge *e, gcall *new_stmt,
781 bool update_speculative)
782 {
783 tree decl;
784
785 /* Speculative edges has three component, update all of them
786 when asked to. */
787 if (update_speculative && e->speculative)
788 {
789 cgraph_edge *direct, *indirect, *next;
790 ipa_ref *ref;
791 bool e_indirect = e->indirect_unknown_callee;
792 int n = 0;
793
794 direct = e->first_speculative_call_target ();
795 indirect = e->speculative_call_indirect_edge ();
796
797 gcall *old_stmt = direct->call_stmt;
798 for (cgraph_edge *d = direct; d; d = next)
799 {
800 next = d->next_speculative_call_target ();
801 cgraph_edge *d2 = set_call_stmt (d, new_stmt, false);
802 gcc_assert (d2 == d);
803 n++;
804 }
805 gcc_checking_assert (indirect->num_speculative_call_targets_p () == n);
806 for (unsigned int i = 0; e->caller->iterate_reference (i, ref); i++)
807 if (ref->speculative && ref->stmt == old_stmt)
808 {
809 ref->stmt = new_stmt;
810 n--;
811 }
812
813 indirect = set_call_stmt (indirect, new_stmt, false);
814 return e_indirect ? indirect : direct;
815 }
816
817 /* Only direct speculative edges go to call_site_hash. */
818 if (e->caller->call_site_hash
819 && (!e->speculative || !e->indirect_unknown_callee)
820 /* It is possible that edge was previously speculative. In this case
821 we have different value in call stmt hash which needs preserving. */
822 && e->caller->get_edge (e->call_stmt) == e)
823 e->caller->call_site_hash->remove_elt_with_hash
824 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt));
825
826 e->call_stmt = new_stmt;
827 if (e->indirect_unknown_callee
828 && (decl = gimple_call_fndecl (new_stmt)))
829 {
830 /* Constant propagation (and possibly also inlining?) can turn an
831 indirect call into a direct one. */
832 cgraph_node *new_callee = cgraph_node::get (decl);
833
834 gcc_checking_assert (new_callee);
835 e = make_direct (e, new_callee);
836 }
837
838 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
839 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
840 /* Update call stite hash. For speculative calls we only record the first
841 direct edge. */
842 if (e->caller->call_site_hash
843 && (!e->speculative
844 || (e->callee
845 && (!e->prev_callee || !e->prev_callee->speculative
846 || e->prev_callee->call_stmt != e->call_stmt))
847 || (e->speculative && !e->callee)))
848 cgraph_add_edge_to_call_site_hash (e);
849 return e;
850 }
851
852 /* Allocate a cgraph_edge structure and fill it with data according to the
853 parameters of which only CALLEE can be NULL (when creating an indirect call
854 edge). CLONING_P should be set if properties that are copied from an
855 original edge should not be calculated. */
856
857 cgraph_edge *
858 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
859 gcall *call_stmt, profile_count count,
860 bool indir_unknown_callee, bool cloning_p)
861 {
862 cgraph_edge *edge;
863
864 /* LTO does not actually have access to the call_stmt since these
865 have not been loaded yet. */
866 if (call_stmt)
867 {
868 /* This is a rather expensive check possibly triggering
869 construction of call stmt hashtable. */
870 cgraph_edge *e;
871 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
872 || e->speculative);
873
874 gcc_assert (is_gimple_call (call_stmt));
875 }
876
877 edge = ggc_alloc<cgraph_edge> ();
878 edge->m_summary_id = -1;
879 edges_count++;
880
881 gcc_assert (++edges_max_uid != 0);
882 edge->m_uid = edges_max_uid;
883 edge->aux = NULL;
884 edge->caller = caller;
885 edge->callee = callee;
886 edge->prev_caller = NULL;
887 edge->next_caller = NULL;
888 edge->prev_callee = NULL;
889 edge->next_callee = NULL;
890 edge->lto_stmt_uid = 0;
891 edge->speculative_id = 0;
892
893 edge->count = count;
894 edge->call_stmt = call_stmt;
895 edge->indirect_info = NULL;
896 edge->indirect_inlining_edge = 0;
897 edge->speculative = false;
898 edge->indirect_unknown_callee = indir_unknown_callee;
899 if (call_stmt && caller->call_site_hash)
900 cgraph_add_edge_to_call_site_hash (edge);
901
902 if (cloning_p)
903 return edge;
904
905 edge->can_throw_external
906 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
907 call_stmt) : false;
908 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
909 edge->call_stmt_cannot_inline_p = false;
910
911 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
912 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
913 edge->in_polymorphic_cdtor
914 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
915 caller->decl);
916 else
917 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
918 if (callee)
919 caller->calls_declare_variant_alt |= callee->declare_variant_alt;
920
921 if (callee && symtab->state != LTO_STREAMING
922 && edge->callee->comdat_local_p ())
923 edge->caller->calls_comdat_local = true;
924
925 return edge;
926 }
927
928 /* Create edge from a given function to CALLEE in the cgraph. CLONING_P should
929 be set if properties that are copied from an original edge should not be
930 calculated. */
931
932 cgraph_edge *
933 cgraph_node::create_edge (cgraph_node *callee,
934 gcall *call_stmt, profile_count count, bool cloning_p)
935 {
936 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
937 false, cloning_p);
938
939 if (!cloning_p)
940 initialize_inline_failed (edge);
941
942 edge->next_caller = callee->callers;
943 if (callee->callers)
944 callee->callers->prev_caller = edge;
945 edge->next_callee = callees;
946 if (callees)
947 callees->prev_callee = edge;
948 callees = edge;
949 callee->callers = edge;
950
951 return edge;
952 }
953
954 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
955
956 cgraph_indirect_call_info *
957 cgraph_allocate_init_indirect_info (void)
958 {
959 cgraph_indirect_call_info *ii;
960
961 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
962 ii->param_index = -1;
963 return ii;
964 }
965
966 /* Create an indirect edge with a yet-undetermined callee where the call
967 statement destination is a formal parameter of the caller with index
968 PARAM_INDEX. CLONING_P should be set if properties that are copied from an
969 original edge should not be calculated and indirect_info structure should
970 not be calculated. */
971
972 cgraph_edge *
973 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
974 profile_count count,
975 bool cloning_p)
976 {
977 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt, count, true,
978 cloning_p);
979 tree target;
980
981 if (!cloning_p)
982 initialize_inline_failed (edge);
983
984 edge->indirect_info = cgraph_allocate_init_indirect_info ();
985 edge->indirect_info->ecf_flags = ecf_flags;
986 edge->indirect_info->vptr_changed = true;
987
988 /* Record polymorphic call info. */
989 if (!cloning_p
990 && call_stmt
991 && (target = gimple_call_fn (call_stmt))
992 && virtual_method_call_p (target))
993 {
994 ipa_polymorphic_call_context context (decl, target, call_stmt);
995
996 /* Only record types can have virtual calls. */
997 edge->indirect_info->polymorphic = true;
998 edge->indirect_info->param_index = -1;
999 edge->indirect_info->otr_token
1000 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
1001 edge->indirect_info->otr_type = obj_type_ref_class (target);
1002 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
1003 edge->indirect_info->context = context;
1004 }
1005
1006 edge->next_callee = indirect_calls;
1007 if (indirect_calls)
1008 indirect_calls->prev_callee = edge;
1009 indirect_calls = edge;
1010
1011 return edge;
1012 }
1013
1014 /* Remove the edge from the list of the callees of the caller. */
1015
1016 void
1017 cgraph_edge::remove_caller (void)
1018 {
1019 if (prev_callee)
1020 prev_callee->next_callee = next_callee;
1021 if (next_callee)
1022 next_callee->prev_callee = prev_callee;
1023 if (!prev_callee)
1024 {
1025 if (indirect_unknown_callee)
1026 caller->indirect_calls = next_callee;
1027 else
1028 caller->callees = next_callee;
1029 }
1030 if (caller->call_site_hash
1031 && this == caller->get_edge (call_stmt))
1032 caller->call_site_hash->remove_elt_with_hash
1033 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1034 }
1035
1036 /* Put the edge onto the free list. */
1037
1038 void
1039 symbol_table::free_edge (cgraph_edge *e)
1040 {
1041 edges_count--;
1042 if (e->m_summary_id != -1)
1043 edge_released_summary_ids.safe_push (e->m_summary_id);
1044
1045 if (e->indirect_info)
1046 ggc_free (e->indirect_info);
1047 ggc_free (e);
1048 }
1049
1050 /* Remove the edge in the cgraph. */
1051
1052 void
1053 cgraph_edge::remove (cgraph_edge *edge)
1054 {
1055 /* Call all edge removal hooks. */
1056 symtab->call_edge_removal_hooks (edge);
1057
1058 if (!edge->indirect_unknown_callee)
1059 /* Remove from callers list of the callee. */
1060 edge->remove_callee ();
1061
1062 /* Remove from callees list of the callers. */
1063 edge->remove_caller ();
1064
1065 /* Put the edge onto the free list. */
1066 symtab->free_edge (edge);
1067 }
1068
1069 /* Turn edge into speculative call calling N2. Update
1070 the profile so the direct call is taken COUNT times
1071 with FREQUENCY.
1072
1073 At clone materialization time, the indirect call E will
1074 be expanded as:
1075
1076 if (call_dest == N2)
1077 n2 ();
1078 else
1079 call call_dest
1080
1081 At this time the function just creates the direct call,
1082 the reference representing the if conditional and attaches
1083 them all to the original indirect call statement.
1084
1085 speculative_id is used to link direct calls with their corresponding
1086 IPA_REF_ADDR references when representing speculative calls.
1087
1088 Return direct edge created. */
1089
1090 cgraph_edge *
1091 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count,
1092 unsigned int speculative_id)
1093 {
1094 cgraph_node *n = caller;
1095 ipa_ref *ref = NULL;
1096 cgraph_edge *e2;
1097
1098 if (dump_file)
1099 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1100 n->dump_name (), n2->dump_name ());
1101 speculative = true;
1102 e2 = n->create_edge (n2, call_stmt, direct_count);
1103 initialize_inline_failed (e2);
1104 e2->speculative = true;
1105 if (TREE_NOTHROW (n2->decl))
1106 e2->can_throw_external = false;
1107 else
1108 e2->can_throw_external = can_throw_external;
1109 e2->lto_stmt_uid = lto_stmt_uid;
1110 e2->speculative_id = speculative_id;
1111 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1112 indirect_info->num_speculative_call_targets++;
1113 count -= e2->count;
1114 symtab->call_edge_duplication_hooks (this, e2);
1115 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1116 ref->lto_stmt_uid = lto_stmt_uid;
1117 ref->speculative_id = speculative_id;
1118 ref->speculative = speculative;
1119 n2->mark_address_taken ();
1120 return e2;
1121 }
1122
1123 /* Speculative call consists of an indirect edge and one or more
1124 direct edge+ref pairs.
1125
1126 Given an edge which is part of speculative call, return the first
1127 direct call edge in the speculative call sequence. */
1128
1129 cgraph_edge *
1130 cgraph_edge::first_speculative_call_target ()
1131 {
1132 cgraph_edge *e = this;
1133
1134 gcc_checking_assert (e->speculative);
1135 if (e->callee)
1136 {
1137 while (e->prev_callee && e->prev_callee->speculative
1138 && e->prev_callee->call_stmt == e->call_stmt
1139 && e->prev_callee->lto_stmt_uid == e->lto_stmt_uid)
1140 e = e->prev_callee;
1141 return e;
1142 }
1143 /* Call stmt site hash always points to the first target of the
1144 speculative call sequence. */
1145 if (e->call_stmt)
1146 return e->caller->get_edge (e->call_stmt);
1147 for (cgraph_edge *e2 = e->caller->callees; true; e2 = e2->next_callee)
1148 if (e2->speculative
1149 && e->call_stmt == e2->call_stmt
1150 && e->lto_stmt_uid == e2->lto_stmt_uid)
1151 return e2;
1152 }
1153
1154 /* We always maintain first direct edge in the call site hash, if one
1155 exists. E is going to be removed. See if it is first one and update
1156 hash accordingly. INDIRECT is the indirect edge of speculative call.
1157 We assume that INDIRECT->num_speculative_call_targets_p () is already
1158 updated for removal of E. */
1159 static void
1160 update_call_stmt_hash_for_removing_direct_edge (cgraph_edge *e,
1161 cgraph_edge *indirect)
1162 {
1163 if (e->caller->call_site_hash)
1164 {
1165 if (e->caller->get_edge (e->call_stmt) != e)
1166 ;
1167 else if (!indirect->num_speculative_call_targets_p ())
1168 cgraph_update_edge_in_call_site_hash (indirect);
1169 else
1170 {
1171 gcc_checking_assert (e->next_callee && e->next_callee->speculative
1172 && e->next_callee->call_stmt == e->call_stmt);
1173 cgraph_update_edge_in_call_site_hash (e->next_callee);
1174 }
1175 }
1176 }
1177
1178 /* Speculative call EDGE turned out to be direct call to CALLEE_DECL. Remove
1179 the speculative call sequence and return edge representing the call, the
1180 original EDGE can be removed and deallocated. Return the edge that now
1181 represents the call.
1182
1183 For "speculative" indirect call that contains multiple "speculative"
1184 targets (i.e. edge->indirect_info->num_speculative_call_targets > 1),
1185 decrease the count and only remove current direct edge.
1186
1187 If no speculative direct call left to the speculative indirect call, remove
1188 the speculative of both the indirect call and corresponding direct edge.
1189
1190 It is up to caller to iteratively resolve each "speculative" direct call and
1191 redirect the call as appropriate. */
1192
1193 cgraph_edge *
1194 cgraph_edge::resolve_speculation (cgraph_edge *edge, tree callee_decl)
1195 {
1196 cgraph_edge *e2;
1197 ipa_ref *ref;
1198
1199 gcc_assert (edge->speculative && (!callee_decl || edge->callee));
1200 if (!edge->callee)
1201 e2 = edge->first_speculative_call_target ();
1202 else
1203 e2 = edge;
1204 ref = e2->speculative_call_target_ref ();
1205 edge = edge->speculative_call_indirect_edge ();
1206 if (!callee_decl
1207 || !ref->referred->semantically_equivalent_p
1208 (symtab_node::get (callee_decl)))
1209 {
1210 if (dump_file)
1211 {
1212 if (callee_decl)
1213 {
1214 fprintf (dump_file, "Speculative indirect call %s => %s has "
1215 "turned out to have contradicting known target ",
1216 edge->caller->dump_name (),
1217 e2->callee->dump_name ());
1218 print_generic_expr (dump_file, callee_decl);
1219 fprintf (dump_file, "\n");
1220 }
1221 else
1222 {
1223 fprintf (dump_file, "Removing speculative call %s => %s\n",
1224 edge->caller->dump_name (),
1225 e2->callee->dump_name ());
1226 }
1227 }
1228 }
1229 else
1230 {
1231 cgraph_edge *tmp = edge;
1232 if (dump_file)
1233 fprintf (dump_file, "Speculative call turned into direct call.\n");
1234 edge = e2;
1235 e2 = tmp;
1236 /* FIXME: If EDGE is inlined, we should scale up the frequencies
1237 and counts in the functions inlined through it. */
1238 }
1239 edge->count += e2->count;
1240 if (edge->num_speculative_call_targets_p ())
1241 {
1242 /* The indirect edge has multiple speculative targets, don't remove
1243 speculative until all related direct edges are resolved. */
1244 edge->indirect_info->num_speculative_call_targets--;
1245 if (!edge->indirect_info->num_speculative_call_targets)
1246 edge->speculative = false;
1247 }
1248 else
1249 edge->speculative = false;
1250 e2->speculative = false;
1251 update_call_stmt_hash_for_removing_direct_edge (e2, edge);
1252 ref->remove_reference ();
1253 if (e2->indirect_unknown_callee || e2->inline_failed)
1254 remove (e2);
1255 else
1256 e2->callee->remove_symbol_and_inline_clones ();
1257 return edge;
1258 }
1259
1260 /* Return edge corresponding to speculative call to a given target.
1261 NULL if speculative call does not have one. */
1262
1263 cgraph_edge *
1264 cgraph_edge::speculative_call_for_target (cgraph_node *target)
1265 {
1266 for (cgraph_edge *direct = first_speculative_call_target ();
1267 direct;
1268 direct = direct->next_speculative_call_target ())
1269 if (direct->speculative_call_target_ref ()
1270 ->referred->semantically_equivalent_p (target))
1271 return direct;
1272 return NULL;
1273 }
1274
1275 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1276 CALLEE. Speculations can be resolved in the process and EDGE can be removed
1277 and deallocated. Return the edge that now represents the call. */
1278
1279 cgraph_edge *
1280 cgraph_edge::make_direct (cgraph_edge *edge, cgraph_node *callee)
1281 {
1282 gcc_assert (edge->indirect_unknown_callee);
1283
1284 /* If we are redirecting speculative call, make it non-speculative. */
1285 if (edge->speculative)
1286 {
1287 cgraph_edge *found = NULL;
1288 cgraph_edge *direct, *next;
1289
1290 edge = edge->speculative_call_indirect_edge ();
1291
1292 /* Look all speculative targets and remove all but one corresponding
1293 to callee (if it exists). */
1294 for (direct = edge->first_speculative_call_target ();
1295 direct;
1296 direct = next)
1297 {
1298 next = direct->next_speculative_call_target ();
1299
1300 /* Compare ref not direct->callee. Direct edge is possibly
1301 inlined or redirected. */
1302 if (!direct->speculative_call_target_ref ()
1303 ->referred->semantically_equivalent_p (callee))
1304 edge = direct->resolve_speculation (direct, NULL);
1305 else
1306 {
1307 gcc_checking_assert (!found);
1308 found = direct;
1309 }
1310 }
1311
1312 /* On successful speculation just remove the indirect edge and
1313 return the pre existing direct edge.
1314 It is important to not remove it and redirect because the direct
1315 edge may be inlined or redirected. */
1316 if (found)
1317 {
1318 cgraph_edge *e2 = resolve_speculation (found, callee->decl);
1319 gcc_checking_assert (!found->speculative && e2 == found);
1320 return found;
1321 }
1322 gcc_checking_assert (!edge->speculative);
1323 }
1324
1325 edge->indirect_unknown_callee = 0;
1326 ggc_free (edge->indirect_info);
1327 edge->indirect_info = NULL;
1328
1329 /* Get the edge out of the indirect edge list. */
1330 if (edge->prev_callee)
1331 edge->prev_callee->next_callee = edge->next_callee;
1332 if (edge->next_callee)
1333 edge->next_callee->prev_callee = edge->prev_callee;
1334 if (!edge->prev_callee)
1335 edge->caller->indirect_calls = edge->next_callee;
1336
1337 /* Put it into the normal callee list */
1338 edge->prev_callee = NULL;
1339 edge->next_callee = edge->caller->callees;
1340 if (edge->caller->callees)
1341 edge->caller->callees->prev_callee = edge;
1342 edge->caller->callees = edge;
1343
1344 /* Insert to callers list of the new callee. */
1345 edge->set_callee (callee);
1346
1347 /* We need to re-determine the inlining status of the edge. */
1348 initialize_inline_failed (edge);
1349 return edge;
1350 }
1351
1352 /* Redirect callee of the edge to N. The function does not update underlying
1353 call expression. */
1354
1355 void
1356 cgraph_edge::redirect_callee (cgraph_node *n)
1357 {
1358 bool loc = callee->comdat_local_p ();
1359 /* Remove from callers list of the current callee. */
1360 remove_callee ();
1361
1362 /* Insert to callers list of the new callee. */
1363 set_callee (n);
1364
1365 if (!inline_failed)
1366 return;
1367 if (!loc && n->comdat_local_p ())
1368 {
1369 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1370 to->calls_comdat_local = true;
1371 }
1372 else if (loc && !n->comdat_local_p ())
1373 {
1374 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1375 gcc_checking_assert (to->calls_comdat_local);
1376 to->calls_comdat_local = to->check_calls_comdat_local_p ();
1377 }
1378 }
1379
1380 /* If necessary, change the function declaration in the call statement
1381 associated with E so that it corresponds to the edge callee. Speculations
1382 can be resolved in the process and EDGE can be removed and deallocated.
1383
1384 The edge could be one of speculative direct call generated from speculative
1385 indirect call. In this circumstance, decrease the speculative targets
1386 count (i.e. num_speculative_call_targets) and redirect call stmt to the
1387 corresponding i-th target. If no speculative direct call left to the
1388 speculative indirect call, remove "speculative" of the indirect call and
1389 also redirect stmt to it's final direct target.
1390
1391 It is up to caller to iteratively transform each "speculative"
1392 direct call as appropriate. */
1393
1394 gimple *
1395 cgraph_edge::redirect_call_stmt_to_callee (cgraph_edge *e)
1396 {
1397 tree decl = gimple_call_fndecl (e->call_stmt);
1398 gcall *new_stmt;
1399 gimple_stmt_iterator gsi;
1400
1401 if (e->speculative)
1402 {
1403 /* If there already is an direct call (i.e. as a result of inliner's
1404 substitution), forget about speculating. */
1405 if (decl)
1406 e = make_direct (e->speculative_call_indirect_edge (),
1407 cgraph_node::get (decl));
1408 else
1409 {
1410 /* Be sure we redirect all speculative targets before poking
1411 abou tindirect edge. */
1412 gcc_checking_assert (e->callee);
1413 cgraph_edge *indirect = e->speculative_call_indirect_edge ();
1414 gcall *new_stmt;
1415 ipa_ref *ref;
1416
1417 /* Expand speculation into GIMPLE code. */
1418 if (dump_file)
1419 {
1420 fprintf (dump_file,
1421 "Expanding speculative call of %s -> %s count: ",
1422 e->caller->dump_name (),
1423 e->callee->dump_name ());
1424 e->count.dump (dump_file);
1425 fprintf (dump_file, "\n");
1426 }
1427 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1428
1429 profile_count all = indirect->count;
1430 for (cgraph_edge *e2 = e->first_speculative_call_target ();
1431 e2;
1432 e2 = e2->next_speculative_call_target ())
1433 all = all + e2->count;
1434 profile_probability prob = e->count.probability_in (all);
1435 if (!prob.initialized_p ())
1436 prob = profile_probability::even ();
1437 ref = e->speculative_call_target_ref ();
1438 new_stmt = gimple_ic (e->call_stmt,
1439 dyn_cast<cgraph_node *> (ref->referred),
1440 prob);
1441 e->speculative = false;
1442 if (indirect->num_speculative_call_targets_p ())
1443 {
1444 /* The indirect edge has multiple speculative targets, don't
1445 remove speculative until all related direct edges are
1446 redirected. */
1447 indirect->indirect_info->num_speculative_call_targets--;
1448 if (!indirect->indirect_info->num_speculative_call_targets)
1449 indirect->speculative = false;
1450 }
1451 else
1452 indirect->speculative = false;
1453 /* Indirect edges are not both in the call site hash.
1454 get it updated. */
1455 update_call_stmt_hash_for_removing_direct_edge (e, indirect);
1456 cgraph_edge::set_call_stmt (e, new_stmt, false);
1457 e->count = gimple_bb (e->call_stmt)->count;
1458
1459 /* Once we are done with expanding the sequence, update also indirect
1460 call probability. Until then the basic block accounts for the
1461 sum of indirect edge and all non-expanded speculations. */
1462 if (!indirect->speculative)
1463 indirect->count = gimple_bb (indirect->call_stmt)->count;
1464 ref->speculative = false;
1465 ref->stmt = NULL;
1466 pop_cfun ();
1467 /* Continue redirecting E to proper target. */
1468 }
1469 }
1470
1471
1472 if (e->indirect_unknown_callee
1473 || decl == e->callee->decl)
1474 return e->call_stmt;
1475
1476 if (decl && ipa_saved_clone_sources)
1477 {
1478 tree *p = ipa_saved_clone_sources->get (e->callee);
1479 if (p && decl == *p)
1480 {
1481 gimple_call_set_fndecl (e->call_stmt, e->callee->decl);
1482 return e->call_stmt;
1483 }
1484 }
1485
1486 if (flag_checking && decl)
1487 {
1488 cgraph_node *node = cgraph_node::get (decl);
1489 gcc_assert (!node || !node->clone.param_adjustments);
1490 }
1491
1492 if (symtab->dump_file)
1493 {
1494 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1495 e->caller->dump_name (), e->callee->dump_name ());
1496 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1497 if (e->callee->clone.param_adjustments)
1498 e->callee->clone.param_adjustments->dump (symtab->dump_file);
1499 unsigned performed_len
1500 = vec_safe_length (e->caller->clone.performed_splits);
1501 if (performed_len > 0)
1502 fprintf (symtab->dump_file, "Performed splits records:\n");
1503 for (unsigned i = 0; i < performed_len; i++)
1504 {
1505 ipa_param_performed_split *sm
1506 = &(*e->caller->clone.performed_splits)[i];
1507 print_node_brief (symtab->dump_file, " dummy_decl: ", sm->dummy_decl,
1508 TDF_UID);
1509 fprintf (symtab->dump_file, ", unit_offset: %u\n", sm->unit_offset);
1510 }
1511 }
1512
1513 if (ipa_param_adjustments *padjs = e->callee->clone.param_adjustments)
1514 {
1515 /* We need to defer cleaning EH info on the new statement to
1516 fixup-cfg. We may not have dominator information at this point
1517 and thus would end up with unreachable blocks and have no way
1518 to communicate that we need to run CFG cleanup then. */
1519 int lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1520 if (lp_nr != 0)
1521 remove_stmt_from_eh_lp (e->call_stmt);
1522
1523 tree old_fntype = gimple_call_fntype (e->call_stmt);
1524 new_stmt = padjs->modify_call (e->call_stmt,
1525 e->caller->clone.performed_splits,
1526 e->callee->decl, false);
1527 cgraph_node *origin = e->callee;
1528 while (origin->clone_of)
1529 origin = origin->clone_of;
1530
1531 if ((origin->former_clone_of
1532 && old_fntype == TREE_TYPE (origin->former_clone_of))
1533 || old_fntype == TREE_TYPE (origin->decl))
1534 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1535 else
1536 {
1537 tree new_fntype = padjs->build_new_function_type (old_fntype, true);
1538 gimple_call_set_fntype (new_stmt, new_fntype);
1539 }
1540
1541 if (lp_nr != 0)
1542 add_stmt_to_eh_lp (new_stmt, lp_nr);
1543 }
1544 else
1545 {
1546 new_stmt = e->call_stmt;
1547 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1548 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1549 }
1550
1551 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1552 adjust gimple_call_fntype too. */
1553 if (gimple_call_noreturn_p (new_stmt)
1554 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1555 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1556 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1557 == void_type_node))
1558 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1559
1560 /* If the call becomes noreturn, remove the LHS if possible. */
1561 tree lhs = gimple_call_lhs (new_stmt);
1562 if (lhs
1563 && gimple_call_noreturn_p (new_stmt)
1564 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1565 || should_remove_lhs_p (lhs)))
1566 {
1567 if (TREE_CODE (lhs) == SSA_NAME)
1568 {
1569 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1570 TREE_TYPE (lhs), NULL);
1571 var = get_or_create_ssa_default_def
1572 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1573 gimple *set_stmt = gimple_build_assign (lhs, var);
1574 gsi = gsi_for_stmt (new_stmt);
1575 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1576 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1577 }
1578 gimple_call_set_lhs (new_stmt, NULL_TREE);
1579 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1580 }
1581
1582 /* If new callee has no static chain, remove it. */
1583 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1584 {
1585 gimple_call_set_chain (new_stmt, NULL);
1586 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1587 }
1588
1589 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1590 new_stmt);
1591
1592 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1593
1594 if (symtab->dump_file)
1595 {
1596 fprintf (symtab->dump_file, " updated to:");
1597 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1598 }
1599 return new_stmt;
1600 }
1601
1602 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1603 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1604 of OLD_STMT if it was previously call statement.
1605 If NEW_STMT is NULL, the call has been dropped without any
1606 replacement. */
1607
1608 static void
1609 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1610 gimple *old_stmt, tree old_call,
1611 gimple *new_stmt)
1612 {
1613 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1614 ? gimple_call_fndecl (new_stmt) : 0;
1615
1616 /* We are seeing indirect calls, then there is nothing to update. */
1617 if (!new_call && !old_call)
1618 return;
1619 /* See if we turned indirect call into direct call or folded call to one builtin
1620 into different builtin. */
1621 if (old_call != new_call)
1622 {
1623 cgraph_edge *e = node->get_edge (old_stmt);
1624 cgraph_edge *ne = NULL;
1625 profile_count count;
1626
1627 if (e)
1628 {
1629 /* Keep calls marked as dead dead. */
1630 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1631 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1632 {
1633 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1634 as_a <gcall *> (new_stmt));
1635 return;
1636 }
1637 /* See if the edge is already there and has the correct callee. It
1638 might be so because of indirect inlining has already updated
1639 it. We also might've cloned and redirected the edge. */
1640 if (new_call && e->callee)
1641 {
1642 cgraph_node *callee = e->callee;
1643 while (callee)
1644 {
1645 if (callee->decl == new_call
1646 || callee->former_clone_of == new_call)
1647 {
1648 cgraph_edge::set_call_stmt (e, as_a <gcall *> (new_stmt));
1649 return;
1650 }
1651 callee = callee->clone_of;
1652 }
1653 }
1654
1655 /* Otherwise remove edge and create new one; we can't simply redirect
1656 since function has changed, so inline plan and other information
1657 attached to edge is invalid. */
1658 count = e->count;
1659 if (e->indirect_unknown_callee || e->inline_failed)
1660 cgraph_edge::remove (e);
1661 else
1662 e->callee->remove_symbol_and_inline_clones ();
1663 }
1664 else if (new_call)
1665 {
1666 /* We are seeing new direct call; compute profile info based on BB. */
1667 basic_block bb = gimple_bb (new_stmt);
1668 count = bb->count;
1669 }
1670
1671 if (new_call)
1672 {
1673 ne = node->create_edge (cgraph_node::get_create (new_call),
1674 as_a <gcall *> (new_stmt), count);
1675 gcc_assert (ne->inline_failed);
1676 }
1677 }
1678 /* We only updated the call stmt; update pointer in cgraph edge.. */
1679 else if (old_stmt != new_stmt)
1680 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1681 as_a <gcall *> (new_stmt));
1682 }
1683
1684 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1685 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1686 of OLD_STMT before it was updated (updating can happen inplace). */
1687
1688 void
1689 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1690 gimple *new_stmt)
1691 {
1692 cgraph_node *orig = cgraph_node::get (cfun->decl);
1693 cgraph_node *node;
1694
1695 gcc_checking_assert (orig);
1696 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1697 if (orig->clones)
1698 for (node = orig->clones; node != orig;)
1699 {
1700 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1701 if (node->clones)
1702 node = node->clones;
1703 else if (node->next_sibling_clone)
1704 node = node->next_sibling_clone;
1705 else
1706 {
1707 while (node != orig && !node->next_sibling_clone)
1708 node = node->clone_of;
1709 if (node != orig)
1710 node = node->next_sibling_clone;
1711 }
1712 }
1713 }
1714
1715
1716 /* Remove all callees from the node. */
1717
1718 void
1719 cgraph_node::remove_callees (void)
1720 {
1721 cgraph_edge *e, *f;
1722
1723 calls_comdat_local = false;
1724
1725 /* It is sufficient to remove the edges from the lists of callers of
1726 the callees. The callee list of the node can be zapped with one
1727 assignment. */
1728 for (e = callees; e; e = f)
1729 {
1730 f = e->next_callee;
1731 symtab->call_edge_removal_hooks (e);
1732 if (!e->indirect_unknown_callee)
1733 e->remove_callee ();
1734 symtab->free_edge (e);
1735 }
1736 for (e = indirect_calls; e; e = f)
1737 {
1738 f = e->next_callee;
1739 symtab->call_edge_removal_hooks (e);
1740 if (!e->indirect_unknown_callee)
1741 e->remove_callee ();
1742 symtab->free_edge (e);
1743 }
1744 indirect_calls = NULL;
1745 callees = NULL;
1746 if (call_site_hash)
1747 {
1748 call_site_hash->empty ();
1749 call_site_hash = NULL;
1750 }
1751 }
1752
1753 /* Remove all callers from the node. */
1754
1755 void
1756 cgraph_node::remove_callers (void)
1757 {
1758 cgraph_edge *e, *f;
1759
1760 /* It is sufficient to remove the edges from the lists of callees of
1761 the callers. The caller list of the node can be zapped with one
1762 assignment. */
1763 for (e = callers; e; e = f)
1764 {
1765 f = e->next_caller;
1766 symtab->call_edge_removal_hooks (e);
1767 e->remove_caller ();
1768 symtab->free_edge (e);
1769 }
1770 callers = NULL;
1771 }
1772
1773 /* Helper function for cgraph_release_function_body and free_lang_data.
1774 It releases body from function DECL without having to inspect its
1775 possibly non-existent symtab node. */
1776
1777 void
1778 release_function_body (tree decl)
1779 {
1780 function *fn = DECL_STRUCT_FUNCTION (decl);
1781 if (fn)
1782 {
1783 if (fn->cfg
1784 && loops_for_fn (fn))
1785 {
1786 fn->curr_properties &= ~PROP_loops;
1787 loop_optimizer_finalize (fn);
1788 }
1789 if (fn->gimple_df)
1790 {
1791 delete_tree_ssa (fn);
1792 fn->eh = NULL;
1793 }
1794 if (fn->cfg)
1795 {
1796 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1797 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1798 delete_tree_cfg_annotations (fn);
1799 clear_edges (fn);
1800 fn->cfg = NULL;
1801 }
1802 if (fn->value_histograms)
1803 free_histograms (fn);
1804 gimple_set_body (decl, NULL);
1805 /* Struct function hangs a lot of data that would leak if we didn't
1806 removed all pointers to it. */
1807 ggc_free (fn);
1808 DECL_STRUCT_FUNCTION (decl) = NULL;
1809 }
1810 DECL_SAVED_TREE (decl) = NULL;
1811 }
1812
1813 /* Release memory used to represent body of function.
1814 Use this only for functions that are released before being translated to
1815 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1816 are free'd in final.c via free_after_compilation().
1817 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1818
1819 void
1820 cgraph_node::release_body (bool keep_arguments)
1821 {
1822 ipa_transforms_to_apply.release ();
1823 if (!used_as_abstract_origin && symtab->state != PARSING)
1824 {
1825 DECL_RESULT (decl) = NULL;
1826
1827 if (!keep_arguments)
1828 DECL_ARGUMENTS (decl) = NULL;
1829 }
1830 /* If the node is abstract and needed, then do not clear
1831 DECL_INITIAL of its associated function declaration because it's
1832 needed to emit debug info later. */
1833 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1834 DECL_INITIAL (decl) = error_mark_node;
1835 release_function_body (decl);
1836 if (lto_file_data)
1837 {
1838 lto_free_function_in_decl_state_for_node (this);
1839 lto_file_data = NULL;
1840 }
1841 }
1842
1843 /* Remove function from symbol table. */
1844
1845 void
1846 cgraph_node::remove (void)
1847 {
1848 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1849 fprintf (symtab->ipa_clones_dump_file,
1850 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1851 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1852 DECL_SOURCE_COLUMN (decl));
1853
1854 symtab->call_cgraph_removal_hooks (this);
1855 remove_callers ();
1856 remove_callees ();
1857 ipa_transforms_to_apply.release ();
1858 delete_function_version (function_version ());
1859
1860 /* Incremental inlining access removed nodes stored in the postorder list.
1861 */
1862 force_output = false;
1863 forced_by_abi = false;
1864 cgraph_node *next;
1865 for (cgraph_node *n = nested; n; n = next)
1866 {
1867 next = n->next_nested;
1868 n->origin = NULL;
1869 n->next_nested = NULL;
1870 }
1871 nested = NULL;
1872 if (origin)
1873 {
1874 cgraph_node **node2 = &origin->nested;
1875
1876 while (*node2 != this)
1877 node2 = &(*node2)->next_nested;
1878 *node2 = next_nested;
1879 }
1880 unregister ();
1881 if (prev_sibling_clone)
1882 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1883 else if (clone_of)
1884 clone_of->clones = next_sibling_clone;
1885 if (next_sibling_clone)
1886 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1887 if (clones)
1888 {
1889 cgraph_node *n, *next;
1890
1891 if (clone_of)
1892 {
1893 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1894 n->clone_of = clone_of;
1895 n->clone_of = clone_of;
1896 n->next_sibling_clone = clone_of->clones;
1897 if (clone_of->clones)
1898 clone_of->clones->prev_sibling_clone = n;
1899 clone_of->clones = clones;
1900 }
1901 else
1902 {
1903 /* We are removing node with clones. This makes clones inconsistent,
1904 but assume they will be removed subsequently and just keep clone
1905 tree intact. This can happen in unreachable function removal since
1906 we remove unreachable functions in random order, not by bottom-up
1907 walk of clone trees. */
1908 for (n = clones; n; n = next)
1909 {
1910 next = n->next_sibling_clone;
1911 n->next_sibling_clone = NULL;
1912 n->prev_sibling_clone = NULL;
1913 n->clone_of = NULL;
1914 }
1915 }
1916 }
1917
1918 /* While all the clones are removed after being proceeded, the function
1919 itself is kept in the cgraph even after it is compiled. Check whether
1920 we are done with this body and reclaim it proactively if this is the case.
1921 */
1922 if (symtab->state != LTO_STREAMING)
1923 {
1924 cgraph_node *n = cgraph_node::get (decl);
1925 if (!n
1926 || (!n->clones && !n->clone_of && !n->inlined_to
1927 && ((symtab->global_info_ready || in_lto_p)
1928 && (TREE_ASM_WRITTEN (n->decl)
1929 || DECL_EXTERNAL (n->decl)
1930 || !n->analyzed
1931 || (!flag_wpa && n->in_other_partition)))))
1932 release_body ();
1933 }
1934 else
1935 {
1936 lto_free_function_in_decl_state_for_node (this);
1937 lto_file_data = NULL;
1938 }
1939
1940 decl = NULL;
1941 if (call_site_hash)
1942 {
1943 call_site_hash->empty ();
1944 call_site_hash = NULL;
1945 }
1946
1947 symtab->release_symbol (this);
1948 }
1949
1950 /* Likewise indicate that a node is having address taken. */
1951
1952 void
1953 cgraph_node::mark_address_taken (void)
1954 {
1955 /* Indirect inlining can figure out that all uses of the address are
1956 inlined. */
1957 if (inlined_to)
1958 {
1959 gcc_assert (cfun->after_inlining);
1960 gcc_assert (callers->indirect_inlining_edge);
1961 return;
1962 }
1963 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1964 IPA_REF_ADDR reference exists (and thus it should be set on node
1965 representing alias we take address of) and as a test whether address
1966 of the object was taken (and thus it should be set on node alias is
1967 referring to). We should remove the first use and the remove the
1968 following set. */
1969 address_taken = 1;
1970 cgraph_node *node = ultimate_alias_target ();
1971 node->address_taken = 1;
1972 }
1973
1974 /* Return local info node for the compiled function. */
1975
1976 cgraph_node *
1977 cgraph_node::local_info_node (tree decl)
1978 {
1979 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1980 cgraph_node *node = get (decl);
1981 if (!node)
1982 return NULL;
1983 return node->ultimate_alias_target ();
1984 }
1985
1986 /* Return RTL info for the compiled function. */
1987
1988 cgraph_rtl_info *
1989 cgraph_node::rtl_info (const_tree decl)
1990 {
1991 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1992 cgraph_node *node = get (decl);
1993 if (!node)
1994 return NULL;
1995 enum availability avail;
1996 node = node->ultimate_alias_target (&avail);
1997 if (decl != current_function_decl
1998 && (avail < AVAIL_AVAILABLE
1999 || (node->decl != current_function_decl
2000 && !TREE_ASM_WRITTEN (node->decl))))
2001 return NULL;
2002 /* Allocate if it doesn't exist. */
2003 if (node->rtl == NULL)
2004 {
2005 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
2006 SET_HARD_REG_SET (node->rtl->function_used_regs);
2007 }
2008 return node->rtl;
2009 }
2010
2011 /* Return a string describing the failure REASON. */
2012
2013 const char*
2014 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
2015 {
2016 #undef DEFCIFCODE
2017 #define DEFCIFCODE(code, type, string) string,
2018
2019 static const char *cif_string_table[CIF_N_REASONS] = {
2020 #include "cif-code.def"
2021 };
2022
2023 /* Signedness of an enum type is implementation defined, so cast it
2024 to unsigned before testing. */
2025 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2026 return cif_string_table[reason];
2027 }
2028
2029 /* Return a type describing the failure REASON. */
2030
2031 cgraph_inline_failed_type_t
2032 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
2033 {
2034 #undef DEFCIFCODE
2035 #define DEFCIFCODE(code, type, string) type,
2036
2037 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
2038 #include "cif-code.def"
2039 };
2040
2041 /* Signedness of an enum type is implementation defined, so cast it
2042 to unsigned before testing. */
2043 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2044 return cif_type_table[reason];
2045 }
2046
2047 /* Names used to print out the availability enum. */
2048 const char * const cgraph_availability_names[] =
2049 {"unset", "not_available", "overwritable", "available", "local"};
2050
2051 /* Output flags of edge to a file F. */
2052
2053 void
2054 cgraph_edge::dump_edge_flags (FILE *f)
2055 {
2056 if (speculative)
2057 fprintf (f, "(speculative) ");
2058 if (!inline_failed)
2059 fprintf (f, "(inlined) ");
2060 if (call_stmt_cannot_inline_p)
2061 fprintf (f, "(call_stmt_cannot_inline_p) ");
2062 if (indirect_inlining_edge)
2063 fprintf (f, "(indirect_inlining) ");
2064 if (count.initialized_p ())
2065 {
2066 fprintf (f, "(");
2067 count.dump (f);
2068 fprintf (f, ",");
2069 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
2070 }
2071 if (can_throw_external)
2072 fprintf (f, "(can throw external) ");
2073 }
2074
2075 /* Dump call graph node to file F. */
2076
2077 void
2078 cgraph_node::dump (FILE *f)
2079 {
2080 cgraph_edge *edge;
2081
2082 dump_base (f);
2083
2084 if (inlined_to)
2085 fprintf (f, " Function %s is inline copy in %s\n",
2086 dump_name (),
2087 inlined_to->dump_name ());
2088 if (clone_of)
2089 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2090 if (symtab->function_flags_ready)
2091 fprintf (f, " Availability: %s\n",
2092 cgraph_availability_names [get_availability ()]);
2093
2094 if (profile_id)
2095 fprintf (f, " Profile id: %i\n",
2096 profile_id);
2097 if (unit_id)
2098 fprintf (f, " Unit id: %i\n",
2099 unit_id);
2100 cgraph_function_version_info *vi = function_version ();
2101 if (vi != NULL)
2102 {
2103 fprintf (f, " Version info: ");
2104 if (vi->prev != NULL)
2105 {
2106 fprintf (f, "prev: ");
2107 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2108 }
2109 if (vi->next != NULL)
2110 {
2111 fprintf (f, "next: ");
2112 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2113 }
2114 if (vi->dispatcher_resolver != NULL_TREE)
2115 fprintf (f, "dispatcher: %s",
2116 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2117
2118 fprintf (f, "\n");
2119 }
2120 fprintf (f, " Function flags:");
2121 if (count.initialized_p ())
2122 {
2123 fprintf (f, " count:");
2124 count.dump (f);
2125 }
2126 if (tp_first_run > 0)
2127 fprintf (f, " first_run:%" PRId64, (int64_t) tp_first_run);
2128 if (origin)
2129 fprintf (f, " nested in:%s", origin->dump_asm_name ());
2130 if (gimple_has_body_p (decl))
2131 fprintf (f, " body");
2132 if (process)
2133 fprintf (f, " process");
2134 if (local)
2135 fprintf (f, " local");
2136 if (redefined_extern_inline)
2137 fprintf (f, " redefined_extern_inline");
2138 if (only_called_at_startup)
2139 fprintf (f, " only_called_at_startup");
2140 if (only_called_at_exit)
2141 fprintf (f, " only_called_at_exit");
2142 if (tm_clone)
2143 fprintf (f, " tm_clone");
2144 if (calls_comdat_local)
2145 fprintf (f, " calls_comdat_local");
2146 if (icf_merged)
2147 fprintf (f, " icf_merged");
2148 if (merged_comdat)
2149 fprintf (f, " merged_comdat");
2150 if (merged_extern_inline)
2151 fprintf (f, " merged_extern_inline");
2152 if (split_part)
2153 fprintf (f, " split_part");
2154 if (indirect_call_target)
2155 fprintf (f, " indirect_call_target");
2156 if (nonfreeing_fn)
2157 fprintf (f, " nonfreeing_fn");
2158 if (DECL_STATIC_CONSTRUCTOR (decl))
2159 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2160 if (DECL_STATIC_DESTRUCTOR (decl))
2161 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2162 if (frequency == NODE_FREQUENCY_HOT)
2163 fprintf (f, " hot");
2164 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2165 fprintf (f, " unlikely_executed");
2166 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2167 fprintf (f, " executed_once");
2168 if (opt_for_fn (decl, optimize_size))
2169 fprintf (f, " optimize_size");
2170 if (parallelized_function)
2171 fprintf (f, " parallelized_function");
2172 if (DECL_IS_OPERATOR_NEW_P (decl))
2173 fprintf (f, " %soperator_new",
2174 DECL_IS_REPLACEABLE_OPERATOR (decl) ? "replaceable_" : "");
2175 if (DECL_IS_OPERATOR_DELETE_P (decl))
2176 fprintf (f, " %soperator_delete",
2177 DECL_IS_REPLACEABLE_OPERATOR (decl) ? "replaceable_" : "");
2178
2179 fprintf (f, "\n");
2180
2181 if (thunk.thunk_p)
2182 {
2183 fprintf (f, " Thunk");
2184 if (thunk.alias)
2185 fprintf (f, " of %s (asm:%s)",
2186 lang_hooks.decl_printable_name (thunk.alias, 2),
2187 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2188 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2189 "has virtual offset %i\n",
2190 (int)thunk.fixed_offset,
2191 (int)thunk.virtual_value,
2192 (int)thunk.indirect_offset,
2193 (int)thunk.virtual_offset_p);
2194 }
2195 else if (former_thunk_p ())
2196 fprintf (f, " Former thunk fixed offset %i virtual value %i "
2197 "indirect_offset %i has virtual offset %i\n",
2198 (int)thunk.fixed_offset,
2199 (int)thunk.virtual_value,
2200 (int)thunk.indirect_offset,
2201 (int)thunk.virtual_offset_p);
2202 if (alias && thunk.alias
2203 && DECL_P (thunk.alias))
2204 {
2205 fprintf (f, " Alias of %s",
2206 lang_hooks.decl_printable_name (thunk.alias, 2));
2207 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2208 fprintf (f, " (asm:%s)",
2209 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2210 fprintf (f, "\n");
2211 }
2212
2213 fprintf (f, " Called by: ");
2214
2215 profile_count sum = profile_count::zero ();
2216 for (edge = callers; edge; edge = edge->next_caller)
2217 {
2218 fprintf (f, "%s ", edge->caller->dump_asm_name ());
2219 edge->dump_edge_flags (f);
2220 if (edge->count.initialized_p ())
2221 sum += edge->count.ipa ();
2222 }
2223
2224 fprintf (f, "\n Calls: ");
2225 for (edge = callees; edge; edge = edge->next_callee)
2226 {
2227 fprintf (f, "%s ", edge->callee->dump_asm_name ());
2228 edge->dump_edge_flags (f);
2229 }
2230 fprintf (f, "\n");
2231
2232 if (count.ipa ().initialized_p ())
2233 {
2234 bool ok = true;
2235 bool min = false;
2236 ipa_ref *ref;
2237
2238 FOR_EACH_ALIAS (this, ref)
2239 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2240 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2241
2242 if (inlined_to
2243 || (symtab->state < EXPANSION
2244 && ultimate_alias_target () == this && only_called_directly_p ()))
2245 ok = !count.ipa ().differs_from_p (sum);
2246 else if (count.ipa () > profile_count::from_gcov_type (100)
2247 && count.ipa () < sum.apply_scale (99, 100))
2248 ok = false, min = true;
2249 if (!ok)
2250 {
2251 fprintf (f, " Invalid sum of caller counts ");
2252 sum.dump (f);
2253 if (min)
2254 fprintf (f, ", should be at most ");
2255 else
2256 fprintf (f, ", should be ");
2257 count.ipa ().dump (f);
2258 fprintf (f, "\n");
2259 }
2260 }
2261
2262 for (edge = indirect_calls; edge; edge = edge->next_callee)
2263 {
2264 if (edge->indirect_info->polymorphic)
2265 {
2266 fprintf (f, " Polymorphic indirect call of type ");
2267 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2268 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2269 }
2270 else
2271 fprintf (f, " Indirect call");
2272 edge->dump_edge_flags (f);
2273 if (edge->indirect_info->param_index != -1)
2274 {
2275 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2276 if (edge->indirect_info->agg_contents)
2277 fprintf (f, " loaded from %s %s at offset %i",
2278 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2279 edge->indirect_info->by_ref ? "passed by reference":"",
2280 (int)edge->indirect_info->offset);
2281 if (edge->indirect_info->vptr_changed)
2282 fprintf (f, " (vptr maybe changed)");
2283 }
2284 fprintf (f, " Num speculative call targets: %i",
2285 edge->indirect_info->num_speculative_call_targets);
2286 fprintf (f, "\n");
2287 if (edge->indirect_info->polymorphic)
2288 edge->indirect_info->context.dump (f);
2289 }
2290 }
2291
2292 /* Dump call graph node to file F in graphviz format. */
2293
2294 void
2295 cgraph_node::dump_graphviz (FILE *f)
2296 {
2297 cgraph_edge *edge;
2298
2299 for (edge = callees; edge; edge = edge->next_callee)
2300 {
2301 cgraph_node *callee = edge->callee;
2302
2303 fprintf (f, "\t\"%s\" -> \"%s\"\n", dump_name (), callee->dump_name ());
2304 }
2305 }
2306
2307
2308 /* Dump call graph node NODE to stderr. */
2309
2310 DEBUG_FUNCTION void
2311 cgraph_node::debug (void)
2312 {
2313 dump (stderr);
2314 }
2315
2316 /* Dump the callgraph to file F. */
2317
2318 void
2319 cgraph_node::dump_cgraph (FILE *f)
2320 {
2321 cgraph_node *node;
2322
2323 fprintf (f, "callgraph:\n\n");
2324 FOR_EACH_FUNCTION (node)
2325 node->dump (f);
2326 }
2327
2328 /* Return true when the DECL can possibly be inlined. */
2329
2330 bool
2331 cgraph_function_possibly_inlined_p (tree decl)
2332 {
2333 if (!symtab->global_info_ready)
2334 return !DECL_UNINLINABLE (decl);
2335 return DECL_POSSIBLY_INLINED (decl);
2336 }
2337
2338 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2339 void
2340 cgraph_node::unnest (void)
2341 {
2342 cgraph_node **node2 = &origin->nested;
2343 gcc_assert (origin);
2344
2345 while (*node2 != this)
2346 node2 = &(*node2)->next_nested;
2347 *node2 = next_nested;
2348 origin = NULL;
2349 }
2350
2351 /* Return function availability. See cgraph.h for description of individual
2352 return values. */
2353 enum availability
2354 cgraph_node::get_availability (symtab_node *ref)
2355 {
2356 if (ref)
2357 {
2358 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2359 if (cref)
2360 ref = cref->inlined_to;
2361 }
2362 enum availability avail;
2363 if (!analyzed)
2364 avail = AVAIL_NOT_AVAILABLE;
2365 else if (local)
2366 avail = AVAIL_LOCAL;
2367 else if (inlined_to)
2368 avail = AVAIL_AVAILABLE;
2369 else if (transparent_alias)
2370 ultimate_alias_target (&avail, ref);
2371 else if (ifunc_resolver
2372 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2373 avail = AVAIL_INTERPOSABLE;
2374 else if (!externally_visible)
2375 avail = AVAIL_AVAILABLE;
2376 /* If this is a reference from symbol itself and there are no aliases, we
2377 may be sure that the symbol was not interposed by something else because
2378 the symbol itself would be unreachable otherwise.
2379
2380 Also comdat groups are always resolved in groups. */
2381 else if ((this == ref && !has_aliases_p ())
2382 || (ref && get_comdat_group ()
2383 && get_comdat_group () == ref->get_comdat_group ()))
2384 avail = AVAIL_AVAILABLE;
2385 /* Inline functions are safe to be analyzed even if their symbol can
2386 be overwritten at runtime. It is not meaningful to enforce any sane
2387 behavior on replacing inline function by different body. */
2388 else if (DECL_DECLARED_INLINE_P (decl))
2389 avail = AVAIL_AVAILABLE;
2390
2391 /* If the function can be overwritten, return OVERWRITABLE. Take
2392 care at least of two notable extensions - the COMDAT functions
2393 used to share template instantiations in C++ (this is symmetric
2394 to code cp_cannot_inline_tree_fn and probably shall be shared and
2395 the inlinability hooks completely eliminated). */
2396
2397 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2398 avail = AVAIL_INTERPOSABLE;
2399 else avail = AVAIL_AVAILABLE;
2400
2401 return avail;
2402 }
2403
2404 /* Worker for cgraph_node_can_be_local_p. */
2405 static bool
2406 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2407 {
2408 return !(!node->force_output
2409 && !node->ifunc_resolver
2410 /* Limitation of gas requires us to output targets of symver aliases
2411 as global symbols. This is binutils PR 25295. */
2412 && !node->symver
2413 && ((DECL_COMDAT (node->decl)
2414 && !node->forced_by_abi
2415 && !node->used_from_object_file_p ()
2416 && !node->same_comdat_group)
2417 || !node->externally_visible));
2418 }
2419
2420 /* Return true if cgraph_node can be made local for API change.
2421 Extern inline functions and C++ COMDAT functions can be made local
2422 at the expense of possible code size growth if function is used in multiple
2423 compilation units. */
2424 bool
2425 cgraph_node::can_be_local_p (void)
2426 {
2427 return (!address_taken
2428 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2429 NULL, true));
2430 }
2431
2432 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2433 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2434 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2435 skipped. */
2436 bool
2437 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2438 (cgraph_node *, void *),
2439 void *data,
2440 bool include_overwritable,
2441 bool exclude_virtual_thunks)
2442 {
2443 cgraph_edge *e;
2444 ipa_ref *ref;
2445 enum availability avail = AVAIL_AVAILABLE;
2446
2447 if (include_overwritable
2448 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2449 {
2450 if (callback (this, data))
2451 return true;
2452 }
2453 FOR_EACH_ALIAS (this, ref)
2454 {
2455 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2456 if (include_overwritable
2457 || alias->get_availability () > AVAIL_INTERPOSABLE)
2458 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2459 include_overwritable,
2460 exclude_virtual_thunks))
2461 return true;
2462 }
2463 if (avail <= AVAIL_INTERPOSABLE)
2464 return false;
2465 for (e = callers; e; e = e->next_caller)
2466 if (e->caller->thunk.thunk_p
2467 && (include_overwritable
2468 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2469 && !(exclude_virtual_thunks
2470 && e->caller->thunk.virtual_offset_p))
2471 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2472 include_overwritable,
2473 exclude_virtual_thunks))
2474 return true;
2475
2476 return false;
2477 }
2478
2479 /* Worker to bring NODE local. */
2480
2481 bool
2482 cgraph_node::make_local (cgraph_node *node, void *)
2483 {
2484 gcc_checking_assert (node->can_be_local_p ());
2485 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2486 {
2487 node->make_decl_local ();
2488 node->set_section (NULL);
2489 node->set_comdat_group (NULL);
2490 node->externally_visible = false;
2491 node->forced_by_abi = false;
2492 node->local = true;
2493 node->set_section (NULL);
2494 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2495 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2496 && !flag_incremental_link);
2497 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2498 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2499 }
2500 return false;
2501 }
2502
2503 /* Bring cgraph node local. */
2504
2505 void
2506 cgraph_node::make_local (void)
2507 {
2508 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2509 }
2510
2511 /* Worker to set nothrow flag. */
2512
2513 static void
2514 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2515 bool *changed)
2516 {
2517 cgraph_edge *e;
2518
2519 if (nothrow && !TREE_NOTHROW (node->decl))
2520 {
2521 /* With non-call exceptions we can't say for sure if other function body
2522 was not possibly optimized to still throw. */
2523 if (!non_call || node->binds_to_current_def_p ())
2524 {
2525 TREE_NOTHROW (node->decl) = true;
2526 *changed = true;
2527 for (e = node->callers; e; e = e->next_caller)
2528 e->can_throw_external = false;
2529 }
2530 }
2531 else if (!nothrow && TREE_NOTHROW (node->decl))
2532 {
2533 TREE_NOTHROW (node->decl) = false;
2534 *changed = true;
2535 }
2536 ipa_ref *ref;
2537 FOR_EACH_ALIAS (node, ref)
2538 {
2539 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2540 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2541 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2542 }
2543 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2544 if (e->caller->thunk.thunk_p
2545 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2546 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2547 }
2548
2549 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2550 if any to NOTHROW. */
2551
2552 bool
2553 cgraph_node::set_nothrow_flag (bool nothrow)
2554 {
2555 bool changed = false;
2556 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2557
2558 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2559 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2560 else
2561 {
2562 ipa_ref *ref;
2563
2564 FOR_EACH_ALIAS (this, ref)
2565 {
2566 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2567 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2568 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2569 }
2570 }
2571 return changed;
2572 }
2573
2574 /* Worker to set malloc flag. */
2575 static void
2576 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2577 {
2578 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2579 {
2580 DECL_IS_MALLOC (node->decl) = true;
2581 *changed = true;
2582 }
2583
2584 ipa_ref *ref;
2585 FOR_EACH_ALIAS (node, ref)
2586 {
2587 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2588 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2589 set_malloc_flag_1 (alias, malloc_p, changed);
2590 }
2591
2592 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2593 if (e->caller->thunk.thunk_p
2594 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2595 set_malloc_flag_1 (e->caller, malloc_p, changed);
2596 }
2597
2598 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2599
2600 bool
2601 cgraph_node::set_malloc_flag (bool malloc_p)
2602 {
2603 bool changed = false;
2604
2605 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2606 set_malloc_flag_1 (this, malloc_p, &changed);
2607 else
2608 {
2609 ipa_ref *ref;
2610
2611 FOR_EACH_ALIAS (this, ref)
2612 {
2613 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2614 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2615 set_malloc_flag_1 (alias, malloc_p, &changed);
2616 }
2617 }
2618 return changed;
2619 }
2620
2621 /* Worker to set_const_flag. */
2622
2623 static void
2624 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2625 bool *changed)
2626 {
2627 /* Static constructors and destructors without a side effect can be
2628 optimized out. */
2629 if (set_const && !looping)
2630 {
2631 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2632 {
2633 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2634 *changed = true;
2635 }
2636 if (DECL_STATIC_DESTRUCTOR (node->decl))
2637 {
2638 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2639 *changed = true;
2640 }
2641 }
2642 if (!set_const)
2643 {
2644 if (TREE_READONLY (node->decl))
2645 {
2646 TREE_READONLY (node->decl) = 0;
2647 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2648 *changed = true;
2649 }
2650 }
2651 else
2652 {
2653 /* Consider function:
2654
2655 bool a(int *p)
2656 {
2657 return *p==*p;
2658 }
2659
2660 During early optimization we will turn this into:
2661
2662 bool a(int *p)
2663 {
2664 return true;
2665 }
2666
2667 Now if this function will be detected as CONST however when interposed
2668 it may end up being just pure. We always must assume the worst
2669 scenario here. */
2670 if (TREE_READONLY (node->decl))
2671 {
2672 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2673 {
2674 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2675 *changed = true;
2676 }
2677 }
2678 else if (node->binds_to_current_def_p ())
2679 {
2680 TREE_READONLY (node->decl) = true;
2681 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2682 DECL_PURE_P (node->decl) = false;
2683 *changed = true;
2684 }
2685 else
2686 {
2687 if (dump_file && (dump_flags & TDF_DETAILS))
2688 fprintf (dump_file, "Dropping state to PURE because function does "
2689 "not bind to current def.\n");
2690 if (!DECL_PURE_P (node->decl))
2691 {
2692 DECL_PURE_P (node->decl) = true;
2693 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2694 *changed = true;
2695 }
2696 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2697 {
2698 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2699 *changed = true;
2700 }
2701 }
2702 }
2703
2704 ipa_ref *ref;
2705 FOR_EACH_ALIAS (node, ref)
2706 {
2707 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2708 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2709 set_const_flag_1 (alias, set_const, looping, changed);
2710 }
2711 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2712 if (e->caller->thunk.thunk_p
2713 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2714 {
2715 /* Virtual thunks access virtual offset in the vtable, so they can
2716 only be pure, never const. */
2717 if (set_const
2718 && (e->caller->thunk.virtual_offset_p
2719 || !node->binds_to_current_def_p (e->caller)))
2720 *changed |= e->caller->set_pure_flag (true, looping);
2721 else
2722 set_const_flag_1 (e->caller, set_const, looping, changed);
2723 }
2724 }
2725
2726 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2727 If SET_CONST if false, clear the flag.
2728
2729 When setting the flag be careful about possible interposition and
2730 do not set the flag for functions that can be interposed and set pure
2731 flag for functions that can bind to other definition.
2732
2733 Return true if any change was done. */
2734
2735 bool
2736 cgraph_node::set_const_flag (bool set_const, bool looping)
2737 {
2738 bool changed = false;
2739 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2740 set_const_flag_1 (this, set_const, looping, &changed);
2741 else
2742 {
2743 ipa_ref *ref;
2744
2745 FOR_EACH_ALIAS (this, ref)
2746 {
2747 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2748 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2749 set_const_flag_1 (alias, set_const, looping, &changed);
2750 }
2751 }
2752 return changed;
2753 }
2754
2755 /* Info used by set_pure_flag_1. */
2756
2757 struct set_pure_flag_info
2758 {
2759 bool pure;
2760 bool looping;
2761 bool changed;
2762 };
2763
2764 /* Worker to set_pure_flag. */
2765
2766 static bool
2767 set_pure_flag_1 (cgraph_node *node, void *data)
2768 {
2769 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2770 /* Static constructors and destructors without a side effect can be
2771 optimized out. */
2772 if (info->pure && !info->looping)
2773 {
2774 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2775 {
2776 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2777 info->changed = true;
2778 }
2779 if (DECL_STATIC_DESTRUCTOR (node->decl))
2780 {
2781 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2782 info->changed = true;
2783 }
2784 }
2785 if (info->pure)
2786 {
2787 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2788 {
2789 DECL_PURE_P (node->decl) = true;
2790 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2791 info->changed = true;
2792 }
2793 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2794 && !info->looping)
2795 {
2796 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2797 info->changed = true;
2798 }
2799 }
2800 else
2801 {
2802 if (DECL_PURE_P (node->decl))
2803 {
2804 DECL_PURE_P (node->decl) = false;
2805 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2806 info->changed = true;
2807 }
2808 }
2809 return false;
2810 }
2811
2812 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2813 if any to PURE.
2814
2815 When setting the flag, be careful about possible interposition.
2816 Return true if any change was done. */
2817
2818 bool
2819 cgraph_node::set_pure_flag (bool pure, bool looping)
2820 {
2821 struct set_pure_flag_info info = {pure, looping, false};
2822 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2823 return info.changed;
2824 }
2825
2826 /* Return true when cgraph_node cannot return or throw and thus
2827 it is safe to ignore its side effects for IPA analysis. */
2828
2829 bool
2830 cgraph_node::cannot_return_p (void)
2831 {
2832 int flags = flags_from_decl_or_type (decl);
2833 if (!opt_for_fn (decl, flag_exceptions))
2834 return (flags & ECF_NORETURN) != 0;
2835 else
2836 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2837 == (ECF_NORETURN | ECF_NOTHROW));
2838 }
2839
2840 /* Return true when call of edge cannot lead to return from caller
2841 and thus it is safe to ignore its side effects for IPA analysis
2842 when computing side effects of the caller.
2843 FIXME: We could actually mark all edges that have no reaching
2844 patch to the exit block or throw to get better results. */
2845 bool
2846 cgraph_edge::cannot_lead_to_return_p (void)
2847 {
2848 if (caller->cannot_return_p ())
2849 return true;
2850 if (indirect_unknown_callee)
2851 {
2852 int flags = indirect_info->ecf_flags;
2853 if (!opt_for_fn (caller->decl, flag_exceptions))
2854 return (flags & ECF_NORETURN) != 0;
2855 else
2856 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2857 == (ECF_NORETURN | ECF_NOTHROW));
2858 }
2859 else
2860 return callee->cannot_return_p ();
2861 }
2862
2863 /* Return true if the edge may be considered hot. */
2864
2865 bool
2866 cgraph_edge::maybe_hot_p (void)
2867 {
2868 if (!maybe_hot_count_p (NULL, count.ipa ()))
2869 return false;
2870 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2871 || (callee
2872 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2873 return false;
2874 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2875 && (callee
2876 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2877 return false;
2878 if (opt_for_fn (caller->decl, optimize_size))
2879 return false;
2880 if (caller->frequency == NODE_FREQUENCY_HOT)
2881 return true;
2882 if (!count.initialized_p ())
2883 return true;
2884 cgraph_node *where = caller->inlined_to ? caller->inlined_to : caller;
2885 if (!where->count.initialized_p ())
2886 return false;
2887 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2888 {
2889 if (count.apply_scale (2, 1) < where->count.apply_scale (3, 1))
2890 return false;
2891 }
2892 else if (count.apply_scale (param_hot_bb_frequency_fraction , 1)
2893 < where->count)
2894 return false;
2895 return true;
2896 }
2897
2898 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2899
2900 static bool
2901 nonremovable_p (cgraph_node *node, void *)
2902 {
2903 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2904 }
2905
2906 /* Return true if whole comdat group can be removed if there are no direct
2907 calls to THIS. */
2908
2909 bool
2910 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2911 {
2912 struct ipa_ref *ref;
2913
2914 /* For local symbols or non-comdat group it is the same as
2915 can_remove_if_no_direct_calls_p. */
2916 if (!externally_visible || !same_comdat_group)
2917 {
2918 if (DECL_EXTERNAL (decl))
2919 return true;
2920 if (address_taken)
2921 return false;
2922 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2923 }
2924
2925 if (will_inline && address_taken)
2926 return false;
2927
2928 /* Otherwise check if we can remove the symbol itself and then verify
2929 that only uses of the comdat groups are direct call to THIS
2930 or its aliases. */
2931 if (!can_remove_if_no_direct_calls_and_refs_p ())
2932 return false;
2933
2934 /* Check that all refs come from within the comdat group. */
2935 for (int i = 0; iterate_referring (i, ref); i++)
2936 if (ref->referring->get_comdat_group () != get_comdat_group ())
2937 return false;
2938
2939 struct cgraph_node *target = ultimate_alias_target ();
2940 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2941 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2942 {
2943 if (!externally_visible)
2944 continue;
2945 if (!next->alias
2946 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2947 return false;
2948
2949 /* If we see different symbol than THIS, be sure to check calls. */
2950 if (next->ultimate_alias_target () != target)
2951 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2952 if (e->caller->get_comdat_group () != get_comdat_group ()
2953 || will_inline)
2954 return false;
2955
2956 /* If function is not being inlined, we care only about
2957 references outside of the comdat group. */
2958 if (!will_inline)
2959 for (int i = 0; next->iterate_referring (i, ref); i++)
2960 if (ref->referring->get_comdat_group () != get_comdat_group ())
2961 return false;
2962 }
2963 return true;
2964 }
2965
2966 /* Return true when function cgraph_node can be expected to be removed
2967 from program when direct calls in this compilation unit are removed.
2968
2969 As a special case COMDAT functions are
2970 cgraph_can_remove_if_no_direct_calls_p while the are not
2971 cgraph_only_called_directly_p (it is possible they are called from other
2972 unit)
2973
2974 This function behaves as cgraph_only_called_directly_p because eliminating
2975 all uses of COMDAT function does not make it necessarily disappear from
2976 the program unless we are compiling whole program or we do LTO. In this
2977 case we know we win since dynamic linking will not really discard the
2978 linkonce section. */
2979
2980 bool
2981 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2982 (bool will_inline)
2983 {
2984 gcc_assert (!inlined_to);
2985 if (DECL_EXTERNAL (decl))
2986 return true;
2987
2988 if (!in_lto_p && !flag_whole_program)
2989 {
2990 /* If the symbol is in comdat group, we need to verify that whole comdat
2991 group becomes unreachable. Technically we could skip references from
2992 within the group, too. */
2993 if (!only_called_directly_p ())
2994 return false;
2995 if (same_comdat_group && externally_visible)
2996 {
2997 struct cgraph_node *target = ultimate_alias_target ();
2998
2999 if (will_inline && address_taken)
3000 return true;
3001 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
3002 next != this;
3003 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
3004 {
3005 if (!externally_visible)
3006 continue;
3007 if (!next->alias
3008 && !next->only_called_directly_p ())
3009 return false;
3010
3011 /* If we see different symbol than THIS,
3012 be sure to check calls. */
3013 if (next->ultimate_alias_target () != target)
3014 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
3015 if (e->caller->get_comdat_group () != get_comdat_group ()
3016 || will_inline)
3017 return false;
3018 }
3019 }
3020 return true;
3021 }
3022 else
3023 return can_remove_if_no_direct_calls_p (will_inline);
3024 }
3025
3026
3027 /* Worker for cgraph_only_called_directly_p. */
3028
3029 static bool
3030 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
3031 {
3032 return !node->only_called_directly_or_aliased_p ();
3033 }
3034
3035 /* Return true when function cgraph_node and all its aliases are only called
3036 directly.
3037 i.e. it is not externally visible, address was not taken and
3038 it is not used in any other non-standard way. */
3039
3040 bool
3041 cgraph_node::only_called_directly_p (void)
3042 {
3043 gcc_assert (ultimate_alias_target () == this);
3044 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
3045 NULL, true);
3046 }
3047
3048
3049 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3050
3051 static bool
3052 collect_callers_of_node_1 (cgraph_node *node, void *data)
3053 {
3054 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
3055 cgraph_edge *cs;
3056 enum availability avail;
3057 node->ultimate_alias_target (&avail);
3058
3059 if (avail > AVAIL_INTERPOSABLE)
3060 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3061 if (!cs->indirect_inlining_edge
3062 && !cs->caller->thunk.thunk_p)
3063 redirect_callers->safe_push (cs);
3064 return false;
3065 }
3066
3067 /* Collect all callers of cgraph_node and its aliases that are known to lead to
3068 cgraph_node (i.e. are not overwritable). */
3069
3070 vec<cgraph_edge *>
3071 cgraph_node::collect_callers (void)
3072 {
3073 vec<cgraph_edge *> redirect_callers = vNULL;
3074 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
3075 &redirect_callers, false);
3076 return redirect_callers;
3077 }
3078
3079
3080 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
3081 optimistically true if this cannot be determined. */
3082
3083 static bool
3084 clone_of_p (cgraph_node *node, cgraph_node *node2)
3085 {
3086 node = node->ultimate_alias_target ();
3087 node2 = node2->ultimate_alias_target ();
3088
3089 if (node2->clone_of == node
3090 || node2->former_clone_of == node->decl)
3091 return true;
3092
3093 if (!node->thunk.thunk_p && !node->former_thunk_p ())
3094 {
3095 while (node2 && node->decl != node2->decl)
3096 node2 = node2->clone_of;
3097 return node2 != NULL;
3098 }
3099
3100 /* There are no virtual clones of thunks so check former_clone_of or if we
3101 might have skipped thunks because this adjustments are no longer
3102 necessary. */
3103 while (node->thunk.thunk_p || node->former_thunk_p ())
3104 {
3105 if (!node->thunk.this_adjusting)
3106 return false;
3107 /* In case of instrumented expanded thunks, which can have multiple calls
3108 in them, we do not know how to continue and just have to be
3109 optimistic. The same applies if all calls have already been inlined
3110 into the thunk. */
3111 if (!node->callees || node->callees->next_callee)
3112 return true;
3113 node = node->callees->callee->ultimate_alias_target ();
3114
3115 if (!node2->clone.param_adjustments
3116 || node2->clone.param_adjustments->first_param_intact_p ())
3117 return false;
3118 if (node2->former_clone_of == node->decl
3119 || node2->former_clone_of == node->former_clone_of)
3120 return true;
3121
3122 cgraph_node *n2 = node2;
3123 while (n2 && node->decl != n2->decl)
3124 n2 = n2->clone_of;
3125 if (n2)
3126 return true;
3127 }
3128
3129 return false;
3130 }
3131
3132 /* Verify edge count and frequency. */
3133
3134 bool
3135 cgraph_edge::verify_count ()
3136 {
3137 bool error_found = false;
3138 if (!count.verify ())
3139 {
3140 error ("caller edge count invalid");
3141 error_found = true;
3142 }
3143 return error_found;
3144 }
3145
3146 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3147 static void
3148 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3149 {
3150 bool fndecl_was_null = false;
3151 /* debug_gimple_stmt needs correct cfun */
3152 if (cfun != this_cfun)
3153 set_cfun (this_cfun);
3154 /* ...and an actual current_function_decl */
3155 if (!current_function_decl)
3156 {
3157 current_function_decl = this_cfun->decl;
3158 fndecl_was_null = true;
3159 }
3160 debug_gimple_stmt (stmt);
3161 if (fndecl_was_null)
3162 current_function_decl = NULL;
3163 }
3164
3165 /* Verify that call graph edge corresponds to DECL from the associated
3166 statement. Return true if the verification should fail. */
3167
3168 bool
3169 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3170 {
3171 cgraph_node *node;
3172
3173 if (!decl || callee->inlined_to)
3174 return false;
3175 if (symtab->state == LTO_STREAMING)
3176 return false;
3177 node = cgraph_node::get (decl);
3178
3179 /* We do not know if a node from a different partition is an alias or what it
3180 aliases and therefore cannot do the former_clone_of check reliably. When
3181 body_removed is set, we have lost all information about what was alias or
3182 thunk of and also cannot proceed. */
3183 if (!node
3184 || node->body_removed
3185 || node->in_other_partition
3186 || callee->icf_merged
3187 || callee->in_other_partition)
3188 return false;
3189
3190 node = node->ultimate_alias_target ();
3191
3192 /* Optimizers can redirect unreachable calls or calls triggering undefined
3193 behavior to builtin_unreachable. */
3194
3195 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3196 return false;
3197
3198 if (callee->former_clone_of != node->decl
3199 && (node != callee->ultimate_alias_target ())
3200 && !clone_of_p (node, callee))
3201 return true;
3202 else
3203 return false;
3204 }
3205
3206 /* Disable warnings about missing quoting in GCC diagnostics for
3207 the verification errors. Their format strings don't follow GCC
3208 diagnostic conventions and the calls are ultimately followed by
3209 one to internal_error. */
3210 #if __GNUC__ >= 10
3211 # pragma GCC diagnostic push
3212 # pragma GCC diagnostic ignored "-Wformat-diag"
3213 #endif
3214
3215 /* Verify consistency of speculative call in NODE corresponding to STMT
3216 and LTO_STMT_UID. If INDIRECT is set, assume that it is the indirect
3217 edge of call sequence. Return true if error is found.
3218
3219 This function is called to every component of indirect call (direct edges,
3220 indirect edge and refs). To save duplicated work, do full testing only
3221 in that case. */
3222 static bool
3223 verify_speculative_call (struct cgraph_node *node, gimple *stmt,
3224 unsigned int lto_stmt_uid,
3225 struct cgraph_edge *indirect)
3226 {
3227 if (indirect == NULL)
3228 {
3229 for (indirect = node->indirect_calls; indirect;
3230 indirect = indirect->next_callee)
3231 if (indirect->call_stmt == stmt
3232 && indirect->lto_stmt_uid == lto_stmt_uid)
3233 break;
3234 if (!indirect)
3235 {
3236 error ("missing indirect call in speculative call sequence");
3237 return true;
3238 }
3239 if (!indirect->speculative)
3240 {
3241 error ("indirect call in speculative call sequence has no "
3242 "speculative flag");
3243 return true;
3244 }
3245 return false;
3246 }
3247
3248 /* Maximal number of targets. We probably will never want to have more than
3249 this. */
3250 const unsigned int num = 256;
3251 cgraph_edge *direct_calls[num];
3252 ipa_ref *refs[num];
3253
3254 for (unsigned int i = 0; i < num; i++)
3255 {
3256 direct_calls[i] = NULL;
3257 refs[i] = NULL;
3258 }
3259
3260 cgraph_edge *first_call = NULL;
3261 cgraph_edge *prev_call = NULL;
3262
3263 for (cgraph_edge *direct = node->callees; direct;
3264 direct = direct->next_callee)
3265 if (direct->call_stmt == stmt && direct->lto_stmt_uid == lto_stmt_uid)
3266 {
3267 if (!first_call)
3268 first_call = direct;
3269 if (prev_call && direct != prev_call->next_callee)
3270 {
3271 error ("speculative edges are not adjacent");
3272 return true;
3273 }
3274 prev_call = direct;
3275 if (!direct->speculative)
3276 {
3277 error ("direct call to %s in speculative call sequence has no "
3278 "speculative flag", direct->callee->dump_name ());
3279 return true;
3280 }
3281 if (direct->speculative_id >= num)
3282 {
3283 error ("direct call to %s in speculative call sequence has "
3284 "speculative_id %i out of range",
3285 direct->callee->dump_name (), direct->speculative_id);
3286 return true;
3287 }
3288 if (direct_calls[direct->speculative_id])
3289 {
3290 error ("duplicate direct call to %s in speculative call sequence "
3291 "with speculative_id %i",
3292 direct->callee->dump_name (), direct->speculative_id);
3293 return true;
3294 }
3295 direct_calls[direct->speculative_id] = direct;
3296 }
3297
3298 if (first_call->call_stmt
3299 && first_call != node->get_edge (first_call->call_stmt))
3300 {
3301 error ("call stmt hash does not point to first direct edge of "
3302 "speculative call sequence");
3303 return true;
3304 }
3305
3306 ipa_ref *ref;
3307 for (int i = 0; node->iterate_reference (i, ref); i++)
3308 if (ref->speculative
3309 && ref->stmt == stmt && ref->lto_stmt_uid == lto_stmt_uid)
3310 {
3311 if (ref->speculative_id >= num)
3312 {
3313 error ("direct call to %s in speculative call sequence has "
3314 "speculative_id %i out of range",
3315 ref->referred->dump_name (), ref->speculative_id);
3316 return true;
3317 }
3318 if (refs[ref->speculative_id])
3319 {
3320 error ("duplicate reference %s in speculative call sequence "
3321 "with speculative_id %i",
3322 ref->referred->dump_name (), ref->speculative_id);
3323 return true;
3324 }
3325 refs[ref->speculative_id] = ref;
3326 }
3327
3328 int num_targets = 0;
3329 for (unsigned int i = 0 ; i < num ; i++)
3330 {
3331 if (refs[i] && !direct_calls[i])
3332 {
3333 error ("missing direct call for speculation %i", i);
3334 return true;
3335 }
3336 if (!refs[i] && direct_calls[i])
3337 {
3338 error ("missing ref for speculation %i", i);
3339 return true;
3340 }
3341 if (refs[i] != NULL)
3342 num_targets++;
3343 }
3344
3345 if (num_targets != indirect->num_speculative_call_targets_p ())
3346 {
3347 error ("number of speculative targets %i mismatched with "
3348 "num_speculative_call_targets %i",
3349 num_targets,
3350 indirect->num_speculative_call_targets_p ());
3351 return true;
3352 }
3353 return false;
3354 }
3355
3356 /* Verify cgraph nodes of given cgraph node. */
3357 DEBUG_FUNCTION void
3358 cgraph_node::verify_node (void)
3359 {
3360 cgraph_edge *e;
3361 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3362 basic_block this_block;
3363 gimple_stmt_iterator gsi;
3364 bool error_found = false;
3365 int i;
3366 ipa_ref *ref = NULL;
3367
3368 if (seen_error ())
3369 return;
3370
3371 timevar_push (TV_CGRAPH_VERIFY);
3372 error_found |= verify_base ();
3373 for (e = callees; e; e = e->next_callee)
3374 if (e->aux)
3375 {
3376 error ("aux field set for edge %s->%s",
3377 identifier_to_locale (e->caller->name ()),
3378 identifier_to_locale (e->callee->name ()));
3379 error_found = true;
3380 }
3381 if (!count.verify ())
3382 {
3383 error ("cgraph count invalid");
3384 error_found = true;
3385 }
3386 if (inlined_to && same_comdat_group)
3387 {
3388 error ("inline clone in same comdat group list");
3389 error_found = true;
3390 }
3391 if (inlined_to && !count.compatible_p (inlined_to->count))
3392 {
3393 error ("inline clone count is not compatible");
3394 count.debug ();
3395 inlined_to->count.debug ();
3396 error_found = true;
3397 }
3398 if (tp_first_run < 0)
3399 {
3400 error ("tp_first_run must be non-negative");
3401 error_found = true;
3402 }
3403 if (!definition && !in_other_partition && local)
3404 {
3405 error ("local symbols must be defined");
3406 error_found = true;
3407 }
3408 if (inlined_to && externally_visible)
3409 {
3410 error ("externally visible inline clone");
3411 error_found = true;
3412 }
3413 if (inlined_to && address_taken)
3414 {
3415 error ("inline clone with address taken");
3416 error_found = true;
3417 }
3418 if (inlined_to && force_output)
3419 {
3420 error ("inline clone is forced to output");
3421 error_found = true;
3422 }
3423 if (symtab->state != LTO_STREAMING)
3424 {
3425 if (calls_comdat_local && !same_comdat_group)
3426 {
3427 error ("calls_comdat_local is set outside of a comdat group");
3428 error_found = true;
3429 }
3430 if (!inlined_to && calls_comdat_local != check_calls_comdat_local_p ())
3431 {
3432 error ("invalid calls_comdat_local flag");
3433 error_found = true;
3434 }
3435 }
3436 if (DECL_IS_MALLOC (decl)
3437 && !POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3438 {
3439 error ("malloc attribute should be used for a function that "
3440 "returns a pointer");
3441 error_found = true;
3442 }
3443 for (e = indirect_calls; e; e = e->next_callee)
3444 {
3445 if (e->aux)
3446 {
3447 error ("aux field set for indirect edge from %s",
3448 identifier_to_locale (e->caller->name ()));
3449 error_found = true;
3450 }
3451 if (!e->count.compatible_p (count))
3452 {
3453 error ("edge count is not compatible with function count");
3454 e->count.debug ();
3455 count.debug ();
3456 error_found = true;
3457 }
3458 if (!e->indirect_unknown_callee
3459 || !e->indirect_info)
3460 {
3461 error ("An indirect edge from %s is not marked as indirect or has "
3462 "associated indirect_info, the corresponding statement is: ",
3463 identifier_to_locale (e->caller->name ()));
3464 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3465 error_found = true;
3466 }
3467 if (e->call_stmt && e->lto_stmt_uid)
3468 {
3469 error ("edge has both call_stmt and lto_stmt_uid set");
3470 error_found = true;
3471 }
3472 }
3473 bool check_comdat = comdat_local_p ();
3474 for (e = callers; e; e = e->next_caller)
3475 {
3476 if (e->verify_count ())
3477 error_found = true;
3478 if (check_comdat
3479 && !in_same_comdat_group_p (e->caller))
3480 {
3481 error ("comdat-local function called by %s outside its comdat",
3482 identifier_to_locale (e->caller->name ()));
3483 error_found = true;
3484 }
3485 if (!e->inline_failed)
3486 {
3487 if (inlined_to
3488 != (e->caller->inlined_to
3489 ? e->caller->inlined_to : e->caller))
3490 {
3491 error ("inlined_to pointer is wrong");
3492 error_found = true;
3493 }
3494 if (callers->next_caller)
3495 {
3496 error ("multiple inline callers");
3497 error_found = true;
3498 }
3499 }
3500 else
3501 if (inlined_to)
3502 {
3503 error ("inlined_to pointer set for noninline callers");
3504 error_found = true;
3505 }
3506 }
3507 for (e = callees; e; e = e->next_callee)
3508 {
3509 if (e->verify_count ())
3510 error_found = true;
3511 if (!e->count.compatible_p (count))
3512 {
3513 error ("edge count is not compatible with function count");
3514 e->count.debug ();
3515 count.debug ();
3516 error_found = true;
3517 }
3518 if (gimple_has_body_p (e->caller->decl)
3519 && !e->caller->inlined_to
3520 && !e->speculative
3521 /* Optimized out calls are redirected to __builtin_unreachable. */
3522 && (e->count.nonzero_p ()
3523 || ! e->callee->decl
3524 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3525 && count
3526 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3527 && (!e->count.ipa_p ()
3528 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3529 {
3530 error ("caller edge count does not match BB count");
3531 fprintf (stderr, "edge count: ");
3532 e->count.dump (stderr);
3533 fprintf (stderr, "\n bb count: ");
3534 gimple_bb (e->call_stmt)->count.dump (stderr);
3535 fprintf (stderr, "\n");
3536 error_found = true;
3537 }
3538 if (e->call_stmt && e->lto_stmt_uid)
3539 {
3540 error ("edge has both call_stmt and lto_stmt_uid set");
3541 error_found = true;
3542 }
3543 if (e->speculative
3544 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3545 NULL))
3546 error_found = true;
3547 }
3548 for (e = indirect_calls; e; e = e->next_callee)
3549 {
3550 if (e->verify_count ())
3551 error_found = true;
3552 if (gimple_has_body_p (e->caller->decl)
3553 && !e->caller->inlined_to
3554 && !e->speculative
3555 && e->count.ipa_p ()
3556 && count
3557 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3558 && (!e->count.ipa_p ()
3559 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3560 {
3561 error ("indirect call count does not match BB count");
3562 fprintf (stderr, "edge count: ");
3563 e->count.dump (stderr);
3564 fprintf (stderr, "\n bb count: ");
3565 gimple_bb (e->call_stmt)->count.dump (stderr);
3566 fprintf (stderr, "\n");
3567 error_found = true;
3568 }
3569 if (e->speculative
3570 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3571 e))
3572 error_found = true;
3573 }
3574 for (i = 0; iterate_reference (i, ref); i++)
3575 {
3576 if (ref->stmt && ref->lto_stmt_uid)
3577 {
3578 error ("reference has both stmt and lto_stmt_uid set");
3579 error_found = true;
3580 }
3581 if (ref->speculative
3582 && verify_speculative_call (this, ref->stmt,
3583 ref->lto_stmt_uid, NULL))
3584 error_found = true;
3585 }
3586
3587 if (!callers && inlined_to)
3588 {
3589 error ("inlined_to pointer is set but no predecessors found");
3590 error_found = true;
3591 }
3592 if (inlined_to == this)
3593 {
3594 error ("inlined_to pointer refers to itself");
3595 error_found = true;
3596 }
3597
3598 if (clone_of)
3599 {
3600 cgraph_node *first_clone = clone_of->clones;
3601 if (first_clone != this)
3602 {
3603 if (prev_sibling_clone->clone_of != clone_of)
3604 {
3605 error ("cgraph_node has wrong clone_of");
3606 error_found = true;
3607 }
3608 }
3609 }
3610 if (clones)
3611 {
3612 cgraph_node *n;
3613 for (n = clones; n; n = n->next_sibling_clone)
3614 if (n->clone_of != this)
3615 break;
3616 if (n)
3617 {
3618 error ("cgraph_node has wrong clone list");
3619 error_found = true;
3620 }
3621 }
3622 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3623 {
3624 error ("cgraph_node is in clone list but it is not clone");
3625 error_found = true;
3626 }
3627 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3628 {
3629 error ("cgraph_node has wrong prev_clone pointer");
3630 error_found = true;
3631 }
3632 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3633 {
3634 error ("double linked list of clones corrupted");
3635 error_found = true;
3636 }
3637
3638 if (analyzed && alias)
3639 {
3640 bool ref_found = false;
3641 int i;
3642 ipa_ref *ref = NULL;
3643
3644 if (callees)
3645 {
3646 error ("Alias has call edges");
3647 error_found = true;
3648 }
3649 for (i = 0; iterate_reference (i, ref); i++)
3650 if (ref->use != IPA_REF_ALIAS)
3651 {
3652 error ("Alias has non-alias reference");
3653 error_found = true;
3654 }
3655 else if (ref_found)
3656 {
3657 error ("Alias has more than one alias reference");
3658 error_found = true;
3659 }
3660 else
3661 ref_found = true;
3662 if (!ref_found)
3663 {
3664 error ("Analyzed alias has no reference");
3665 error_found = true;
3666 }
3667 }
3668
3669 if (analyzed && thunk.thunk_p)
3670 {
3671 if (!callees)
3672 {
3673 error ("No edge out of thunk node");
3674 error_found = true;
3675 }
3676 else if (callees->next_callee)
3677 {
3678 error ("More than one edge out of thunk node");
3679 error_found = true;
3680 }
3681 if (gimple_has_body_p (decl) && !inlined_to)
3682 {
3683 error ("Thunk is not supposed to have body");
3684 error_found = true;
3685 }
3686 }
3687 else if (analyzed && gimple_has_body_p (decl)
3688 && !TREE_ASM_WRITTEN (decl)
3689 && (!DECL_EXTERNAL (decl) || inlined_to)
3690 && !flag_wpa)
3691 {
3692 if (this_cfun->cfg)
3693 {
3694 hash_set<gimple *> stmts;
3695
3696 /* Reach the trees by walking over the CFG, and note the
3697 enclosing basic-blocks in the call edges. */
3698 FOR_EACH_BB_FN (this_block, this_cfun)
3699 {
3700 for (gsi = gsi_start_phis (this_block);
3701 !gsi_end_p (gsi); gsi_next (&gsi))
3702 stmts.add (gsi_stmt (gsi));
3703 for (gsi = gsi_start_bb (this_block);
3704 !gsi_end_p (gsi);
3705 gsi_next (&gsi))
3706 {
3707 gimple *stmt = gsi_stmt (gsi);
3708 stmts.add (stmt);
3709 if (is_gimple_call (stmt))
3710 {
3711 cgraph_edge *e = get_edge (stmt);
3712 tree decl = gimple_call_fndecl (stmt);
3713 if (e)
3714 {
3715 if (e->aux)
3716 {
3717 error ("shared call_stmt:");
3718 cgraph_debug_gimple_stmt (this_cfun, stmt);
3719 error_found = true;
3720 }
3721 if (!e->indirect_unknown_callee)
3722 {
3723 if (e->verify_corresponds_to_fndecl (decl))
3724 {
3725 error ("edge points to wrong declaration:");
3726 debug_tree (e->callee->decl);
3727 fprintf (stderr," Instead of:");
3728 debug_tree (decl);
3729 error_found = true;
3730 }
3731 }
3732 else if (decl)
3733 {
3734 error ("an indirect edge with unknown callee "
3735 "corresponding to a call_stmt with "
3736 "a known declaration:");
3737 error_found = true;
3738 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3739 }
3740 e->aux = (void *)1;
3741 }
3742 else if (decl)
3743 {
3744 error ("missing callgraph edge for call stmt:");
3745 cgraph_debug_gimple_stmt (this_cfun, stmt);
3746 error_found = true;
3747 }
3748 }
3749 }
3750 }
3751 for (i = 0; iterate_reference (i, ref); i++)
3752 if (ref->stmt && !stmts.contains (ref->stmt))
3753 {
3754 error ("reference to dead statement");
3755 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3756 error_found = true;
3757 }
3758 }
3759 else
3760 /* No CFG available?! */
3761 gcc_unreachable ();
3762
3763 for (e = callees; e; e = e->next_callee)
3764 {
3765 if (!e->aux && !e->speculative)
3766 {
3767 error ("edge %s->%s has no corresponding call_stmt",
3768 identifier_to_locale (e->caller->name ()),
3769 identifier_to_locale (e->callee->name ()));
3770 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3771 error_found = true;
3772 }
3773 e->aux = 0;
3774 }
3775 for (e = indirect_calls; e; e = e->next_callee)
3776 {
3777 if (!e->aux && !e->speculative)
3778 {
3779 error ("an indirect edge from %s has no corresponding call_stmt",
3780 identifier_to_locale (e->caller->name ()));
3781 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3782 error_found = true;
3783 }
3784 e->aux = 0;
3785 }
3786 }
3787
3788 if (nested != NULL)
3789 {
3790 for (cgraph_node *n = nested; n != NULL; n = n->next_nested)
3791 {
3792 if (n->origin == NULL)
3793 {
3794 error ("missing origin for a node in a nested list");
3795 error_found = true;
3796 }
3797 else if (n->origin != this)
3798 {
3799 error ("origin points to a different parent");
3800 error_found = true;
3801 break;
3802 }
3803 }
3804 }
3805 if (next_nested != NULL && origin == NULL)
3806 {
3807 error ("missing origin for a node in a nested list");
3808 error_found = true;
3809 }
3810
3811 if (error_found)
3812 {
3813 dump (stderr);
3814 internal_error ("verify_cgraph_node failed");
3815 }
3816 timevar_pop (TV_CGRAPH_VERIFY);
3817 }
3818
3819 /* Verify whole cgraph structure. */
3820 DEBUG_FUNCTION void
3821 cgraph_node::verify_cgraph_nodes (void)
3822 {
3823 cgraph_node *node;
3824
3825 if (seen_error ())
3826 return;
3827
3828 FOR_EACH_FUNCTION (node)
3829 node->verify ();
3830 }
3831
3832 #if __GNUC__ >= 10
3833 # pragma GCC diagnostic pop
3834 #endif
3835
3836 /* Walk the alias chain to return the function cgraph_node is alias of.
3837 Walk through thunks, too.
3838 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3839 When REF is non-NULL, assume that reference happens in symbol REF
3840 when determining the availability. */
3841
3842 cgraph_node *
3843 cgraph_node::function_symbol (enum availability *availability,
3844 struct symtab_node *ref)
3845 {
3846 cgraph_node *node = ultimate_alias_target (availability, ref);
3847
3848 while (node->thunk.thunk_p)
3849 {
3850 enum availability a;
3851
3852 ref = node;
3853 node = node->callees->callee;
3854 node = node->ultimate_alias_target (availability ? &a : NULL, ref);
3855 if (availability && a < *availability)
3856 *availability = a;
3857 }
3858 return node;
3859 }
3860
3861 /* Walk the alias chain to return the function cgraph_node is alias of.
3862 Walk through non virtual thunks, too. Thus we return either a function
3863 or a virtual thunk node.
3864 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3865 When REF is non-NULL, assume that reference happens in symbol REF
3866 when determining the availability. */
3867
3868 cgraph_node *
3869 cgraph_node::function_or_virtual_thunk_symbol
3870 (enum availability *availability,
3871 struct symtab_node *ref)
3872 {
3873 cgraph_node *node = ultimate_alias_target (availability, ref);
3874
3875 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3876 {
3877 enum availability a;
3878
3879 ref = node;
3880 node = node->callees->callee;
3881 node = node->ultimate_alias_target (availability ? &a : NULL, ref);
3882 if (availability && a < *availability)
3883 *availability = a;
3884 }
3885 return node;
3886 }
3887
3888 /* When doing LTO, read cgraph_node's body from disk if it is not already
3889 present. */
3890
3891 bool
3892 cgraph_node::get_untransformed_body (void)
3893 {
3894 lto_file_decl_data *file_data;
3895 const char *data, *name;
3896 size_t len;
3897 tree decl = this->decl;
3898
3899 /* Check if body is already there. Either we have gimple body or
3900 the function is thunk and in that case we set DECL_ARGUMENTS. */
3901 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3902 return false;
3903
3904 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3905
3906 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3907
3908 file_data = lto_file_data;
3909 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3910
3911 /* We may have renamed the declaration, e.g., a static function. */
3912 name = lto_get_decl_name_mapping (file_data, name);
3913 struct lto_in_decl_state *decl_state
3914 = lto_get_function_in_decl_state (file_data, decl);
3915
3916 cgraph_node *origin = this;
3917 while (origin->clone_of)
3918 origin = origin->clone_of;
3919
3920 int stream_order = origin->order - file_data->order_base;
3921 data = lto_get_section_data (file_data, LTO_section_function_body,
3922 name, stream_order, &len,
3923 decl_state->compressed);
3924 if (!data)
3925 fatal_error (input_location, "%s: section %s.%d is missing",
3926 file_data->file_name, name, stream_order);
3927
3928 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3929
3930 if (!quiet_flag)
3931 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3932 lto_input_function_body (file_data, this, data);
3933 lto_stats.num_function_bodies++;
3934 lto_free_section_data (file_data, LTO_section_function_body, name,
3935 data, len, decl_state->compressed);
3936 lto_free_function_in_decl_state_for_node (this);
3937 /* Keep lto file data so ipa-inline-analysis knows about cross module
3938 inlining. */
3939
3940 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3941
3942 return true;
3943 }
3944
3945 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3946 if it is not already present. When some IPA transformations are scheduled,
3947 apply them. */
3948
3949 bool
3950 cgraph_node::get_body (void)
3951 {
3952 bool updated;
3953
3954 updated = get_untransformed_body ();
3955
3956 /* Getting transformed body makes no sense for inline clones;
3957 we should never use this on real clones because they are materialized
3958 early.
3959 TODO: Materializing clones here will likely lead to smaller LTRANS
3960 footprint. */
3961 gcc_assert (!inlined_to && !clone_of);
3962 if (ipa_transforms_to_apply.exists ())
3963 {
3964 opt_pass *saved_current_pass = current_pass;
3965 FILE *saved_dump_file = dump_file;
3966 const char *saved_dump_file_name = dump_file_name;
3967 dump_flags_t saved_dump_flags = dump_flags;
3968 dump_file_name = NULL;
3969 set_dump_file (NULL);
3970
3971 push_cfun (DECL_STRUCT_FUNCTION (decl));
3972
3973 update_ssa (TODO_update_ssa_only_virtuals);
3974 execute_all_ipa_transforms (true);
3975 cgraph_edge::rebuild_edges ();
3976 free_dominance_info (CDI_DOMINATORS);
3977 free_dominance_info (CDI_POST_DOMINATORS);
3978 pop_cfun ();
3979 updated = true;
3980
3981 current_pass = saved_current_pass;
3982 set_dump_file (saved_dump_file);
3983 dump_file_name = saved_dump_file_name;
3984 dump_flags = saved_dump_flags;
3985 }
3986 return updated;
3987 }
3988
3989 /* Return the DECL_STRUCT_FUNCTION of the function. */
3990
3991 struct function *
3992 cgraph_node::get_fun () const
3993 {
3994 const cgraph_node *node = this;
3995 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3996
3997 while (!fun && node->clone_of)
3998 {
3999 node = node->clone_of;
4000 fun = DECL_STRUCT_FUNCTION (node->decl);
4001 }
4002
4003 return fun;
4004 }
4005
4006 /* Reset all state within cgraph.c so that we can rerun the compiler
4007 within the same process. For use by toplev::finalize. */
4008
4009 void
4010 cgraph_c_finalize (void)
4011 {
4012 symtab = NULL;
4013
4014 x_cgraph_nodes_queue = NULL;
4015
4016 cgraph_fnver_htab = NULL;
4017 version_info_node = NULL;
4018 }
4019
4020 /* A worker for call_for_symbol_and_aliases. */
4021
4022 bool
4023 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
4024 void *),
4025 void *data,
4026 bool include_overwritable)
4027 {
4028 ipa_ref *ref;
4029 FOR_EACH_ALIAS (this, ref)
4030 {
4031 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
4032 if (include_overwritable
4033 || alias->get_availability () > AVAIL_INTERPOSABLE)
4034 if (alias->call_for_symbol_and_aliases (callback, data,
4035 include_overwritable))
4036 return true;
4037 }
4038 return false;
4039 }
4040
4041 /* Return true if NODE has thunk. */
4042
4043 bool
4044 cgraph_node::has_thunk_p (cgraph_node *node, void *)
4045 {
4046 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
4047 if (e->caller->thunk.thunk_p)
4048 return true;
4049 return false;
4050 }
4051
4052 /* Expected frequency of executions within the function. */
4053
4054 sreal
4055 cgraph_edge::sreal_frequency ()
4056 {
4057 return count.to_sreal_scale (caller->inlined_to
4058 ? caller->inlined_to->count
4059 : caller->count);
4060 }
4061
4062
4063 /* During LTO stream in this can be used to check whether call can possibly
4064 be internal to the current translation unit. */
4065
4066 bool
4067 cgraph_edge::possibly_call_in_translation_unit_p (void)
4068 {
4069 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
4070
4071 /* While incremental linking we may end up getting function body later. */
4072 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
4073 return true;
4074
4075 /* We may be smarter here and avoid streaming in indirect calls we can't
4076 track, but that would require arranging streaming the indirect call
4077 summary first. */
4078 if (!callee)
4079 return true;
4080
4081 /* If callee is local to the original translation unit, it will be
4082 defined. */
4083 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
4084 return true;
4085
4086 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
4087 yet) and see if it is a definition. In fact we may also resolve aliases,
4088 but that is probably not too important. */
4089 symtab_node *node = callee;
4090 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
4091 node = node->previous_sharing_asm_name;
4092 if (node->previous_sharing_asm_name)
4093 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
4094 gcc_assert (TREE_PUBLIC (node->decl));
4095 return node->get_availability () >= AVAIL_INTERPOSABLE;
4096 }
4097
4098 /* Return num_speculative_targets of this edge. */
4099
4100 int
4101 cgraph_edge::num_speculative_call_targets_p (void)
4102 {
4103 return indirect_info ? indirect_info->num_speculative_call_targets : 0;
4104 }
4105
4106 /* Check if function calls comdat local. This is used to recompute
4107 calls_comdat_local flag after function transformations. */
4108 bool
4109 cgraph_node::check_calls_comdat_local_p ()
4110 {
4111 for (cgraph_edge *e = callees; e; e = e->next_callee)
4112 if (e->inline_failed
4113 ? e->callee->comdat_local_p ()
4114 : e->callee->check_calls_comdat_local_p ())
4115 return true;
4116 return false;
4117 }
4118
4119 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
4120 This needs to be a global so that it can be a GC root, and thus
4121 prevent the stashed copy from being garbage-collected if the GC runs
4122 during a symbol_table_test. */
4123
4124 symbol_table *saved_symtab;
4125
4126 #if CHECKING_P
4127
4128 namespace selftest {
4129
4130 /* class selftest::symbol_table_test. */
4131
4132 /* Constructor. Store the old value of symtab, and create a new one. */
4133
4134 symbol_table_test::symbol_table_test ()
4135 {
4136 gcc_assert (saved_symtab == NULL);
4137 saved_symtab = symtab;
4138 symtab = new (ggc_alloc<symbol_table> ()) symbol_table ();
4139 }
4140
4141 /* Destructor. Restore the old value of symtab. */
4142
4143 symbol_table_test::~symbol_table_test ()
4144 {
4145 gcc_assert (saved_symtab != NULL);
4146 symtab = saved_symtab;
4147 saved_symtab = NULL;
4148 }
4149
4150 /* Verify that symbol_table_test works. */
4151
4152 static void
4153 test_symbol_table_test ()
4154 {
4155 /* Simulate running two selftests involving symbol tables. */
4156 for (int i = 0; i < 2; i++)
4157 {
4158 symbol_table_test stt;
4159 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
4160 get_identifier ("test_decl"),
4161 build_function_type_list (void_type_node,
4162 NULL_TREE));
4163 cgraph_node *node = cgraph_node::get_create (test_decl);
4164 gcc_assert (node);
4165
4166 /* Verify that the node has order 0 on both iterations,
4167 and thus that nodes have predictable dump names in selftests. */
4168 ASSERT_EQ (node->order, 0);
4169 ASSERT_STREQ (node->dump_name (), "test_decl/0");
4170 }
4171 }
4172
4173 /* Run all of the selftests within this file. */
4174
4175 void
4176 cgraph_c_tests ()
4177 {
4178 test_symbol_table_test ();
4179 }
4180
4181 } // namespace selftest
4182
4183 #endif /* CHECKING_P */
4184
4185 #include "gt-cgraph.h"