]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
PR c++/89705 - ICE with reference binding with conversion function.
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "params.h"
61 #include "context.h"
62 #include "gimplify.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "selftest.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 /* Queue of cgraph nodes scheduled to be lowered. */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
73
74 /* Symbol table global context. */
75 symbol_table *symtab;
76
77 /* List of hooks triggered on cgraph_edge events. */
78 struct cgraph_edge_hook_list {
79 cgraph_edge_hook hook;
80 void *data;
81 struct cgraph_edge_hook_list *next;
82 };
83
84 /* List of hooks triggered on cgraph_node events. */
85 struct cgraph_node_hook_list {
86 cgraph_node_hook hook;
87 void *data;
88 struct cgraph_node_hook_list *next;
89 };
90
91 /* List of hooks triggered on events involving two cgraph_edges. */
92 struct cgraph_2edge_hook_list {
93 cgraph_2edge_hook hook;
94 void *data;
95 struct cgraph_2edge_hook_list *next;
96 };
97
98 /* List of hooks triggered on events involving two cgraph_nodes. */
99 struct cgraph_2node_hook_list {
100 cgraph_2node_hook hook;
101 void *data;
102 struct cgraph_2node_hook_list *next;
103 };
104
105 /* Hash descriptor for cgraph_function_version_info. */
106
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
108 {
109 static hashval_t hash (cgraph_function_version_info *);
110 static bool equal (cgraph_function_version_info *,
111 cgraph_function_version_info *);
112 };
113
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115 The cgraph_function_version_info has a THIS_NODE field that is the
116 corresponding cgraph_node.. */
117
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
119
120 /* Hash function for cgraph_fnver_htab. */
121 hashval_t
122 function_version_hasher::hash (cgraph_function_version_info *ptr)
123 {
124 int uid = ptr->this_node->get_uid ();
125 return (hashval_t)(uid);
126 }
127
128 /* eq function for cgraph_fnver_htab. */
129 bool
130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 cgraph_function_version_info *n2)
132 {
133 return n1->this_node->get_uid () == n2->this_node->get_uid ();
134 }
135
136 /* Mark as GC root all allocated nodes. */
137 static GTY(()) struct cgraph_function_version_info *
138 version_info_node = NULL;
139
140 /* Return true if NODE's address can be compared. */
141
142 bool
143 symtab_node::address_can_be_compared_p ()
144 {
145 /* Address of virtual tables and functions is never compared. */
146 if (DECL_VIRTUAL_P (decl))
147 return false;
148 /* Address of C++ cdtors is never compared. */
149 if (is_a <cgraph_node *> (this)
150 && (DECL_CXX_CONSTRUCTOR_P (decl)
151 || DECL_CXX_DESTRUCTOR_P (decl)))
152 return false;
153 /* Constant pool symbols addresses are never compared.
154 flag_merge_constants permits us to assume the same on readonly vars. */
155 if (is_a <varpool_node *> (this)
156 && (DECL_IN_CONSTANT_POOL (decl)
157 || (flag_merge_constants >= 2
158 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
159 return false;
160 return true;
161 }
162
163 /* Get the cgraph_function_version_info node corresponding to node. */
164 cgraph_function_version_info *
165 cgraph_node::function_version (void)
166 {
167 cgraph_function_version_info key;
168 key.this_node = this;
169
170 if (cgraph_fnver_htab == NULL)
171 return NULL;
172
173 return cgraph_fnver_htab->find (&key);
174 }
175
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177 corresponding to cgraph_node NODE. */
178 cgraph_function_version_info *
179 cgraph_node::insert_new_function_version (void)
180 {
181 version_info_node = NULL;
182 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183 version_info_node->this_node = this;
184
185 if (cgraph_fnver_htab == NULL)
186 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
187
188 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
189 = version_info_node;
190 return version_info_node;
191 }
192
193 /* Remove the cgraph_function_version_info node given by DECL_V. */
194 static void
195 delete_function_version (cgraph_function_version_info *decl_v)
196 {
197 if (decl_v == NULL)
198 return;
199
200 if (decl_v->prev != NULL)
201 decl_v->prev->next = decl_v->next;
202
203 if (decl_v->next != NULL)
204 decl_v->next->prev = decl_v->prev;
205
206 if (cgraph_fnver_htab != NULL)
207 cgraph_fnver_htab->remove_elt (decl_v);
208 }
209
210 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
211 DECL is a duplicate declaration. */
212 void
213 cgraph_node::delete_function_version_by_decl (tree decl)
214 {
215 cgraph_node *decl_node = cgraph_node::get (decl);
216
217 if (decl_node == NULL)
218 return;
219
220 delete_function_version (decl_node->function_version ());
221
222 decl_node->remove ();
223 }
224
225 /* Record that DECL1 and DECL2 are semantically identical function
226 versions. */
227 void
228 cgraph_node::record_function_versions (tree decl1, tree decl2)
229 {
230 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
231 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
232 cgraph_function_version_info *decl1_v = NULL;
233 cgraph_function_version_info *decl2_v = NULL;
234 cgraph_function_version_info *before;
235 cgraph_function_version_info *after;
236
237 gcc_assert (decl1_node != NULL && decl2_node != NULL);
238 decl1_v = decl1_node->function_version ();
239 decl2_v = decl2_node->function_version ();
240
241 if (decl1_v != NULL && decl2_v != NULL)
242 return;
243
244 if (decl1_v == NULL)
245 decl1_v = decl1_node->insert_new_function_version ();
246
247 if (decl2_v == NULL)
248 decl2_v = decl2_node->insert_new_function_version ();
249
250 /* Chain decl2_v and decl1_v. All semantically identical versions
251 will be chained together. */
252
253 before = decl1_v;
254 after = decl2_v;
255
256 while (before->next != NULL)
257 before = before->next;
258
259 while (after->prev != NULL)
260 after= after->prev;
261
262 before->next = after;
263 after->prev = before;
264 }
265
266 /* Initialize callgraph dump file. */
267
268 void
269 symbol_table::initialize (void)
270 {
271 if (!dump_file)
272 dump_file = dump_begin (TDI_cgraph, NULL);
273
274 if (!ipa_clones_dump_file)
275 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
276 }
277
278 /* Allocate new callgraph node and insert it into basic data structures. */
279
280 cgraph_node *
281 symbol_table::create_empty (void)
282 {
283 cgraph_node *node = allocate_cgraph_symbol ();
284
285 node->type = SYMTAB_FUNCTION;
286 node->frequency = NODE_FREQUENCY_NORMAL;
287 node->count_materialization_scale = REG_BR_PROB_BASE;
288 cgraph_count++;
289
290 return node;
291 }
292
293 /* Register HOOK to be called with DATA on each removed edge. */
294 cgraph_edge_hook_list *
295 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
296 {
297 cgraph_edge_hook_list *entry;
298 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
299
300 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
301 entry->hook = hook;
302 entry->data = data;
303 entry->next = NULL;
304 while (*ptr)
305 ptr = &(*ptr)->next;
306 *ptr = entry;
307 return entry;
308 }
309
310 /* Remove ENTRY from the list of hooks called on removing edges. */
311 void
312 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
313 {
314 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
315
316 while (*ptr != entry)
317 ptr = &(*ptr)->next;
318 *ptr = entry->next;
319 free (entry);
320 }
321
322 /* Call all edge removal hooks. */
323 void
324 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
325 {
326 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
327 while (entry)
328 {
329 entry->hook (e, entry->data);
330 entry = entry->next;
331 }
332 }
333
334 /* Register HOOK to be called with DATA on each removed node. */
335 cgraph_node_hook_list *
336 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
337 {
338 cgraph_node_hook_list *entry;
339 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
340
341 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
342 entry->hook = hook;
343 entry->data = data;
344 entry->next = NULL;
345 while (*ptr)
346 ptr = &(*ptr)->next;
347 *ptr = entry;
348 return entry;
349 }
350
351 /* Remove ENTRY from the list of hooks called on removing nodes. */
352 void
353 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
354 {
355 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
356
357 while (*ptr != entry)
358 ptr = &(*ptr)->next;
359 *ptr = entry->next;
360 free (entry);
361 }
362
363 /* Call all node removal hooks. */
364 void
365 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
366 {
367 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
368 while (entry)
369 {
370 entry->hook (node, entry->data);
371 entry = entry->next;
372 }
373 }
374
375 /* Call all node removal hooks. */
376 void
377 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
378 {
379 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
380 while (entry)
381 {
382 entry->hook (node, entry->data);
383 entry = entry->next;
384 }
385 }
386
387
388 /* Register HOOK to be called with DATA on each inserted node. */
389 cgraph_node_hook_list *
390 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
391 {
392 cgraph_node_hook_list *entry;
393 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
394
395 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
396 entry->hook = hook;
397 entry->data = data;
398 entry->next = NULL;
399 while (*ptr)
400 ptr = &(*ptr)->next;
401 *ptr = entry;
402 return entry;
403 }
404
405 /* Remove ENTRY from the list of hooks called on inserted nodes. */
406 void
407 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
408 {
409 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
410
411 while (*ptr != entry)
412 ptr = &(*ptr)->next;
413 *ptr = entry->next;
414 free (entry);
415 }
416
417 /* Register HOOK to be called with DATA on each duplicated edge. */
418 cgraph_2edge_hook_list *
419 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
420 {
421 cgraph_2edge_hook_list *entry;
422 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
423
424 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
425 entry->hook = hook;
426 entry->data = data;
427 entry->next = NULL;
428 while (*ptr)
429 ptr = &(*ptr)->next;
430 *ptr = entry;
431 return entry;
432 }
433
434 /* Remove ENTRY from the list of hooks called on duplicating edges. */
435 void
436 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
437 {
438 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
439
440 while (*ptr != entry)
441 ptr = &(*ptr)->next;
442 *ptr = entry->next;
443 free (entry);
444 }
445
446 /* Call all edge duplication hooks. */
447 void
448 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
449 {
450 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
451 while (entry)
452 {
453 entry->hook (cs1, cs2, entry->data);
454 entry = entry->next;
455 }
456 }
457
458 /* Register HOOK to be called with DATA on each duplicated node. */
459 cgraph_2node_hook_list *
460 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
461 {
462 cgraph_2node_hook_list *entry;
463 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
464
465 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
466 entry->hook = hook;
467 entry->data = data;
468 entry->next = NULL;
469 while (*ptr)
470 ptr = &(*ptr)->next;
471 *ptr = entry;
472 return entry;
473 }
474
475 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
476 void
477 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
478 {
479 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
480
481 while (*ptr != entry)
482 ptr = &(*ptr)->next;
483 *ptr = entry->next;
484 free (entry);
485 }
486
487 /* Call all node duplication hooks. */
488 void
489 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
490 cgraph_node *node2)
491 {
492 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
493 while (entry)
494 {
495 entry->hook (node, node2, entry->data);
496 entry = entry->next;
497 }
498 }
499
500 /* Return cgraph node assigned to DECL. Create new one when needed. */
501
502 cgraph_node *
503 cgraph_node::create (tree decl)
504 {
505 cgraph_node *node = symtab->create_empty ();
506 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
507
508 node->decl = decl;
509
510 node->count = profile_count::uninitialized ();
511
512 if ((flag_openacc || flag_openmp)
513 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
514 {
515 node->offloadable = 1;
516 if (ENABLE_OFFLOADING)
517 g->have_offload = true;
518 }
519
520 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
521 node->ifunc_resolver = true;
522
523 node->register_symbol ();
524
525 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
526 {
527 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
528 node->next_nested = node->origin->nested;
529 node->origin->nested = node;
530 }
531 return node;
532 }
533
534 /* Try to find a call graph node for declaration DECL and if it does not exist
535 or if it corresponds to an inline clone, create a new one. */
536
537 cgraph_node *
538 cgraph_node::get_create (tree decl)
539 {
540 cgraph_node *first_clone = cgraph_node::get (decl);
541
542 if (first_clone && !first_clone->global.inlined_to)
543 return first_clone;
544
545 cgraph_node *node = cgraph_node::create (decl);
546 if (first_clone)
547 {
548 first_clone->clone_of = node;
549 node->clones = first_clone;
550 symtab->symtab_prevail_in_asm_name_hash (node);
551 node->decl->decl_with_vis.symtab_node = node;
552 if (dump_file)
553 fprintf (dump_file, "Introduced new external node "
554 "(%s) and turned into root of the clone tree.\n",
555 node->dump_name ());
556 }
557 else if (dump_file)
558 fprintf (dump_file, "Introduced new external node "
559 "(%s).\n", node->dump_name ());
560 return node;
561 }
562
563 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
564 the function body is associated with (not necessarily cgraph_node (DECL). */
565
566 cgraph_node *
567 cgraph_node::create_alias (tree alias, tree target)
568 {
569 cgraph_node *alias_node;
570
571 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
572 || TREE_CODE (target) == IDENTIFIER_NODE);
573 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
574 alias_node = cgraph_node::get_create (alias);
575 gcc_assert (!alias_node->definition);
576 alias_node->alias_target = target;
577 alias_node->definition = true;
578 alias_node->alias = true;
579 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
580 alias_node->transparent_alias = alias_node->weakref = true;
581 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
582 alias_node->ifunc_resolver = true;
583 return alias_node;
584 }
585
586 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
587 and NULL otherwise.
588 Same body aliases are output whenever the body of DECL is output,
589 and cgraph_node::get (ALIAS) transparently returns
590 cgraph_node::get (DECL). */
591
592 cgraph_node *
593 cgraph_node::create_same_body_alias (tree alias, tree decl)
594 {
595 cgraph_node *n;
596
597 /* If aliases aren't supported by the assembler, fail. */
598 if (!TARGET_SUPPORTS_ALIASES)
599 return NULL;
600
601 /* Langhooks can create same body aliases of symbols not defined.
602 Those are useless. Drop them on the floor. */
603 if (symtab->global_info_ready)
604 return NULL;
605
606 n = cgraph_node::create_alias (alias, decl);
607 n->cpp_implicit_alias = true;
608 if (symtab->cpp_implicit_aliases_done)
609 n->resolve_alias (cgraph_node::get (decl));
610 return n;
611 }
612
613 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
614 aliases DECL with an adjustments made into the first parameter.
615 See comments in struct cgraph_thunk_info for detail on the parameters. */
616
617 cgraph_node *
618 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
619 HOST_WIDE_INT fixed_offset,
620 HOST_WIDE_INT virtual_value,
621 HOST_WIDE_INT indirect_offset,
622 tree virtual_offset,
623 tree real_alias)
624 {
625 cgraph_node *node;
626
627 node = cgraph_node::get (alias);
628 if (node)
629 node->reset ();
630 else
631 node = cgraph_node::create (alias);
632
633 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
634 gcc_checking_assert (virtual_offset
635 ? virtual_value == wi::to_wide (virtual_offset)
636 : virtual_value == 0);
637
638 node->thunk.fixed_offset = fixed_offset;
639 node->thunk.virtual_value = virtual_value;
640 node->thunk.indirect_offset = indirect_offset;
641 node->thunk.alias = real_alias;
642 node->thunk.this_adjusting = this_adjusting;
643 node->thunk.virtual_offset_p = virtual_offset != NULL;
644 node->thunk.thunk_p = true;
645 node->definition = true;
646
647 return node;
648 }
649
650 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
651 Return NULL if there's no such node. */
652
653 cgraph_node *
654 cgraph_node::get_for_asmname (tree asmname)
655 {
656 /* We do not want to look at inline clones. */
657 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
658 node;
659 node = node->next_sharing_asm_name)
660 {
661 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
662 if (cn && !cn->global.inlined_to)
663 return cn;
664 }
665 return NULL;
666 }
667
668 /* Returns a hash value for X (which really is a cgraph_edge). */
669
670 hashval_t
671 cgraph_edge_hasher::hash (cgraph_edge *e)
672 {
673 /* This is a really poor hash function, but it is what htab_hash_pointer
674 uses. */
675 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
676 }
677
678 /* Returns a hash value for X (which really is a cgraph_edge). */
679
680 hashval_t
681 cgraph_edge_hasher::hash (gimple *call_stmt)
682 {
683 /* This is a really poor hash function, but it is what htab_hash_pointer
684 uses. */
685 return (hashval_t) ((intptr_t)call_stmt >> 3);
686 }
687
688 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
689
690 inline bool
691 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
692 {
693 return x->call_stmt == y;
694 }
695
696 /* Add call graph edge E to call site hash of its caller. */
697
698 static inline void
699 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
700 {
701 gimple *call = e->call_stmt;
702 *e->caller->call_site_hash->find_slot_with_hash
703 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
704 }
705
706 /* Add call graph edge E to call site hash of its caller. */
707
708 static inline void
709 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
710 {
711 /* There are two speculative edges for every statement (one direct,
712 one indirect); always hash the direct one. */
713 if (e->speculative && e->indirect_unknown_callee)
714 return;
715 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
716 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
717 if (*slot)
718 {
719 gcc_assert (((cgraph_edge *)*slot)->speculative);
720 if (e->callee)
721 *slot = e;
722 return;
723 }
724 gcc_assert (!*slot || e->speculative);
725 *slot = e;
726 }
727
728 /* Return the callgraph edge representing the GIMPLE_CALL statement
729 CALL_STMT. */
730
731 cgraph_edge *
732 cgraph_node::get_edge (gimple *call_stmt)
733 {
734 cgraph_edge *e, *e2;
735 int n = 0;
736
737 if (call_site_hash)
738 return call_site_hash->find_with_hash
739 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
740
741 /* This loop may turn out to be performance problem. In such case adding
742 hashtables into call nodes with very many edges is probably best
743 solution. It is not good idea to add pointer into CALL_EXPR itself
744 because we want to make possible having multiple cgraph nodes representing
745 different clones of the same body before the body is actually cloned. */
746 for (e = callees; e; e = e->next_callee)
747 {
748 if (e->call_stmt == call_stmt)
749 break;
750 n++;
751 }
752
753 if (!e)
754 for (e = indirect_calls; e; e = e->next_callee)
755 {
756 if (e->call_stmt == call_stmt)
757 break;
758 n++;
759 }
760
761 if (n > 100)
762 {
763 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
764 for (e2 = callees; e2; e2 = e2->next_callee)
765 cgraph_add_edge_to_call_site_hash (e2);
766 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
767 cgraph_add_edge_to_call_site_hash (e2);
768 }
769
770 return e;
771 }
772
773
774 /* Change field call_stmt of edge to NEW_STMT.
775 If UPDATE_SPECULATIVE and E is any component of speculative
776 edge, then update all components. */
777
778 void
779 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
780 {
781 tree decl;
782
783 /* Speculative edges has three component, update all of them
784 when asked to. */
785 if (update_speculative && speculative)
786 {
787 cgraph_edge *direct, *indirect;
788 ipa_ref *ref;
789
790 speculative_call_info (direct, indirect, ref);
791 direct->set_call_stmt (new_stmt, false);
792 indirect->set_call_stmt (new_stmt, false);
793 ref->stmt = new_stmt;
794 return;
795 }
796
797 /* Only direct speculative edges go to call_site_hash. */
798 if (caller->call_site_hash
799 && (!speculative || !indirect_unknown_callee))
800 {
801 caller->call_site_hash->remove_elt_with_hash
802 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
803 }
804
805 cgraph_edge *e = this;
806
807 call_stmt = new_stmt;
808 if (indirect_unknown_callee
809 && (decl = gimple_call_fndecl (new_stmt)))
810 {
811 /* Constant propagation (and possibly also inlining?) can turn an
812 indirect call into a direct one. */
813 cgraph_node *new_callee = cgraph_node::get (decl);
814
815 gcc_checking_assert (new_callee);
816 e = make_direct (new_callee);
817 }
818
819 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
820 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
821 if (e->caller->call_site_hash)
822 cgraph_add_edge_to_call_site_hash (e);
823 }
824
825 /* Allocate a cgraph_edge structure and fill it with data according to the
826 parameters of which only CALLEE can be NULL (when creating an indirect call
827 edge). */
828
829 cgraph_edge *
830 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
831 gcall *call_stmt, profile_count count,
832 bool indir_unknown_callee)
833 {
834 cgraph_edge *edge;
835
836 /* LTO does not actually have access to the call_stmt since these
837 have not been loaded yet. */
838 if (call_stmt)
839 {
840 /* This is a rather expensive check possibly triggering
841 construction of call stmt hashtable. */
842 cgraph_edge *e;
843 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
844 || e->speculative);
845
846 gcc_assert (is_gimple_call (call_stmt));
847 }
848
849 if (free_edges)
850 {
851 edge = free_edges;
852 free_edges = NEXT_FREE_EDGE (edge);
853 }
854 else
855 {
856 edge = ggc_alloc<cgraph_edge> ();
857 edge->m_summary_id = -1;
858 }
859
860 edges_count++;
861
862 gcc_assert (++edges_max_uid != 0);
863 edge->m_uid = edges_max_uid;
864 edge->aux = NULL;
865 edge->caller = caller;
866 edge->callee = callee;
867 edge->prev_caller = NULL;
868 edge->next_caller = NULL;
869 edge->prev_callee = NULL;
870 edge->next_callee = NULL;
871 edge->lto_stmt_uid = 0;
872
873 edge->count = count;
874
875 edge->call_stmt = call_stmt;
876 edge->can_throw_external
877 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
878 call_stmt) : false;
879 if (call_stmt
880 && callee && callee->decl
881 && !gimple_check_call_matching_types (call_stmt, callee->decl,
882 false))
883 {
884 edge->inline_failed = CIF_MISMATCHED_ARGUMENTS;
885 edge->call_stmt_cannot_inline_p = true;
886 }
887 else
888 {
889 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
890 edge->call_stmt_cannot_inline_p = false;
891 }
892
893 edge->indirect_info = NULL;
894 edge->indirect_inlining_edge = 0;
895 edge->speculative = false;
896 edge->indirect_unknown_callee = indir_unknown_callee;
897 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
898 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
899 edge->in_polymorphic_cdtor
900 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
901 caller->decl);
902 else
903 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
904 if (call_stmt && caller->call_site_hash)
905 cgraph_add_edge_to_call_site_hash (edge);
906
907 return edge;
908 }
909
910 /* Create edge from a given function to CALLEE in the cgraph. */
911
912 cgraph_edge *
913 cgraph_node::create_edge (cgraph_node *callee,
914 gcall *call_stmt, profile_count count)
915 {
916 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
917 false);
918
919 initialize_inline_failed (edge);
920
921 edge->next_caller = callee->callers;
922 if (callee->callers)
923 callee->callers->prev_caller = edge;
924 edge->next_callee = callees;
925 if (callees)
926 callees->prev_callee = edge;
927 callees = edge;
928 callee->callers = edge;
929
930 return edge;
931 }
932
933 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
934
935 cgraph_indirect_call_info *
936 cgraph_allocate_init_indirect_info (void)
937 {
938 cgraph_indirect_call_info *ii;
939
940 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
941 ii->param_index = -1;
942 return ii;
943 }
944
945 /* Create an indirect edge with a yet-undetermined callee where the call
946 statement destination is a formal parameter of the caller with index
947 PARAM_INDEX. */
948
949 cgraph_edge *
950 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
951 profile_count count,
952 bool compute_indirect_info)
953 {
954 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
955 count, true);
956 tree target;
957
958 initialize_inline_failed (edge);
959
960 edge->indirect_info = cgraph_allocate_init_indirect_info ();
961 edge->indirect_info->ecf_flags = ecf_flags;
962 edge->indirect_info->vptr_changed = true;
963
964 /* Record polymorphic call info. */
965 if (compute_indirect_info
966 && call_stmt
967 && (target = gimple_call_fn (call_stmt))
968 && virtual_method_call_p (target))
969 {
970 ipa_polymorphic_call_context context (decl, target, call_stmt);
971
972 /* Only record types can have virtual calls. */
973 edge->indirect_info->polymorphic = true;
974 edge->indirect_info->param_index = -1;
975 edge->indirect_info->otr_token
976 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
977 edge->indirect_info->otr_type = obj_type_ref_class (target);
978 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
979 edge->indirect_info->context = context;
980 }
981
982 edge->next_callee = indirect_calls;
983 if (indirect_calls)
984 indirect_calls->prev_callee = edge;
985 indirect_calls = edge;
986
987 return edge;
988 }
989
990 /* Remove the edge from the list of the callees of the caller. */
991
992 void
993 cgraph_edge::remove_caller (void)
994 {
995 if (prev_callee)
996 prev_callee->next_callee = next_callee;
997 if (next_callee)
998 next_callee->prev_callee = prev_callee;
999 if (!prev_callee)
1000 {
1001 if (indirect_unknown_callee)
1002 caller->indirect_calls = next_callee;
1003 else
1004 caller->callees = next_callee;
1005 }
1006 if (caller->call_site_hash)
1007 caller->call_site_hash->remove_elt_with_hash
1008 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1009 }
1010
1011 /* Put the edge onto the free list. */
1012
1013 void
1014 symbol_table::free_edge (cgraph_edge *e)
1015 {
1016 if (e->indirect_info)
1017 ggc_free (e->indirect_info);
1018
1019 /* Clear out the edge so we do not dangle pointers. */
1020 int summary_id = e->m_summary_id;
1021 memset (e, 0, sizeof (*e));
1022 e->m_summary_id = summary_id;
1023 NEXT_FREE_EDGE (e) = free_edges;
1024 free_edges = e;
1025 edges_count--;
1026 }
1027
1028 /* Remove the edge in the cgraph. */
1029
1030 void
1031 cgraph_edge::remove (void)
1032 {
1033 /* Call all edge removal hooks. */
1034 symtab->call_edge_removal_hooks (this);
1035
1036 if (!indirect_unknown_callee)
1037 /* Remove from callers list of the callee. */
1038 remove_callee ();
1039
1040 /* Remove from callees list of the callers. */
1041 remove_caller ();
1042
1043 /* Put the edge onto the free list. */
1044 symtab->free_edge (this);
1045 }
1046
1047 /* Turn edge into speculative call calling N2. Update
1048 the profile so the direct call is taken COUNT times
1049 with FREQUENCY.
1050
1051 At clone materialization time, the indirect call E will
1052 be expanded as:
1053
1054 if (call_dest == N2)
1055 n2 ();
1056 else
1057 call call_dest
1058
1059 At this time the function just creates the direct call,
1060 the referencd representing the if conditional and attaches
1061 them all to the orginal indirect call statement.
1062
1063 Return direct edge created. */
1064
1065 cgraph_edge *
1066 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count)
1067 {
1068 cgraph_node *n = caller;
1069 ipa_ref *ref = NULL;
1070 cgraph_edge *e2;
1071
1072 if (dump_file)
1073 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1074 n->dump_name (), n2->dump_name ());
1075 speculative = true;
1076 e2 = n->create_edge (n2, call_stmt, direct_count);
1077 initialize_inline_failed (e2);
1078 e2->speculative = true;
1079 if (TREE_NOTHROW (n2->decl))
1080 e2->can_throw_external = false;
1081 else
1082 e2->can_throw_external = can_throw_external;
1083 e2->lto_stmt_uid = lto_stmt_uid;
1084 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1085 count -= e2->count;
1086 symtab->call_edge_duplication_hooks (this, e2);
1087 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1088 ref->lto_stmt_uid = lto_stmt_uid;
1089 ref->speculative = speculative;
1090 n2->mark_address_taken ();
1091 return e2;
1092 }
1093
1094 /* Speculative call consist of three components:
1095 1) an indirect edge representing the original call
1096 2) an direct edge representing the new call
1097 3) ADDR_EXPR reference representing the speculative check.
1098 All three components are attached to single statement (the indirect
1099 call) and if one of them exists, all of them must exist.
1100
1101 Given speculative call edge, return all three components.
1102 */
1103
1104 void
1105 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1106 cgraph_edge *&indirect,
1107 ipa_ref *&reference)
1108 {
1109 ipa_ref *ref;
1110 int i;
1111 cgraph_edge *e2;
1112 cgraph_edge *e = this;
1113
1114 if (!e->indirect_unknown_callee)
1115 for (e2 = e->caller->indirect_calls;
1116 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1117 e2 = e2->next_callee)
1118 ;
1119 else
1120 {
1121 e2 = e;
1122 /* We can take advantage of the call stmt hash. */
1123 if (e2->call_stmt)
1124 {
1125 e = e->caller->get_edge (e2->call_stmt);
1126 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1127 }
1128 else
1129 for (e = e->caller->callees;
1130 e2->call_stmt != e->call_stmt
1131 || e2->lto_stmt_uid != e->lto_stmt_uid;
1132 e = e->next_callee)
1133 ;
1134 }
1135 gcc_assert (e->speculative && e2->speculative);
1136 direct = e;
1137 indirect = e2;
1138
1139 reference = NULL;
1140 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1141 if (ref->speculative
1142 && ((ref->stmt && ref->stmt == e->call_stmt)
1143 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1144 {
1145 reference = ref;
1146 break;
1147 }
1148
1149 /* Speculative edge always consist of all three components - direct edge,
1150 indirect and reference. */
1151
1152 gcc_assert (e && e2 && ref);
1153 }
1154
1155 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1156 Remove the speculative call sequence and return edge representing the call.
1157 It is up to caller to redirect the call as appropriate. */
1158
1159 cgraph_edge *
1160 cgraph_edge::resolve_speculation (tree callee_decl)
1161 {
1162 cgraph_edge *edge = this;
1163 cgraph_edge *e2;
1164 ipa_ref *ref;
1165
1166 gcc_assert (edge->speculative);
1167 edge->speculative_call_info (e2, edge, ref);
1168 if (!callee_decl
1169 || !ref->referred->semantically_equivalent_p
1170 (symtab_node::get (callee_decl)))
1171 {
1172 if (dump_file)
1173 {
1174 if (callee_decl)
1175 {
1176 fprintf (dump_file, "Speculative indirect call %s => %s has "
1177 "turned out to have contradicting known target ",
1178 edge->caller->dump_name (),
1179 e2->callee->dump_name ());
1180 print_generic_expr (dump_file, callee_decl);
1181 fprintf (dump_file, "\n");
1182 }
1183 else
1184 {
1185 fprintf (dump_file, "Removing speculative call %s => %s\n",
1186 edge->caller->dump_name (),
1187 e2->callee->dump_name ());
1188 }
1189 }
1190 }
1191 else
1192 {
1193 cgraph_edge *tmp = edge;
1194 if (dump_file)
1195 fprintf (dump_file, "Speculative call turned into direct call.\n");
1196 edge = e2;
1197 e2 = tmp;
1198 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1199 in the functions inlined through it. */
1200 }
1201 edge->count += e2->count;
1202 edge->speculative = false;
1203 e2->speculative = false;
1204 ref->remove_reference ();
1205 if (e2->indirect_unknown_callee || e2->inline_failed)
1206 e2->remove ();
1207 else
1208 e2->callee->remove_symbol_and_inline_clones ();
1209 if (edge->caller->call_site_hash)
1210 cgraph_update_edge_in_call_site_hash (edge);
1211 return edge;
1212 }
1213
1214 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1215 CALLEE. DELTA is an integer constant that is to be added to the this
1216 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1217
1218 cgraph_edge *
1219 cgraph_edge::make_direct (cgraph_node *callee)
1220 {
1221 cgraph_edge *edge = this;
1222 gcc_assert (indirect_unknown_callee);
1223
1224 /* If we are redirecting speculative call, make it non-speculative. */
1225 if (indirect_unknown_callee && speculative)
1226 {
1227 edge = edge->resolve_speculation (callee->decl);
1228
1229 /* On successful speculation just return the pre existing direct edge. */
1230 if (!indirect_unknown_callee)
1231 return edge;
1232 }
1233
1234 indirect_unknown_callee = 0;
1235 ggc_free (indirect_info);
1236 indirect_info = NULL;
1237
1238 /* Get the edge out of the indirect edge list. */
1239 if (prev_callee)
1240 prev_callee->next_callee = next_callee;
1241 if (next_callee)
1242 next_callee->prev_callee = prev_callee;
1243 if (!prev_callee)
1244 caller->indirect_calls = next_callee;
1245
1246 /* Put it into the normal callee list */
1247 prev_callee = NULL;
1248 next_callee = caller->callees;
1249 if (caller->callees)
1250 caller->callees->prev_callee = edge;
1251 caller->callees = edge;
1252
1253 /* Insert to callers list of the new callee. */
1254 edge->set_callee (callee);
1255
1256 if (call_stmt
1257 && !gimple_check_call_matching_types (call_stmt, callee->decl, false))
1258 {
1259 call_stmt_cannot_inline_p = true;
1260 inline_failed = CIF_MISMATCHED_ARGUMENTS;
1261 }
1262
1263 /* We need to re-determine the inlining status of the edge. */
1264 initialize_inline_failed (edge);
1265 return edge;
1266 }
1267
1268 /* If necessary, change the function declaration in the call statement
1269 associated with E so that it corresponds to the edge callee. */
1270
1271 gimple *
1272 cgraph_edge::redirect_call_stmt_to_callee (void)
1273 {
1274 cgraph_edge *e = this;
1275
1276 tree decl = gimple_call_fndecl (e->call_stmt);
1277 gcall *new_stmt;
1278 gimple_stmt_iterator gsi;
1279
1280 if (e->speculative)
1281 {
1282 cgraph_edge *e2;
1283 gcall *new_stmt;
1284 ipa_ref *ref;
1285
1286 e->speculative_call_info (e, e2, ref);
1287 /* If there already is an direct call (i.e. as a result of inliner's
1288 substitution), forget about speculating. */
1289 if (decl)
1290 e = e->resolve_speculation (decl);
1291 /* If types do not match, speculation was likely wrong.
1292 The direct edge was possibly redirected to the clone with a different
1293 signature. We did not update the call statement yet, so compare it
1294 with the reference that still points to the proper type. */
1295 else if (!gimple_check_call_matching_types (e->call_stmt,
1296 ref->referred->decl,
1297 true))
1298 {
1299 if (dump_file)
1300 fprintf (dump_file, "Not expanding speculative call of %s -> %s\n"
1301 "Type mismatch.\n",
1302 e->caller->dump_name (),
1303 e->callee->dump_name ());
1304 e = e->resolve_speculation ();
1305 /* We are producing the final function body and will throw away the
1306 callgraph edges really soon. Reset the counts/frequencies to
1307 keep verifier happy in the case of roundoff errors. */
1308 e->count = gimple_bb (e->call_stmt)->count;
1309 }
1310 /* Expand speculation into GIMPLE code. */
1311 else
1312 {
1313 if (dump_file)
1314 {
1315 fprintf (dump_file,
1316 "Expanding speculative call of %s -> %s count: ",
1317 e->caller->dump_name (),
1318 e->callee->dump_name ());
1319 e->count.dump (dump_file);
1320 fprintf (dump_file, "\n");
1321 }
1322 gcc_assert (e2->speculative);
1323 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1324
1325 profile_probability prob = e->count.probability_in (e->count
1326 + e2->count);
1327 if (!prob.initialized_p ())
1328 prob = profile_probability::even ();
1329 new_stmt = gimple_ic (e->call_stmt,
1330 dyn_cast<cgraph_node *> (ref->referred),
1331 prob);
1332 e->speculative = false;
1333 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1334 false);
1335 e->count = gimple_bb (e->call_stmt)->count;
1336 e2->speculative = false;
1337 e2->count = gimple_bb (e2->call_stmt)->count;
1338 ref->speculative = false;
1339 ref->stmt = NULL;
1340 /* Indirect edges are not both in the call site hash.
1341 get it updated. */
1342 if (e->caller->call_site_hash)
1343 cgraph_update_edge_in_call_site_hash (e2);
1344 pop_cfun ();
1345 /* Continue redirecting E to proper target. */
1346 }
1347 }
1348
1349
1350 if (e->indirect_unknown_callee
1351 || decl == e->callee->decl)
1352 return e->call_stmt;
1353
1354 if (flag_checking && decl)
1355 {
1356 cgraph_node *node = cgraph_node::get (decl);
1357 gcc_assert (!node || !node->clone.combined_args_to_skip);
1358 }
1359
1360 if (symtab->dump_file)
1361 {
1362 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1363 e->caller->dump_name (), e->callee->dump_name ());
1364 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1365 if (e->callee->clone.combined_args_to_skip)
1366 {
1367 fprintf (symtab->dump_file, " combined args to skip: ");
1368 dump_bitmap (symtab->dump_file,
1369 e->callee->clone.combined_args_to_skip);
1370 }
1371 }
1372
1373 if (e->callee->clone.combined_args_to_skip)
1374 {
1375 int lp_nr;
1376
1377 new_stmt = e->call_stmt;
1378 if (e->callee->clone.combined_args_to_skip)
1379 new_stmt
1380 = gimple_call_copy_skip_args (new_stmt,
1381 e->callee->clone.combined_args_to_skip);
1382 tree old_fntype = gimple_call_fntype (e->call_stmt);
1383 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1384 cgraph_node *origin = e->callee;
1385 while (origin->clone_of)
1386 origin = origin->clone_of;
1387
1388 if ((origin->former_clone_of
1389 && old_fntype == TREE_TYPE (origin->former_clone_of))
1390 || old_fntype == TREE_TYPE (origin->decl))
1391 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1392 else
1393 {
1394 bitmap skip = e->callee->clone.combined_args_to_skip;
1395 tree t = cgraph_build_function_type_skip_args (old_fntype, skip,
1396 false);
1397 gimple_call_set_fntype (new_stmt, t);
1398 }
1399
1400 if (gimple_vdef (new_stmt)
1401 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1402 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1403
1404 gsi = gsi_for_stmt (e->call_stmt);
1405
1406 /* For optimized away parameters, add on the caller side
1407 before the call
1408 DEBUG D#X => parm_Y(D)
1409 stmts and associate D#X with parm in decl_debug_args_lookup
1410 vector to say for debug info that if parameter parm had been passed,
1411 it would have value parm_Y(D). */
1412 if (e->callee->clone.combined_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
1413 {
1414 vec<tree, va_gc> **debug_args
1415 = decl_debug_args_lookup (e->callee->decl);
1416 tree old_decl = gimple_call_fndecl (e->call_stmt);
1417 if (debug_args && old_decl)
1418 {
1419 tree parm;
1420 unsigned i = 0, num;
1421 unsigned len = vec_safe_length (*debug_args);
1422 unsigned nargs = gimple_call_num_args (e->call_stmt);
1423 for (parm = DECL_ARGUMENTS (old_decl), num = 0;
1424 parm && num < nargs;
1425 parm = DECL_CHAIN (parm), num++)
1426 if (bitmap_bit_p (e->callee->clone.combined_args_to_skip, num)
1427 && is_gimple_reg (parm))
1428 {
1429 unsigned last = i;
1430
1431 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1432 i += 2;
1433 if (i >= len)
1434 {
1435 i = 0;
1436 while (i < last
1437 && (**debug_args)[i] != DECL_ORIGIN (parm))
1438 i += 2;
1439 if (i >= last)
1440 continue;
1441 }
1442 tree ddecl = (**debug_args)[i + 1];
1443 tree arg = gimple_call_arg (e->call_stmt, num);
1444 if (!useless_type_conversion_p (TREE_TYPE (ddecl),
1445 TREE_TYPE (arg)))
1446 {
1447 tree rhs1;
1448 if (!fold_convertible_p (TREE_TYPE (ddecl), arg))
1449 continue;
1450 if (TREE_CODE (arg) == SSA_NAME
1451 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
1452 && (rhs1
1453 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
1454 && useless_type_conversion_p (TREE_TYPE (ddecl),
1455 TREE_TYPE (rhs1)))
1456 arg = rhs1;
1457 else
1458 arg = fold_convert (TREE_TYPE (ddecl), arg);
1459 }
1460
1461 gimple *def_temp
1462 = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1463 e->call_stmt);
1464 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
1465 }
1466 }
1467 }
1468
1469 gsi_replace (&gsi, new_stmt, false);
1470 /* We need to defer cleaning EH info on the new statement to
1471 fixup-cfg. We may not have dominator information at this point
1472 and thus would end up with unreachable blocks and have no way
1473 to communicate that we need to run CFG cleanup then. */
1474 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1475 if (lp_nr != 0)
1476 {
1477 remove_stmt_from_eh_lp (e->call_stmt);
1478 add_stmt_to_eh_lp (new_stmt, lp_nr);
1479 }
1480 }
1481 else
1482 {
1483 new_stmt = e->call_stmt;
1484 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1485 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1486 }
1487
1488 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1489 adjust gimple_call_fntype too. */
1490 if (gimple_call_noreturn_p (new_stmt)
1491 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1492 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1493 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1494 == void_type_node))
1495 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1496
1497 /* If the call becomes noreturn, remove the LHS if possible. */
1498 tree lhs = gimple_call_lhs (new_stmt);
1499 if (lhs
1500 && gimple_call_noreturn_p (new_stmt)
1501 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1502 || should_remove_lhs_p (lhs)))
1503 {
1504 if (TREE_CODE (lhs) == SSA_NAME)
1505 {
1506 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1507 TREE_TYPE (lhs), NULL);
1508 var = get_or_create_ssa_default_def
1509 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1510 gimple *set_stmt = gimple_build_assign (lhs, var);
1511 gsi = gsi_for_stmt (new_stmt);
1512 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1513 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1514 }
1515 gimple_call_set_lhs (new_stmt, NULL_TREE);
1516 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1517 }
1518
1519 /* If new callee has no static chain, remove it. */
1520 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1521 {
1522 gimple_call_set_chain (new_stmt, NULL);
1523 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1524 }
1525
1526 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1527 new_stmt);
1528
1529 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1530
1531 if (symtab->dump_file)
1532 {
1533 fprintf (symtab->dump_file, " updated to:");
1534 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1535 }
1536 return new_stmt;
1537 }
1538
1539 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1540 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1541 of OLD_STMT if it was previously call statement.
1542 If NEW_STMT is NULL, the call has been dropped without any
1543 replacement. */
1544
1545 static void
1546 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1547 gimple *old_stmt, tree old_call,
1548 gimple *new_stmt)
1549 {
1550 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1551 ? gimple_call_fndecl (new_stmt) : 0;
1552
1553 /* We are seeing indirect calls, then there is nothing to update. */
1554 if (!new_call && !old_call)
1555 return;
1556 /* See if we turned indirect call into direct call or folded call to one builtin
1557 into different builtin. */
1558 if (old_call != new_call)
1559 {
1560 cgraph_edge *e = node->get_edge (old_stmt);
1561 cgraph_edge *ne = NULL;
1562 profile_count count;
1563
1564 if (e)
1565 {
1566 /* Keep calls marked as dead dead. */
1567 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1568 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1569 {
1570 node->get_edge (old_stmt)->set_call_stmt
1571 (as_a <gcall *> (new_stmt));
1572 return;
1573 }
1574 /* See if the edge is already there and has the correct callee. It
1575 might be so because of indirect inlining has already updated
1576 it. We also might've cloned and redirected the edge. */
1577 if (new_call && e->callee)
1578 {
1579 cgraph_node *callee = e->callee;
1580 while (callee)
1581 {
1582 if (callee->decl == new_call
1583 || callee->former_clone_of == new_call)
1584 {
1585 e->set_call_stmt (as_a <gcall *> (new_stmt));
1586 return;
1587 }
1588 callee = callee->clone_of;
1589 }
1590 }
1591
1592 /* Otherwise remove edge and create new one; we can't simply redirect
1593 since function has changed, so inline plan and other information
1594 attached to edge is invalid. */
1595 count = e->count;
1596 if (e->indirect_unknown_callee || e->inline_failed)
1597 e->remove ();
1598 else
1599 e->callee->remove_symbol_and_inline_clones ();
1600 }
1601 else if (new_call)
1602 {
1603 /* We are seeing new direct call; compute profile info based on BB. */
1604 basic_block bb = gimple_bb (new_stmt);
1605 count = bb->count;
1606 }
1607
1608 if (new_call)
1609 {
1610 ne = node->create_edge (cgraph_node::get_create (new_call),
1611 as_a <gcall *> (new_stmt), count);
1612 gcc_assert (ne->inline_failed);
1613 }
1614 }
1615 /* We only updated the call stmt; update pointer in cgraph edge.. */
1616 else if (old_stmt != new_stmt)
1617 node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1618 }
1619
1620 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1621 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1622 of OLD_STMT before it was updated (updating can happen inplace). */
1623
1624 void
1625 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1626 gimple *new_stmt)
1627 {
1628 cgraph_node *orig = cgraph_node::get (cfun->decl);
1629 cgraph_node *node;
1630
1631 gcc_checking_assert (orig);
1632 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1633 if (orig->clones)
1634 for (node = orig->clones; node != orig;)
1635 {
1636 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1637 if (node->clones)
1638 node = node->clones;
1639 else if (node->next_sibling_clone)
1640 node = node->next_sibling_clone;
1641 else
1642 {
1643 while (node != orig && !node->next_sibling_clone)
1644 node = node->clone_of;
1645 if (node != orig)
1646 node = node->next_sibling_clone;
1647 }
1648 }
1649 }
1650
1651
1652 /* Remove all callees from the node. */
1653
1654 void
1655 cgraph_node::remove_callees (void)
1656 {
1657 cgraph_edge *e, *f;
1658
1659 /* It is sufficient to remove the edges from the lists of callers of
1660 the callees. The callee list of the node can be zapped with one
1661 assignment. */
1662 for (e = callees; e; e = f)
1663 {
1664 f = e->next_callee;
1665 symtab->call_edge_removal_hooks (e);
1666 if (!e->indirect_unknown_callee)
1667 e->remove_callee ();
1668 symtab->free_edge (e);
1669 }
1670 for (e = indirect_calls; e; e = f)
1671 {
1672 f = e->next_callee;
1673 symtab->call_edge_removal_hooks (e);
1674 if (!e->indirect_unknown_callee)
1675 e->remove_callee ();
1676 symtab->free_edge (e);
1677 }
1678 indirect_calls = NULL;
1679 callees = NULL;
1680 if (call_site_hash)
1681 {
1682 call_site_hash->empty ();
1683 call_site_hash = NULL;
1684 }
1685 }
1686
1687 /* Remove all callers from the node. */
1688
1689 void
1690 cgraph_node::remove_callers (void)
1691 {
1692 cgraph_edge *e, *f;
1693
1694 /* It is sufficient to remove the edges from the lists of callees of
1695 the callers. The caller list of the node can be zapped with one
1696 assignment. */
1697 for (e = callers; e; e = f)
1698 {
1699 f = e->next_caller;
1700 symtab->call_edge_removal_hooks (e);
1701 e->remove_caller ();
1702 symtab->free_edge (e);
1703 }
1704 callers = NULL;
1705 }
1706
1707 /* Helper function for cgraph_release_function_body and free_lang_data.
1708 It releases body from function DECL without having to inspect its
1709 possibly non-existent symtab node. */
1710
1711 void
1712 release_function_body (tree decl)
1713 {
1714 function *fn = DECL_STRUCT_FUNCTION (decl);
1715 if (fn)
1716 {
1717 if (fn->cfg
1718 && loops_for_fn (fn))
1719 {
1720 fn->curr_properties &= ~PROP_loops;
1721 loop_optimizer_finalize (fn);
1722 }
1723 if (fn->gimple_df)
1724 {
1725 delete_tree_ssa (fn);
1726 fn->eh = NULL;
1727 }
1728 if (fn->cfg)
1729 {
1730 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1731 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1732 delete_tree_cfg_annotations (fn);
1733 clear_edges (fn);
1734 fn->cfg = NULL;
1735 }
1736 if (fn->value_histograms)
1737 free_histograms (fn);
1738 gimple_set_body (decl, NULL);
1739 /* Struct function hangs a lot of data that would leak if we didn't
1740 removed all pointers to it. */
1741 ggc_free (fn);
1742 DECL_STRUCT_FUNCTION (decl) = NULL;
1743 }
1744 DECL_SAVED_TREE (decl) = NULL;
1745 }
1746
1747 /* Release memory used to represent body of function.
1748 Use this only for functions that are released before being translated to
1749 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1750 are free'd in final.c via free_after_compilation().
1751 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1752
1753 void
1754 cgraph_node::release_body (bool keep_arguments)
1755 {
1756 ipa_transforms_to_apply.release ();
1757 if (!used_as_abstract_origin && symtab->state != PARSING)
1758 {
1759 DECL_RESULT (decl) = NULL;
1760
1761 if (!keep_arguments)
1762 DECL_ARGUMENTS (decl) = NULL;
1763 }
1764 /* If the node is abstract and needed, then do not clear
1765 DECL_INITIAL of its associated function declaration because it's
1766 needed to emit debug info later. */
1767 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1768 DECL_INITIAL (decl) = error_mark_node;
1769 release_function_body (decl);
1770 if (lto_file_data)
1771 {
1772 lto_free_function_in_decl_state_for_node (this);
1773 lto_file_data = NULL;
1774 }
1775 }
1776
1777 /* Remove function from symbol table. */
1778
1779 void
1780 cgraph_node::remove (void)
1781 {
1782 cgraph_node *n;
1783
1784 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1785 fprintf (symtab->ipa_clones_dump_file,
1786 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1787 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1788 DECL_SOURCE_COLUMN (decl));
1789
1790 symtab->call_cgraph_removal_hooks (this);
1791 remove_callers ();
1792 remove_callees ();
1793 ipa_transforms_to_apply.release ();
1794 delete_function_version (function_version ());
1795
1796 /* Incremental inlining access removed nodes stored in the postorder list.
1797 */
1798 force_output = false;
1799 forced_by_abi = false;
1800 for (n = nested; n; n = n->next_nested)
1801 n->origin = NULL;
1802 nested = NULL;
1803 if (origin)
1804 {
1805 cgraph_node **node2 = &origin->nested;
1806
1807 while (*node2 != this)
1808 node2 = &(*node2)->next_nested;
1809 *node2 = next_nested;
1810 }
1811 unregister ();
1812 if (prev_sibling_clone)
1813 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1814 else if (clone_of)
1815 clone_of->clones = next_sibling_clone;
1816 if (next_sibling_clone)
1817 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1818 if (clones)
1819 {
1820 cgraph_node *n, *next;
1821
1822 if (clone_of)
1823 {
1824 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1825 n->clone_of = clone_of;
1826 n->clone_of = clone_of;
1827 n->next_sibling_clone = clone_of->clones;
1828 if (clone_of->clones)
1829 clone_of->clones->prev_sibling_clone = n;
1830 clone_of->clones = clones;
1831 }
1832 else
1833 {
1834 /* We are removing node with clones. This makes clones inconsistent,
1835 but assume they will be removed subsequently and just keep clone
1836 tree intact. This can happen in unreachable function removal since
1837 we remove unreachable functions in random order, not by bottom-up
1838 walk of clone trees. */
1839 for (n = clones; n; n = next)
1840 {
1841 next = n->next_sibling_clone;
1842 n->next_sibling_clone = NULL;
1843 n->prev_sibling_clone = NULL;
1844 n->clone_of = NULL;
1845 }
1846 }
1847 }
1848
1849 /* While all the clones are removed after being proceeded, the function
1850 itself is kept in the cgraph even after it is compiled. Check whether
1851 we are done with this body and reclaim it proactively if this is the case.
1852 */
1853 if (symtab->state != LTO_STREAMING)
1854 {
1855 n = cgraph_node::get (decl);
1856 if (!n
1857 || (!n->clones && !n->clone_of && !n->global.inlined_to
1858 && ((symtab->global_info_ready || in_lto_p)
1859 && (TREE_ASM_WRITTEN (n->decl)
1860 || DECL_EXTERNAL (n->decl)
1861 || !n->analyzed
1862 || (!flag_wpa && n->in_other_partition)))))
1863 release_body ();
1864 }
1865 else
1866 {
1867 lto_free_function_in_decl_state_for_node (this);
1868 lto_file_data = NULL;
1869 }
1870
1871 decl = NULL;
1872 if (call_site_hash)
1873 {
1874 call_site_hash->empty ();
1875 call_site_hash = NULL;
1876 }
1877
1878 symtab->release_symbol (this);
1879 }
1880
1881 /* Likewise indicate that a node is having address taken. */
1882
1883 void
1884 cgraph_node::mark_address_taken (void)
1885 {
1886 /* Indirect inlining can figure out that all uses of the address are
1887 inlined. */
1888 if (global.inlined_to)
1889 {
1890 gcc_assert (cfun->after_inlining);
1891 gcc_assert (callers->indirect_inlining_edge);
1892 return;
1893 }
1894 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1895 IPA_REF_ADDR reference exists (and thus it should be set on node
1896 representing alias we take address of) and as a test whether address
1897 of the object was taken (and thus it should be set on node alias is
1898 referring to). We should remove the first use and the remove the
1899 following set. */
1900 address_taken = 1;
1901 cgraph_node *node = ultimate_alias_target ();
1902 node->address_taken = 1;
1903 }
1904
1905 /* Return local info for the compiled function. */
1906
1907 cgraph_local_info *
1908 cgraph_node::local_info (tree decl)
1909 {
1910 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1911 cgraph_node *node = get (decl);
1912 if (!node)
1913 return NULL;
1914 return &node->ultimate_alias_target ()->local;
1915 }
1916
1917 /* Return local info for the compiled function. */
1918
1919 cgraph_rtl_info *
1920 cgraph_node::rtl_info (tree decl)
1921 {
1922 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1923 cgraph_node *node = get (decl);
1924 if (!node)
1925 return NULL;
1926 enum availability avail;
1927 node = node->ultimate_alias_target (&avail);
1928 if (decl != current_function_decl
1929 && (avail < AVAIL_AVAILABLE
1930 || (node->decl != current_function_decl
1931 && !TREE_ASM_WRITTEN (node->decl))))
1932 return NULL;
1933 /* Allocate if it doesn't exist. */
1934 if (node->rtl == NULL)
1935 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1936 return node->rtl;
1937 }
1938
1939 /* Return a string describing the failure REASON. */
1940
1941 const char*
1942 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1943 {
1944 #undef DEFCIFCODE
1945 #define DEFCIFCODE(code, type, string) string,
1946
1947 static const char *cif_string_table[CIF_N_REASONS] = {
1948 #include "cif-code.def"
1949 };
1950
1951 /* Signedness of an enum type is implementation defined, so cast it
1952 to unsigned before testing. */
1953 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1954 return cif_string_table[reason];
1955 }
1956
1957 /* Return a type describing the failure REASON. */
1958
1959 cgraph_inline_failed_type_t
1960 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1961 {
1962 #undef DEFCIFCODE
1963 #define DEFCIFCODE(code, type, string) type,
1964
1965 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1966 #include "cif-code.def"
1967 };
1968
1969 /* Signedness of an enum type is implementation defined, so cast it
1970 to unsigned before testing. */
1971 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1972 return cif_type_table[reason];
1973 }
1974
1975 /* Names used to print out the availability enum. */
1976 const char * const cgraph_availability_names[] =
1977 {"unset", "not_available", "overwritable", "available", "local"};
1978
1979 /* Output flags of edge to a file F. */
1980
1981 void
1982 cgraph_edge::dump_edge_flags (FILE *f)
1983 {
1984 if (speculative)
1985 fprintf (f, "(speculative) ");
1986 if (!inline_failed)
1987 fprintf (f, "(inlined) ");
1988 if (call_stmt_cannot_inline_p)
1989 fprintf (f, "(call_stmt_cannot_inline_p) ");
1990 if (indirect_inlining_edge)
1991 fprintf (f, "(indirect_inlining) ");
1992 if (count.initialized_p ())
1993 {
1994 fprintf (f, "(");
1995 count.dump (f);
1996 fprintf (f, ",");
1997 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
1998 }
1999 if (can_throw_external)
2000 fprintf (f, "(can throw external) ");
2001 }
2002
2003 /* Dump call graph node to file F. */
2004
2005 void
2006 cgraph_node::dump (FILE *f)
2007 {
2008 cgraph_edge *edge;
2009
2010 dump_base (f);
2011
2012 if (global.inlined_to)
2013 fprintf (f, " Function %s is inline copy in %s\n",
2014 dump_name (),
2015 global.inlined_to->dump_name ());
2016 if (clone_of)
2017 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2018 if (symtab->function_flags_ready)
2019 fprintf (f, " Availability: %s\n",
2020 cgraph_availability_names [get_availability ()]);
2021
2022 if (profile_id)
2023 fprintf (f, " Profile id: %i\n",
2024 profile_id);
2025 cgraph_function_version_info *vi = function_version ();
2026 if (vi != NULL)
2027 {
2028 fprintf (f, " Version info: ");
2029 if (vi->prev != NULL)
2030 {
2031 fprintf (f, "prev: ");
2032 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2033 }
2034 if (vi->next != NULL)
2035 {
2036 fprintf (f, "next: ");
2037 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2038 }
2039 if (vi->dispatcher_resolver != NULL_TREE)
2040 fprintf (f, "dispatcher: %s",
2041 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2042
2043 fprintf (f, "\n");
2044 }
2045 fprintf (f, " Function flags:");
2046 if (count.initialized_p ())
2047 {
2048 fprintf (f, " count:");
2049 count.dump (f);
2050 }
2051 if (tp_first_run > 0)
2052 fprintf (f, " first_run:%i", tp_first_run);
2053 if (origin)
2054 fprintf (f, " nested in:%s", origin->asm_name ());
2055 if (gimple_has_body_p (decl))
2056 fprintf (f, " body");
2057 if (process)
2058 fprintf (f, " process");
2059 if (local.local)
2060 fprintf (f, " local");
2061 if (local.redefined_extern_inline)
2062 fprintf (f, " redefined_extern_inline");
2063 if (only_called_at_startup)
2064 fprintf (f, " only_called_at_startup");
2065 if (only_called_at_exit)
2066 fprintf (f, " only_called_at_exit");
2067 if (tm_clone)
2068 fprintf (f, " tm_clone");
2069 if (calls_comdat_local)
2070 fprintf (f, " calls_comdat_local");
2071 if (icf_merged)
2072 fprintf (f, " icf_merged");
2073 if (merged_comdat)
2074 fprintf (f, " merged_comdat");
2075 if (split_part)
2076 fprintf (f, " split_part");
2077 if (indirect_call_target)
2078 fprintf (f, " indirect_call_target");
2079 if (nonfreeing_fn)
2080 fprintf (f, " nonfreeing_fn");
2081 if (DECL_STATIC_CONSTRUCTOR (decl))
2082 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2083 if (DECL_STATIC_DESTRUCTOR (decl))
2084 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2085 if (frequency == NODE_FREQUENCY_HOT)
2086 fprintf (f, " hot");
2087 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2088 fprintf (f, " unlikely_executed");
2089 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2090 fprintf (f, " executed_once");
2091 if (opt_for_fn (decl, optimize_size))
2092 fprintf (f, " optimize_size");
2093 if (parallelized_function)
2094 fprintf (f, " parallelized_function");
2095
2096 fprintf (f, "\n");
2097
2098 if (thunk.thunk_p)
2099 {
2100 fprintf (f, " Thunk");
2101 if (thunk.alias)
2102 fprintf (f, " of %s (asm:%s)",
2103 lang_hooks.decl_printable_name (thunk.alias, 2),
2104 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2105 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2106 "has virtual offset %i\n",
2107 (int)thunk.fixed_offset,
2108 (int)thunk.virtual_value,
2109 (int)thunk.indirect_offset,
2110 (int)thunk.virtual_offset_p);
2111 }
2112 else if (former_thunk_p ())
2113 fprintf (f, " Former thunk fixed offset %i virtual value %i "
2114 "indirect_offset %i has virtual offset %i\n",
2115 (int)thunk.fixed_offset,
2116 (int)thunk.virtual_value,
2117 (int)thunk.indirect_offset,
2118 (int)thunk.virtual_offset_p);
2119 if (alias && thunk.alias
2120 && DECL_P (thunk.alias))
2121 {
2122 fprintf (f, " Alias of %s",
2123 lang_hooks.decl_printable_name (thunk.alias, 2));
2124 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2125 fprintf (f, " (asm:%s)",
2126 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2127 fprintf (f, "\n");
2128 }
2129
2130 fprintf (f, " Called by: ");
2131
2132 profile_count sum = profile_count::zero ();
2133 for (edge = callers; edge; edge = edge->next_caller)
2134 {
2135 fprintf (f, "%s ", edge->caller->dump_name ());
2136 edge->dump_edge_flags (f);
2137 if (edge->count.initialized_p ())
2138 sum += edge->count.ipa ();
2139 }
2140
2141 fprintf (f, "\n Calls: ");
2142 for (edge = callees; edge; edge = edge->next_callee)
2143 {
2144 fprintf (f, "%s ", edge->callee->dump_name ());
2145 edge->dump_edge_flags (f);
2146 }
2147 fprintf (f, "\n");
2148
2149 if (count.ipa ().initialized_p ())
2150 {
2151 bool ok = true;
2152 bool min = false;
2153 ipa_ref *ref;
2154
2155 FOR_EACH_ALIAS (this, ref)
2156 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2157 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2158
2159 if (global.inlined_to
2160 || (symtab->state < EXPANSION
2161 && ultimate_alias_target () == this && only_called_directly_p ()))
2162 ok = !count.ipa ().differs_from_p (sum);
2163 else if (count.ipa () > profile_count::from_gcov_type (100)
2164 && count.ipa () < sum.apply_scale (99, 100))
2165 ok = false, min = true;
2166 if (!ok)
2167 {
2168 fprintf (f, " Invalid sum of caller counts ");
2169 sum.dump (f);
2170 if (min)
2171 fprintf (f, ", should be at most ");
2172 else
2173 fprintf (f, ", should be ");
2174 count.ipa ().dump (f);
2175 fprintf (f, "\n");
2176 }
2177 }
2178
2179 for (edge = indirect_calls; edge; edge = edge->next_callee)
2180 {
2181 if (edge->indirect_info->polymorphic)
2182 {
2183 fprintf (f, " Polymorphic indirect call of type ");
2184 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2185 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2186 }
2187 else
2188 fprintf (f, " Indirect call");
2189 edge->dump_edge_flags (f);
2190 if (edge->indirect_info->param_index != -1)
2191 {
2192 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2193 if (edge->indirect_info->agg_contents)
2194 fprintf (f, " loaded from %s %s at offset %i",
2195 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2196 edge->indirect_info->by_ref ? "passed by reference":"",
2197 (int)edge->indirect_info->offset);
2198 if (edge->indirect_info->vptr_changed)
2199 fprintf (f, " (vptr maybe changed)");
2200 }
2201 fprintf (f, "\n");
2202 if (edge->indirect_info->polymorphic)
2203 edge->indirect_info->context.dump (f);
2204 }
2205 }
2206
2207 /* Dump call graph node NODE to stderr. */
2208
2209 DEBUG_FUNCTION void
2210 cgraph_node::debug (void)
2211 {
2212 dump (stderr);
2213 }
2214
2215 /* Dump the callgraph to file F. */
2216
2217 void
2218 cgraph_node::dump_cgraph (FILE *f)
2219 {
2220 cgraph_node *node;
2221
2222 fprintf (f, "callgraph:\n\n");
2223 FOR_EACH_FUNCTION (node)
2224 node->dump (f);
2225 }
2226
2227 /* Return true when the DECL can possibly be inlined. */
2228
2229 bool
2230 cgraph_function_possibly_inlined_p (tree decl)
2231 {
2232 if (!symtab->global_info_ready)
2233 return !DECL_UNINLINABLE (decl);
2234 return DECL_POSSIBLY_INLINED (decl);
2235 }
2236
2237 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2238 void
2239 cgraph_node::unnest (void)
2240 {
2241 cgraph_node **node2 = &origin->nested;
2242 gcc_assert (origin);
2243
2244 while (*node2 != this)
2245 node2 = &(*node2)->next_nested;
2246 *node2 = next_nested;
2247 origin = NULL;
2248 }
2249
2250 /* Return function availability. See cgraph.h for description of individual
2251 return values. */
2252 enum availability
2253 cgraph_node::get_availability (symtab_node *ref)
2254 {
2255 if (ref)
2256 {
2257 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2258 if (cref)
2259 ref = cref->global.inlined_to;
2260 }
2261 enum availability avail;
2262 if (!analyzed)
2263 avail = AVAIL_NOT_AVAILABLE;
2264 else if (local.local)
2265 avail = AVAIL_LOCAL;
2266 else if (global.inlined_to)
2267 avail = AVAIL_AVAILABLE;
2268 else if (transparent_alias)
2269 ultimate_alias_target (&avail, ref);
2270 else if (ifunc_resolver
2271 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2272 avail = AVAIL_INTERPOSABLE;
2273 else if (!externally_visible)
2274 avail = AVAIL_AVAILABLE;
2275 /* If this is a reference from symbol itself and there are no aliases, we
2276 may be sure that the symbol was not interposed by something else because
2277 the symbol itself would be unreachable otherwise.
2278
2279 Also comdat groups are always resolved in groups. */
2280 else if ((this == ref && !has_aliases_p ())
2281 || (ref && get_comdat_group ()
2282 && get_comdat_group () == ref->get_comdat_group ()))
2283 avail = AVAIL_AVAILABLE;
2284 /* Inline functions are safe to be analyzed even if their symbol can
2285 be overwritten at runtime. It is not meaningful to enforce any sane
2286 behavior on replacing inline function by different body. */
2287 else if (DECL_DECLARED_INLINE_P (decl))
2288 avail = AVAIL_AVAILABLE;
2289
2290 /* If the function can be overwritten, return OVERWRITABLE. Take
2291 care at least of two notable extensions - the COMDAT functions
2292 used to share template instantiations in C++ (this is symmetric
2293 to code cp_cannot_inline_tree_fn and probably shall be shared and
2294 the inlinability hooks completely eliminated). */
2295
2296 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2297 avail = AVAIL_INTERPOSABLE;
2298 else avail = AVAIL_AVAILABLE;
2299
2300 return avail;
2301 }
2302
2303 /* Worker for cgraph_node_can_be_local_p. */
2304 static bool
2305 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2306 {
2307 return !(!node->force_output
2308 && ((DECL_COMDAT (node->decl)
2309 && !node->forced_by_abi
2310 && !node->used_from_object_file_p ()
2311 && !node->same_comdat_group)
2312 || !node->externally_visible));
2313 }
2314
2315 /* Return true if cgraph_node can be made local for API change.
2316 Extern inline functions and C++ COMDAT functions can be made local
2317 at the expense of possible code size growth if function is used in multiple
2318 compilation units. */
2319 bool
2320 cgraph_node::can_be_local_p (void)
2321 {
2322 return (!address_taken
2323 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2324 NULL, true));
2325 }
2326
2327 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2328 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2329 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2330 skipped. */
2331 bool
2332 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2333 (cgraph_node *, void *),
2334 void *data,
2335 bool include_overwritable,
2336 bool exclude_virtual_thunks)
2337 {
2338 cgraph_edge *e;
2339 ipa_ref *ref;
2340 enum availability avail = AVAIL_AVAILABLE;
2341
2342 if (include_overwritable
2343 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2344 {
2345 if (callback (this, data))
2346 return true;
2347 }
2348 FOR_EACH_ALIAS (this, ref)
2349 {
2350 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2351 if (include_overwritable
2352 || alias->get_availability () > AVAIL_INTERPOSABLE)
2353 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2354 include_overwritable,
2355 exclude_virtual_thunks))
2356 return true;
2357 }
2358 if (avail <= AVAIL_INTERPOSABLE)
2359 return false;
2360 for (e = callers; e; e = e->next_caller)
2361 if (e->caller->thunk.thunk_p
2362 && (include_overwritable
2363 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2364 && !(exclude_virtual_thunks
2365 && e->caller->thunk.virtual_offset_p))
2366 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2367 include_overwritable,
2368 exclude_virtual_thunks))
2369 return true;
2370
2371 return false;
2372 }
2373
2374 /* Worker to bring NODE local. */
2375
2376 bool
2377 cgraph_node::make_local (cgraph_node *node, void *)
2378 {
2379 gcc_checking_assert (node->can_be_local_p ());
2380 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2381 {
2382 node->make_decl_local ();
2383 node->set_section (NULL);
2384 node->set_comdat_group (NULL);
2385 node->externally_visible = false;
2386 node->forced_by_abi = false;
2387 node->local.local = true;
2388 node->set_section (NULL);
2389 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2390 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2391 && !flag_incremental_link);
2392 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2393 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2394 }
2395 return false;
2396 }
2397
2398 /* Bring cgraph node local. */
2399
2400 void
2401 cgraph_node::make_local (void)
2402 {
2403 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2404 }
2405
2406 /* Worker to set nothrow flag. */
2407
2408 static void
2409 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2410 bool *changed)
2411 {
2412 cgraph_edge *e;
2413
2414 if (nothrow && !TREE_NOTHROW (node->decl))
2415 {
2416 /* With non-call exceptions we can't say for sure if other function body
2417 was not possibly optimized to stil throw. */
2418 if (!non_call || node->binds_to_current_def_p ())
2419 {
2420 TREE_NOTHROW (node->decl) = true;
2421 *changed = true;
2422 for (e = node->callers; e; e = e->next_caller)
2423 e->can_throw_external = false;
2424 }
2425 }
2426 else if (!nothrow && TREE_NOTHROW (node->decl))
2427 {
2428 TREE_NOTHROW (node->decl) = false;
2429 *changed = true;
2430 }
2431 ipa_ref *ref;
2432 FOR_EACH_ALIAS (node, ref)
2433 {
2434 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2435 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2436 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2437 }
2438 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2439 if (e->caller->thunk.thunk_p
2440 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2441 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2442 }
2443
2444 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2445 if any to NOTHROW. */
2446
2447 bool
2448 cgraph_node::set_nothrow_flag (bool nothrow)
2449 {
2450 bool changed = false;
2451 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2452
2453 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2454 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2455 else
2456 {
2457 ipa_ref *ref;
2458
2459 FOR_EACH_ALIAS (this, ref)
2460 {
2461 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2462 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2463 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2464 }
2465 }
2466 return changed;
2467 }
2468
2469 /* Worker to set malloc flag. */
2470 static void
2471 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2472 {
2473 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2474 {
2475 DECL_IS_MALLOC (node->decl) = true;
2476 *changed = true;
2477 }
2478
2479 ipa_ref *ref;
2480 FOR_EACH_ALIAS (node, ref)
2481 {
2482 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2483 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2484 set_malloc_flag_1 (alias, malloc_p, changed);
2485 }
2486
2487 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2488 if (e->caller->thunk.thunk_p
2489 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2490 set_malloc_flag_1 (e->caller, malloc_p, changed);
2491 }
2492
2493 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2494
2495 bool
2496 cgraph_node::set_malloc_flag (bool malloc_p)
2497 {
2498 bool changed = false;
2499
2500 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2501 set_malloc_flag_1 (this, malloc_p, &changed);
2502 else
2503 {
2504 ipa_ref *ref;
2505
2506 FOR_EACH_ALIAS (this, ref)
2507 {
2508 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2509 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2510 set_malloc_flag_1 (alias, malloc_p, &changed);
2511 }
2512 }
2513 return changed;
2514 }
2515
2516 /* Worker to set_const_flag. */
2517
2518 static void
2519 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2520 bool *changed)
2521 {
2522 /* Static constructors and destructors without a side effect can be
2523 optimized out. */
2524 if (set_const && !looping)
2525 {
2526 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2527 {
2528 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2529 *changed = true;
2530 }
2531 if (DECL_STATIC_DESTRUCTOR (node->decl))
2532 {
2533 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2534 *changed = true;
2535 }
2536 }
2537 if (!set_const)
2538 {
2539 if (TREE_READONLY (node->decl))
2540 {
2541 TREE_READONLY (node->decl) = 0;
2542 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2543 *changed = true;
2544 }
2545 }
2546 else
2547 {
2548 /* Consider function:
2549
2550 bool a(int *p)
2551 {
2552 return *p==*p;
2553 }
2554
2555 During early optimization we will turn this into:
2556
2557 bool a(int *p)
2558 {
2559 return true;
2560 }
2561
2562 Now if this function will be detected as CONST however when interposed
2563 it may end up being just pure. We always must assume the worst
2564 scenario here. */
2565 if (TREE_READONLY (node->decl))
2566 {
2567 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2568 {
2569 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2570 *changed = true;
2571 }
2572 }
2573 else if (node->binds_to_current_def_p ())
2574 {
2575 TREE_READONLY (node->decl) = true;
2576 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2577 DECL_PURE_P (node->decl) = false;
2578 *changed = true;
2579 }
2580 else
2581 {
2582 if (dump_file && (dump_flags & TDF_DETAILS))
2583 fprintf (dump_file, "Dropping state to PURE because function does "
2584 "not bind to current def.\n");
2585 if (!DECL_PURE_P (node->decl))
2586 {
2587 DECL_PURE_P (node->decl) = true;
2588 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2589 *changed = true;
2590 }
2591 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2592 {
2593 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2594 *changed = true;
2595 }
2596 }
2597 }
2598
2599 ipa_ref *ref;
2600 FOR_EACH_ALIAS (node, ref)
2601 {
2602 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2603 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2604 set_const_flag_1 (alias, set_const, looping, changed);
2605 }
2606 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2607 if (e->caller->thunk.thunk_p
2608 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2609 {
2610 /* Virtual thunks access virtual offset in the vtable, so they can
2611 only be pure, never const. */
2612 if (set_const
2613 && (e->caller->thunk.virtual_offset_p
2614 || !node->binds_to_current_def_p (e->caller)))
2615 *changed |= e->caller->set_pure_flag (true, looping);
2616 else
2617 set_const_flag_1 (e->caller, set_const, looping, changed);
2618 }
2619 }
2620
2621 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2622 If SET_CONST if false, clear the flag.
2623
2624 When setting the flag be careful about possible interposition and
2625 do not set the flag for functions that can be interposet and set pure
2626 flag for functions that can bind to other definition.
2627
2628 Return true if any change was done. */
2629
2630 bool
2631 cgraph_node::set_const_flag (bool set_const, bool looping)
2632 {
2633 bool changed = false;
2634 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2635 set_const_flag_1 (this, set_const, looping, &changed);
2636 else
2637 {
2638 ipa_ref *ref;
2639
2640 FOR_EACH_ALIAS (this, ref)
2641 {
2642 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2643 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2644 set_const_flag_1 (alias, set_const, looping, &changed);
2645 }
2646 }
2647 return changed;
2648 }
2649
2650 /* Info used by set_pure_flag_1. */
2651
2652 struct set_pure_flag_info
2653 {
2654 bool pure;
2655 bool looping;
2656 bool changed;
2657 };
2658
2659 /* Worker to set_pure_flag. */
2660
2661 static bool
2662 set_pure_flag_1 (cgraph_node *node, void *data)
2663 {
2664 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2665 /* Static constructors and destructors without a side effect can be
2666 optimized out. */
2667 if (info->pure && !info->looping)
2668 {
2669 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2670 {
2671 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2672 info->changed = true;
2673 }
2674 if (DECL_STATIC_DESTRUCTOR (node->decl))
2675 {
2676 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2677 info->changed = true;
2678 }
2679 }
2680 if (info->pure)
2681 {
2682 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2683 {
2684 DECL_PURE_P (node->decl) = true;
2685 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2686 info->changed = true;
2687 }
2688 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2689 && !info->looping)
2690 {
2691 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2692 info->changed = true;
2693 }
2694 }
2695 else
2696 {
2697 if (DECL_PURE_P (node->decl))
2698 {
2699 DECL_PURE_P (node->decl) = false;
2700 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2701 info->changed = true;
2702 }
2703 }
2704 return false;
2705 }
2706
2707 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2708 if any to PURE.
2709
2710 When setting the flag, be careful about possible interposition.
2711 Return true if any change was done. */
2712
2713 bool
2714 cgraph_node::set_pure_flag (bool pure, bool looping)
2715 {
2716 struct set_pure_flag_info info = {pure, looping, false};
2717 if (!pure)
2718 looping = false;
2719 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2720 return info.changed;
2721 }
2722
2723 /* Return true when cgraph_node cannot return or throw and thus
2724 it is safe to ignore its side effects for IPA analysis. */
2725
2726 bool
2727 cgraph_node::cannot_return_p (void)
2728 {
2729 int flags = flags_from_decl_or_type (decl);
2730 if (!opt_for_fn (decl, flag_exceptions))
2731 return (flags & ECF_NORETURN) != 0;
2732 else
2733 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2734 == (ECF_NORETURN | ECF_NOTHROW));
2735 }
2736
2737 /* Return true when call of edge cannot lead to return from caller
2738 and thus it is safe to ignore its side effects for IPA analysis
2739 when computing side effects of the caller.
2740 FIXME: We could actually mark all edges that have no reaching
2741 patch to the exit block or throw to get better results. */
2742 bool
2743 cgraph_edge::cannot_lead_to_return_p (void)
2744 {
2745 if (caller->cannot_return_p ())
2746 return true;
2747 if (indirect_unknown_callee)
2748 {
2749 int flags = indirect_info->ecf_flags;
2750 if (!opt_for_fn (caller->decl, flag_exceptions))
2751 return (flags & ECF_NORETURN) != 0;
2752 else
2753 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2754 == (ECF_NORETURN | ECF_NOTHROW));
2755 }
2756 else
2757 return callee->cannot_return_p ();
2758 }
2759
2760 /* Return true if the call can be hot. */
2761
2762 bool
2763 cgraph_edge::maybe_hot_p (void)
2764 {
2765 if (!maybe_hot_count_p (NULL, count.ipa ()))
2766 return false;
2767 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2768 || (callee
2769 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2770 return false;
2771 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2772 && (callee
2773 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2774 return false;
2775 if (opt_for_fn (caller->decl, optimize_size))
2776 return false;
2777 if (caller->frequency == NODE_FREQUENCY_HOT)
2778 return true;
2779 /* If profile is now known yet, be conservative.
2780 FIXME: this predicate is used by early inliner and can do better there. */
2781 if (symtab->state < IPA_SSA)
2782 return true;
2783 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2784 && sreal_frequency () * 2 < 3)
2785 return false;
2786 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0
2787 || sreal_frequency () * PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) <= 1)
2788 return false;
2789 return true;
2790 }
2791
2792 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2793
2794 static bool
2795 nonremovable_p (cgraph_node *node, void *)
2796 {
2797 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2798 }
2799
2800 /* Return true if whole comdat group can be removed if there are no direct
2801 calls to THIS. */
2802
2803 bool
2804 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2805 {
2806 struct ipa_ref *ref;
2807
2808 /* For local symbols or non-comdat group it is the same as
2809 can_remove_if_no_direct_calls_p. */
2810 if (!externally_visible || !same_comdat_group)
2811 {
2812 if (DECL_EXTERNAL (decl))
2813 return true;
2814 if (address_taken)
2815 return false;
2816 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2817 }
2818
2819 if (will_inline && address_taken)
2820 return false;
2821
2822 /* Otheriwse check if we can remove the symbol itself and then verify
2823 that only uses of the comdat groups are direct call to THIS
2824 or its aliases. */
2825 if (!can_remove_if_no_direct_calls_and_refs_p ())
2826 return false;
2827
2828 /* Check that all refs come from within the comdat group. */
2829 for (int i = 0; iterate_referring (i, ref); i++)
2830 if (ref->referring->get_comdat_group () != get_comdat_group ())
2831 return false;
2832
2833 struct cgraph_node *target = ultimate_alias_target ();
2834 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2835 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2836 {
2837 if (!externally_visible)
2838 continue;
2839 if (!next->alias
2840 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2841 return false;
2842
2843 /* If we see different symbol than THIS, be sure to check calls. */
2844 if (next->ultimate_alias_target () != target)
2845 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2846 if (e->caller->get_comdat_group () != get_comdat_group ()
2847 || will_inline)
2848 return false;
2849
2850 /* If function is not being inlined, we care only about
2851 references outside of the comdat group. */
2852 if (!will_inline)
2853 for (int i = 0; next->iterate_referring (i, ref); i++)
2854 if (ref->referring->get_comdat_group () != get_comdat_group ())
2855 return false;
2856 }
2857 return true;
2858 }
2859
2860 /* Return true when function cgraph_node can be expected to be removed
2861 from program when direct calls in this compilation unit are removed.
2862
2863 As a special case COMDAT functions are
2864 cgraph_can_remove_if_no_direct_calls_p while the are not
2865 cgraph_only_called_directly_p (it is possible they are called from other
2866 unit)
2867
2868 This function behaves as cgraph_only_called_directly_p because eliminating
2869 all uses of COMDAT function does not make it necessarily disappear from
2870 the program unless we are compiling whole program or we do LTO. In this
2871 case we know we win since dynamic linking will not really discard the
2872 linkonce section. */
2873
2874 bool
2875 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2876 (bool will_inline)
2877 {
2878 gcc_assert (!global.inlined_to);
2879 if (DECL_EXTERNAL (decl))
2880 return true;
2881
2882 if (!in_lto_p && !flag_whole_program)
2883 {
2884 /* If the symbol is in comdat group, we need to verify that whole comdat
2885 group becomes unreachable. Technically we could skip references from
2886 within the group, too. */
2887 if (!only_called_directly_p ())
2888 return false;
2889 if (same_comdat_group && externally_visible)
2890 {
2891 struct cgraph_node *target = ultimate_alias_target ();
2892
2893 if (will_inline && address_taken)
2894 return true;
2895 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2896 next != this;
2897 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2898 {
2899 if (!externally_visible)
2900 continue;
2901 if (!next->alias
2902 && !next->only_called_directly_p ())
2903 return false;
2904
2905 /* If we see different symbol than THIS,
2906 be sure to check calls. */
2907 if (next->ultimate_alias_target () != target)
2908 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2909 if (e->caller->get_comdat_group () != get_comdat_group ()
2910 || will_inline)
2911 return false;
2912 }
2913 }
2914 return true;
2915 }
2916 else
2917 return can_remove_if_no_direct_calls_p (will_inline);
2918 }
2919
2920
2921 /* Worker for cgraph_only_called_directly_p. */
2922
2923 static bool
2924 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2925 {
2926 return !node->only_called_directly_or_aliased_p ();
2927 }
2928
2929 /* Return true when function cgraph_node and all its aliases are only called
2930 directly.
2931 i.e. it is not externally visible, address was not taken and
2932 it is not used in any other non-standard way. */
2933
2934 bool
2935 cgraph_node::only_called_directly_p (void)
2936 {
2937 gcc_assert (ultimate_alias_target () == this);
2938 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2939 NULL, true);
2940 }
2941
2942
2943 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2944
2945 static bool
2946 collect_callers_of_node_1 (cgraph_node *node, void *data)
2947 {
2948 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2949 cgraph_edge *cs;
2950 enum availability avail;
2951 node->ultimate_alias_target (&avail);
2952
2953 if (avail > AVAIL_INTERPOSABLE)
2954 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2955 if (!cs->indirect_inlining_edge
2956 && !cs->caller->thunk.thunk_p)
2957 redirect_callers->safe_push (cs);
2958 return false;
2959 }
2960
2961 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2962 cgraph_node (i.e. are not overwritable). */
2963
2964 vec<cgraph_edge *>
2965 cgraph_node::collect_callers (void)
2966 {
2967 vec<cgraph_edge *> redirect_callers = vNULL;
2968 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
2969 &redirect_callers, false);
2970 return redirect_callers;
2971 }
2972
2973
2974 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
2975 optimistically true if this cannot be determined. */
2976
2977 static bool
2978 clone_of_p (cgraph_node *node, cgraph_node *node2)
2979 {
2980 bool skipped_thunk = false;
2981 node = node->ultimate_alias_target ();
2982 node2 = node2->ultimate_alias_target ();
2983
2984 /* There are no virtual clones of thunks so check former_clone_of or if we
2985 might have skipped thunks because this adjustments are no longer
2986 necessary. */
2987 while (node->thunk.thunk_p || node->former_thunk_p ())
2988 {
2989 if (node2->former_clone_of == node->decl)
2990 return true;
2991 if (!node->thunk.this_adjusting)
2992 return false;
2993 /* In case of instrumented expanded thunks, which can have multiple calls
2994 in them, we do not know how to continue and just have to be
2995 optimistic. */
2996 if (node->callees->next_callee)
2997 return true;
2998 node = node->callees->callee->ultimate_alias_target ();
2999 skipped_thunk = true;
3000 }
3001
3002 if (skipped_thunk)
3003 {
3004 if (!node2->clone.args_to_skip
3005 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
3006 return false;
3007 if (node2->former_clone_of == node->decl)
3008 return true;
3009 else if (!node2->clone_of)
3010 return false;
3011 }
3012
3013 while (node2 && node->decl != node2->decl)
3014 node2 = node2->clone_of;
3015 return node2 != NULL;
3016 }
3017
3018 /* Verify edge count and frequency. */
3019
3020 bool
3021 cgraph_edge::verify_count ()
3022 {
3023 bool error_found = false;
3024 if (!count.verify ())
3025 {
3026 error ("caller edge count invalid");
3027 error_found = true;
3028 }
3029 return error_found;
3030 }
3031
3032 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3033 static void
3034 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3035 {
3036 bool fndecl_was_null = false;
3037 /* debug_gimple_stmt needs correct cfun */
3038 if (cfun != this_cfun)
3039 set_cfun (this_cfun);
3040 /* ...and an actual current_function_decl */
3041 if (!current_function_decl)
3042 {
3043 current_function_decl = this_cfun->decl;
3044 fndecl_was_null = true;
3045 }
3046 debug_gimple_stmt (stmt);
3047 if (fndecl_was_null)
3048 current_function_decl = NULL;
3049 }
3050
3051 /* Verify that call graph edge corresponds to DECL from the associated
3052 statement. Return true if the verification should fail. */
3053
3054 bool
3055 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3056 {
3057 cgraph_node *node;
3058
3059 if (!decl || callee->global.inlined_to)
3060 return false;
3061 if (symtab->state == LTO_STREAMING)
3062 return false;
3063 node = cgraph_node::get (decl);
3064
3065 /* We do not know if a node from a different partition is an alias or what it
3066 aliases and therefore cannot do the former_clone_of check reliably. When
3067 body_removed is set, we have lost all information about what was alias or
3068 thunk of and also cannot proceed. */
3069 if (!node
3070 || node->body_removed
3071 || node->in_other_partition
3072 || callee->icf_merged
3073 || callee->in_other_partition)
3074 return false;
3075
3076 node = node->ultimate_alias_target ();
3077
3078 /* Optimizers can redirect unreachable calls or calls triggering undefined
3079 behavior to builtin_unreachable. */
3080
3081 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3082 return false;
3083
3084 if (callee->former_clone_of != node->decl
3085 && (node != callee->ultimate_alias_target ())
3086 && !clone_of_p (node, callee))
3087 return true;
3088 else
3089 return false;
3090 }
3091
3092 /* Verify cgraph nodes of given cgraph node. */
3093 DEBUG_FUNCTION void
3094 cgraph_node::verify_node (void)
3095 {
3096 cgraph_edge *e;
3097 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3098 basic_block this_block;
3099 gimple_stmt_iterator gsi;
3100 bool error_found = false;
3101
3102 if (seen_error ())
3103 return;
3104
3105 timevar_push (TV_CGRAPH_VERIFY);
3106 error_found |= verify_base ();
3107 for (e = callees; e; e = e->next_callee)
3108 if (e->aux)
3109 {
3110 error ("aux field set for edge %s->%s",
3111 identifier_to_locale (e->caller->name ()),
3112 identifier_to_locale (e->callee->name ()));
3113 error_found = true;
3114 }
3115 if (!count.verify ())
3116 {
3117 error ("cgraph count invalid");
3118 error_found = true;
3119 }
3120 if (global.inlined_to && same_comdat_group)
3121 {
3122 error ("inline clone in same comdat group list");
3123 error_found = true;
3124 }
3125 if (!definition && !in_other_partition && local.local)
3126 {
3127 error ("local symbols must be defined");
3128 error_found = true;
3129 }
3130 if (global.inlined_to && externally_visible)
3131 {
3132 error ("externally visible inline clone");
3133 error_found = true;
3134 }
3135 if (global.inlined_to && address_taken)
3136 {
3137 error ("inline clone with address taken");
3138 error_found = true;
3139 }
3140 if (global.inlined_to && force_output)
3141 {
3142 error ("inline clone is forced to output");
3143 error_found = true;
3144 }
3145 for (e = indirect_calls; e; e = e->next_callee)
3146 {
3147 if (e->aux)
3148 {
3149 error ("aux field set for indirect edge from %s",
3150 identifier_to_locale (e->caller->name ()));
3151 error_found = true;
3152 }
3153 if (!e->indirect_unknown_callee
3154 || !e->indirect_info)
3155 {
3156 error ("An indirect edge from %s is not marked as indirect or has "
3157 "associated indirect_info, the corresponding statement is: ",
3158 identifier_to_locale (e->caller->name ()));
3159 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3160 error_found = true;
3161 }
3162 }
3163 bool check_comdat = comdat_local_p ();
3164 for (e = callers; e; e = e->next_caller)
3165 {
3166 if (e->verify_count ())
3167 error_found = true;
3168 if (check_comdat
3169 && !in_same_comdat_group_p (e->caller))
3170 {
3171 error ("comdat-local function called by %s outside its comdat",
3172 identifier_to_locale (e->caller->name ()));
3173 error_found = true;
3174 }
3175 if (!e->inline_failed)
3176 {
3177 if (global.inlined_to
3178 != (e->caller->global.inlined_to
3179 ? e->caller->global.inlined_to : e->caller))
3180 {
3181 error ("inlined_to pointer is wrong");
3182 error_found = true;
3183 }
3184 if (callers->next_caller)
3185 {
3186 error ("multiple inline callers");
3187 error_found = true;
3188 }
3189 }
3190 else
3191 if (global.inlined_to)
3192 {
3193 error ("inlined_to pointer set for noninline callers");
3194 error_found = true;
3195 }
3196 }
3197 for (e = callees; e; e = e->next_callee)
3198 {
3199 if (e->verify_count ())
3200 error_found = true;
3201 if (gimple_has_body_p (e->caller->decl)
3202 && !e->caller->global.inlined_to
3203 && !e->speculative
3204 /* Optimized out calls are redirected to __builtin_unreachable. */
3205 && (e->count.nonzero_p ()
3206 || ! e->callee->decl
3207 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3208 && count
3209 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3210 && (!e->count.ipa_p ()
3211 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3212 {
3213 error ("caller edge count does not match BB count");
3214 fprintf (stderr, "edge count: ");
3215 e->count.dump (stderr);
3216 fprintf (stderr, "\n bb count: ");
3217 gimple_bb (e->call_stmt)->count.dump (stderr);
3218 fprintf (stderr, "\n");
3219 error_found = true;
3220 }
3221 }
3222 for (e = indirect_calls; e; e = e->next_callee)
3223 {
3224 if (e->verify_count ())
3225 error_found = true;
3226 if (gimple_has_body_p (e->caller->decl)
3227 && !e->caller->global.inlined_to
3228 && !e->speculative
3229 && e->count.ipa_p ()
3230 && count
3231 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3232 && (!e->count.ipa_p ()
3233 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3234 {
3235 error ("indirect call count does not match BB count");
3236 fprintf (stderr, "edge count: ");
3237 e->count.dump (stderr);
3238 fprintf (stderr, "\n bb count: ");
3239 gimple_bb (e->call_stmt)->count.dump (stderr);
3240 fprintf (stderr, "\n");
3241 error_found = true;
3242 }
3243 }
3244 if (!callers && global.inlined_to)
3245 {
3246 error ("inlined_to pointer is set but no predecessors found");
3247 error_found = true;
3248 }
3249 if (global.inlined_to == this)
3250 {
3251 error ("inlined_to pointer refers to itself");
3252 error_found = true;
3253 }
3254
3255 if (clone_of)
3256 {
3257 cgraph_node *first_clone = clone_of->clones;
3258 if (first_clone != this)
3259 {
3260 if (prev_sibling_clone->clone_of != clone_of)
3261 {
3262 error ("cgraph_node has wrong clone_of");
3263 error_found = true;
3264 }
3265 }
3266 }
3267 if (clones)
3268 {
3269 cgraph_node *n;
3270 for (n = clones; n; n = n->next_sibling_clone)
3271 if (n->clone_of != this)
3272 break;
3273 if (n)
3274 {
3275 error ("cgraph_node has wrong clone list");
3276 error_found = true;
3277 }
3278 }
3279 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3280 {
3281 error ("cgraph_node is in clone list but it is not clone");
3282 error_found = true;
3283 }
3284 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3285 {
3286 error ("cgraph_node has wrong prev_clone pointer");
3287 error_found = true;
3288 }
3289 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3290 {
3291 error ("double linked list of clones corrupted");
3292 error_found = true;
3293 }
3294
3295 if (analyzed && alias)
3296 {
3297 bool ref_found = false;
3298 int i;
3299 ipa_ref *ref = NULL;
3300
3301 if (callees)
3302 {
3303 error ("Alias has call edges");
3304 error_found = true;
3305 }
3306 for (i = 0; iterate_reference (i, ref); i++)
3307 if (ref->use != IPA_REF_ALIAS)
3308 {
3309 error ("Alias has non-alias reference");
3310 error_found = true;
3311 }
3312 else if (ref_found)
3313 {
3314 error ("Alias has more than one alias reference");
3315 error_found = true;
3316 }
3317 else
3318 ref_found = true;
3319 if (!ref_found)
3320 {
3321 error ("Analyzed alias has no reference");
3322 error_found = true;
3323 }
3324 }
3325
3326 if (analyzed && thunk.thunk_p)
3327 {
3328 if (!callees)
3329 {
3330 error ("No edge out of thunk node");
3331 error_found = true;
3332 }
3333 else if (callees->next_callee)
3334 {
3335 error ("More than one edge out of thunk node");
3336 error_found = true;
3337 }
3338 if (gimple_has_body_p (decl) && !global.inlined_to)
3339 {
3340 error ("Thunk is not supposed to have body");
3341 error_found = true;
3342 }
3343 }
3344 else if (analyzed && gimple_has_body_p (decl)
3345 && !TREE_ASM_WRITTEN (decl)
3346 && (!DECL_EXTERNAL (decl) || global.inlined_to)
3347 && !flag_wpa)
3348 {
3349 if (this_cfun->cfg)
3350 {
3351 hash_set<gimple *> stmts;
3352 int i;
3353 ipa_ref *ref = NULL;
3354
3355 /* Reach the trees by walking over the CFG, and note the
3356 enclosing basic-blocks in the call edges. */
3357 FOR_EACH_BB_FN (this_block, this_cfun)
3358 {
3359 for (gsi = gsi_start_phis (this_block);
3360 !gsi_end_p (gsi); gsi_next (&gsi))
3361 stmts.add (gsi_stmt (gsi));
3362 for (gsi = gsi_start_bb (this_block);
3363 !gsi_end_p (gsi);
3364 gsi_next (&gsi))
3365 {
3366 gimple *stmt = gsi_stmt (gsi);
3367 stmts.add (stmt);
3368 if (is_gimple_call (stmt))
3369 {
3370 cgraph_edge *e = get_edge (stmt);
3371 tree decl = gimple_call_fndecl (stmt);
3372 if (e)
3373 {
3374 if (e->aux)
3375 {
3376 error ("shared call_stmt:");
3377 cgraph_debug_gimple_stmt (this_cfun, stmt);
3378 error_found = true;
3379 }
3380 if (!e->indirect_unknown_callee)
3381 {
3382 if (e->verify_corresponds_to_fndecl (decl))
3383 {
3384 error ("edge points to wrong declaration:");
3385 debug_tree (e->callee->decl);
3386 fprintf (stderr," Instead of:");
3387 debug_tree (decl);
3388 error_found = true;
3389 }
3390 }
3391 else if (decl)
3392 {
3393 error ("an indirect edge with unknown callee "
3394 "corresponding to a call_stmt with "
3395 "a known declaration:");
3396 error_found = true;
3397 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3398 }
3399 e->aux = (void *)1;
3400 }
3401 else if (decl)
3402 {
3403 error ("missing callgraph edge for call stmt:");
3404 cgraph_debug_gimple_stmt (this_cfun, stmt);
3405 error_found = true;
3406 }
3407 }
3408 }
3409 }
3410 for (i = 0; iterate_reference (i, ref); i++)
3411 if (ref->stmt && !stmts.contains (ref->stmt))
3412 {
3413 error ("reference to dead statement");
3414 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3415 error_found = true;
3416 }
3417 }
3418 else
3419 /* No CFG available?! */
3420 gcc_unreachable ();
3421
3422 for (e = callees; e; e = e->next_callee)
3423 {
3424 if (!e->aux)
3425 {
3426 error ("edge %s->%s has no corresponding call_stmt",
3427 identifier_to_locale (e->caller->name ()),
3428 identifier_to_locale (e->callee->name ()));
3429 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3430 error_found = true;
3431 }
3432 e->aux = 0;
3433 }
3434 for (e = indirect_calls; e; e = e->next_callee)
3435 {
3436 if (!e->aux && !e->speculative)
3437 {
3438 error ("an indirect edge from %s has no corresponding call_stmt",
3439 identifier_to_locale (e->caller->name ()));
3440 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3441 error_found = true;
3442 }
3443 e->aux = 0;
3444 }
3445 }
3446 if (error_found)
3447 {
3448 dump (stderr);
3449 internal_error ("verify_cgraph_node failed");
3450 }
3451 timevar_pop (TV_CGRAPH_VERIFY);
3452 }
3453
3454 /* Verify whole cgraph structure. */
3455 DEBUG_FUNCTION void
3456 cgraph_node::verify_cgraph_nodes (void)
3457 {
3458 cgraph_node *node;
3459
3460 if (seen_error ())
3461 return;
3462
3463 FOR_EACH_FUNCTION (node)
3464 node->verify ();
3465 }
3466
3467 /* Walk the alias chain to return the function cgraph_node is alias of.
3468 Walk through thunks, too.
3469 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3470 When REF is non-NULL, assume that reference happens in symbol REF
3471 when determining the availability. */
3472
3473 cgraph_node *
3474 cgraph_node::function_symbol (enum availability *availability,
3475 struct symtab_node *ref)
3476 {
3477 cgraph_node *node = ultimate_alias_target (availability, ref);
3478
3479 while (node->thunk.thunk_p)
3480 {
3481 ref = node;
3482 node = node->callees->callee;
3483 if (availability)
3484 {
3485 enum availability a;
3486 a = node->get_availability (ref);
3487 if (a < *availability)
3488 *availability = a;
3489 }
3490 node = node->ultimate_alias_target (availability, ref);
3491 }
3492 return node;
3493 }
3494
3495 /* Walk the alias chain to return the function cgraph_node is alias of.
3496 Walk through non virtual thunks, too. Thus we return either a function
3497 or a virtual thunk node.
3498 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3499 When REF is non-NULL, assume that reference happens in symbol REF
3500 when determining the availability. */
3501
3502 cgraph_node *
3503 cgraph_node::function_or_virtual_thunk_symbol
3504 (enum availability *availability,
3505 struct symtab_node *ref)
3506 {
3507 cgraph_node *node = ultimate_alias_target (availability, ref);
3508
3509 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3510 {
3511 ref = node;
3512 node = node->callees->callee;
3513 if (availability)
3514 {
3515 enum availability a;
3516 a = node->get_availability (ref);
3517 if (a < *availability)
3518 *availability = a;
3519 }
3520 node = node->ultimate_alias_target (availability, ref);
3521 }
3522 return node;
3523 }
3524
3525 /* When doing LTO, read cgraph_node's body from disk if it is not already
3526 present. */
3527
3528 bool
3529 cgraph_node::get_untransformed_body (void)
3530 {
3531 lto_file_decl_data *file_data;
3532 const char *data, *name;
3533 size_t len;
3534 tree decl = this->decl;
3535
3536 /* Check if body is already there. Either we have gimple body or
3537 the function is thunk and in that case we set DECL_ARGUMENTS. */
3538 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3539 return false;
3540
3541 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3542
3543 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3544
3545 file_data = lto_file_data;
3546 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3547
3548 /* We may have renamed the declaration, e.g., a static function. */
3549 name = lto_get_decl_name_mapping (file_data, name);
3550 struct lto_in_decl_state *decl_state
3551 = lto_get_function_in_decl_state (file_data, decl);
3552
3553 data = lto_get_section_data (file_data, LTO_section_function_body,
3554 name, &len, decl_state->compressed);
3555 if (!data)
3556 fatal_error (input_location, "%s: section %s is missing",
3557 file_data->file_name,
3558 name);
3559
3560 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3561
3562 if (!quiet_flag)
3563 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3564 lto_input_function_body (file_data, this, data);
3565 lto_stats.num_function_bodies++;
3566 lto_free_section_data (file_data, LTO_section_function_body, name,
3567 data, len, decl_state->compressed);
3568 lto_free_function_in_decl_state_for_node (this);
3569 /* Keep lto file data so ipa-inline-analysis knows about cross module
3570 inlining. */
3571
3572 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3573
3574 return true;
3575 }
3576
3577 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3578 if it is not already present. When some IPA transformations are scheduled,
3579 apply them. */
3580
3581 bool
3582 cgraph_node::get_body (void)
3583 {
3584 bool updated;
3585
3586 updated = get_untransformed_body ();
3587
3588 /* Getting transformed body makes no sense for inline clones;
3589 we should never use this on real clones because they are materialized
3590 early.
3591 TODO: Materializing clones here will likely lead to smaller LTRANS
3592 footprint. */
3593 gcc_assert (!global.inlined_to && !clone_of);
3594 if (ipa_transforms_to_apply.exists ())
3595 {
3596 opt_pass *saved_current_pass = current_pass;
3597 FILE *saved_dump_file = dump_file;
3598 const char *saved_dump_file_name = dump_file_name;
3599 dump_flags_t saved_dump_flags = dump_flags;
3600 dump_file_name = NULL;
3601 set_dump_file (NULL);
3602
3603 push_cfun (DECL_STRUCT_FUNCTION (decl));
3604 execute_all_ipa_transforms ();
3605 cgraph_edge::rebuild_edges ();
3606 free_dominance_info (CDI_DOMINATORS);
3607 free_dominance_info (CDI_POST_DOMINATORS);
3608 pop_cfun ();
3609 updated = true;
3610
3611 current_pass = saved_current_pass;
3612 set_dump_file (saved_dump_file);
3613 dump_file_name = saved_dump_file_name;
3614 dump_flags = saved_dump_flags;
3615 }
3616 return updated;
3617 }
3618
3619 /* Return the DECL_STRUCT_FUNCTION of the function. */
3620
3621 struct function *
3622 cgraph_node::get_fun (void)
3623 {
3624 cgraph_node *node = this;
3625 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3626
3627 while (!fun && node->clone_of)
3628 {
3629 node = node->clone_of;
3630 fun = DECL_STRUCT_FUNCTION (node->decl);
3631 }
3632
3633 return fun;
3634 }
3635
3636 /* Verify if the type of the argument matches that of the function
3637 declaration. If we cannot verify this or there is a mismatch,
3638 return false. */
3639
3640 static bool
3641 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3642 {
3643 tree parms, p;
3644 unsigned int i, nargs;
3645
3646 /* Calls to internal functions always match their signature. */
3647 if (gimple_call_internal_p (stmt))
3648 return true;
3649
3650 nargs = gimple_call_num_args (stmt);
3651
3652 /* Get argument types for verification. */
3653 if (fndecl)
3654 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3655 else
3656 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3657
3658 /* Verify if the type of the argument matches that of the function
3659 declaration. If we cannot verify this or there is a mismatch,
3660 return false. */
3661 if (fndecl && DECL_ARGUMENTS (fndecl))
3662 {
3663 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3664 i < nargs;
3665 i++, p = DECL_CHAIN (p))
3666 {
3667 tree arg;
3668 /* We cannot distinguish a varargs function from the case
3669 of excess parameters, still deferring the inlining decision
3670 to the callee is possible. */
3671 if (!p)
3672 break;
3673 arg = gimple_call_arg (stmt, i);
3674 if (p == error_mark_node
3675 || DECL_ARG_TYPE (p) == error_mark_node
3676 || arg == error_mark_node
3677 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3678 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3679 return false;
3680 }
3681 if (args_count_match && p)
3682 return false;
3683 }
3684 else if (parms)
3685 {
3686 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3687 {
3688 tree arg;
3689 /* If this is a varargs function defer inlining decision
3690 to callee. */
3691 if (!p)
3692 break;
3693 arg = gimple_call_arg (stmt, i);
3694 if (TREE_VALUE (p) == error_mark_node
3695 || arg == error_mark_node
3696 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3697 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3698 && !fold_convertible_p (TREE_VALUE (p), arg)))
3699 return false;
3700 }
3701 }
3702 else
3703 {
3704 if (nargs != 0)
3705 return false;
3706 }
3707 return true;
3708 }
3709
3710 /* Verify if the type of the argument and lhs of CALL_STMT matches
3711 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3712 true, the arg count needs to be the same.
3713 If we cannot verify this or there is a mismatch, return false. */
3714
3715 bool
3716 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3717 bool args_count_match)
3718 {
3719 tree lhs;
3720
3721 if ((DECL_RESULT (callee)
3722 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3723 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3724 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3725 TREE_TYPE (lhs))
3726 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3727 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3728 return false;
3729 return true;
3730 }
3731
3732 /* Reset all state within cgraph.c so that we can rerun the compiler
3733 within the same process. For use by toplev::finalize. */
3734
3735 void
3736 cgraph_c_finalize (void)
3737 {
3738 symtab = NULL;
3739
3740 x_cgraph_nodes_queue = NULL;
3741
3742 cgraph_fnver_htab = NULL;
3743 version_info_node = NULL;
3744 }
3745
3746 /* A wroker for call_for_symbol_and_aliases. */
3747
3748 bool
3749 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3750 void *),
3751 void *data,
3752 bool include_overwritable)
3753 {
3754 ipa_ref *ref;
3755 FOR_EACH_ALIAS (this, ref)
3756 {
3757 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3758 if (include_overwritable
3759 || alias->get_availability () > AVAIL_INTERPOSABLE)
3760 if (alias->call_for_symbol_and_aliases (callback, data,
3761 include_overwritable))
3762 return true;
3763 }
3764 return false;
3765 }
3766
3767 /* Return true if NODE has thunk. */
3768
3769 bool
3770 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3771 {
3772 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3773 if (e->caller->thunk.thunk_p)
3774 return true;
3775 return false;
3776 }
3777
3778 /* Expected frequency of executions within the function. */
3779
3780 sreal
3781 cgraph_edge::sreal_frequency ()
3782 {
3783 return count.to_sreal_scale (caller->global.inlined_to
3784 ? caller->global.inlined_to->count
3785 : caller->count);
3786 }
3787
3788
3789 /* During LTO stream in this can be used to check whether call can possibly
3790 be internal to the current translation unit. */
3791
3792 bool
3793 cgraph_edge::possibly_call_in_translation_unit_p (void)
3794 {
3795 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
3796
3797 /* While incremental linking we may end up getting function body later. */
3798 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
3799 return true;
3800
3801 /* We may be smarter here and avoid stremaing in indirect calls we can't
3802 track, but that would require arranging stremaing the indirect call
3803 summary first. */
3804 if (!callee)
3805 return true;
3806
3807 /* If calle is local to the original translation unit, it will be defined. */
3808 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
3809 return true;
3810
3811 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
3812 yet) and see if it is a definition. In fact we may also resolve aliases,
3813 but that is probably not too important. */
3814 symtab_node *node = callee;
3815 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
3816 node = node->previous_sharing_asm_name;
3817 if (node->previous_sharing_asm_name)
3818 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
3819 gcc_assert (TREE_PUBLIC (node->decl));
3820 return node->get_availability () >= AVAIL_AVAILABLE;
3821 }
3822
3823 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
3824 This needs to be a global so that it can be a GC root, and thus
3825 prevent the stashed copy from being garbage-collected if the GC runs
3826 during a symbol_table_test. */
3827
3828 symbol_table *saved_symtab;
3829
3830 #if CHECKING_P
3831
3832 namespace selftest {
3833
3834 /* class selftest::symbol_table_test. */
3835
3836 /* Constructor. Store the old value of symtab, and create a new one. */
3837
3838 symbol_table_test::symbol_table_test ()
3839 {
3840 gcc_assert (saved_symtab == NULL);
3841 saved_symtab = symtab;
3842 symtab = new (ggc_cleared_alloc <symbol_table> ()) symbol_table ();
3843 }
3844
3845 /* Destructor. Restore the old value of symtab. */
3846
3847 symbol_table_test::~symbol_table_test ()
3848 {
3849 gcc_assert (saved_symtab != NULL);
3850 symtab = saved_symtab;
3851 saved_symtab = NULL;
3852 }
3853
3854 /* Verify that symbol_table_test works. */
3855
3856 static void
3857 test_symbol_table_test ()
3858 {
3859 /* Simulate running two selftests involving symbol tables. */
3860 for (int i = 0; i < 2; i++)
3861 {
3862 symbol_table_test stt;
3863 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
3864 get_identifier ("test_decl"),
3865 build_function_type_list (void_type_node,
3866 NULL_TREE));
3867 cgraph_node *node = cgraph_node::get_create (test_decl);
3868 gcc_assert (node);
3869
3870 /* Verify that the node has order 0 on both iterations,
3871 and thus that nodes have predictable dump names in selftests. */
3872 ASSERT_EQ (node->order, 0);
3873 ASSERT_STREQ (node->dump_name (), "test_decl/0");
3874 }
3875 }
3876
3877 /* Run all of the selftests within this file. */
3878
3879 void
3880 cgraph_c_tests ()
3881 {
3882 test_symbol_table_test ();
3883 }
3884
3885 } // namespace selftest
3886
3887 #endif /* CHECKING_P */
3888
3889 #include "gt-cgraph.h"