]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.c
9f0d603a1cf3bbfeb18e7802b27601e4691cec89
[thirdparty/gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "params.h"
61 #include "context.h"
62 #include "gimplify.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "selftest.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 /* Queue of cgraph nodes scheduled to be lowered. */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
73
74 /* Symbol table global context. */
75 symbol_table *symtab;
76
77 /* List of hooks triggered on cgraph_edge events. */
78 struct cgraph_edge_hook_list {
79 cgraph_edge_hook hook;
80 void *data;
81 struct cgraph_edge_hook_list *next;
82 };
83
84 /* List of hooks triggered on cgraph_node events. */
85 struct cgraph_node_hook_list {
86 cgraph_node_hook hook;
87 void *data;
88 struct cgraph_node_hook_list *next;
89 };
90
91 /* List of hooks triggered on events involving two cgraph_edges. */
92 struct cgraph_2edge_hook_list {
93 cgraph_2edge_hook hook;
94 void *data;
95 struct cgraph_2edge_hook_list *next;
96 };
97
98 /* List of hooks triggered on events involving two cgraph_nodes. */
99 struct cgraph_2node_hook_list {
100 cgraph_2node_hook hook;
101 void *data;
102 struct cgraph_2node_hook_list *next;
103 };
104
105 /* Hash descriptor for cgraph_function_version_info. */
106
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
108 {
109 static hashval_t hash (cgraph_function_version_info *);
110 static bool equal (cgraph_function_version_info *,
111 cgraph_function_version_info *);
112 };
113
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115 The cgraph_function_version_info has a THIS_NODE field that is the
116 corresponding cgraph_node.. */
117
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
119
120 /* Hash function for cgraph_fnver_htab. */
121 hashval_t
122 function_version_hasher::hash (cgraph_function_version_info *ptr)
123 {
124 int uid = ptr->this_node->get_uid ();
125 return (hashval_t)(uid);
126 }
127
128 /* eq function for cgraph_fnver_htab. */
129 bool
130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 cgraph_function_version_info *n2)
132 {
133 return n1->this_node->get_uid () == n2->this_node->get_uid ();
134 }
135
136 /* Mark as GC root all allocated nodes. */
137 static GTY(()) struct cgraph_function_version_info *
138 version_info_node = NULL;
139
140 /* Return true if NODE's address can be compared. */
141
142 bool
143 symtab_node::address_can_be_compared_p ()
144 {
145 /* Address of virtual tables and functions is never compared. */
146 if (DECL_VIRTUAL_P (decl))
147 return false;
148 /* Address of C++ cdtors is never compared. */
149 if (is_a <cgraph_node *> (this)
150 && (DECL_CXX_CONSTRUCTOR_P (decl)
151 || DECL_CXX_DESTRUCTOR_P (decl)))
152 return false;
153 /* Constant pool symbols addresses are never compared.
154 flag_merge_constants permits us to assume the same on readonly vars. */
155 if (is_a <varpool_node *> (this)
156 && (DECL_IN_CONSTANT_POOL (decl)
157 || (flag_merge_constants >= 2
158 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
159 return false;
160 return true;
161 }
162
163 /* Get the cgraph_function_version_info node corresponding to node. */
164 cgraph_function_version_info *
165 cgraph_node::function_version (void)
166 {
167 cgraph_function_version_info key;
168 key.this_node = this;
169
170 if (cgraph_fnver_htab == NULL)
171 return NULL;
172
173 return cgraph_fnver_htab->find (&key);
174 }
175
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177 corresponding to cgraph_node NODE. */
178 cgraph_function_version_info *
179 cgraph_node::insert_new_function_version (void)
180 {
181 version_info_node = NULL;
182 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183 version_info_node->this_node = this;
184
185 if (cgraph_fnver_htab == NULL)
186 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
187
188 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
189 = version_info_node;
190 return version_info_node;
191 }
192
193 /* Remove the cgraph_function_version_info node given by DECL_V. */
194 static void
195 delete_function_version (cgraph_function_version_info *decl_v)
196 {
197 if (decl_v == NULL)
198 return;
199
200 if (decl_v->prev != NULL)
201 decl_v->prev->next = decl_v->next;
202
203 if (decl_v->next != NULL)
204 decl_v->next->prev = decl_v->prev;
205
206 if (cgraph_fnver_htab != NULL)
207 cgraph_fnver_htab->remove_elt (decl_v);
208 }
209
210 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
211 DECL is a duplicate declaration. */
212 void
213 cgraph_node::delete_function_version_by_decl (tree decl)
214 {
215 cgraph_node *decl_node = cgraph_node::get (decl);
216
217 if (decl_node == NULL)
218 return;
219
220 delete_function_version (decl_node->function_version ());
221
222 decl_node->remove ();
223 }
224
225 /* Record that DECL1 and DECL2 are semantically identical function
226 versions. */
227 void
228 cgraph_node::record_function_versions (tree decl1, tree decl2)
229 {
230 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
231 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
232 cgraph_function_version_info *decl1_v = NULL;
233 cgraph_function_version_info *decl2_v = NULL;
234 cgraph_function_version_info *before;
235 cgraph_function_version_info *after;
236
237 gcc_assert (decl1_node != NULL && decl2_node != NULL);
238 decl1_v = decl1_node->function_version ();
239 decl2_v = decl2_node->function_version ();
240
241 if (decl1_v != NULL && decl2_v != NULL)
242 return;
243
244 if (decl1_v == NULL)
245 decl1_v = decl1_node->insert_new_function_version ();
246
247 if (decl2_v == NULL)
248 decl2_v = decl2_node->insert_new_function_version ();
249
250 /* Chain decl2_v and decl1_v. All semantically identical versions
251 will be chained together. */
252
253 before = decl1_v;
254 after = decl2_v;
255
256 while (before->next != NULL)
257 before = before->next;
258
259 while (after->prev != NULL)
260 after= after->prev;
261
262 before->next = after;
263 after->prev = before;
264 }
265
266 /* Initialize callgraph dump file. */
267
268 void
269 symbol_table::initialize (void)
270 {
271 if (!dump_file)
272 dump_file = dump_begin (TDI_cgraph, NULL);
273
274 if (!ipa_clones_dump_file)
275 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
276 }
277
278 /* Allocate new callgraph node and insert it into basic data structures. */
279
280 cgraph_node *
281 symbol_table::create_empty (void)
282 {
283 cgraph_node *node = allocate_cgraph_symbol ();
284
285 node->type = SYMTAB_FUNCTION;
286 node->frequency = NODE_FREQUENCY_NORMAL;
287 node->count_materialization_scale = REG_BR_PROB_BASE;
288 cgraph_count++;
289
290 return node;
291 }
292
293 /* Register HOOK to be called with DATA on each removed edge. */
294 cgraph_edge_hook_list *
295 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
296 {
297 cgraph_edge_hook_list *entry;
298 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
299
300 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
301 entry->hook = hook;
302 entry->data = data;
303 entry->next = NULL;
304 while (*ptr)
305 ptr = &(*ptr)->next;
306 *ptr = entry;
307 return entry;
308 }
309
310 /* Remove ENTRY from the list of hooks called on removing edges. */
311 void
312 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
313 {
314 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
315
316 while (*ptr != entry)
317 ptr = &(*ptr)->next;
318 *ptr = entry->next;
319 free (entry);
320 }
321
322 /* Call all edge removal hooks. */
323 void
324 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
325 {
326 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
327 while (entry)
328 {
329 entry->hook (e, entry->data);
330 entry = entry->next;
331 }
332 }
333
334 /* Register HOOK to be called with DATA on each removed node. */
335 cgraph_node_hook_list *
336 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
337 {
338 cgraph_node_hook_list *entry;
339 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
340
341 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
342 entry->hook = hook;
343 entry->data = data;
344 entry->next = NULL;
345 while (*ptr)
346 ptr = &(*ptr)->next;
347 *ptr = entry;
348 return entry;
349 }
350
351 /* Remove ENTRY from the list of hooks called on removing nodes. */
352 void
353 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
354 {
355 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
356
357 while (*ptr != entry)
358 ptr = &(*ptr)->next;
359 *ptr = entry->next;
360 free (entry);
361 }
362
363 /* Call all node removal hooks. */
364 void
365 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
366 {
367 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
368 while (entry)
369 {
370 entry->hook (node, entry->data);
371 entry = entry->next;
372 }
373 }
374
375 /* Call all node removal hooks. */
376 void
377 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
378 {
379 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
380 while (entry)
381 {
382 entry->hook (node, entry->data);
383 entry = entry->next;
384 }
385 }
386
387
388 /* Register HOOK to be called with DATA on each inserted node. */
389 cgraph_node_hook_list *
390 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
391 {
392 cgraph_node_hook_list *entry;
393 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
394
395 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
396 entry->hook = hook;
397 entry->data = data;
398 entry->next = NULL;
399 while (*ptr)
400 ptr = &(*ptr)->next;
401 *ptr = entry;
402 return entry;
403 }
404
405 /* Remove ENTRY from the list of hooks called on inserted nodes. */
406 void
407 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
408 {
409 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
410
411 while (*ptr != entry)
412 ptr = &(*ptr)->next;
413 *ptr = entry->next;
414 free (entry);
415 }
416
417 /* Register HOOK to be called with DATA on each duplicated edge. */
418 cgraph_2edge_hook_list *
419 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
420 {
421 cgraph_2edge_hook_list *entry;
422 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
423
424 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
425 entry->hook = hook;
426 entry->data = data;
427 entry->next = NULL;
428 while (*ptr)
429 ptr = &(*ptr)->next;
430 *ptr = entry;
431 return entry;
432 }
433
434 /* Remove ENTRY from the list of hooks called on duplicating edges. */
435 void
436 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
437 {
438 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
439
440 while (*ptr != entry)
441 ptr = &(*ptr)->next;
442 *ptr = entry->next;
443 free (entry);
444 }
445
446 /* Call all edge duplication hooks. */
447 void
448 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
449 {
450 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
451 while (entry)
452 {
453 entry->hook (cs1, cs2, entry->data);
454 entry = entry->next;
455 }
456 }
457
458 /* Register HOOK to be called with DATA on each duplicated node. */
459 cgraph_2node_hook_list *
460 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
461 {
462 cgraph_2node_hook_list *entry;
463 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
464
465 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
466 entry->hook = hook;
467 entry->data = data;
468 entry->next = NULL;
469 while (*ptr)
470 ptr = &(*ptr)->next;
471 *ptr = entry;
472 return entry;
473 }
474
475 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
476 void
477 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
478 {
479 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
480
481 while (*ptr != entry)
482 ptr = &(*ptr)->next;
483 *ptr = entry->next;
484 free (entry);
485 }
486
487 /* Call all node duplication hooks. */
488 void
489 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
490 cgraph_node *node2)
491 {
492 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
493 while (entry)
494 {
495 entry->hook (node, node2, entry->data);
496 entry = entry->next;
497 }
498 }
499
500 /* Return cgraph node assigned to DECL. Create new one when needed. */
501
502 cgraph_node *
503 cgraph_node::create (tree decl)
504 {
505 cgraph_node *node = symtab->create_empty ();
506 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
507
508 node->decl = decl;
509
510 node->count = profile_count::uninitialized ();
511
512 if ((flag_openacc || flag_openmp)
513 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
514 {
515 node->offloadable = 1;
516 if (ENABLE_OFFLOADING)
517 g->have_offload = true;
518 }
519
520 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
521 node->ifunc_resolver = true;
522
523 node->register_symbol ();
524
525 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
526 {
527 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
528 node->next_nested = node->origin->nested;
529 node->origin->nested = node;
530 }
531 return node;
532 }
533
534 /* Try to find a call graph node for declaration DECL and if it does not exist
535 or if it corresponds to an inline clone, create a new one. */
536
537 cgraph_node *
538 cgraph_node::get_create (tree decl)
539 {
540 cgraph_node *first_clone = cgraph_node::get (decl);
541
542 if (first_clone && !first_clone->global.inlined_to)
543 return first_clone;
544
545 cgraph_node *node = cgraph_node::create (decl);
546 if (first_clone)
547 {
548 first_clone->clone_of = node;
549 node->clones = first_clone;
550 symtab->symtab_prevail_in_asm_name_hash (node);
551 node->decl->decl_with_vis.symtab_node = node;
552 if (dump_file)
553 fprintf (dump_file, "Introduced new external node "
554 "(%s) and turned into root of the clone tree.\n",
555 node->dump_name ());
556 }
557 else if (dump_file)
558 fprintf (dump_file, "Introduced new external node "
559 "(%s).\n", node->dump_name ());
560 return node;
561 }
562
563 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
564 the function body is associated with (not necessarily cgraph_node (DECL). */
565
566 cgraph_node *
567 cgraph_node::create_alias (tree alias, tree target)
568 {
569 cgraph_node *alias_node;
570
571 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
572 || TREE_CODE (target) == IDENTIFIER_NODE);
573 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
574 alias_node = cgraph_node::get_create (alias);
575 gcc_assert (!alias_node->definition);
576 alias_node->alias_target = target;
577 alias_node->definition = true;
578 alias_node->alias = true;
579 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
580 alias_node->transparent_alias = alias_node->weakref = true;
581 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
582 alias_node->ifunc_resolver = true;
583 return alias_node;
584 }
585
586 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
587 and NULL otherwise.
588 Same body aliases are output whenever the body of DECL is output,
589 and cgraph_node::get (ALIAS) transparently returns
590 cgraph_node::get (DECL). */
591
592 cgraph_node *
593 cgraph_node::create_same_body_alias (tree alias, tree decl)
594 {
595 cgraph_node *n;
596
597 /* If aliases aren't supported by the assembler, fail. */
598 if (!TARGET_SUPPORTS_ALIASES)
599 return NULL;
600
601 /* Langhooks can create same body aliases of symbols not defined.
602 Those are useless. Drop them on the floor. */
603 if (symtab->global_info_ready)
604 return NULL;
605
606 n = cgraph_node::create_alias (alias, decl);
607 n->cpp_implicit_alias = true;
608 if (symtab->cpp_implicit_aliases_done)
609 n->resolve_alias (cgraph_node::get (decl));
610 return n;
611 }
612
613 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
614 aliases DECL with an adjustments made into the first parameter.
615 See comments in struct cgraph_thunk_info for detail on the parameters. */
616
617 cgraph_node *
618 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
619 HOST_WIDE_INT fixed_offset,
620 HOST_WIDE_INT virtual_value,
621 HOST_WIDE_INT indirect_offset,
622 tree virtual_offset,
623 tree real_alias)
624 {
625 cgraph_node *node;
626
627 node = cgraph_node::get (alias);
628 if (node)
629 node->reset ();
630 else
631 node = cgraph_node::create (alias);
632
633 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
634 gcc_checking_assert (virtual_offset
635 ? virtual_value == wi::to_wide (virtual_offset)
636 : virtual_value == 0);
637
638 node->thunk.fixed_offset = fixed_offset;
639 node->thunk.virtual_value = virtual_value;
640 node->thunk.indirect_offset = indirect_offset;
641 node->thunk.alias = real_alias;
642 node->thunk.this_adjusting = this_adjusting;
643 node->thunk.virtual_offset_p = virtual_offset != NULL;
644 node->thunk.thunk_p = true;
645 node->definition = true;
646
647 return node;
648 }
649
650 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
651 Return NULL if there's no such node. */
652
653 cgraph_node *
654 cgraph_node::get_for_asmname (tree asmname)
655 {
656 /* We do not want to look at inline clones. */
657 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
658 node;
659 node = node->next_sharing_asm_name)
660 {
661 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
662 if (cn && !cn->global.inlined_to)
663 return cn;
664 }
665 return NULL;
666 }
667
668 /* Returns a hash value for X (which really is a cgraph_edge). */
669
670 hashval_t
671 cgraph_edge_hasher::hash (cgraph_edge *e)
672 {
673 /* This is a really poor hash function, but it is what htab_hash_pointer
674 uses. */
675 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
676 }
677
678 /* Returns a hash value for X (which really is a cgraph_edge). */
679
680 hashval_t
681 cgraph_edge_hasher::hash (gimple *call_stmt)
682 {
683 /* This is a really poor hash function, but it is what htab_hash_pointer
684 uses. */
685 return (hashval_t) ((intptr_t)call_stmt >> 3);
686 }
687
688 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
689
690 inline bool
691 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
692 {
693 return x->call_stmt == y;
694 }
695
696 /* Add call graph edge E to call site hash of its caller. */
697
698 static inline void
699 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
700 {
701 gimple *call = e->call_stmt;
702 *e->caller->call_site_hash->find_slot_with_hash
703 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
704 }
705
706 /* Add call graph edge E to call site hash of its caller. */
707
708 static inline void
709 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
710 {
711 /* There are two speculative edges for every statement (one direct,
712 one indirect); always hash the direct one. */
713 if (e->speculative && e->indirect_unknown_callee)
714 return;
715 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
716 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
717 if (*slot)
718 {
719 gcc_assert (((cgraph_edge *)*slot)->speculative);
720 if (e->callee)
721 *slot = e;
722 return;
723 }
724 gcc_assert (!*slot || e->speculative);
725 *slot = e;
726 }
727
728 /* Return the callgraph edge representing the GIMPLE_CALL statement
729 CALL_STMT. */
730
731 cgraph_edge *
732 cgraph_node::get_edge (gimple *call_stmt)
733 {
734 cgraph_edge *e, *e2;
735 int n = 0;
736
737 if (call_site_hash)
738 return call_site_hash->find_with_hash
739 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
740
741 /* This loop may turn out to be performance problem. In such case adding
742 hashtables into call nodes with very many edges is probably best
743 solution. It is not good idea to add pointer into CALL_EXPR itself
744 because we want to make possible having multiple cgraph nodes representing
745 different clones of the same body before the body is actually cloned. */
746 for (e = callees; e; e = e->next_callee)
747 {
748 if (e->call_stmt == call_stmt)
749 break;
750 n++;
751 }
752
753 if (!e)
754 for (e = indirect_calls; e; e = e->next_callee)
755 {
756 if (e->call_stmt == call_stmt)
757 break;
758 n++;
759 }
760
761 if (n > 100)
762 {
763 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
764 for (e2 = callees; e2; e2 = e2->next_callee)
765 cgraph_add_edge_to_call_site_hash (e2);
766 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
767 cgraph_add_edge_to_call_site_hash (e2);
768 }
769
770 return e;
771 }
772
773
774 /* Change field call_stmt of edge to NEW_STMT.
775 If UPDATE_SPECULATIVE and E is any component of speculative
776 edge, then update all components. */
777
778 void
779 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
780 {
781 tree decl;
782
783 /* Speculative edges has three component, update all of them
784 when asked to. */
785 if (update_speculative && speculative)
786 {
787 cgraph_edge *direct, *indirect;
788 ipa_ref *ref;
789
790 speculative_call_info (direct, indirect, ref);
791 direct->set_call_stmt (new_stmt, false);
792 indirect->set_call_stmt (new_stmt, false);
793 ref->stmt = new_stmt;
794 return;
795 }
796
797 /* Only direct speculative edges go to call_site_hash. */
798 if (caller->call_site_hash
799 && (!speculative || !indirect_unknown_callee))
800 {
801 caller->call_site_hash->remove_elt_with_hash
802 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
803 }
804
805 cgraph_edge *e = this;
806
807 call_stmt = new_stmt;
808 if (indirect_unknown_callee
809 && (decl = gimple_call_fndecl (new_stmt)))
810 {
811 /* Constant propagation (and possibly also inlining?) can turn an
812 indirect call into a direct one. */
813 cgraph_node *new_callee = cgraph_node::get (decl);
814
815 gcc_checking_assert (new_callee);
816 e = make_direct (new_callee);
817 }
818
819 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
820 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
821 if (e->caller->call_site_hash)
822 cgraph_add_edge_to_call_site_hash (e);
823 }
824
825 /* Allocate a cgraph_edge structure and fill it with data according to the
826 parameters of which only CALLEE can be NULL (when creating an indirect call
827 edge). */
828
829 cgraph_edge *
830 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
831 gcall *call_stmt, profile_count count,
832 bool indir_unknown_callee)
833 {
834 cgraph_edge *edge;
835
836 /* LTO does not actually have access to the call_stmt since these
837 have not been loaded yet. */
838 if (call_stmt)
839 {
840 /* This is a rather expensive check possibly triggering
841 construction of call stmt hashtable. */
842 cgraph_edge *e;
843 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
844 || e->speculative);
845
846 gcc_assert (is_gimple_call (call_stmt));
847 }
848
849 if (free_edges)
850 {
851 edge = free_edges;
852 free_edges = NEXT_FREE_EDGE (edge);
853 }
854 else
855 {
856 edge = ggc_alloc<cgraph_edge> ();
857 edge->m_summary_id = -1;
858 }
859
860 edges_count++;
861
862 gcc_assert (++edges_max_uid != 0);
863 edge->m_uid = edges_max_uid;
864 edge->aux = NULL;
865 edge->caller = caller;
866 edge->callee = callee;
867 edge->prev_caller = NULL;
868 edge->next_caller = NULL;
869 edge->prev_callee = NULL;
870 edge->next_callee = NULL;
871 edge->lto_stmt_uid = 0;
872
873 edge->count = count;
874
875 edge->call_stmt = call_stmt;
876 edge->can_throw_external
877 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
878 call_stmt) : false;
879 if (call_stmt
880 && callee && callee->decl
881 && !gimple_check_call_matching_types (call_stmt, callee->decl,
882 false))
883 {
884 edge->inline_failed = CIF_MISMATCHED_ARGUMENTS;
885 edge->call_stmt_cannot_inline_p = true;
886 }
887 else
888 {
889 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
890 edge->call_stmt_cannot_inline_p = false;
891 }
892
893 edge->indirect_info = NULL;
894 edge->indirect_inlining_edge = 0;
895 edge->speculative = false;
896 edge->indirect_unknown_callee = indir_unknown_callee;
897 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
898 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
899 edge->in_polymorphic_cdtor
900 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
901 caller->decl);
902 else
903 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
904 if (call_stmt && caller->call_site_hash)
905 cgraph_add_edge_to_call_site_hash (edge);
906
907 return edge;
908 }
909
910 /* Create edge from a given function to CALLEE in the cgraph. */
911
912 cgraph_edge *
913 cgraph_node::create_edge (cgraph_node *callee,
914 gcall *call_stmt, profile_count count)
915 {
916 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
917 false);
918
919 initialize_inline_failed (edge);
920
921 edge->next_caller = callee->callers;
922 if (callee->callers)
923 callee->callers->prev_caller = edge;
924 edge->next_callee = callees;
925 if (callees)
926 callees->prev_callee = edge;
927 callees = edge;
928 callee->callers = edge;
929
930 return edge;
931 }
932
933 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
934
935 cgraph_indirect_call_info *
936 cgraph_allocate_init_indirect_info (void)
937 {
938 cgraph_indirect_call_info *ii;
939
940 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
941 ii->param_index = -1;
942 return ii;
943 }
944
945 /* Create an indirect edge with a yet-undetermined callee where the call
946 statement destination is a formal parameter of the caller with index
947 PARAM_INDEX. */
948
949 cgraph_edge *
950 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
951 profile_count count,
952 bool compute_indirect_info)
953 {
954 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
955 count, true);
956 tree target;
957
958 initialize_inline_failed (edge);
959
960 edge->indirect_info = cgraph_allocate_init_indirect_info ();
961 edge->indirect_info->ecf_flags = ecf_flags;
962 edge->indirect_info->vptr_changed = true;
963
964 /* Record polymorphic call info. */
965 if (compute_indirect_info
966 && call_stmt
967 && (target = gimple_call_fn (call_stmt))
968 && virtual_method_call_p (target))
969 {
970 ipa_polymorphic_call_context context (decl, target, call_stmt);
971
972 /* Only record types can have virtual calls. */
973 edge->indirect_info->polymorphic = true;
974 edge->indirect_info->param_index = -1;
975 edge->indirect_info->otr_token
976 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
977 edge->indirect_info->otr_type = obj_type_ref_class (target);
978 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
979 edge->indirect_info->context = context;
980 }
981
982 edge->next_callee = indirect_calls;
983 if (indirect_calls)
984 indirect_calls->prev_callee = edge;
985 indirect_calls = edge;
986
987 return edge;
988 }
989
990 /* Remove the edge from the list of the callees of the caller. */
991
992 void
993 cgraph_edge::remove_caller (void)
994 {
995 if (prev_callee)
996 prev_callee->next_callee = next_callee;
997 if (next_callee)
998 next_callee->prev_callee = prev_callee;
999 if (!prev_callee)
1000 {
1001 if (indirect_unknown_callee)
1002 caller->indirect_calls = next_callee;
1003 else
1004 caller->callees = next_callee;
1005 }
1006 if (caller->call_site_hash)
1007 caller->call_site_hash->remove_elt_with_hash
1008 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1009 }
1010
1011 /* Put the edge onto the free list. */
1012
1013 void
1014 symbol_table::free_edge (cgraph_edge *e)
1015 {
1016 if (e->indirect_info)
1017 ggc_free (e->indirect_info);
1018
1019 /* Clear out the edge so we do not dangle pointers. */
1020 int summary_id = e->m_summary_id;
1021 memset (e, 0, sizeof (*e));
1022 e->m_summary_id = summary_id;
1023 NEXT_FREE_EDGE (e) = free_edges;
1024 free_edges = e;
1025 edges_count--;
1026 }
1027
1028 /* Remove the edge in the cgraph. */
1029
1030 void
1031 cgraph_edge::remove (void)
1032 {
1033 /* Call all edge removal hooks. */
1034 symtab->call_edge_removal_hooks (this);
1035
1036 if (!indirect_unknown_callee)
1037 /* Remove from callers list of the callee. */
1038 remove_callee ();
1039
1040 /* Remove from callees list of the callers. */
1041 remove_caller ();
1042
1043 /* Put the edge onto the free list. */
1044 symtab->free_edge (this);
1045 }
1046
1047 /* Turn edge into speculative call calling N2. Update
1048 the profile so the direct call is taken COUNT times
1049 with FREQUENCY.
1050
1051 At clone materialization time, the indirect call E will
1052 be expanded as:
1053
1054 if (call_dest == N2)
1055 n2 ();
1056 else
1057 call call_dest
1058
1059 At this time the function just creates the direct call,
1060 the referencd representing the if conditional and attaches
1061 them all to the orginal indirect call statement.
1062
1063 Return direct edge created. */
1064
1065 cgraph_edge *
1066 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count)
1067 {
1068 cgraph_node *n = caller;
1069 ipa_ref *ref = NULL;
1070 cgraph_edge *e2;
1071
1072 if (dump_file)
1073 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1074 n->dump_name (), n2->dump_name ());
1075 speculative = true;
1076 e2 = n->create_edge (n2, call_stmt, direct_count);
1077 initialize_inline_failed (e2);
1078 e2->speculative = true;
1079 if (TREE_NOTHROW (n2->decl))
1080 e2->can_throw_external = false;
1081 else
1082 e2->can_throw_external = can_throw_external;
1083 e2->lto_stmt_uid = lto_stmt_uid;
1084 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1085 count -= e2->count;
1086 symtab->call_edge_duplication_hooks (this, e2);
1087 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1088 ref->lto_stmt_uid = lto_stmt_uid;
1089 ref->speculative = speculative;
1090 n2->mark_address_taken ();
1091 return e2;
1092 }
1093
1094 /* Speculative call consist of three components:
1095 1) an indirect edge representing the original call
1096 2) an direct edge representing the new call
1097 3) ADDR_EXPR reference representing the speculative check.
1098 All three components are attached to single statement (the indirect
1099 call) and if one of them exists, all of them must exist.
1100
1101 Given speculative call edge, return all three components.
1102 */
1103
1104 void
1105 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1106 cgraph_edge *&indirect,
1107 ipa_ref *&reference)
1108 {
1109 ipa_ref *ref;
1110 int i;
1111 cgraph_edge *e2;
1112 cgraph_edge *e = this;
1113
1114 if (!e->indirect_unknown_callee)
1115 for (e2 = e->caller->indirect_calls;
1116 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1117 e2 = e2->next_callee)
1118 ;
1119 else
1120 {
1121 e2 = e;
1122 /* We can take advantage of the call stmt hash. */
1123 if (e2->call_stmt)
1124 {
1125 e = e->caller->get_edge (e2->call_stmt);
1126 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1127 }
1128 else
1129 for (e = e->caller->callees;
1130 e2->call_stmt != e->call_stmt
1131 || e2->lto_stmt_uid != e->lto_stmt_uid;
1132 e = e->next_callee)
1133 ;
1134 }
1135 gcc_assert (e->speculative && e2->speculative);
1136 direct = e;
1137 indirect = e2;
1138
1139 reference = NULL;
1140 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1141 if (ref->speculative
1142 && ((ref->stmt && ref->stmt == e->call_stmt)
1143 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1144 {
1145 reference = ref;
1146 break;
1147 }
1148
1149 /* Speculative edge always consist of all three components - direct edge,
1150 indirect and reference. */
1151
1152 gcc_assert (e && e2 && ref);
1153 }
1154
1155 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1156 Remove the speculative call sequence and return edge representing the call.
1157 It is up to caller to redirect the call as appropriate. */
1158
1159 cgraph_edge *
1160 cgraph_edge::resolve_speculation (tree callee_decl)
1161 {
1162 cgraph_edge *edge = this;
1163 cgraph_edge *e2;
1164 ipa_ref *ref;
1165
1166 gcc_assert (edge->speculative);
1167 edge->speculative_call_info (e2, edge, ref);
1168 if (!callee_decl
1169 || !ref->referred->semantically_equivalent_p
1170 (symtab_node::get (callee_decl)))
1171 {
1172 if (dump_file)
1173 {
1174 if (callee_decl)
1175 {
1176 fprintf (dump_file, "Speculative indirect call %s => %s has "
1177 "turned out to have contradicting known target ",
1178 edge->caller->dump_name (),
1179 e2->callee->dump_name ());
1180 print_generic_expr (dump_file, callee_decl);
1181 fprintf (dump_file, "\n");
1182 }
1183 else
1184 {
1185 fprintf (dump_file, "Removing speculative call %s => %s\n",
1186 edge->caller->dump_name (),
1187 e2->callee->dump_name ());
1188 }
1189 }
1190 }
1191 else
1192 {
1193 cgraph_edge *tmp = edge;
1194 if (dump_file)
1195 fprintf (dump_file, "Speculative call turned into direct call.\n");
1196 edge = e2;
1197 e2 = tmp;
1198 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1199 in the functions inlined through it. */
1200 }
1201 edge->count += e2->count;
1202 edge->speculative = false;
1203 e2->speculative = false;
1204 ref->remove_reference ();
1205 if (e2->indirect_unknown_callee || e2->inline_failed)
1206 e2->remove ();
1207 else
1208 e2->callee->remove_symbol_and_inline_clones ();
1209 if (edge->caller->call_site_hash)
1210 cgraph_update_edge_in_call_site_hash (edge);
1211 return edge;
1212 }
1213
1214 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1215 CALLEE. DELTA is an integer constant that is to be added to the this
1216 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1217
1218 cgraph_edge *
1219 cgraph_edge::make_direct (cgraph_node *callee)
1220 {
1221 cgraph_edge *edge = this;
1222 gcc_assert (indirect_unknown_callee);
1223
1224 /* If we are redirecting speculative call, make it non-speculative. */
1225 if (indirect_unknown_callee && speculative)
1226 {
1227 edge = edge->resolve_speculation (callee->decl);
1228
1229 /* On successful speculation just return the pre existing direct edge. */
1230 if (!indirect_unknown_callee)
1231 return edge;
1232 }
1233
1234 indirect_unknown_callee = 0;
1235 ggc_free (indirect_info);
1236 indirect_info = NULL;
1237
1238 /* Get the edge out of the indirect edge list. */
1239 if (prev_callee)
1240 prev_callee->next_callee = next_callee;
1241 if (next_callee)
1242 next_callee->prev_callee = prev_callee;
1243 if (!prev_callee)
1244 caller->indirect_calls = next_callee;
1245
1246 /* Put it into the normal callee list */
1247 prev_callee = NULL;
1248 next_callee = caller->callees;
1249 if (caller->callees)
1250 caller->callees->prev_callee = edge;
1251 caller->callees = edge;
1252
1253 /* Insert to callers list of the new callee. */
1254 edge->set_callee (callee);
1255
1256 if (call_stmt
1257 && !gimple_check_call_matching_types (call_stmt, callee->decl, false))
1258 {
1259 call_stmt_cannot_inline_p = true;
1260 inline_failed = CIF_MISMATCHED_ARGUMENTS;
1261 }
1262
1263 /* We need to re-determine the inlining status of the edge. */
1264 initialize_inline_failed (edge);
1265 return edge;
1266 }
1267
1268 /* If necessary, change the function declaration in the call statement
1269 associated with E so that it corresponds to the edge callee. */
1270
1271 gimple *
1272 cgraph_edge::redirect_call_stmt_to_callee (void)
1273 {
1274 cgraph_edge *e = this;
1275
1276 tree decl = gimple_call_fndecl (e->call_stmt);
1277 gcall *new_stmt;
1278 gimple_stmt_iterator gsi;
1279
1280 if (e->speculative)
1281 {
1282 cgraph_edge *e2;
1283 gcall *new_stmt;
1284 ipa_ref *ref;
1285
1286 e->speculative_call_info (e, e2, ref);
1287 /* If there already is an direct call (i.e. as a result of inliner's
1288 substitution), forget about speculating. */
1289 if (decl)
1290 e = e->resolve_speculation (decl);
1291 /* If types do not match, speculation was likely wrong.
1292 The direct edge was possibly redirected to the clone with a different
1293 signature. We did not update the call statement yet, so compare it
1294 with the reference that still points to the proper type. */
1295 else if (!gimple_check_call_matching_types (e->call_stmt,
1296 ref->referred->decl,
1297 true))
1298 {
1299 if (dump_file)
1300 fprintf (dump_file, "Not expanding speculative call of %s -> %s\n"
1301 "Type mismatch.\n",
1302 e->caller->dump_name (),
1303 e->callee->dump_name ());
1304 e = e->resolve_speculation ();
1305 /* We are producing the final function body and will throw away the
1306 callgraph edges really soon. Reset the counts/frequencies to
1307 keep verifier happy in the case of roundoff errors. */
1308 e->count = gimple_bb (e->call_stmt)->count;
1309 }
1310 /* Expand speculation into GIMPLE code. */
1311 else
1312 {
1313 if (dump_file)
1314 {
1315 fprintf (dump_file,
1316 "Expanding speculative call of %s -> %s count: ",
1317 e->caller->dump_name (),
1318 e->callee->dump_name ());
1319 e->count.dump (dump_file);
1320 fprintf (dump_file, "\n");
1321 }
1322 gcc_assert (e2->speculative);
1323 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1324
1325 profile_probability prob = e->count.probability_in (e->count
1326 + e2->count);
1327 if (!prob.initialized_p ())
1328 prob = profile_probability::even ();
1329 new_stmt = gimple_ic (e->call_stmt,
1330 dyn_cast<cgraph_node *> (ref->referred),
1331 prob);
1332 e->speculative = false;
1333 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1334 false);
1335 e->count = gimple_bb (e->call_stmt)->count;
1336 e2->speculative = false;
1337 e2->count = gimple_bb (e2->call_stmt)->count;
1338 ref->speculative = false;
1339 ref->stmt = NULL;
1340 /* Indirect edges are not both in the call site hash.
1341 get it updated. */
1342 if (e->caller->call_site_hash)
1343 cgraph_update_edge_in_call_site_hash (e2);
1344 pop_cfun ();
1345 /* Continue redirecting E to proper target. */
1346 }
1347 }
1348
1349
1350 if (e->indirect_unknown_callee
1351 || decl == e->callee->decl)
1352 return e->call_stmt;
1353
1354 if (flag_checking && decl)
1355 {
1356 cgraph_node *node = cgraph_node::get (decl);
1357 gcc_assert (!node || !node->clone.combined_args_to_skip);
1358 }
1359
1360 if (symtab->dump_file)
1361 {
1362 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1363 e->caller->dump_name (), e->callee->dump_name ());
1364 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1365 if (e->callee->clone.combined_args_to_skip)
1366 {
1367 fprintf (symtab->dump_file, " combined args to skip: ");
1368 dump_bitmap (symtab->dump_file,
1369 e->callee->clone.combined_args_to_skip);
1370 }
1371 }
1372
1373 if (e->callee->clone.combined_args_to_skip)
1374 {
1375 int lp_nr;
1376
1377 new_stmt = e->call_stmt;
1378 if (e->callee->clone.combined_args_to_skip)
1379 new_stmt
1380 = gimple_call_copy_skip_args (new_stmt,
1381 e->callee->clone.combined_args_to_skip);
1382 tree old_fntype = gimple_call_fntype (e->call_stmt);
1383 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1384 cgraph_node *origin = e->callee;
1385 while (origin->clone_of)
1386 origin = origin->clone_of;
1387
1388 if ((origin->former_clone_of
1389 && old_fntype == TREE_TYPE (origin->former_clone_of))
1390 || old_fntype == TREE_TYPE (origin->decl))
1391 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1392 else
1393 {
1394 bitmap skip = e->callee->clone.combined_args_to_skip;
1395 tree t = cgraph_build_function_type_skip_args (old_fntype, skip,
1396 false);
1397 gimple_call_set_fntype (new_stmt, t);
1398 }
1399
1400 if (gimple_vdef (new_stmt)
1401 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1402 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1403
1404 gsi = gsi_for_stmt (e->call_stmt);
1405
1406 /* For optimized away parameters, add on the caller side
1407 before the call
1408 DEBUG D#X => parm_Y(D)
1409 stmts and associate D#X with parm in decl_debug_args_lookup
1410 vector to say for debug info that if parameter parm had been passed,
1411 it would have value parm_Y(D). */
1412 if (e->callee->clone.combined_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
1413 {
1414 vec<tree, va_gc> **debug_args
1415 = decl_debug_args_lookup (e->callee->decl);
1416 tree old_decl = gimple_call_fndecl (e->call_stmt);
1417 if (debug_args && old_decl)
1418 {
1419 tree parm;
1420 unsigned i = 0, num;
1421 unsigned len = vec_safe_length (*debug_args);
1422 unsigned nargs = gimple_call_num_args (e->call_stmt);
1423 for (parm = DECL_ARGUMENTS (old_decl), num = 0;
1424 parm && num < nargs;
1425 parm = DECL_CHAIN (parm), num++)
1426 if (bitmap_bit_p (e->callee->clone.combined_args_to_skip, num)
1427 && is_gimple_reg (parm))
1428 {
1429 unsigned last = i;
1430
1431 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1432 i += 2;
1433 if (i >= len)
1434 {
1435 i = 0;
1436 while (i < last
1437 && (**debug_args)[i] != DECL_ORIGIN (parm))
1438 i += 2;
1439 if (i >= last)
1440 continue;
1441 }
1442 tree ddecl = (**debug_args)[i + 1];
1443 tree arg = gimple_call_arg (e->call_stmt, num);
1444 if (!useless_type_conversion_p (TREE_TYPE (ddecl),
1445 TREE_TYPE (arg)))
1446 {
1447 tree rhs1;
1448 if (!fold_convertible_p (TREE_TYPE (ddecl), arg))
1449 continue;
1450 if (TREE_CODE (arg) == SSA_NAME
1451 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
1452 && (rhs1
1453 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
1454 && useless_type_conversion_p (TREE_TYPE (ddecl),
1455 TREE_TYPE (rhs1)))
1456 arg = rhs1;
1457 else
1458 arg = fold_convert (TREE_TYPE (ddecl), arg);
1459 }
1460
1461 gimple *def_temp
1462 = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1463 e->call_stmt);
1464 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
1465 }
1466 }
1467 }
1468
1469 gsi_replace (&gsi, new_stmt, false);
1470 /* We need to defer cleaning EH info on the new statement to
1471 fixup-cfg. We may not have dominator information at this point
1472 and thus would end up with unreachable blocks and have no way
1473 to communicate that we need to run CFG cleanup then. */
1474 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1475 if (lp_nr != 0)
1476 {
1477 remove_stmt_from_eh_lp (e->call_stmt);
1478 add_stmt_to_eh_lp (new_stmt, lp_nr);
1479 }
1480 }
1481 else
1482 {
1483 new_stmt = e->call_stmt;
1484 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1485 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1486 }
1487
1488 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1489 adjust gimple_call_fntype too. */
1490 if (gimple_call_noreturn_p (new_stmt)
1491 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1492 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1493 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1494 == void_type_node))
1495 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1496
1497 /* If the call becomes noreturn, remove the LHS if possible. */
1498 tree lhs = gimple_call_lhs (new_stmt);
1499 if (lhs
1500 && gimple_call_noreturn_p (new_stmt)
1501 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1502 || should_remove_lhs_p (lhs)))
1503 {
1504 if (TREE_CODE (lhs) == SSA_NAME)
1505 {
1506 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1507 TREE_TYPE (lhs), NULL);
1508 var = get_or_create_ssa_default_def
1509 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1510 gimple *set_stmt = gimple_build_assign (lhs, var);
1511 gsi = gsi_for_stmt (new_stmt);
1512 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1513 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1514 }
1515 gimple_call_set_lhs (new_stmt, NULL_TREE);
1516 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1517 }
1518
1519 /* If new callee has no static chain, remove it. */
1520 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1521 {
1522 gimple_call_set_chain (new_stmt, NULL);
1523 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1524 }
1525
1526 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1527 new_stmt);
1528
1529 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1530
1531 if (symtab->dump_file)
1532 {
1533 fprintf (symtab->dump_file, " updated to:");
1534 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1535 }
1536 return new_stmt;
1537 }
1538
1539 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1540 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1541 of OLD_STMT if it was previously call statement.
1542 If NEW_STMT is NULL, the call has been dropped without any
1543 replacement. */
1544
1545 static void
1546 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1547 gimple *old_stmt, tree old_call,
1548 gimple *new_stmt)
1549 {
1550 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1551 ? gimple_call_fndecl (new_stmt) : 0;
1552
1553 /* We are seeing indirect calls, then there is nothing to update. */
1554 if (!new_call && !old_call)
1555 return;
1556 /* See if we turned indirect call into direct call or folded call to one builtin
1557 into different builtin. */
1558 if (old_call != new_call)
1559 {
1560 cgraph_edge *e = node->get_edge (old_stmt);
1561 cgraph_edge *ne = NULL;
1562 profile_count count;
1563
1564 if (e)
1565 {
1566 /* Keep calls marked as dead dead. */
1567 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1568 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1569 {
1570 node->get_edge (old_stmt)->set_call_stmt
1571 (as_a <gcall *> (new_stmt));
1572 return;
1573 }
1574 /* See if the edge is already there and has the correct callee. It
1575 might be so because of indirect inlining has already updated
1576 it. We also might've cloned and redirected the edge. */
1577 if (new_call && e->callee)
1578 {
1579 cgraph_node *callee = e->callee;
1580 while (callee)
1581 {
1582 if (callee->decl == new_call
1583 || callee->former_clone_of == new_call)
1584 {
1585 e->set_call_stmt (as_a <gcall *> (new_stmt));
1586 return;
1587 }
1588 callee = callee->clone_of;
1589 }
1590 }
1591
1592 /* Otherwise remove edge and create new one; we can't simply redirect
1593 since function has changed, so inline plan and other information
1594 attached to edge is invalid. */
1595 count = e->count;
1596 if (e->indirect_unknown_callee || e->inline_failed)
1597 e->remove ();
1598 else
1599 e->callee->remove_symbol_and_inline_clones ();
1600 }
1601 else if (new_call)
1602 {
1603 /* We are seeing new direct call; compute profile info based on BB. */
1604 basic_block bb = gimple_bb (new_stmt);
1605 count = bb->count;
1606 }
1607
1608 if (new_call)
1609 {
1610 ne = node->create_edge (cgraph_node::get_create (new_call),
1611 as_a <gcall *> (new_stmt), count);
1612 gcc_assert (ne->inline_failed);
1613 }
1614 }
1615 /* We only updated the call stmt; update pointer in cgraph edge.. */
1616 else if (old_stmt != new_stmt)
1617 node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1618 }
1619
1620 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1621 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1622 of OLD_STMT before it was updated (updating can happen inplace). */
1623
1624 void
1625 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1626 gimple *new_stmt)
1627 {
1628 cgraph_node *orig = cgraph_node::get (cfun->decl);
1629 cgraph_node *node;
1630
1631 gcc_checking_assert (orig);
1632 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1633 if (orig->clones)
1634 for (node = orig->clones; node != orig;)
1635 {
1636 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1637 if (node->clones)
1638 node = node->clones;
1639 else if (node->next_sibling_clone)
1640 node = node->next_sibling_clone;
1641 else
1642 {
1643 while (node != orig && !node->next_sibling_clone)
1644 node = node->clone_of;
1645 if (node != orig)
1646 node = node->next_sibling_clone;
1647 }
1648 }
1649 }
1650
1651
1652 /* Remove all callees from the node. */
1653
1654 void
1655 cgraph_node::remove_callees (void)
1656 {
1657 cgraph_edge *e, *f;
1658
1659 /* It is sufficient to remove the edges from the lists of callers of
1660 the callees. The callee list of the node can be zapped with one
1661 assignment. */
1662 for (e = callees; e; e = f)
1663 {
1664 f = e->next_callee;
1665 symtab->call_edge_removal_hooks (e);
1666 if (!e->indirect_unknown_callee)
1667 e->remove_callee ();
1668 symtab->free_edge (e);
1669 }
1670 for (e = indirect_calls; e; e = f)
1671 {
1672 f = e->next_callee;
1673 symtab->call_edge_removal_hooks (e);
1674 if (!e->indirect_unknown_callee)
1675 e->remove_callee ();
1676 symtab->free_edge (e);
1677 }
1678 indirect_calls = NULL;
1679 callees = NULL;
1680 if (call_site_hash)
1681 {
1682 call_site_hash->empty ();
1683 call_site_hash = NULL;
1684 }
1685 }
1686
1687 /* Remove all callers from the node. */
1688
1689 void
1690 cgraph_node::remove_callers (void)
1691 {
1692 cgraph_edge *e, *f;
1693
1694 /* It is sufficient to remove the edges from the lists of callees of
1695 the callers. The caller list of the node can be zapped with one
1696 assignment. */
1697 for (e = callers; e; e = f)
1698 {
1699 f = e->next_caller;
1700 symtab->call_edge_removal_hooks (e);
1701 e->remove_caller ();
1702 symtab->free_edge (e);
1703 }
1704 callers = NULL;
1705 }
1706
1707 /* Helper function for cgraph_release_function_body and free_lang_data.
1708 It releases body from function DECL without having to inspect its
1709 possibly non-existent symtab node. */
1710
1711 void
1712 release_function_body (tree decl)
1713 {
1714 function *fn = DECL_STRUCT_FUNCTION (decl);
1715 if (fn)
1716 {
1717 if (fn->cfg
1718 && loops_for_fn (fn))
1719 {
1720 fn->curr_properties &= ~PROP_loops;
1721 loop_optimizer_finalize (fn);
1722 }
1723 if (fn->gimple_df)
1724 {
1725 delete_tree_ssa (fn);
1726 fn->eh = NULL;
1727 }
1728 if (fn->cfg)
1729 {
1730 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1731 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1732 delete_tree_cfg_annotations (fn);
1733 clear_edges (fn);
1734 fn->cfg = NULL;
1735 }
1736 if (fn->value_histograms)
1737 free_histograms (fn);
1738 gimple_set_body (decl, NULL);
1739 /* Struct function hangs a lot of data that would leak if we didn't
1740 removed all pointers to it. */
1741 ggc_free (fn);
1742 DECL_STRUCT_FUNCTION (decl) = NULL;
1743 }
1744 DECL_SAVED_TREE (decl) = NULL;
1745 }
1746
1747 /* Release memory used to represent body of function.
1748 Use this only for functions that are released before being translated to
1749 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1750 are free'd in final.c via free_after_compilation().
1751 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1752
1753 void
1754 cgraph_node::release_body (bool keep_arguments)
1755 {
1756 ipa_transforms_to_apply.release ();
1757 if (!used_as_abstract_origin && symtab->state != PARSING)
1758 {
1759 DECL_RESULT (decl) = NULL;
1760
1761 if (!keep_arguments)
1762 DECL_ARGUMENTS (decl) = NULL;
1763 }
1764 /* If the node is abstract and needed, then do not clear
1765 DECL_INITIAL of its associated function declaration because it's
1766 needed to emit debug info later. */
1767 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1768 DECL_INITIAL (decl) = error_mark_node;
1769 release_function_body (decl);
1770 if (lto_file_data)
1771 {
1772 lto_free_function_in_decl_state_for_node (this);
1773 lto_file_data = NULL;
1774 }
1775 }
1776
1777 /* Remove function from symbol table. */
1778
1779 void
1780 cgraph_node::remove (void)
1781 {
1782 cgraph_node *n;
1783
1784 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1785 fprintf (symtab->ipa_clones_dump_file,
1786 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1787 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1788 DECL_SOURCE_COLUMN (decl));
1789
1790 symtab->call_cgraph_removal_hooks (this);
1791 remove_callers ();
1792 remove_callees ();
1793 ipa_transforms_to_apply.release ();
1794 delete_function_version (function_version ());
1795
1796 /* Incremental inlining access removed nodes stored in the postorder list.
1797 */
1798 force_output = false;
1799 forced_by_abi = false;
1800 for (n = nested; n; n = n->next_nested)
1801 n->origin = NULL;
1802 nested = NULL;
1803 if (origin)
1804 {
1805 cgraph_node **node2 = &origin->nested;
1806
1807 while (*node2 != this)
1808 node2 = &(*node2)->next_nested;
1809 *node2 = next_nested;
1810 }
1811 unregister ();
1812 if (prev_sibling_clone)
1813 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1814 else if (clone_of)
1815 clone_of->clones = next_sibling_clone;
1816 if (next_sibling_clone)
1817 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1818 if (clones)
1819 {
1820 cgraph_node *n, *next;
1821
1822 if (clone_of)
1823 {
1824 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1825 n->clone_of = clone_of;
1826 n->clone_of = clone_of;
1827 n->next_sibling_clone = clone_of->clones;
1828 if (clone_of->clones)
1829 clone_of->clones->prev_sibling_clone = n;
1830 clone_of->clones = clones;
1831 }
1832 else
1833 {
1834 /* We are removing node with clones. This makes clones inconsistent,
1835 but assume they will be removed subsequently and just keep clone
1836 tree intact. This can happen in unreachable function removal since
1837 we remove unreachable functions in random order, not by bottom-up
1838 walk of clone trees. */
1839 for (n = clones; n; n = next)
1840 {
1841 next = n->next_sibling_clone;
1842 n->next_sibling_clone = NULL;
1843 n->prev_sibling_clone = NULL;
1844 n->clone_of = NULL;
1845 }
1846 }
1847 }
1848
1849 /* While all the clones are removed after being proceeded, the function
1850 itself is kept in the cgraph even after it is compiled. Check whether
1851 we are done with this body and reclaim it proactively if this is the case.
1852 */
1853 if (symtab->state != LTO_STREAMING)
1854 {
1855 n = cgraph_node::get (decl);
1856 if (!n
1857 || (!n->clones && !n->clone_of && !n->global.inlined_to
1858 && ((symtab->global_info_ready || in_lto_p)
1859 && (TREE_ASM_WRITTEN (n->decl)
1860 || DECL_EXTERNAL (n->decl)
1861 || !n->analyzed
1862 || (!flag_wpa && n->in_other_partition)))))
1863 release_body ();
1864 }
1865 else
1866 {
1867 lto_free_function_in_decl_state_for_node (this);
1868 lto_file_data = NULL;
1869 }
1870
1871 decl = NULL;
1872 if (call_site_hash)
1873 {
1874 call_site_hash->empty ();
1875 call_site_hash = NULL;
1876 }
1877
1878 symtab->release_symbol (this);
1879 }
1880
1881 /* Likewise indicate that a node is having address taken. */
1882
1883 void
1884 cgraph_node::mark_address_taken (void)
1885 {
1886 /* Indirect inlining can figure out that all uses of the address are
1887 inlined. */
1888 if (global.inlined_to)
1889 {
1890 gcc_assert (cfun->after_inlining);
1891 gcc_assert (callers->indirect_inlining_edge);
1892 return;
1893 }
1894 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1895 IPA_REF_ADDR reference exists (and thus it should be set on node
1896 representing alias we take address of) and as a test whether address
1897 of the object was taken (and thus it should be set on node alias is
1898 referring to). We should remove the first use and the remove the
1899 following set. */
1900 address_taken = 1;
1901 cgraph_node *node = ultimate_alias_target ();
1902 node->address_taken = 1;
1903 }
1904
1905 /* Return local info for the compiled function. */
1906
1907 cgraph_local_info *
1908 cgraph_node::local_info (tree decl)
1909 {
1910 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1911 cgraph_node *node = get (decl);
1912 if (!node)
1913 return NULL;
1914 return &node->ultimate_alias_target ()->local;
1915 }
1916
1917 /* Return local info for the compiled function. */
1918
1919 cgraph_rtl_info *
1920 cgraph_node::rtl_info (tree decl)
1921 {
1922 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1923 cgraph_node *node = get (decl);
1924 if (!node)
1925 return NULL;
1926 enum availability avail;
1927 node = node->ultimate_alias_target (&avail);
1928 if (decl != current_function_decl
1929 && (avail < AVAIL_AVAILABLE
1930 || (node->decl != current_function_decl
1931 && !TREE_ASM_WRITTEN (node->decl))))
1932 return NULL;
1933 /* Allocate if it doesn't exist. */
1934 if (node->rtl == NULL)
1935 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1936 return node->rtl;
1937 }
1938
1939 /* Return a string describing the failure REASON. */
1940
1941 const char*
1942 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1943 {
1944 #undef DEFCIFCODE
1945 #define DEFCIFCODE(code, type, string) string,
1946
1947 static const char *cif_string_table[CIF_N_REASONS] = {
1948 #include "cif-code.def"
1949 };
1950
1951 /* Signedness of an enum type is implementation defined, so cast it
1952 to unsigned before testing. */
1953 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1954 return cif_string_table[reason];
1955 }
1956
1957 /* Return a type describing the failure REASON. */
1958
1959 cgraph_inline_failed_type_t
1960 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1961 {
1962 #undef DEFCIFCODE
1963 #define DEFCIFCODE(code, type, string) type,
1964
1965 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1966 #include "cif-code.def"
1967 };
1968
1969 /* Signedness of an enum type is implementation defined, so cast it
1970 to unsigned before testing. */
1971 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1972 return cif_type_table[reason];
1973 }
1974
1975 /* Names used to print out the availability enum. */
1976 const char * const cgraph_availability_names[] =
1977 {"unset", "not_available", "overwritable", "available", "local"};
1978
1979 /* Output flags of edge to a file F. */
1980
1981 void
1982 cgraph_edge::dump_edge_flags (FILE *f)
1983 {
1984 if (speculative)
1985 fprintf (f, "(speculative) ");
1986 if (!inline_failed)
1987 fprintf (f, "(inlined) ");
1988 if (call_stmt_cannot_inline_p)
1989 fprintf (f, "(call_stmt_cannot_inline_p) ");
1990 if (indirect_inlining_edge)
1991 fprintf (f, "(indirect_inlining) ");
1992 if (count.initialized_p ())
1993 {
1994 fprintf (f, "(");
1995 count.dump (f);
1996 fprintf (f, ",");
1997 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
1998 }
1999 if (can_throw_external)
2000 fprintf (f, "(can throw external) ");
2001 }
2002
2003 /* Dump call graph node to file F. */
2004
2005 void
2006 cgraph_node::dump (FILE *f)
2007 {
2008 cgraph_edge *edge;
2009
2010 dump_base (f);
2011
2012 if (global.inlined_to)
2013 fprintf (f, " Function %s is inline copy in %s\n",
2014 dump_name (),
2015 global.inlined_to->dump_name ());
2016 if (clone_of)
2017 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2018 if (symtab->function_flags_ready)
2019 fprintf (f, " Availability: %s\n",
2020 cgraph_availability_names [get_availability ()]);
2021
2022 if (profile_id)
2023 fprintf (f, " Profile id: %i\n",
2024 profile_id);
2025 cgraph_function_version_info *vi = function_version ();
2026 if (vi != NULL)
2027 {
2028 fprintf (f, " Version info: ");
2029 if (vi->prev != NULL)
2030 {
2031 fprintf (f, "prev: ");
2032 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2033 }
2034 if (vi->next != NULL)
2035 {
2036 fprintf (f, "next: ");
2037 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2038 }
2039 if (vi->dispatcher_resolver != NULL_TREE)
2040 fprintf (f, "dispatcher: %s",
2041 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2042
2043 fprintf (f, "\n");
2044 }
2045 fprintf (f, " Function flags:");
2046 if (count.initialized_p ())
2047 {
2048 fprintf (f, " count:");
2049 count.dump (f);
2050 }
2051 if (tp_first_run > 0)
2052 fprintf (f, " first_run:%i", tp_first_run);
2053 if (origin)
2054 fprintf (f, " nested in:%s", origin->asm_name ());
2055 if (gimple_has_body_p (decl))
2056 fprintf (f, " body");
2057 if (process)
2058 fprintf (f, " process");
2059 if (local.local)
2060 fprintf (f, " local");
2061 if (local.redefined_extern_inline)
2062 fprintf (f, " redefined_extern_inline");
2063 if (only_called_at_startup)
2064 fprintf (f, " only_called_at_startup");
2065 if (only_called_at_exit)
2066 fprintf (f, " only_called_at_exit");
2067 if (tm_clone)
2068 fprintf (f, " tm_clone");
2069 if (calls_comdat_local)
2070 fprintf (f, " calls_comdat_local");
2071 if (icf_merged)
2072 fprintf (f, " icf_merged");
2073 if (merged_comdat)
2074 fprintf (f, " merged_comdat");
2075 if (split_part)
2076 fprintf (f, " split_part");
2077 if (indirect_call_target)
2078 fprintf (f, " indirect_call_target");
2079 if (nonfreeing_fn)
2080 fprintf (f, " nonfreeing_fn");
2081 if (DECL_STATIC_CONSTRUCTOR (decl))
2082 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2083 if (DECL_STATIC_DESTRUCTOR (decl))
2084 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2085 if (frequency == NODE_FREQUENCY_HOT)
2086 fprintf (f, " hot");
2087 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2088 fprintf (f, " unlikely_executed");
2089 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2090 fprintf (f, " executed_once");
2091 if (opt_for_fn (decl, optimize_size))
2092 fprintf (f, " optimize_size");
2093 if (parallelized_function)
2094 fprintf (f, " parallelized_function");
2095
2096 fprintf (f, "\n");
2097
2098 if (thunk.thunk_p)
2099 {
2100 fprintf (f, " Thunk");
2101 if (thunk.alias)
2102 fprintf (f, " of %s (asm:%s)",
2103 lang_hooks.decl_printable_name (thunk.alias, 2),
2104 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2105 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2106 "has virtual offset %i\n",
2107 (int)thunk.fixed_offset,
2108 (int)thunk.virtual_value,
2109 (int)thunk.indirect_offset,
2110 (int)thunk.virtual_offset_p);
2111 }
2112 else if (former_thunk_p ())
2113 fprintf (f, " Former thunk");
2114 if (alias && thunk.alias
2115 && DECL_P (thunk.alias))
2116 {
2117 fprintf (f, " Alias of %s",
2118 lang_hooks.decl_printable_name (thunk.alias, 2));
2119 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2120 fprintf (f, " (asm:%s)",
2121 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2122 fprintf (f, "\n");
2123 }
2124
2125 fprintf (f, " Called by: ");
2126
2127 profile_count sum = profile_count::zero ();
2128 for (edge = callers; edge; edge = edge->next_caller)
2129 {
2130 fprintf (f, "%s ", edge->caller->dump_name ());
2131 edge->dump_edge_flags (f);
2132 if (edge->count.initialized_p ())
2133 sum += edge->count.ipa ();
2134 }
2135
2136 fprintf (f, "\n Calls: ");
2137 for (edge = callees; edge; edge = edge->next_callee)
2138 {
2139 fprintf (f, "%s ", edge->callee->dump_name ());
2140 edge->dump_edge_flags (f);
2141 }
2142 fprintf (f, "\n");
2143
2144 if (count.ipa ().initialized_p ())
2145 {
2146 bool ok = true;
2147 bool min = false;
2148 ipa_ref *ref;
2149
2150 FOR_EACH_ALIAS (this, ref)
2151 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2152 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2153
2154 if (global.inlined_to
2155 || (symtab->state < EXPANSION
2156 && ultimate_alias_target () == this && only_called_directly_p ()))
2157 ok = !count.ipa ().differs_from_p (sum);
2158 else if (count.ipa () > profile_count::from_gcov_type (100)
2159 && count.ipa () < sum.apply_scale (99, 100))
2160 ok = false, min = true;
2161 if (!ok)
2162 {
2163 fprintf (f, " Invalid sum of caller counts ");
2164 sum.dump (f);
2165 if (min)
2166 fprintf (f, ", should be at most ");
2167 else
2168 fprintf (f, ", should be ");
2169 count.ipa ().dump (f);
2170 fprintf (f, "\n");
2171 }
2172 }
2173
2174 for (edge = indirect_calls; edge; edge = edge->next_callee)
2175 {
2176 if (edge->indirect_info->polymorphic)
2177 {
2178 fprintf (f, " Polymorphic indirect call of type ");
2179 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2180 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2181 }
2182 else
2183 fprintf (f, " Indirect call");
2184 edge->dump_edge_flags (f);
2185 if (edge->indirect_info->param_index != -1)
2186 {
2187 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2188 if (edge->indirect_info->agg_contents)
2189 fprintf (f, " loaded from %s %s at offset %i",
2190 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2191 edge->indirect_info->by_ref ? "passed by reference":"",
2192 (int)edge->indirect_info->offset);
2193 if (edge->indirect_info->vptr_changed)
2194 fprintf (f, " (vptr maybe changed)");
2195 }
2196 fprintf (f, "\n");
2197 if (edge->indirect_info->polymorphic)
2198 edge->indirect_info->context.dump (f);
2199 }
2200 }
2201
2202 /* Dump call graph node NODE to stderr. */
2203
2204 DEBUG_FUNCTION void
2205 cgraph_node::debug (void)
2206 {
2207 dump (stderr);
2208 }
2209
2210 /* Dump the callgraph to file F. */
2211
2212 void
2213 cgraph_node::dump_cgraph (FILE *f)
2214 {
2215 cgraph_node *node;
2216
2217 fprintf (f, "callgraph:\n\n");
2218 FOR_EACH_FUNCTION (node)
2219 node->dump (f);
2220 }
2221
2222 /* Return true when the DECL can possibly be inlined. */
2223
2224 bool
2225 cgraph_function_possibly_inlined_p (tree decl)
2226 {
2227 if (!symtab->global_info_ready)
2228 return !DECL_UNINLINABLE (decl);
2229 return DECL_POSSIBLY_INLINED (decl);
2230 }
2231
2232 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2233 void
2234 cgraph_node::unnest (void)
2235 {
2236 cgraph_node **node2 = &origin->nested;
2237 gcc_assert (origin);
2238
2239 while (*node2 != this)
2240 node2 = &(*node2)->next_nested;
2241 *node2 = next_nested;
2242 origin = NULL;
2243 }
2244
2245 /* Return function availability. See cgraph.h for description of individual
2246 return values. */
2247 enum availability
2248 cgraph_node::get_availability (symtab_node *ref)
2249 {
2250 if (ref)
2251 {
2252 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2253 if (cref)
2254 ref = cref->global.inlined_to;
2255 }
2256 enum availability avail;
2257 if (!analyzed)
2258 avail = AVAIL_NOT_AVAILABLE;
2259 else if (local.local)
2260 avail = AVAIL_LOCAL;
2261 else if (global.inlined_to)
2262 avail = AVAIL_AVAILABLE;
2263 else if (transparent_alias)
2264 ultimate_alias_target (&avail, ref);
2265 else if (ifunc_resolver
2266 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2267 avail = AVAIL_INTERPOSABLE;
2268 else if (!externally_visible)
2269 avail = AVAIL_AVAILABLE;
2270 /* If this is a reference from symbol itself and there are no aliases, we
2271 may be sure that the symbol was not interposed by something else because
2272 the symbol itself would be unreachable otherwise.
2273
2274 Also comdat groups are always resolved in groups. */
2275 else if ((this == ref && !has_aliases_p ())
2276 || (ref && get_comdat_group ()
2277 && get_comdat_group () == ref->get_comdat_group ()))
2278 avail = AVAIL_AVAILABLE;
2279 /* Inline functions are safe to be analyzed even if their symbol can
2280 be overwritten at runtime. It is not meaningful to enforce any sane
2281 behavior on replacing inline function by different body. */
2282 else if (DECL_DECLARED_INLINE_P (decl))
2283 avail = AVAIL_AVAILABLE;
2284
2285 /* If the function can be overwritten, return OVERWRITABLE. Take
2286 care at least of two notable extensions - the COMDAT functions
2287 used to share template instantiations in C++ (this is symmetric
2288 to code cp_cannot_inline_tree_fn and probably shall be shared and
2289 the inlinability hooks completely eliminated). */
2290
2291 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2292 avail = AVAIL_INTERPOSABLE;
2293 else avail = AVAIL_AVAILABLE;
2294
2295 return avail;
2296 }
2297
2298 /* Worker for cgraph_node_can_be_local_p. */
2299 static bool
2300 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2301 {
2302 return !(!node->force_output
2303 && ((DECL_COMDAT (node->decl)
2304 && !node->forced_by_abi
2305 && !node->used_from_object_file_p ()
2306 && !node->same_comdat_group)
2307 || !node->externally_visible));
2308 }
2309
2310 /* Return true if cgraph_node can be made local for API change.
2311 Extern inline functions and C++ COMDAT functions can be made local
2312 at the expense of possible code size growth if function is used in multiple
2313 compilation units. */
2314 bool
2315 cgraph_node::can_be_local_p (void)
2316 {
2317 return (!address_taken
2318 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2319 NULL, true));
2320 }
2321
2322 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2323 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2324 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2325 skipped. */
2326 bool
2327 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2328 (cgraph_node *, void *),
2329 void *data,
2330 bool include_overwritable,
2331 bool exclude_virtual_thunks)
2332 {
2333 cgraph_edge *e;
2334 ipa_ref *ref;
2335 enum availability avail = AVAIL_AVAILABLE;
2336
2337 if (include_overwritable
2338 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2339 {
2340 if (callback (this, data))
2341 return true;
2342 }
2343 FOR_EACH_ALIAS (this, ref)
2344 {
2345 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2346 if (include_overwritable
2347 || alias->get_availability () > AVAIL_INTERPOSABLE)
2348 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2349 include_overwritable,
2350 exclude_virtual_thunks))
2351 return true;
2352 }
2353 if (avail <= AVAIL_INTERPOSABLE)
2354 return false;
2355 for (e = callers; e; e = e->next_caller)
2356 if (e->caller->thunk.thunk_p
2357 && (include_overwritable
2358 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2359 && !(exclude_virtual_thunks
2360 && e->caller->thunk.virtual_offset_p))
2361 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2362 include_overwritable,
2363 exclude_virtual_thunks))
2364 return true;
2365
2366 return false;
2367 }
2368
2369 /* Worker to bring NODE local. */
2370
2371 bool
2372 cgraph_node::make_local (cgraph_node *node, void *)
2373 {
2374 gcc_checking_assert (node->can_be_local_p ());
2375 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2376 {
2377 node->make_decl_local ();
2378 node->set_section (NULL);
2379 node->set_comdat_group (NULL);
2380 node->externally_visible = false;
2381 node->forced_by_abi = false;
2382 node->local.local = true;
2383 node->set_section (NULL);
2384 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2385 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2386 && !flag_incremental_link);
2387 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2388 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2389 }
2390 return false;
2391 }
2392
2393 /* Bring cgraph node local. */
2394
2395 void
2396 cgraph_node::make_local (void)
2397 {
2398 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2399 }
2400
2401 /* Worker to set nothrow flag. */
2402
2403 static void
2404 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2405 bool *changed)
2406 {
2407 cgraph_edge *e;
2408
2409 if (nothrow && !TREE_NOTHROW (node->decl))
2410 {
2411 /* With non-call exceptions we can't say for sure if other function body
2412 was not possibly optimized to stil throw. */
2413 if (!non_call || node->binds_to_current_def_p ())
2414 {
2415 TREE_NOTHROW (node->decl) = true;
2416 *changed = true;
2417 for (e = node->callers; e; e = e->next_caller)
2418 e->can_throw_external = false;
2419 }
2420 }
2421 else if (!nothrow && TREE_NOTHROW (node->decl))
2422 {
2423 TREE_NOTHROW (node->decl) = false;
2424 *changed = true;
2425 }
2426 ipa_ref *ref;
2427 FOR_EACH_ALIAS (node, ref)
2428 {
2429 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2430 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2431 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2432 }
2433 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2434 if (e->caller->thunk.thunk_p
2435 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2436 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2437 }
2438
2439 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2440 if any to NOTHROW. */
2441
2442 bool
2443 cgraph_node::set_nothrow_flag (bool nothrow)
2444 {
2445 bool changed = false;
2446 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2447
2448 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2449 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2450 else
2451 {
2452 ipa_ref *ref;
2453
2454 FOR_EACH_ALIAS (this, ref)
2455 {
2456 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2457 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2458 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2459 }
2460 }
2461 return changed;
2462 }
2463
2464 /* Worker to set malloc flag. */
2465 static void
2466 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2467 {
2468 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2469 {
2470 DECL_IS_MALLOC (node->decl) = true;
2471 *changed = true;
2472 }
2473
2474 ipa_ref *ref;
2475 FOR_EACH_ALIAS (node, ref)
2476 {
2477 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2478 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2479 set_malloc_flag_1 (alias, malloc_p, changed);
2480 }
2481
2482 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2483 if (e->caller->thunk.thunk_p
2484 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2485 set_malloc_flag_1 (e->caller, malloc_p, changed);
2486 }
2487
2488 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2489
2490 bool
2491 cgraph_node::set_malloc_flag (bool malloc_p)
2492 {
2493 bool changed = false;
2494
2495 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2496 set_malloc_flag_1 (this, malloc_p, &changed);
2497 else
2498 {
2499 ipa_ref *ref;
2500
2501 FOR_EACH_ALIAS (this, ref)
2502 {
2503 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2504 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2505 set_malloc_flag_1 (alias, malloc_p, &changed);
2506 }
2507 }
2508 return changed;
2509 }
2510
2511 /* Worker to set_const_flag. */
2512
2513 static void
2514 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2515 bool *changed)
2516 {
2517 /* Static constructors and destructors without a side effect can be
2518 optimized out. */
2519 if (set_const && !looping)
2520 {
2521 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2522 {
2523 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2524 *changed = true;
2525 }
2526 if (DECL_STATIC_DESTRUCTOR (node->decl))
2527 {
2528 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2529 *changed = true;
2530 }
2531 }
2532 if (!set_const)
2533 {
2534 if (TREE_READONLY (node->decl))
2535 {
2536 TREE_READONLY (node->decl) = 0;
2537 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2538 *changed = true;
2539 }
2540 }
2541 else
2542 {
2543 /* Consider function:
2544
2545 bool a(int *p)
2546 {
2547 return *p==*p;
2548 }
2549
2550 During early optimization we will turn this into:
2551
2552 bool a(int *p)
2553 {
2554 return true;
2555 }
2556
2557 Now if this function will be detected as CONST however when interposed
2558 it may end up being just pure. We always must assume the worst
2559 scenario here. */
2560 if (TREE_READONLY (node->decl))
2561 {
2562 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2563 {
2564 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2565 *changed = true;
2566 }
2567 }
2568 else if (node->binds_to_current_def_p ())
2569 {
2570 TREE_READONLY (node->decl) = true;
2571 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2572 DECL_PURE_P (node->decl) = false;
2573 *changed = true;
2574 }
2575 else
2576 {
2577 if (dump_file && (dump_flags & TDF_DETAILS))
2578 fprintf (dump_file, "Dropping state to PURE because function does "
2579 "not bind to current def.\n");
2580 if (!DECL_PURE_P (node->decl))
2581 {
2582 DECL_PURE_P (node->decl) = true;
2583 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2584 *changed = true;
2585 }
2586 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2587 {
2588 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2589 *changed = true;
2590 }
2591 }
2592 }
2593
2594 ipa_ref *ref;
2595 FOR_EACH_ALIAS (node, ref)
2596 {
2597 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2598 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2599 set_const_flag_1 (alias, set_const, looping, changed);
2600 }
2601 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2602 if (e->caller->thunk.thunk_p
2603 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2604 {
2605 /* Virtual thunks access virtual offset in the vtable, so they can
2606 only be pure, never const. */
2607 if (set_const
2608 && (e->caller->thunk.virtual_offset_p
2609 || !node->binds_to_current_def_p (e->caller)))
2610 *changed |= e->caller->set_pure_flag (true, looping);
2611 else
2612 set_const_flag_1 (e->caller, set_const, looping, changed);
2613 }
2614 }
2615
2616 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2617 If SET_CONST if false, clear the flag.
2618
2619 When setting the flag be careful about possible interposition and
2620 do not set the flag for functions that can be interposet and set pure
2621 flag for functions that can bind to other definition.
2622
2623 Return true if any change was done. */
2624
2625 bool
2626 cgraph_node::set_const_flag (bool set_const, bool looping)
2627 {
2628 bool changed = false;
2629 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2630 set_const_flag_1 (this, set_const, looping, &changed);
2631 else
2632 {
2633 ipa_ref *ref;
2634
2635 FOR_EACH_ALIAS (this, ref)
2636 {
2637 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2638 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2639 set_const_flag_1 (alias, set_const, looping, &changed);
2640 }
2641 }
2642 return changed;
2643 }
2644
2645 /* Info used by set_pure_flag_1. */
2646
2647 struct set_pure_flag_info
2648 {
2649 bool pure;
2650 bool looping;
2651 bool changed;
2652 };
2653
2654 /* Worker to set_pure_flag. */
2655
2656 static bool
2657 set_pure_flag_1 (cgraph_node *node, void *data)
2658 {
2659 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2660 /* Static constructors and destructors without a side effect can be
2661 optimized out. */
2662 if (info->pure && !info->looping)
2663 {
2664 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2665 {
2666 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2667 info->changed = true;
2668 }
2669 if (DECL_STATIC_DESTRUCTOR (node->decl))
2670 {
2671 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2672 info->changed = true;
2673 }
2674 }
2675 if (info->pure)
2676 {
2677 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2678 {
2679 DECL_PURE_P (node->decl) = true;
2680 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2681 info->changed = true;
2682 }
2683 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2684 && !info->looping)
2685 {
2686 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2687 info->changed = true;
2688 }
2689 }
2690 else
2691 {
2692 if (DECL_PURE_P (node->decl))
2693 {
2694 DECL_PURE_P (node->decl) = false;
2695 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2696 info->changed = true;
2697 }
2698 }
2699 return false;
2700 }
2701
2702 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2703 if any to PURE.
2704
2705 When setting the flag, be careful about possible interposition.
2706 Return true if any change was done. */
2707
2708 bool
2709 cgraph_node::set_pure_flag (bool pure, bool looping)
2710 {
2711 struct set_pure_flag_info info = {pure, looping, false};
2712 if (!pure)
2713 looping = false;
2714 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2715 return info.changed;
2716 }
2717
2718 /* Return true when cgraph_node cannot return or throw and thus
2719 it is safe to ignore its side effects for IPA analysis. */
2720
2721 bool
2722 cgraph_node::cannot_return_p (void)
2723 {
2724 int flags = flags_from_decl_or_type (decl);
2725 if (!opt_for_fn (decl, flag_exceptions))
2726 return (flags & ECF_NORETURN) != 0;
2727 else
2728 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2729 == (ECF_NORETURN | ECF_NOTHROW));
2730 }
2731
2732 /* Return true when call of edge cannot lead to return from caller
2733 and thus it is safe to ignore its side effects for IPA analysis
2734 when computing side effects of the caller.
2735 FIXME: We could actually mark all edges that have no reaching
2736 patch to the exit block or throw to get better results. */
2737 bool
2738 cgraph_edge::cannot_lead_to_return_p (void)
2739 {
2740 if (caller->cannot_return_p ())
2741 return true;
2742 if (indirect_unknown_callee)
2743 {
2744 int flags = indirect_info->ecf_flags;
2745 if (!opt_for_fn (caller->decl, flag_exceptions))
2746 return (flags & ECF_NORETURN) != 0;
2747 else
2748 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2749 == (ECF_NORETURN | ECF_NOTHROW));
2750 }
2751 else
2752 return callee->cannot_return_p ();
2753 }
2754
2755 /* Return true if the call can be hot. */
2756
2757 bool
2758 cgraph_edge::maybe_hot_p (void)
2759 {
2760 if (!maybe_hot_count_p (NULL, count.ipa ()))
2761 return false;
2762 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2763 || (callee
2764 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2765 return false;
2766 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2767 && (callee
2768 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2769 return false;
2770 if (opt_for_fn (caller->decl, optimize_size))
2771 return false;
2772 if (caller->frequency == NODE_FREQUENCY_HOT)
2773 return true;
2774 /* If profile is now known yet, be conservative.
2775 FIXME: this predicate is used by early inliner and can do better there. */
2776 if (symtab->state < IPA_SSA)
2777 return true;
2778 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2779 && sreal_frequency () * 2 < 3)
2780 return false;
2781 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0
2782 || sreal_frequency () * PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) <= 1)
2783 return false;
2784 return true;
2785 }
2786
2787 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2788
2789 static bool
2790 nonremovable_p (cgraph_node *node, void *)
2791 {
2792 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2793 }
2794
2795 /* Return true if whole comdat group can be removed if there are no direct
2796 calls to THIS. */
2797
2798 bool
2799 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2800 {
2801 struct ipa_ref *ref;
2802
2803 /* For local symbols or non-comdat group it is the same as
2804 can_remove_if_no_direct_calls_p. */
2805 if (!externally_visible || !same_comdat_group)
2806 {
2807 if (DECL_EXTERNAL (decl))
2808 return true;
2809 if (address_taken)
2810 return false;
2811 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2812 }
2813
2814 if (will_inline && address_taken)
2815 return false;
2816
2817 /* Otheriwse check if we can remove the symbol itself and then verify
2818 that only uses of the comdat groups are direct call to THIS
2819 or its aliases. */
2820 if (!can_remove_if_no_direct_calls_and_refs_p ())
2821 return false;
2822
2823 /* Check that all refs come from within the comdat group. */
2824 for (int i = 0; iterate_referring (i, ref); i++)
2825 if (ref->referring->get_comdat_group () != get_comdat_group ())
2826 return false;
2827
2828 struct cgraph_node *target = ultimate_alias_target ();
2829 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2830 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2831 {
2832 if (!externally_visible)
2833 continue;
2834 if (!next->alias
2835 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2836 return false;
2837
2838 /* If we see different symbol than THIS, be sure to check calls. */
2839 if (next->ultimate_alias_target () != target)
2840 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2841 if (e->caller->get_comdat_group () != get_comdat_group ()
2842 || will_inline)
2843 return false;
2844
2845 /* If function is not being inlined, we care only about
2846 references outside of the comdat group. */
2847 if (!will_inline)
2848 for (int i = 0; next->iterate_referring (i, ref); i++)
2849 if (ref->referring->get_comdat_group () != get_comdat_group ())
2850 return false;
2851 }
2852 return true;
2853 }
2854
2855 /* Return true when function cgraph_node can be expected to be removed
2856 from program when direct calls in this compilation unit are removed.
2857
2858 As a special case COMDAT functions are
2859 cgraph_can_remove_if_no_direct_calls_p while the are not
2860 cgraph_only_called_directly_p (it is possible they are called from other
2861 unit)
2862
2863 This function behaves as cgraph_only_called_directly_p because eliminating
2864 all uses of COMDAT function does not make it necessarily disappear from
2865 the program unless we are compiling whole program or we do LTO. In this
2866 case we know we win since dynamic linking will not really discard the
2867 linkonce section. */
2868
2869 bool
2870 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2871 (bool will_inline)
2872 {
2873 gcc_assert (!global.inlined_to);
2874 if (DECL_EXTERNAL (decl))
2875 return true;
2876
2877 if (!in_lto_p && !flag_whole_program)
2878 {
2879 /* If the symbol is in comdat group, we need to verify that whole comdat
2880 group becomes unreachable. Technically we could skip references from
2881 within the group, too. */
2882 if (!only_called_directly_p ())
2883 return false;
2884 if (same_comdat_group && externally_visible)
2885 {
2886 struct cgraph_node *target = ultimate_alias_target ();
2887
2888 if (will_inline && address_taken)
2889 return true;
2890 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2891 next != this;
2892 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2893 {
2894 if (!externally_visible)
2895 continue;
2896 if (!next->alias
2897 && !next->only_called_directly_p ())
2898 return false;
2899
2900 /* If we see different symbol than THIS,
2901 be sure to check calls. */
2902 if (next->ultimate_alias_target () != target)
2903 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2904 if (e->caller->get_comdat_group () != get_comdat_group ()
2905 || will_inline)
2906 return false;
2907 }
2908 }
2909 return true;
2910 }
2911 else
2912 return can_remove_if_no_direct_calls_p (will_inline);
2913 }
2914
2915
2916 /* Worker for cgraph_only_called_directly_p. */
2917
2918 static bool
2919 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2920 {
2921 return !node->only_called_directly_or_aliased_p ();
2922 }
2923
2924 /* Return true when function cgraph_node and all its aliases are only called
2925 directly.
2926 i.e. it is not externally visible, address was not taken and
2927 it is not used in any other non-standard way. */
2928
2929 bool
2930 cgraph_node::only_called_directly_p (void)
2931 {
2932 gcc_assert (ultimate_alias_target () == this);
2933 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2934 NULL, true);
2935 }
2936
2937
2938 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2939
2940 static bool
2941 collect_callers_of_node_1 (cgraph_node *node, void *data)
2942 {
2943 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2944 cgraph_edge *cs;
2945 enum availability avail;
2946 node->ultimate_alias_target (&avail);
2947
2948 if (avail > AVAIL_INTERPOSABLE)
2949 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2950 if (!cs->indirect_inlining_edge
2951 && !cs->caller->thunk.thunk_p)
2952 redirect_callers->safe_push (cs);
2953 return false;
2954 }
2955
2956 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2957 cgraph_node (i.e. are not overwritable). */
2958
2959 vec<cgraph_edge *>
2960 cgraph_node::collect_callers (void)
2961 {
2962 vec<cgraph_edge *> redirect_callers = vNULL;
2963 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
2964 &redirect_callers, false);
2965 return redirect_callers;
2966 }
2967
2968
2969 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
2970 optimistically true if this cannot be determined. */
2971
2972 static bool
2973 clone_of_p (cgraph_node *node, cgraph_node *node2)
2974 {
2975 bool skipped_thunk = false;
2976 node = node->ultimate_alias_target ();
2977 node2 = node2->ultimate_alias_target ();
2978
2979 /* There are no virtual clones of thunks so check former_clone_of or if we
2980 might have skipped thunks because this adjustments are no longer
2981 necessary. */
2982 while (node->thunk.thunk_p || node->former_thunk_p ())
2983 {
2984 if (node2->former_clone_of == node->decl)
2985 return true;
2986 if (!node->thunk.this_adjusting)
2987 return false;
2988 /* In case of instrumented expanded thunks, which can have multiple calls
2989 in them, we do not know how to continue and just have to be
2990 optimistic. */
2991 if (node->callees->next_callee)
2992 return true;
2993 node = node->callees->callee->ultimate_alias_target ();
2994 skipped_thunk = true;
2995 }
2996
2997 if (skipped_thunk)
2998 {
2999 if (!node2->clone.args_to_skip
3000 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
3001 return false;
3002 if (node2->former_clone_of == node->decl)
3003 return true;
3004 else if (!node2->clone_of)
3005 return false;
3006 }
3007
3008 while (node2 && node->decl != node2->decl)
3009 node2 = node2->clone_of;
3010 return node2 != NULL;
3011 }
3012
3013 /* Verify edge count and frequency. */
3014
3015 bool
3016 cgraph_edge::verify_count ()
3017 {
3018 bool error_found = false;
3019 if (!count.verify ())
3020 {
3021 error ("caller edge count invalid");
3022 error_found = true;
3023 }
3024 return error_found;
3025 }
3026
3027 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3028 static void
3029 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3030 {
3031 bool fndecl_was_null = false;
3032 /* debug_gimple_stmt needs correct cfun */
3033 if (cfun != this_cfun)
3034 set_cfun (this_cfun);
3035 /* ...and an actual current_function_decl */
3036 if (!current_function_decl)
3037 {
3038 current_function_decl = this_cfun->decl;
3039 fndecl_was_null = true;
3040 }
3041 debug_gimple_stmt (stmt);
3042 if (fndecl_was_null)
3043 current_function_decl = NULL;
3044 }
3045
3046 /* Verify that call graph edge corresponds to DECL from the associated
3047 statement. Return true if the verification should fail. */
3048
3049 bool
3050 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3051 {
3052 cgraph_node *node;
3053
3054 if (!decl || callee->global.inlined_to)
3055 return false;
3056 if (symtab->state == LTO_STREAMING)
3057 return false;
3058 node = cgraph_node::get (decl);
3059
3060 /* We do not know if a node from a different partition is an alias or what it
3061 aliases and therefore cannot do the former_clone_of check reliably. When
3062 body_removed is set, we have lost all information about what was alias or
3063 thunk of and also cannot proceed. */
3064 if (!node
3065 || node->body_removed
3066 || node->in_other_partition
3067 || callee->icf_merged
3068 || callee->in_other_partition)
3069 return false;
3070
3071 node = node->ultimate_alias_target ();
3072
3073 /* Optimizers can redirect unreachable calls or calls triggering undefined
3074 behavior to builtin_unreachable. */
3075
3076 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3077 return false;
3078
3079 if (callee->former_clone_of != node->decl
3080 && (node != callee->ultimate_alias_target ())
3081 && !clone_of_p (node, callee))
3082 return true;
3083 else
3084 return false;
3085 }
3086
3087 /* Verify cgraph nodes of given cgraph node. */
3088 DEBUG_FUNCTION void
3089 cgraph_node::verify_node (void)
3090 {
3091 cgraph_edge *e;
3092 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3093 basic_block this_block;
3094 gimple_stmt_iterator gsi;
3095 bool error_found = false;
3096
3097 if (seen_error ())
3098 return;
3099
3100 timevar_push (TV_CGRAPH_VERIFY);
3101 error_found |= verify_base ();
3102 for (e = callees; e; e = e->next_callee)
3103 if (e->aux)
3104 {
3105 error ("aux field set for edge %s->%s",
3106 identifier_to_locale (e->caller->name ()),
3107 identifier_to_locale (e->callee->name ()));
3108 error_found = true;
3109 }
3110 if (!count.verify ())
3111 {
3112 error ("cgraph count invalid");
3113 error_found = true;
3114 }
3115 if (global.inlined_to && same_comdat_group)
3116 {
3117 error ("inline clone in same comdat group list");
3118 error_found = true;
3119 }
3120 if (!definition && !in_other_partition && local.local)
3121 {
3122 error ("local symbols must be defined");
3123 error_found = true;
3124 }
3125 if (global.inlined_to && externally_visible)
3126 {
3127 error ("externally visible inline clone");
3128 error_found = true;
3129 }
3130 if (global.inlined_to && address_taken)
3131 {
3132 error ("inline clone with address taken");
3133 error_found = true;
3134 }
3135 if (global.inlined_to && force_output)
3136 {
3137 error ("inline clone is forced to output");
3138 error_found = true;
3139 }
3140 for (e = indirect_calls; e; e = e->next_callee)
3141 {
3142 if (e->aux)
3143 {
3144 error ("aux field set for indirect edge from %s",
3145 identifier_to_locale (e->caller->name ()));
3146 error_found = true;
3147 }
3148 if (!e->indirect_unknown_callee
3149 || !e->indirect_info)
3150 {
3151 error ("An indirect edge from %s is not marked as indirect or has "
3152 "associated indirect_info, the corresponding statement is: ",
3153 identifier_to_locale (e->caller->name ()));
3154 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3155 error_found = true;
3156 }
3157 }
3158 bool check_comdat = comdat_local_p ();
3159 for (e = callers; e; e = e->next_caller)
3160 {
3161 if (e->verify_count ())
3162 error_found = true;
3163 if (check_comdat
3164 && !in_same_comdat_group_p (e->caller))
3165 {
3166 error ("comdat-local function called by %s outside its comdat",
3167 identifier_to_locale (e->caller->name ()));
3168 error_found = true;
3169 }
3170 if (!e->inline_failed)
3171 {
3172 if (global.inlined_to
3173 != (e->caller->global.inlined_to
3174 ? e->caller->global.inlined_to : e->caller))
3175 {
3176 error ("inlined_to pointer is wrong");
3177 error_found = true;
3178 }
3179 if (callers->next_caller)
3180 {
3181 error ("multiple inline callers");
3182 error_found = true;
3183 }
3184 }
3185 else
3186 if (global.inlined_to)
3187 {
3188 error ("inlined_to pointer set for noninline callers");
3189 error_found = true;
3190 }
3191 }
3192 for (e = callees; e; e = e->next_callee)
3193 {
3194 if (e->verify_count ())
3195 error_found = true;
3196 if (gimple_has_body_p (e->caller->decl)
3197 && !e->caller->global.inlined_to
3198 && !e->speculative
3199 /* Optimized out calls are redirected to __builtin_unreachable. */
3200 && (e->count.nonzero_p ()
3201 || ! e->callee->decl
3202 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3203 && count
3204 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3205 && (!e->count.ipa_p ()
3206 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3207 {
3208 error ("caller edge count does not match BB count");
3209 fprintf (stderr, "edge count: ");
3210 e->count.dump (stderr);
3211 fprintf (stderr, "\n bb count: ");
3212 gimple_bb (e->call_stmt)->count.dump (stderr);
3213 fprintf (stderr, "\n");
3214 error_found = true;
3215 }
3216 }
3217 for (e = indirect_calls; e; e = e->next_callee)
3218 {
3219 if (e->verify_count ())
3220 error_found = true;
3221 if (gimple_has_body_p (e->caller->decl)
3222 && !e->caller->global.inlined_to
3223 && !e->speculative
3224 && e->count.ipa_p ()
3225 && count
3226 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3227 && (!e->count.ipa_p ()
3228 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3229 {
3230 error ("indirect call count does not match BB count");
3231 fprintf (stderr, "edge count: ");
3232 e->count.dump (stderr);
3233 fprintf (stderr, "\n bb count: ");
3234 gimple_bb (e->call_stmt)->count.dump (stderr);
3235 fprintf (stderr, "\n");
3236 error_found = true;
3237 }
3238 }
3239 if (!callers && global.inlined_to)
3240 {
3241 error ("inlined_to pointer is set but no predecessors found");
3242 error_found = true;
3243 }
3244 if (global.inlined_to == this)
3245 {
3246 error ("inlined_to pointer refers to itself");
3247 error_found = true;
3248 }
3249
3250 if (clone_of)
3251 {
3252 cgraph_node *first_clone = clone_of->clones;
3253 if (first_clone != this)
3254 {
3255 if (prev_sibling_clone->clone_of != clone_of)
3256 {
3257 error ("cgraph_node has wrong clone_of");
3258 error_found = true;
3259 }
3260 }
3261 }
3262 if (clones)
3263 {
3264 cgraph_node *n;
3265 for (n = clones; n; n = n->next_sibling_clone)
3266 if (n->clone_of != this)
3267 break;
3268 if (n)
3269 {
3270 error ("cgraph_node has wrong clone list");
3271 error_found = true;
3272 }
3273 }
3274 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3275 {
3276 error ("cgraph_node is in clone list but it is not clone");
3277 error_found = true;
3278 }
3279 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3280 {
3281 error ("cgraph_node has wrong prev_clone pointer");
3282 error_found = true;
3283 }
3284 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3285 {
3286 error ("double linked list of clones corrupted");
3287 error_found = true;
3288 }
3289
3290 if (analyzed && alias)
3291 {
3292 bool ref_found = false;
3293 int i;
3294 ipa_ref *ref = NULL;
3295
3296 if (callees)
3297 {
3298 error ("Alias has call edges");
3299 error_found = true;
3300 }
3301 for (i = 0; iterate_reference (i, ref); i++)
3302 if (ref->use != IPA_REF_ALIAS)
3303 {
3304 error ("Alias has non-alias reference");
3305 error_found = true;
3306 }
3307 else if (ref_found)
3308 {
3309 error ("Alias has more than one alias reference");
3310 error_found = true;
3311 }
3312 else
3313 ref_found = true;
3314 if (!ref_found)
3315 {
3316 error ("Analyzed alias has no reference");
3317 error_found = true;
3318 }
3319 }
3320
3321 if (analyzed && thunk.thunk_p)
3322 {
3323 if (!callees)
3324 {
3325 error ("No edge out of thunk node");
3326 error_found = true;
3327 }
3328 else if (callees->next_callee)
3329 {
3330 error ("More than one edge out of thunk node");
3331 error_found = true;
3332 }
3333 if (gimple_has_body_p (decl) && !global.inlined_to)
3334 {
3335 error ("Thunk is not supposed to have body");
3336 error_found = true;
3337 }
3338 }
3339 else if (analyzed && gimple_has_body_p (decl)
3340 && !TREE_ASM_WRITTEN (decl)
3341 && (!DECL_EXTERNAL (decl) || global.inlined_to)
3342 && !flag_wpa)
3343 {
3344 if (this_cfun->cfg)
3345 {
3346 hash_set<gimple *> stmts;
3347 int i;
3348 ipa_ref *ref = NULL;
3349
3350 /* Reach the trees by walking over the CFG, and note the
3351 enclosing basic-blocks in the call edges. */
3352 FOR_EACH_BB_FN (this_block, this_cfun)
3353 {
3354 for (gsi = gsi_start_phis (this_block);
3355 !gsi_end_p (gsi); gsi_next (&gsi))
3356 stmts.add (gsi_stmt (gsi));
3357 for (gsi = gsi_start_bb (this_block);
3358 !gsi_end_p (gsi);
3359 gsi_next (&gsi))
3360 {
3361 gimple *stmt = gsi_stmt (gsi);
3362 stmts.add (stmt);
3363 if (is_gimple_call (stmt))
3364 {
3365 cgraph_edge *e = get_edge (stmt);
3366 tree decl = gimple_call_fndecl (stmt);
3367 if (e)
3368 {
3369 if (e->aux)
3370 {
3371 error ("shared call_stmt:");
3372 cgraph_debug_gimple_stmt (this_cfun, stmt);
3373 error_found = true;
3374 }
3375 if (!e->indirect_unknown_callee)
3376 {
3377 if (e->verify_corresponds_to_fndecl (decl))
3378 {
3379 error ("edge points to wrong declaration:");
3380 debug_tree (e->callee->decl);
3381 fprintf (stderr," Instead of:");
3382 debug_tree (decl);
3383 error_found = true;
3384 }
3385 }
3386 else if (decl)
3387 {
3388 error ("an indirect edge with unknown callee "
3389 "corresponding to a call_stmt with "
3390 "a known declaration:");
3391 error_found = true;
3392 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3393 }
3394 e->aux = (void *)1;
3395 }
3396 else if (decl)
3397 {
3398 error ("missing callgraph edge for call stmt:");
3399 cgraph_debug_gimple_stmt (this_cfun, stmt);
3400 error_found = true;
3401 }
3402 }
3403 }
3404 }
3405 for (i = 0; iterate_reference (i, ref); i++)
3406 if (ref->stmt && !stmts.contains (ref->stmt))
3407 {
3408 error ("reference to dead statement");
3409 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3410 error_found = true;
3411 }
3412 }
3413 else
3414 /* No CFG available?! */
3415 gcc_unreachable ();
3416
3417 for (e = callees; e; e = e->next_callee)
3418 {
3419 if (!e->aux)
3420 {
3421 error ("edge %s->%s has no corresponding call_stmt",
3422 identifier_to_locale (e->caller->name ()),
3423 identifier_to_locale (e->callee->name ()));
3424 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3425 error_found = true;
3426 }
3427 e->aux = 0;
3428 }
3429 for (e = indirect_calls; e; e = e->next_callee)
3430 {
3431 if (!e->aux && !e->speculative)
3432 {
3433 error ("an indirect edge from %s has no corresponding call_stmt",
3434 identifier_to_locale (e->caller->name ()));
3435 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3436 error_found = true;
3437 }
3438 e->aux = 0;
3439 }
3440 }
3441 if (error_found)
3442 {
3443 dump (stderr);
3444 internal_error ("verify_cgraph_node failed");
3445 }
3446 timevar_pop (TV_CGRAPH_VERIFY);
3447 }
3448
3449 /* Verify whole cgraph structure. */
3450 DEBUG_FUNCTION void
3451 cgraph_node::verify_cgraph_nodes (void)
3452 {
3453 cgraph_node *node;
3454
3455 if (seen_error ())
3456 return;
3457
3458 FOR_EACH_FUNCTION (node)
3459 node->verify ();
3460 }
3461
3462 /* Walk the alias chain to return the function cgraph_node is alias of.
3463 Walk through thunks, too.
3464 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3465 When REF is non-NULL, assume that reference happens in symbol REF
3466 when determining the availability. */
3467
3468 cgraph_node *
3469 cgraph_node::function_symbol (enum availability *availability,
3470 struct symtab_node *ref)
3471 {
3472 cgraph_node *node = ultimate_alias_target (availability, ref);
3473
3474 while (node->thunk.thunk_p)
3475 {
3476 ref = node;
3477 node = node->callees->callee;
3478 if (availability)
3479 {
3480 enum availability a;
3481 a = node->get_availability (ref);
3482 if (a < *availability)
3483 *availability = a;
3484 }
3485 node = node->ultimate_alias_target (availability, ref);
3486 }
3487 return node;
3488 }
3489
3490 /* Walk the alias chain to return the function cgraph_node is alias of.
3491 Walk through non virtual thunks, too. Thus we return either a function
3492 or a virtual thunk node.
3493 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3494 When REF is non-NULL, assume that reference happens in symbol REF
3495 when determining the availability. */
3496
3497 cgraph_node *
3498 cgraph_node::function_or_virtual_thunk_symbol
3499 (enum availability *availability,
3500 struct symtab_node *ref)
3501 {
3502 cgraph_node *node = ultimate_alias_target (availability, ref);
3503
3504 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3505 {
3506 ref = node;
3507 node = node->callees->callee;
3508 if (availability)
3509 {
3510 enum availability a;
3511 a = node->get_availability (ref);
3512 if (a < *availability)
3513 *availability = a;
3514 }
3515 node = node->ultimate_alias_target (availability, ref);
3516 }
3517 return node;
3518 }
3519
3520 /* When doing LTO, read cgraph_node's body from disk if it is not already
3521 present. */
3522
3523 bool
3524 cgraph_node::get_untransformed_body (void)
3525 {
3526 lto_file_decl_data *file_data;
3527 const char *data, *name;
3528 size_t len;
3529 tree decl = this->decl;
3530
3531 /* Check if body is already there. Either we have gimple body or
3532 the function is thunk and in that case we set DECL_ARGUMENTS. */
3533 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3534 return false;
3535
3536 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3537
3538 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3539
3540 file_data = lto_file_data;
3541 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3542
3543 /* We may have renamed the declaration, e.g., a static function. */
3544 name = lto_get_decl_name_mapping (file_data, name);
3545 struct lto_in_decl_state *decl_state
3546 = lto_get_function_in_decl_state (file_data, decl);
3547
3548 data = lto_get_section_data (file_data, LTO_section_function_body,
3549 name, &len, decl_state->compressed);
3550 if (!data)
3551 fatal_error (input_location, "%s: section %s is missing",
3552 file_data->file_name,
3553 name);
3554
3555 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3556
3557 if (!quiet_flag)
3558 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3559 lto_input_function_body (file_data, this, data);
3560 lto_stats.num_function_bodies++;
3561 lto_free_section_data (file_data, LTO_section_function_body, name,
3562 data, len, decl_state->compressed);
3563 lto_free_function_in_decl_state_for_node (this);
3564 /* Keep lto file data so ipa-inline-analysis knows about cross module
3565 inlining. */
3566
3567 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3568
3569 return true;
3570 }
3571
3572 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3573 if it is not already present. When some IPA transformations are scheduled,
3574 apply them. */
3575
3576 bool
3577 cgraph_node::get_body (void)
3578 {
3579 bool updated;
3580
3581 updated = get_untransformed_body ();
3582
3583 /* Getting transformed body makes no sense for inline clones;
3584 we should never use this on real clones because they are materialized
3585 early.
3586 TODO: Materializing clones here will likely lead to smaller LTRANS
3587 footprint. */
3588 gcc_assert (!global.inlined_to && !clone_of);
3589 if (ipa_transforms_to_apply.exists ())
3590 {
3591 opt_pass *saved_current_pass = current_pass;
3592 FILE *saved_dump_file = dump_file;
3593 const char *saved_dump_file_name = dump_file_name;
3594 dump_flags_t saved_dump_flags = dump_flags;
3595 dump_file_name = NULL;
3596 set_dump_file (NULL);
3597
3598 push_cfun (DECL_STRUCT_FUNCTION (decl));
3599 execute_all_ipa_transforms ();
3600 cgraph_edge::rebuild_edges ();
3601 free_dominance_info (CDI_DOMINATORS);
3602 free_dominance_info (CDI_POST_DOMINATORS);
3603 pop_cfun ();
3604 updated = true;
3605
3606 current_pass = saved_current_pass;
3607 set_dump_file (saved_dump_file);
3608 dump_file_name = saved_dump_file_name;
3609 dump_flags = saved_dump_flags;
3610 }
3611 return updated;
3612 }
3613
3614 /* Return the DECL_STRUCT_FUNCTION of the function. */
3615
3616 struct function *
3617 cgraph_node::get_fun (void)
3618 {
3619 cgraph_node *node = this;
3620 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3621
3622 while (!fun && node->clone_of)
3623 {
3624 node = node->clone_of;
3625 fun = DECL_STRUCT_FUNCTION (node->decl);
3626 }
3627
3628 return fun;
3629 }
3630
3631 /* Verify if the type of the argument matches that of the function
3632 declaration. If we cannot verify this or there is a mismatch,
3633 return false. */
3634
3635 static bool
3636 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3637 {
3638 tree parms, p;
3639 unsigned int i, nargs;
3640
3641 /* Calls to internal functions always match their signature. */
3642 if (gimple_call_internal_p (stmt))
3643 return true;
3644
3645 nargs = gimple_call_num_args (stmt);
3646
3647 /* Get argument types for verification. */
3648 if (fndecl)
3649 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3650 else
3651 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3652
3653 /* Verify if the type of the argument matches that of the function
3654 declaration. If we cannot verify this or there is a mismatch,
3655 return false. */
3656 if (fndecl && DECL_ARGUMENTS (fndecl))
3657 {
3658 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3659 i < nargs;
3660 i++, p = DECL_CHAIN (p))
3661 {
3662 tree arg;
3663 /* We cannot distinguish a varargs function from the case
3664 of excess parameters, still deferring the inlining decision
3665 to the callee is possible. */
3666 if (!p)
3667 break;
3668 arg = gimple_call_arg (stmt, i);
3669 if (p == error_mark_node
3670 || DECL_ARG_TYPE (p) == error_mark_node
3671 || arg == error_mark_node
3672 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3673 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3674 return false;
3675 }
3676 if (args_count_match && p)
3677 return false;
3678 }
3679 else if (parms)
3680 {
3681 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3682 {
3683 tree arg;
3684 /* If this is a varargs function defer inlining decision
3685 to callee. */
3686 if (!p)
3687 break;
3688 arg = gimple_call_arg (stmt, i);
3689 if (TREE_VALUE (p) == error_mark_node
3690 || arg == error_mark_node
3691 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3692 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3693 && !fold_convertible_p (TREE_VALUE (p), arg)))
3694 return false;
3695 }
3696 }
3697 else
3698 {
3699 if (nargs != 0)
3700 return false;
3701 }
3702 return true;
3703 }
3704
3705 /* Verify if the type of the argument and lhs of CALL_STMT matches
3706 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3707 true, the arg count needs to be the same.
3708 If we cannot verify this or there is a mismatch, return false. */
3709
3710 bool
3711 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3712 bool args_count_match)
3713 {
3714 tree lhs;
3715
3716 if ((DECL_RESULT (callee)
3717 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3718 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3719 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3720 TREE_TYPE (lhs))
3721 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3722 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3723 return false;
3724 return true;
3725 }
3726
3727 /* Reset all state within cgraph.c so that we can rerun the compiler
3728 within the same process. For use by toplev::finalize. */
3729
3730 void
3731 cgraph_c_finalize (void)
3732 {
3733 symtab = NULL;
3734
3735 x_cgraph_nodes_queue = NULL;
3736
3737 cgraph_fnver_htab = NULL;
3738 version_info_node = NULL;
3739 }
3740
3741 /* A wroker for call_for_symbol_and_aliases. */
3742
3743 bool
3744 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3745 void *),
3746 void *data,
3747 bool include_overwritable)
3748 {
3749 ipa_ref *ref;
3750 FOR_EACH_ALIAS (this, ref)
3751 {
3752 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3753 if (include_overwritable
3754 || alias->get_availability () > AVAIL_INTERPOSABLE)
3755 if (alias->call_for_symbol_and_aliases (callback, data,
3756 include_overwritable))
3757 return true;
3758 }
3759 return false;
3760 }
3761
3762 /* Return true if NODE has thunk. */
3763
3764 bool
3765 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3766 {
3767 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3768 if (e->caller->thunk.thunk_p)
3769 return true;
3770 return false;
3771 }
3772
3773 /* Expected frequency of executions within the function. */
3774
3775 sreal
3776 cgraph_edge::sreal_frequency ()
3777 {
3778 return count.to_sreal_scale (caller->global.inlined_to
3779 ? caller->global.inlined_to->count
3780 : caller->count);
3781 }
3782
3783
3784 /* During LTO stream in this can be used to check whether call can possibly
3785 be internal to the current translation unit. */
3786
3787 bool
3788 cgraph_edge::possibly_call_in_translation_unit_p (void)
3789 {
3790 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
3791
3792 /* While incremental linking we may end up getting function body later. */
3793 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
3794 return true;
3795
3796 /* We may be smarter here and avoid stremaing in indirect calls we can't
3797 track, but that would require arranging stremaing the indirect call
3798 summary first. */
3799 if (!callee)
3800 return true;
3801
3802 /* If calle is local to the original translation unit, it will be defined. */
3803 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
3804 return true;
3805
3806 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
3807 yet) and see if it is a definition. In fact we may also resolve aliases,
3808 but that is probably not too important. */
3809 symtab_node *node = callee;
3810 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
3811 node = node->previous_sharing_asm_name;
3812 if (node->previous_sharing_asm_name)
3813 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
3814 gcc_assert (TREE_PUBLIC (node->decl));
3815 return node->get_availability () >= AVAIL_AVAILABLE;
3816 }
3817
3818 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
3819 This needs to be a global so that it can be a GC root, and thus
3820 prevent the stashed copy from being garbage-collected if the GC runs
3821 during a symbol_table_test. */
3822
3823 symbol_table *saved_symtab;
3824
3825 #if CHECKING_P
3826
3827 namespace selftest {
3828
3829 /* class selftest::symbol_table_test. */
3830
3831 /* Constructor. Store the old value of symtab, and create a new one. */
3832
3833 symbol_table_test::symbol_table_test ()
3834 {
3835 gcc_assert (saved_symtab == NULL);
3836 saved_symtab = symtab;
3837 symtab = new (ggc_cleared_alloc <symbol_table> ()) symbol_table ();
3838 }
3839
3840 /* Destructor. Restore the old value of symtab. */
3841
3842 symbol_table_test::~symbol_table_test ()
3843 {
3844 gcc_assert (saved_symtab != NULL);
3845 symtab = saved_symtab;
3846 saved_symtab = NULL;
3847 }
3848
3849 /* Verify that symbol_table_test works. */
3850
3851 static void
3852 test_symbol_table_test ()
3853 {
3854 /* Simulate running two selftests involving symbol tables. */
3855 for (int i = 0; i < 2; i++)
3856 {
3857 symbol_table_test stt;
3858 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
3859 get_identifier ("test_decl"),
3860 build_function_type_list (void_type_node,
3861 NULL_TREE));
3862 cgraph_node *node = cgraph_node::get_create (test_decl);
3863 gcc_assert (node);
3864
3865 /* Verify that the node has order 0 on both iterations,
3866 and thus that nodes have predictable dump names in selftests. */
3867 ASSERT_EQ (node->order, 0);
3868 ASSERT_STREQ (node->dump_name (), "test_decl/0");
3869 }
3870 }
3871
3872 /* Run all of the selftests within this file. */
3873
3874 void
3875 cgraph_c_tests ()
3876 {
3877 test_symbol_table_test ();
3878 }
3879
3880 } // namespace selftest
3881
3882 #endif /* CHECKING_P */
3883
3884 #include "gt-cgraph.h"