]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraph.h
builtin-types.def (BT_FN_BOOL_UINT_LONGPTR_LONGPTR_LONGPTR, [...]): New.
[thirdparty/gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "ipa-ref.h"
25 #include "plugin-api.h"
26
27 class ipa_opt_pass_d;
28 typedef ipa_opt_pass_d *ipa_opt_pass;
29
30 /* Symbol table consists of functions and variables.
31 TODO: add labels and CONST_DECLs. */
32 enum symtab_type
33 {
34 SYMTAB_SYMBOL,
35 SYMTAB_FUNCTION,
36 SYMTAB_VARIABLE
37 };
38
39 /* Section names are stored as reference counted strings in GGC safe hashtable
40 (to make them survive through PCH). */
41
42 struct GTY((for_user)) section_hash_entry
43 {
44 int ref_count;
45 char *name; /* As long as this datastructure stays in GGC, we can not put
46 string at the tail of structure of GGC dies in horrible
47 way */
48 };
49
50 struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
51 {
52 typedef const char *compare_type;
53
54 static hashval_t hash (section_hash_entry *);
55 static bool equal (section_hash_entry *, const char *);
56 };
57
58 enum availability
59 {
60 /* Not yet set by cgraph_function_body_availability. */
61 AVAIL_UNSET,
62 /* Function body/variable initializer is unknown. */
63 AVAIL_NOT_AVAILABLE,
64 /* Function body/variable initializer is known but might be replaced
65 by a different one from other compilation unit and thus needs to
66 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
67 arbitrary side effects on escaping variables and functions, while
68 like AVAILABLE it might access static variables. */
69 AVAIL_INTERPOSABLE,
70 /* Function body/variable initializer is known and will be used in final
71 program. */
72 AVAIL_AVAILABLE,
73 /* Function body/variable initializer is known and all it's uses are
74 explicitly visible within current unit (ie it's address is never taken and
75 it is not exported to other units). Currently used only for functions. */
76 AVAIL_LOCAL
77 };
78
79 /* Classification of symbols WRT partitioning. */
80 enum symbol_partitioning_class
81 {
82 /* External declarations are ignored by partitioning algorithms and they are
83 added into the boundary later via compute_ltrans_boundary. */
84 SYMBOL_EXTERNAL,
85 /* Partitioned symbols are pur into one of partitions. */
86 SYMBOL_PARTITION,
87 /* Duplicated symbols (such as comdat or constant pool references) are
88 copied into every node needing them via add_symbol_to_partition. */
89 SYMBOL_DUPLICATE
90 };
91
92 /* Base of all entries in the symbol table.
93 The symtab_node is inherited by cgraph and varpol nodes. */
94 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
95 chain_next ("%h.next"), chain_prev ("%h.previous")))
96 symtab_node
97 {
98 public:
99 /* Return name. */
100 const char *name () const;
101
102 /* Return asm name. */
103 const char * asm_name () const;
104
105 /* Add node into symbol table. This function is not used directly, but via
106 cgraph/varpool node creation routines. */
107 void register_symbol (void);
108
109 /* Remove symbol from symbol table. */
110 void remove (void);
111
112 /* Dump symtab node to F. */
113 void dump (FILE *f);
114
115 /* Dump symtab node to stderr. */
116 void DEBUG_FUNCTION debug (void);
117
118 /* Verify consistency of node. */
119 void DEBUG_FUNCTION verify (void);
120
121 /* Return ipa reference from this symtab_node to
122 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
123 of the use and STMT the statement (if it exists). */
124 ipa_ref *create_reference (symtab_node *referred_node,
125 enum ipa_ref_use use_type);
126
127 /* Return ipa reference from this symtab_node to
128 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
129 of the use and STMT the statement (if it exists). */
130 ipa_ref *create_reference (symtab_node *referred_node,
131 enum ipa_ref_use use_type, gimple *stmt);
132
133 /* If VAL is a reference to a function or a variable, add a reference from
134 this symtab_node to the corresponding symbol table node. USE_TYPE specify
135 type of the use and STMT the statement (if it exists). Return the new
136 reference or NULL if none was created. */
137 ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
138 gimple *stmt);
139
140 /* Clone all references from symtab NODE to this symtab_node. */
141 void clone_references (symtab_node *node);
142
143 /* Remove all stmt references in non-speculative references.
144 Those are not maintained during inlining & clonning.
145 The exception are speculative references that are updated along
146 with callgraph edges associated with them. */
147 void clone_referring (symtab_node *node);
148
149 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
150 ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
151
152 /* Find the structure describing a reference to REFERRED_NODE
153 and associated with statement STMT. */
154 ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
155 unsigned int lto_stmt_uid);
156
157 /* Remove all references that are associated with statement STMT. */
158 void remove_stmt_references (gimple *stmt);
159
160 /* Remove all stmt references in non-speculative references.
161 Those are not maintained during inlining & clonning.
162 The exception are speculative references that are updated along
163 with callgraph edges associated with them. */
164 void clear_stmts_in_references (void);
165
166 /* Remove all references in ref list. */
167 void remove_all_references (void);
168
169 /* Remove all referring items in ref list. */
170 void remove_all_referring (void);
171
172 /* Dump references in ref list to FILE. */
173 void dump_references (FILE *file);
174
175 /* Dump referring in list to FILE. */
176 void dump_referring (FILE *);
177
178 /* Get number of references for this node. */
179 inline unsigned num_references (void)
180 {
181 return ref_list.references ? ref_list.references->length () : 0;
182 }
183
184 /* Iterates I-th reference in the list, REF is also set. */
185 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
186
187 /* Iterates I-th referring item in the list, REF is also set. */
188 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
189
190 /* Iterates I-th referring alias item in the list, REF is also set. */
191 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
192
193 /* Return true if symtab node and TARGET represents
194 semantically equivalent symbols. */
195 bool semantically_equivalent_p (symtab_node *target);
196
197 /* Classify symbol symtab node for partitioning. */
198 enum symbol_partitioning_class get_partitioning_class (void);
199
200 /* Return comdat group. */
201 tree get_comdat_group ()
202 {
203 return x_comdat_group;
204 }
205
206 /* Return comdat group as identifier_node. */
207 tree get_comdat_group_id ()
208 {
209 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
210 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
211 return x_comdat_group;
212 }
213
214 /* Set comdat group. */
215 void set_comdat_group (tree group)
216 {
217 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
218 || DECL_P (group));
219 x_comdat_group = group;
220 }
221
222 /* Return section as string. */
223 const char * get_section ()
224 {
225 if (!x_section)
226 return NULL;
227 return x_section->name;
228 }
229
230 /* Remove node from same comdat group. */
231 void remove_from_same_comdat_group (void);
232
233 /* Add this symtab_node to the same comdat group that OLD is in. */
234 void add_to_same_comdat_group (symtab_node *old_node);
235
236 /* Dissolve the same_comdat_group list in which NODE resides. */
237 void dissolve_same_comdat_group_list (void);
238
239 /* Return true when symtab_node is known to be used from other (non-LTO)
240 object file. Known only when doing LTO via linker plugin. */
241 bool used_from_object_file_p (void);
242
243 /* Walk the alias chain to return the symbol NODE is alias of.
244 If NODE is not an alias, return NODE.
245 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
246 symtab_node *ultimate_alias_target (enum availability *avail = NULL);
247
248 /* Return next reachable static symbol with initializer after NODE. */
249 inline symtab_node *next_defined_symbol (void);
250
251 /* Add reference recording that symtab node is alias of TARGET.
252 The function can fail in the case of aliasing cycles; in this case
253 it returns false. */
254 bool resolve_alias (symtab_node *target);
255
256 /* C++ FE sometimes change linkage flags after producing same
257 body aliases. */
258 void fixup_same_cpp_alias_visibility (symtab_node *target);
259
260 /* Call callback on symtab node and aliases associated to this node.
261 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
262 skipped. */
263 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
264 void *data,
265 bool include_overwrite);
266
267 /* If node can not be interposable by static or dynamic linker to point to
268 different definition, return this symbol. Otherwise look for alias with
269 such property and if none exists, introduce new one. */
270 symtab_node *noninterposable_alias (void);
271
272 /* Return node that alias is aliasing. */
273 inline symtab_node *get_alias_target (void);
274
275 /* Set section for symbol and its aliases. */
276 void set_section (const char *section);
277
278 /* Set section, do not recurse into aliases.
279 When one wants to change section of symbol and its aliases,
280 use set_section. */
281 void set_section_for_node (const char *section);
282
283 /* Set initialization priority to PRIORITY. */
284 void set_init_priority (priority_type priority);
285
286 /* Return the initialization priority. */
287 priority_type get_init_priority ();
288
289 /* Return availability of NODE. */
290 enum availability get_availability (void);
291
292 /* Make DECL local. */
293 void make_decl_local (void);
294
295 /* Return desired alignment of the definition. This is NOT alignment useful
296 to access THIS, because THIS may be interposable and DECL_ALIGN should
297 be used instead. It however must be guaranteed when output definition
298 of THIS. */
299 unsigned int definition_alignment ();
300
301 /* Return true if alignment can be increased. */
302 bool can_increase_alignment_p ();
303
304 /* Increase alignment of symbol to ALIGN. */
305 void increase_alignment (unsigned int align);
306
307 /* Return true if list contains an alias. */
308 bool has_aliases_p (void);
309
310 /* Return true when the symbol is real symbol, i.e. it is not inline clone
311 or abstract function kept for debug info purposes only. */
312 bool real_symbol_p (void);
313
314 /* Determine if symbol declaration is needed. That is, visible to something
315 either outside this translation unit, something magic in the system
316 configury. This function is used just during symbol creation. */
317 bool needed_p (void);
318
319 /* Return true when there are references to the node. */
320 bool referred_to_p (bool include_self = true);
321
322 /* Return true if NODE can be discarded by linker from the binary. */
323 inline bool
324 can_be_discarded_p (void)
325 {
326 return (DECL_EXTERNAL (decl)
327 || (get_comdat_group ()
328 && resolution != LDPR_PREVAILING_DEF
329 && resolution != LDPR_PREVAILING_DEF_IRONLY
330 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
331 }
332
333 /* Return true if NODE is local to a particular COMDAT group, and must not
334 be named from outside the COMDAT. This is used for C++ decloned
335 constructors. */
336 inline bool comdat_local_p (void)
337 {
338 return (same_comdat_group && !TREE_PUBLIC (decl));
339 }
340
341 /* Return true if ONE and TWO are part of the same COMDAT group. */
342 inline bool in_same_comdat_group_p (symtab_node *target);
343
344 /* Return true if symbol is known to be nonzero. */
345 bool nonzero_address ();
346
347 /* Return 0 if symbol is known to have different address than S2,
348 Return 1 if symbol is known to have same address as S2,
349 return 2 otherwise. */
350 int equal_address_to (symtab_node *s2);
351
352 /* Return true if symbol's address may possibly be compared to other
353 symbol's address. */
354 bool address_matters_p ();
355
356 /* Return true if NODE's address can be compared. This use properties
357 of NODE only and does not look if the address is actually taken in
358 interesting way. For that use ADDRESS_MATTERS_P instead. */
359 bool address_can_be_compared_p (void);
360
361 /* Return symbol table node associated with DECL, if any,
362 and NULL otherwise. */
363 static inline symtab_node *get (const_tree decl)
364 {
365 #ifdef ENABLE_CHECKING
366 /* Check that we are called for sane type of object - functions
367 and static or external variables. */
368 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
369 || (TREE_CODE (decl) == VAR_DECL
370 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
371 || in_lto_p)));
372 /* Check that the mapping is sane - perhaps this check can go away,
373 but at the moment frontends tends to corrupt the mapping by calling
374 memcpy/memset on the tree nodes. */
375 gcc_checking_assert (!decl->decl_with_vis.symtab_node
376 || decl->decl_with_vis.symtab_node->decl == decl);
377 #endif
378 return decl->decl_with_vis.symtab_node;
379 }
380
381 /* Try to find a symtab node for declaration DECL and if it does not
382 exist or if it corresponds to an inline clone, create a new one. */
383 static inline symtab_node * get_create (tree node);
384
385 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
386 Return NULL if there's no such node. */
387 static symtab_node *get_for_asmname (const_tree asmname);
388
389 /* Dump symbol table to F. */
390 static void dump_table (FILE *);
391
392 /* Dump symbol table to stderr. */
393 static inline DEBUG_FUNCTION void debug_symtab (void)
394 {
395 dump_table (stderr);
396 }
397
398 /* Verify symbol table for internal consistency. */
399 static DEBUG_FUNCTION void verify_symtab_nodes (void);
400
401 /* Type of the symbol. */
402 ENUM_BITFIELD (symtab_type) type : 8;
403
404 /* The symbols resolution. */
405 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
406
407 /*** Flags representing the symbol type. ***/
408
409 /* True when symbol corresponds to a definition in current unit.
410 set via finalize_function or finalize_decl */
411 unsigned definition : 1;
412 /* True when symbol is an alias.
413 Set by ssemble_alias. */
414 unsigned alias : 1;
415 /* True when alias is a weakref. */
416 unsigned weakref : 1;
417 /* C++ frontend produce same body aliases and extra name aliases for
418 virtual functions and vtables that are obviously equivalent.
419 Those aliases are bit special, especially because C++ frontend
420 visibility code is so ugly it can not get them right at first time
421 and their visibility needs to be copied from their "masters" at
422 the end of parsing. */
423 unsigned cpp_implicit_alias : 1;
424 /* Set once the definition was analyzed. The list of references and
425 other properties are built during analysis. */
426 unsigned analyzed : 1;
427 /* Set for write-only variables. */
428 unsigned writeonly : 1;
429 /* Visibility of symbol was used for further optimization; do not
430 permit further changes. */
431 unsigned refuse_visibility_changes : 1;
432
433 /*** Visibility and linkage flags. ***/
434
435 /* Set when function is visible by other units. */
436 unsigned externally_visible : 1;
437 /* Don't reorder to other symbols having this set. */
438 unsigned no_reorder : 1;
439 /* The symbol will be assumed to be used in an invisible way (like
440 by an toplevel asm statement). */
441 unsigned force_output : 1;
442 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
443 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
444 to static and it does not inhibit optimization. */
445 unsigned forced_by_abi : 1;
446 /* True when the name is known to be unique and thus it does not need mangling. */
447 unsigned unique_name : 1;
448 /* Specify whether the section was set by user or by
449 compiler via -ffunction-sections. */
450 unsigned implicit_section : 1;
451 /* True when body and other characteristics have been removed by
452 symtab_remove_unreachable_nodes. */
453 unsigned body_removed : 1;
454
455 /*** WHOPR Partitioning flags.
456 These flags are used at ltrans stage when only part of the callgraph is
457 available. ***/
458
459 /* Set when variable is used from other LTRANS partition. */
460 unsigned used_from_other_partition : 1;
461 /* Set when function is available in the other LTRANS partition.
462 During WPA output it is used to mark nodes that are present in
463 multiple partitions. */
464 unsigned in_other_partition : 1;
465
466
467
468 /*** other flags. ***/
469
470 /* Set when symbol has address taken. */
471 unsigned address_taken : 1;
472 /* Set when init priority is set. */
473 unsigned in_init_priority_hash : 1;
474
475 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
476 of offloading, for separate compilation for a different target. */
477 unsigned need_lto_streaming : 1;
478
479 /* Set when symbol can be streamed into bytecode for offloading. */
480 unsigned offloadable : 1;
481
482
483 /* Ordering of all symtab entries. */
484 int order;
485
486 /* Declaration representing the symbol. */
487 tree decl;
488
489 /* Linked list of symbol table entries starting with symtab_nodes. */
490 symtab_node *next;
491 symtab_node *previous;
492
493 /* Linked list of symbols with the same asm name. There may be multiple
494 entries for single symbol name during LTO, because symbols are renamed
495 only after partitioning.
496
497 Because inline clones are kept in the assembler name has, they also produce
498 duplicate entries.
499
500 There are also several long standing bugs where frontends and builtin
501 code produce duplicated decls. */
502 symtab_node *next_sharing_asm_name;
503 symtab_node *previous_sharing_asm_name;
504
505 /* Circular list of nodes in the same comdat group if non-NULL. */
506 symtab_node *same_comdat_group;
507
508 /* Vectors of referring and referenced entities. */
509 ipa_ref_list ref_list;
510
511 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
512 depending to what was known to frontend on the creation time.
513 Once alias is resolved, this pointer become NULL. */
514 tree alias_target;
515
516 /* File stream where this node is being written to. */
517 struct lto_file_decl_data * lto_file_data;
518
519 PTR GTY ((skip)) aux;
520
521 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
522 tree x_comdat_group;
523
524 /* Section name. Again can be private, if allowed. */
525 section_hash_entry *x_section;
526
527 protected:
528 /* Dump base fields of symtab nodes to F. Not to be used directly. */
529 void dump_base (FILE *);
530
531 /* Verify common part of symtab node. */
532 bool DEBUG_FUNCTION verify_base (void);
533
534 /* Remove node from symbol table. This function is not used directly, but via
535 cgraph/varpool node removal routines. */
536 void unregister (void);
537
538 /* Return the initialization and finalization priority information for
539 DECL. If there is no previous priority information, a freshly
540 allocated structure is returned. */
541 struct symbol_priority_map *priority_info (void);
542
543 /* Worker for call_for_symbol_and_aliases_1. */
544 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
545 void *data,
546 bool include_overwrite);
547 private:
548 /* Worker for set_section. */
549 static bool set_section (symtab_node *n, void *s);
550
551 /* Worker for symtab_resolve_alias. */
552 static bool set_implicit_section (symtab_node *n, void *);
553
554 /* Worker searching noninterposable alias. */
555 static bool noninterposable_alias (symtab_node *node, void *data);
556
557 /* Worker for ultimate_alias_target. */
558 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL);
559 };
560
561 /* Walk all aliases for NODE. */
562 #define FOR_EACH_ALIAS(node, alias) \
563 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
564
565 /* This is the information that is put into the cgraph local structure
566 to recover a function. */
567 struct lto_file_decl_data;
568
569 extern const char * const cgraph_availability_names[];
570 extern const char * const ld_plugin_symbol_resolution_names[];
571 extern const char * const tls_model_names[];
572
573 /* Information about thunk, used only for same body aliases. */
574
575 struct GTY(()) cgraph_thunk_info {
576 /* Information about the thunk. */
577 HOST_WIDE_INT fixed_offset;
578 HOST_WIDE_INT virtual_value;
579 tree alias;
580 bool this_adjusting;
581 bool virtual_offset_p;
582 bool add_pointer_bounds_args;
583 /* Set to true when alias node is thunk. */
584 bool thunk_p;
585 };
586
587 /* Information about the function collected locally.
588 Available after function is analyzed. */
589
590 struct GTY(()) cgraph_local_info {
591 /* Set when function is visible in current compilation unit only and
592 its address is never taken. */
593 unsigned local : 1;
594
595 /* False when there is something makes versioning impossible. */
596 unsigned versionable : 1;
597
598 /* False when function calling convention and signature can not be changed.
599 This is the case when __builtin_apply_args is used. */
600 unsigned can_change_signature : 1;
601
602 /* True when the function has been originally extern inline, but it is
603 redefined now. */
604 unsigned redefined_extern_inline : 1;
605
606 /* True if the function may enter serial irrevocable mode. */
607 unsigned tm_may_enter_irr : 1;
608 };
609
610 /* Information about the function that needs to be computed globally
611 once compilation is finished. Available only with -funit-at-a-time. */
612
613 struct GTY(()) cgraph_global_info {
614 /* For inline clones this points to the function they will be
615 inlined into. */
616 cgraph_node *inlined_to;
617 };
618
619 /* Represent which DECL tree (or reference to such tree)
620 will be replaced by another tree while versioning. */
621 struct GTY(()) ipa_replace_map
622 {
623 /* The tree that will be replaced. */
624 tree old_tree;
625 /* The new (replacing) tree. */
626 tree new_tree;
627 /* Parameter number to replace, when old_tree is NULL. */
628 int parm_num;
629 /* True when a substitution should be done, false otherwise. */
630 bool replace_p;
631 /* True when we replace a reference to old_tree. */
632 bool ref_p;
633 };
634
635 struct GTY(()) cgraph_clone_info
636 {
637 vec<ipa_replace_map *, va_gc> *tree_map;
638 bitmap args_to_skip;
639 bitmap combined_args_to_skip;
640 };
641
642 enum cgraph_simd_clone_arg_type
643 {
644 SIMD_CLONE_ARG_TYPE_VECTOR,
645 SIMD_CLONE_ARG_TYPE_UNIFORM,
646 /* These are only for integer/pointer arguments passed by value. */
647 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
648 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
649 /* These 3 are only for reference type arguments or arguments passed
650 by reference. */
651 SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP,
652 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP,
653 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP,
654 SIMD_CLONE_ARG_TYPE_MASK
655 };
656
657 /* Function arguments in the original function of a SIMD clone.
658 Supplementary data for `struct simd_clone'. */
659
660 struct GTY(()) cgraph_simd_clone_arg {
661 /* Original function argument as it originally existed in
662 DECL_ARGUMENTS. */
663 tree orig_arg;
664
665 /* orig_arg's function (or for extern functions type from
666 TYPE_ARG_TYPES). */
667 tree orig_type;
668
669 /* If argument is a vector, this holds the vector version of
670 orig_arg that after adjusting the argument types will live in
671 DECL_ARGUMENTS. Otherwise, this is NULL.
672
673 This basically holds:
674 vector(simdlen) __typeof__(orig_arg) new_arg. */
675 tree vector_arg;
676
677 /* vector_arg's type (or for extern functions new vector type. */
678 tree vector_type;
679
680 /* If argument is a vector, this holds the array where the simd
681 argument is held while executing the simd clone function. This
682 is a local variable in the cloned function. Its content is
683 copied from vector_arg upon entry to the clone.
684
685 This basically holds:
686 __typeof__(orig_arg) simd_array[simdlen]. */
687 tree simd_array;
688
689 /* A SIMD clone's argument can be either linear (constant or
690 variable), uniform, or vector. */
691 enum cgraph_simd_clone_arg_type arg_type;
692
693 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is
694 the constant linear step, if arg_type is
695 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP, this is index of
696 the uniform argument holding the step, otherwise 0. */
697 HOST_WIDE_INT linear_step;
698
699 /* Variable alignment if available, otherwise 0. */
700 unsigned int alignment;
701 };
702
703 /* Specific data for a SIMD function clone. */
704
705 struct GTY(()) cgraph_simd_clone {
706 /* Number of words in the SIMD lane associated with this clone. */
707 unsigned int simdlen;
708
709 /* Number of annotated function arguments in `args'. This is
710 usually the number of named arguments in FNDECL. */
711 unsigned int nargs;
712
713 /* Max hardware vector size in bits for integral vectors. */
714 unsigned int vecsize_int;
715
716 /* Max hardware vector size in bits for floating point vectors. */
717 unsigned int vecsize_float;
718
719 /* The mangling character for a given vector size. This is used
720 to determine the ISA mangling bit as specified in the Intel
721 Vector ABI. */
722 unsigned char vecsize_mangle;
723
724 /* True if this is the masked, in-branch version of the clone,
725 otherwise false. */
726 unsigned int inbranch : 1;
727
728 /* True if this is a Cilk Plus variant. */
729 unsigned int cilk_elemental : 1;
730
731 /* Doubly linked list of SIMD clones. */
732 cgraph_node *prev_clone, *next_clone;
733
734 /* Original cgraph node the SIMD clones were created for. */
735 cgraph_node *origin;
736
737 /* Annotated function arguments for the original function. */
738 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
739 };
740
741 /* Function Multiversioning info. */
742 struct GTY((for_user)) cgraph_function_version_info {
743 /* The cgraph_node for which the function version info is stored. */
744 cgraph_node *this_node;
745 /* Chains all the semantically identical function versions. The
746 first function in this chain is the version_info node of the
747 default function. */
748 cgraph_function_version_info *prev;
749 /* If this version node corresponds to a dispatcher for function
750 versions, this points to the version info node of the default
751 function, the first node in the chain. */
752 cgraph_function_version_info *next;
753 /* If this node corresponds to a function version, this points
754 to the dispatcher function decl, which is the function that must
755 be called to execute the right function version at run-time.
756
757 If this cgraph node is a dispatcher (if dispatcher_function is
758 true, in the cgraph_node struct) for function versions, this
759 points to resolver function, which holds the function body of the
760 dispatcher. The dispatcher decl is an alias to the resolver
761 function decl. */
762 tree dispatcher_resolver;
763 };
764
765 #define DEFCIFCODE(code, type, string) CIF_ ## code,
766 /* Reasons for inlining failures. */
767
768 enum cgraph_inline_failed_t {
769 #include "cif-code.def"
770 CIF_N_REASONS
771 };
772
773 enum cgraph_inline_failed_type_t
774 {
775 CIF_FINAL_NORMAL = 0,
776 CIF_FINAL_ERROR
777 };
778
779 struct cgraph_edge;
780
781 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
782 {
783 typedef gimple *compare_type;
784
785 static hashval_t hash (cgraph_edge *);
786 static hashval_t hash (gimple *);
787 static bool equal (cgraph_edge *, gimple *);
788 };
789
790 /* The cgraph data structure.
791 Each function decl has assigned cgraph_node listing callees and callers. */
792
793 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
794 public:
795 /* Remove the node from cgraph and all inline clones inlined into it.
796 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
797 removed. This allows to call the function from outer loop walking clone
798 tree. */
799 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
800
801 /* Record all references from cgraph_node that are taken
802 in statement STMT. */
803 void record_stmt_references (gimple *stmt);
804
805 /* Like cgraph_set_call_stmt but walk the clone tree and update all
806 clones sharing the same function body.
807 When WHOLE_SPECULATIVE_EDGES is true, all three components of
808 speculative edge gets updated. Otherwise we update only direct
809 call. */
810 void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
811 bool update_speculative = true);
812
813 /* Walk the alias chain to return the function cgraph_node is alias of.
814 Walk through thunk, too.
815 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
816 cgraph_node *function_symbol (enum availability *avail = NULL);
817
818 /* Walk the alias chain to return the function cgraph_node is alias of.
819 Walk through non virtual thunks, too. Thus we return either a function
820 or a virtual thunk node.
821 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
822 cgraph_node *function_or_virtual_thunk_symbol
823 (enum availability *avail = NULL);
824
825 /* Create node representing clone of N executed COUNT times. Decrease
826 the execution counts from original node too.
827 The new clone will have decl set to DECL that may or may not be the same
828 as decl of N.
829
830 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
831 function's profile to reflect the fact that part of execution is handled
832 by node.
833 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
834 the new clone. Otherwise the caller is responsible for doing so later.
835
836 If the new node is being inlined into another one, NEW_INLINED_TO should be
837 the outline function the new one is (even indirectly) inlined to.
838 All hooks will see this in node's global.inlined_to, when invoked.
839 Can be NULL if the node is not inlined. */
840 cgraph_node *create_clone (tree decl, gcov_type count, int freq,
841 bool update_original,
842 vec<cgraph_edge *> redirect_callers,
843 bool call_duplication_hook,
844 cgraph_node *new_inlined_to,
845 bitmap args_to_skip);
846
847 /* Create callgraph node clone with new declaration. The actual body will
848 be copied later at compilation stage. */
849 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
850 vec<ipa_replace_map *, va_gc> *tree_map,
851 bitmap args_to_skip, const char * suffix);
852
853 /* cgraph node being removed from symbol table; see if its entry can be
854 replaced by other inline clone. */
855 cgraph_node *find_replacement (void);
856
857 /* Create a new cgraph node which is the new version of
858 callgraph node. REDIRECT_CALLERS holds the callers
859 edges which should be redirected to point to
860 NEW_VERSION. ALL the callees edges of the node
861 are cloned to the new version node. Return the new
862 version node.
863
864 If non-NULL BLOCK_TO_COPY determine what basic blocks
865 was copied to prevent duplications of calls that are dead
866 in the clone. */
867
868 cgraph_node *create_version_clone (tree new_decl,
869 vec<cgraph_edge *> redirect_callers,
870 bitmap bbs_to_copy);
871
872 /* Perform function versioning.
873 Function versioning includes copying of the tree and
874 a callgraph update (creating a new cgraph node and updating
875 its callees and callers).
876
877 REDIRECT_CALLERS varray includes the edges to be redirected
878 to the new version.
879
880 TREE_MAP is a mapping of tree nodes we want to replace with
881 new ones (according to results of prior analysis).
882
883 If non-NULL ARGS_TO_SKIP determine function parameters to remove
884 from new version.
885 If SKIP_RETURN is true, the new version will return void.
886 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
887 If non_NULL NEW_ENTRY determine new entry BB of the clone.
888
889 Return the new version's cgraph node. */
890 cgraph_node *create_version_clone_with_body
891 (vec<cgraph_edge *> redirect_callers,
892 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
893 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
894 const char *clone_name);
895
896 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
897 corresponding to cgraph_node. */
898 cgraph_function_version_info *insert_new_function_version (void);
899
900 /* Get the cgraph_function_version_info node corresponding to node. */
901 cgraph_function_version_info *function_version (void);
902
903 /* Discover all functions and variables that are trivially needed, analyze
904 them as well as all functions and variables referred by them */
905 void analyze (void);
906
907 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
908 aliases DECL with an adjustments made into the first parameter.
909 See comments in thunk_adjust for detail on the parameters. */
910 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
911 HOST_WIDE_INT fixed_offset,
912 HOST_WIDE_INT virtual_value,
913 tree virtual_offset,
914 tree real_alias);
915
916
917 /* Return node that alias is aliasing. */
918 inline cgraph_node *get_alias_target (void);
919
920 /* Given function symbol, walk the alias chain to return the function node
921 is alias of. Do not walk through thunks.
922 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
923
924 cgraph_node *ultimate_alias_target (availability *availability = NULL);
925
926 /* Expand thunk NODE to gimple if possible.
927 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
928 no assembler is produced.
929 When OUTPUT_ASM_THUNK is true, also produce assembler for
930 thunks that are not lowered. */
931 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
932
933 /* Call expand_thunk on all callers that are thunks and analyze those
934 nodes that were expanded. */
935 void expand_all_artificial_thunks ();
936
937 /* Assemble thunks and aliases associated to node. */
938 void assemble_thunks_and_aliases (void);
939
940 /* Expand function specified by node. */
941 void expand (void);
942
943 /* As an GCC extension we allow redefinition of the function. The
944 semantics when both copies of bodies differ is not well defined.
945 We replace the old body with new body so in unit at a time mode
946 we always use new body, while in normal mode we may end up with
947 old body inlined into some functions and new body expanded and
948 inlined in others. */
949 void reset (void);
950
951 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
952 kind of wrapper method. */
953 void create_wrapper (cgraph_node *target);
954
955 /* Verify cgraph nodes of the cgraph node. */
956 void DEBUG_FUNCTION verify_node (void);
957
958 /* Remove function from symbol table. */
959 void remove (void);
960
961 /* Dump call graph node to file F. */
962 void dump (FILE *f);
963
964 /* Dump call graph node to stderr. */
965 void DEBUG_FUNCTION debug (void);
966
967 /* When doing LTO, read cgraph_node's body from disk if it is not already
968 present. */
969 bool get_untransformed_body (void);
970
971 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
972 if it is not already present. When some IPA transformations are scheduled,
973 apply them. */
974 bool get_body (void);
975
976 /* Release memory used to represent body of function.
977 Use this only for functions that are released before being translated to
978 target code (i.e. RTL). Functions that are compiled to RTL and beyond
979 are free'd in final.c via free_after_compilation(). */
980 void release_body (bool keep_arguments = false);
981
982 /* Return the DECL_STRUCT_FUNCTION of the function. */
983 struct function *get_fun (void);
984
985 /* cgraph_node is no longer nested function; update cgraph accordingly. */
986 void unnest (void);
987
988 /* Bring cgraph node local. */
989 void make_local (void);
990
991 /* Likewise indicate that a node is having address taken. */
992 void mark_address_taken (void);
993
994 /* Set fialization priority to PRIORITY. */
995 void set_fini_priority (priority_type priority);
996
997 /* Return the finalization priority. */
998 priority_type get_fini_priority (void);
999
1000 /* Create edge from a given function to CALLEE in the cgraph. */
1001 cgraph_edge *create_edge (cgraph_node *callee,
1002 gcall *call_stmt, gcov_type count,
1003 int freq);
1004
1005 /* Create an indirect edge with a yet-undetermined callee where the call
1006 statement destination is a formal parameter of the caller with index
1007 PARAM_INDEX. */
1008 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1009 gcov_type count, int freq,
1010 bool compute_indirect_info = true);
1011
1012 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1013 same function body. If clones already have edge for OLD_STMT; only
1014 update the edge same way as cgraph_set_call_stmt_including_clones does. */
1015 void create_edge_including_clones (cgraph_node *callee,
1016 gimple *old_stmt, gcall *stmt,
1017 gcov_type count,
1018 int freq,
1019 cgraph_inline_failed_t reason);
1020
1021 /* Return the callgraph edge representing the GIMPLE_CALL statement
1022 CALL_STMT. */
1023 cgraph_edge *get_edge (gimple *call_stmt);
1024
1025 /* Collect all callers of cgraph_node and its aliases that are known to lead
1026 to NODE (i.e. are not overwritable). */
1027 vec<cgraph_edge *> collect_callers (void);
1028
1029 /* Remove all callers from the node. */
1030 void remove_callers (void);
1031
1032 /* Remove all callees from the node. */
1033 void remove_callees (void);
1034
1035 /* Return function availability. See cgraph.h for description of individual
1036 return values. */
1037 enum availability get_availability (void);
1038
1039 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1040 if any to NOTHROW. */
1041 void set_nothrow_flag (bool nothrow);
1042
1043 /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
1044 if any to READONLY. */
1045 void set_const_flag (bool readonly, bool looping);
1046
1047 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1048 if any to PURE. */
1049 void set_pure_flag (bool pure, bool looping);
1050
1051 /* Call callback on function and aliases associated to the function.
1052 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1053 skipped. */
1054
1055 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1056 void *),
1057 void *data, bool include_overwritable);
1058
1059 /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1060 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1061 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1062 skipped. */
1063 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1064 void *data),
1065 void *data,
1066 bool include_overwritable,
1067 bool exclude_virtual_thunks = false);
1068
1069 /* Likewise indicate that a node is needed, i.e. reachable via some
1070 external means. */
1071 inline void mark_force_output (void);
1072
1073 /* Return true when function can be marked local. */
1074 bool local_p (void);
1075
1076 /* Return true if cgraph_node can be made local for API change.
1077 Extern inline functions and C++ COMDAT functions can be made local
1078 at the expense of possible code size growth if function is used in multiple
1079 compilation units. */
1080 bool can_be_local_p (void);
1081
1082 /* Return true when cgraph_node can not return or throw and thus
1083 it is safe to ignore its side effects for IPA analysis. */
1084 bool cannot_return_p (void);
1085
1086 /* Return true when function cgraph_node and all its aliases are only called
1087 directly.
1088 i.e. it is not externally visible, address was not taken and
1089 it is not used in any other non-standard way. */
1090 bool only_called_directly_p (void);
1091
1092 /* Return true when function is only called directly or it has alias.
1093 i.e. it is not externally visible, address was not taken and
1094 it is not used in any other non-standard way. */
1095 inline bool only_called_directly_or_aliased_p (void);
1096
1097 /* Return true when function cgraph_node can be expected to be removed
1098 from program when direct calls in this compilation unit are removed.
1099
1100 As a special case COMDAT functions are
1101 cgraph_can_remove_if_no_direct_calls_p while the are not
1102 cgraph_only_called_directly_p (it is possible they are called from other
1103 unit)
1104
1105 This function behaves as cgraph_only_called_directly_p because eliminating
1106 all uses of COMDAT function does not make it necessarily disappear from
1107 the program unless we are compiling whole program or we do LTO. In this
1108 case we know we win since dynamic linking will not really discard the
1109 linkonce section.
1110
1111 If WILL_INLINE is true, assume that function will be inlined into all the
1112 direct calls. */
1113 bool will_be_removed_from_program_if_no_direct_calls_p
1114 (bool will_inline = false);
1115
1116 /* Return true when function can be removed from callgraph
1117 if all direct calls and references are eliminated. The function does
1118 not take into account comdat groups. */
1119 bool can_remove_if_no_direct_calls_and_refs_p (void);
1120
1121 /* Return true when function cgraph_node and its aliases can be removed from
1122 callgraph if all direct calls are eliminated.
1123 If WILL_INLINE is true, assume that function will be inlined into all the
1124 direct calls. */
1125 bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1126
1127 /* Return true when callgraph node is a function with Gimple body defined
1128 in current unit. Functions can also be define externally or they
1129 can be thunks with no Gimple representation.
1130
1131 Note that at WPA stage, the function body may not be present in memory. */
1132 inline bool has_gimple_body_p (void);
1133
1134 /* Return true if function should be optimized for size. */
1135 bool optimize_for_size_p (void);
1136
1137 /* Dump the callgraph to file F. */
1138 static void dump_cgraph (FILE *f);
1139
1140 /* Dump the call graph to stderr. */
1141 static inline
1142 void debug_cgraph (void)
1143 {
1144 dump_cgraph (stderr);
1145 }
1146
1147 /* Record that DECL1 and DECL2 are semantically identical function
1148 versions. */
1149 static void record_function_versions (tree decl1, tree decl2);
1150
1151 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1152 DECL is a duplicate declaration. */
1153 static void delete_function_version (tree decl);
1154
1155 /* Add the function FNDECL to the call graph.
1156 Unlike finalize_function, this function is intended to be used
1157 by middle end and allows insertion of new function at arbitrary point
1158 of compilation. The function can be either in high, low or SSA form
1159 GIMPLE.
1160
1161 The function is assumed to be reachable and have address taken (so no
1162 API breaking optimizations are performed on it).
1163
1164 Main work done by this function is to enqueue the function for later
1165 processing to avoid need the passes to be re-entrant. */
1166 static void add_new_function (tree fndecl, bool lowered);
1167
1168 /* Return callgraph node for given symbol and check it is a function. */
1169 static inline cgraph_node *get (const_tree decl)
1170 {
1171 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1172 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1173 }
1174
1175 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1176 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1177 have the garbage collector run at the moment. We would need to either
1178 create a new GC context, or just not compile right now. */
1179 static void finalize_function (tree, bool);
1180
1181 /* Return cgraph node assigned to DECL. Create new one when needed. */
1182 static cgraph_node * create (tree decl);
1183
1184 /* Try to find a call graph node for declaration DECL and if it does not
1185 exist or if it corresponds to an inline clone, create a new one. */
1186 static cgraph_node * get_create (tree);
1187
1188 /* Return local info for the compiled function. */
1189 static cgraph_local_info *local_info (tree decl);
1190
1191 /* Return local info for the compiled function. */
1192 static struct cgraph_rtl_info *rtl_info (tree);
1193
1194 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1195 Return NULL if there's no such node. */
1196 static cgraph_node *get_for_asmname (tree asmname);
1197
1198 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1199 successful and NULL otherwise.
1200 Same body aliases are output whenever the body of DECL is output,
1201 and cgraph_node::get (ALIAS) transparently
1202 returns cgraph_node::get (DECL). */
1203 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1204
1205 /* Verify whole cgraph structure. */
1206 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1207
1208 /* Worker to bring NODE local. */
1209 static bool make_local (cgraph_node *node, void *);
1210
1211 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1212 the function body is associated
1213 with (not necessarily cgraph_node (DECL). */
1214 static cgraph_node *create_alias (tree alias, tree target);
1215
1216 /* Return true if NODE has thunk. */
1217 static bool has_thunk_p (cgraph_node *node, void *);
1218
1219 cgraph_edge *callees;
1220 cgraph_edge *callers;
1221 /* List of edges representing indirect calls with a yet undetermined
1222 callee. */
1223 cgraph_edge *indirect_calls;
1224 /* For nested functions points to function the node is nested in. */
1225 cgraph_node *origin;
1226 /* Points to first nested function, if any. */
1227 cgraph_node *nested;
1228 /* Pointer to the next function with same origin, if any. */
1229 cgraph_node *next_nested;
1230 /* Pointer to the next clone. */
1231 cgraph_node *next_sibling_clone;
1232 cgraph_node *prev_sibling_clone;
1233 cgraph_node *clones;
1234 cgraph_node *clone_of;
1235 /* If instrumentation_clone is 1 then instrumented_version points
1236 to the original function used to make instrumented version.
1237 Otherwise points to instrumented version of the function. */
1238 cgraph_node *instrumented_version;
1239 /* If instrumentation_clone is 1 then orig_decl is the original
1240 function declaration. */
1241 tree orig_decl;
1242 /* For functions with many calls sites it holds map from call expression
1243 to the edge to speed up cgraph_edge function. */
1244 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1245 /* Declaration node used to be clone of. */
1246 tree former_clone_of;
1247
1248 /* If this is a SIMD clone, this points to the SIMD specific
1249 information for it. */
1250 cgraph_simd_clone *simdclone;
1251 /* If this function has SIMD clones, this points to the first clone. */
1252 cgraph_node *simd_clones;
1253
1254 /* Interprocedural passes scheduled to have their transform functions
1255 applied next time we execute local pass on them. We maintain it
1256 per-function in order to allow IPA passes to introduce new functions. */
1257 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1258
1259 cgraph_local_info local;
1260 cgraph_global_info global;
1261 struct cgraph_rtl_info *rtl;
1262 cgraph_clone_info clone;
1263 cgraph_thunk_info thunk;
1264
1265 /* Expected number of executions: calculated in profile.c. */
1266 gcov_type count;
1267 /* How to scale counts at materialization time; used to merge
1268 LTO units with different number of profile runs. */
1269 int count_materialization_scale;
1270 /* Unique id of the node. */
1271 int uid;
1272 /* Summary unique id of the node. */
1273 int summary_uid;
1274 /* ID assigned by the profiling. */
1275 unsigned int profile_id;
1276 /* Time profiler: first run of function. */
1277 int tp_first_run;
1278
1279 /* Set when decl is an abstract function pointed to by the
1280 ABSTRACT_DECL_ORIGIN of a reachable function. */
1281 unsigned used_as_abstract_origin : 1;
1282 /* Set once the function is lowered (i.e. its CFG is built). */
1283 unsigned lowered : 1;
1284 /* Set once the function has been instantiated and its callee
1285 lists created. */
1286 unsigned process : 1;
1287 /* How commonly executed the node is. Initialized during branch
1288 probabilities pass. */
1289 ENUM_BITFIELD (node_frequency) frequency : 2;
1290 /* True when function can only be called at startup (from static ctor). */
1291 unsigned only_called_at_startup : 1;
1292 /* True when function can only be called at startup (from static dtor). */
1293 unsigned only_called_at_exit : 1;
1294 /* True when function is the transactional clone of a function which
1295 is called only from inside transactions. */
1296 /* ?? We should be able to remove this. We have enough bits in
1297 cgraph to calculate it. */
1298 unsigned tm_clone : 1;
1299 /* True if this decl is a dispatcher for function versions. */
1300 unsigned dispatcher_function : 1;
1301 /* True if this decl calls a COMDAT-local function. This is set up in
1302 compute_inline_parameters and inline_call. */
1303 unsigned calls_comdat_local : 1;
1304 /* True if node has been created by merge operation in IPA-ICF. */
1305 unsigned icf_merged: 1;
1306 /* True when function is clone created for Pointer Bounds Checker
1307 instrumentation. */
1308 unsigned instrumentation_clone : 1;
1309 /* True if call to node can't result in a call to free, munmap or
1310 other operation that could make previously non-trapping memory
1311 accesses trapping. */
1312 unsigned nonfreeing_fn : 1;
1313 /* True if there was multiple COMDAT bodies merged by lto-symtab. */
1314 unsigned merged : 1;
1315 /* True if function was created to be executed in parallel. */
1316 unsigned parallelized_function : 1;
1317 /* True if function is part split out by ipa-split. */
1318 unsigned split_part : 1;
1319
1320 private:
1321 /* Worker for call_for_symbol_and_aliases. */
1322 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1323 void *),
1324 void *data, bool include_overwritable);
1325 };
1326
1327 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1328 can appear in multiple sets. */
1329 struct cgraph_node_set_def
1330 {
1331 hash_map<cgraph_node *, size_t> *map;
1332 vec<cgraph_node *> nodes;
1333 };
1334
1335 typedef cgraph_node_set_def *cgraph_node_set;
1336 typedef struct varpool_node_set_def *varpool_node_set;
1337
1338 class varpool_node;
1339
1340 /* A varpool node set is a collection of varpool nodes. A varpool node
1341 can appear in multiple sets. */
1342 struct varpool_node_set_def
1343 {
1344 hash_map<varpool_node *, size_t> * map;
1345 vec<varpool_node *> nodes;
1346 };
1347
1348 /* Iterator structure for cgraph node sets. */
1349 struct cgraph_node_set_iterator
1350 {
1351 cgraph_node_set set;
1352 unsigned index;
1353 };
1354
1355 /* Iterator structure for varpool node sets. */
1356 struct varpool_node_set_iterator
1357 {
1358 varpool_node_set set;
1359 unsigned index;
1360 };
1361
1362 /* Context of polymorphic call. It represent information about the type of
1363 instance that may reach the call. This is used by ipa-devirt walkers of the
1364 type inheritance graph. */
1365
1366 class GTY(()) ipa_polymorphic_call_context {
1367 public:
1368 /* The called object appears in an object of type OUTER_TYPE
1369 at offset OFFSET. When information is not 100% reliable, we
1370 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1371 HOST_WIDE_INT offset;
1372 HOST_WIDE_INT speculative_offset;
1373 tree outer_type;
1374 tree speculative_outer_type;
1375 /* True if outer object may be in construction or destruction. */
1376 unsigned maybe_in_construction : 1;
1377 /* True if outer object may be of derived type. */
1378 unsigned maybe_derived_type : 1;
1379 /* True if speculative outer object may be of derived type. We always
1380 speculate that construction does not happen. */
1381 unsigned speculative_maybe_derived_type : 1;
1382 /* True if the context is invalid and all calls should be redirected
1383 to BUILTIN_UNREACHABLE. */
1384 unsigned invalid : 1;
1385 /* True if the outer type is dynamic. */
1386 unsigned dynamic : 1;
1387
1388 /* Build empty "I know nothing" context. */
1389 ipa_polymorphic_call_context ();
1390 /* Build polymorphic call context for indirect call E. */
1391 ipa_polymorphic_call_context (cgraph_edge *e);
1392 /* Build polymorphic call context for IP invariant CST.
1393 If specified, OTR_TYPE specify the type of polymorphic call
1394 that takes CST+OFFSET as a prameter. */
1395 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1396 HOST_WIDE_INT offset = 0);
1397 /* Build context for pointer REF contained in FNDECL at statement STMT.
1398 if INSTANCE is non-NULL, return pointer to the object described by
1399 the context. */
1400 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
1401 tree *instance = NULL);
1402
1403 /* Look for vtable stores or constructor calls to work out dynamic type
1404 of memory location. */
1405 bool get_dynamic_type (tree, tree, tree, gimple *);
1406
1407 /* Make context non-speculative. */
1408 void clear_speculation ();
1409
1410 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
1411 NULL, the context is set to dummy "I know nothing" setting. */
1412 void clear_outer_type (tree otr_type = NULL);
1413
1414 /* Walk container types and modify context to point to actual class
1415 containing OTR_TYPE (if non-NULL) as base class.
1416 Return true if resulting context is valid.
1417
1418 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1419 valid only via alocation of new polymorphic type inside by means
1420 of placement new.
1421
1422 When CONSIDER_BASES is false, only look for actual fields, not base types
1423 of TYPE. */
1424 bool restrict_to_inner_class (tree otr_type,
1425 bool consider_placement_new = true,
1426 bool consider_bases = true);
1427
1428 /* Adjust all offsets in contexts by given number of bits. */
1429 void offset_by (HOST_WIDE_INT);
1430 /* Use when we can not track dynamic type change. This speculatively assume
1431 type change is not happening. */
1432 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1433 /* Assume that both THIS and a given context is valid and strenghten THIS
1434 if possible. Return true if any strenghtening was made.
1435 If actual type the context is being used in is known, OTR_TYPE should be
1436 set accordingly. This improves quality of combined result. */
1437 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1438 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1439
1440 /* Return TRUE if context is fully useless. */
1441 bool useless_p () const;
1442 /* Return TRUE if this context conveys the same information as X. */
1443 bool equal_to (const ipa_polymorphic_call_context &x) const;
1444
1445 /* Dump human readable context to F. If NEWLINE is true, it will be
1446 terminated by a newline. */
1447 void dump (FILE *f, bool newline = true) const;
1448 void DEBUG_FUNCTION debug () const;
1449
1450 /* LTO streaming. */
1451 void stream_out (struct output_block *) const;
1452 void stream_in (struct lto_input_block *, struct data_in *data_in);
1453
1454 private:
1455 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1456 bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1457 void set_by_decl (tree, HOST_WIDE_INT);
1458 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1459 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const;
1460 void make_speculative (tree otr_type = NULL);
1461 };
1462
1463 /* Structure containing additional information about an indirect call. */
1464
1465 struct GTY(()) cgraph_indirect_call_info
1466 {
1467 /* When agg_content is set, an offset where the call pointer is located
1468 within the aggregate. */
1469 HOST_WIDE_INT offset;
1470 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1471 ipa_polymorphic_call_context context;
1472 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1473 HOST_WIDE_INT otr_token;
1474 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1475 tree otr_type;
1476 /* Index of the parameter that is called. */
1477 int param_index;
1478 /* ECF flags determined from the caller. */
1479 int ecf_flags;
1480 /* Profile_id of common target obtrained from profile. */
1481 int common_target_id;
1482 /* Probability that call will land in function with COMMON_TARGET_ID. */
1483 int common_target_probability;
1484
1485 /* Set when the call is a virtual call with the parameter being the
1486 associated object pointer rather than a simple direct call. */
1487 unsigned polymorphic : 1;
1488 /* Set when the call is a call of a pointer loaded from contents of an
1489 aggregate at offset. */
1490 unsigned agg_contents : 1;
1491 /* Set when this is a call through a member pointer. */
1492 unsigned member_ptr : 1;
1493 /* When the previous bit is set, this one determines whether the destination
1494 is loaded from a parameter passed by reference. */
1495 unsigned by_ref : 1;
1496 /* For polymorphic calls this specify whether the virtual table pointer
1497 may have changed in between function entry and the call. */
1498 unsigned vptr_changed : 1;
1499 };
1500
1501 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1502 for_user)) cgraph_edge {
1503 friend class cgraph_node;
1504
1505 /* Remove the edge in the cgraph. */
1506 void remove (void);
1507
1508 /* Change field call_stmt of edge to NEW_STMT.
1509 If UPDATE_SPECULATIVE and E is any component of speculative
1510 edge, then update all components. */
1511 void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
1512
1513 /* Redirect callee of the edge to N. The function does not update underlying
1514 call expression. */
1515 void redirect_callee (cgraph_node *n);
1516
1517 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise
1518 create one or more equivalent thunks for N and redirect E to the first in
1519 the chain. Note that it is then necessary to call
1520 n->expand_all_artificial_thunks once all callers are redirected. */
1521 void redirect_callee_duplicating_thunks (cgraph_node *n);
1522
1523 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1524 CALLEE. DELTA is an integer constant that is to be added to the this
1525 pointer (first parameter) to compensate for skipping
1526 a thunk adjustment. */
1527 cgraph_edge *make_direct (cgraph_node *callee);
1528
1529 /* Turn edge into speculative call calling N2. Update
1530 the profile so the direct call is taken COUNT times
1531 with FREQUENCY. */
1532 cgraph_edge *make_speculative (cgraph_node *n2, gcov_type direct_count,
1533 int direct_frequency);
1534
1535 /* Given speculative call edge, return all three components. */
1536 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1537 ipa_ref *&reference);
1538
1539 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1540 Remove the speculative call sequence and return edge representing the call.
1541 It is up to caller to redirect the call as appropriate. */
1542 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1543
1544 /* If necessary, change the function declaration in the call statement
1545 associated with the edge so that it corresponds to the edge callee. */
1546 gimple *redirect_call_stmt_to_callee (void);
1547
1548 /* Create clone of edge in the node N represented
1549 by CALL_EXPR the callgraph. */
1550 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1551 gcov_type count_scale, int freq_scale, bool update_original);
1552
1553 /* Verify edge count and frequency. */
1554 bool verify_count_and_frequency ();
1555
1556 /* Return true when call of edge can not lead to return from caller
1557 and thus it is safe to ignore its side effects for IPA analysis
1558 when computing side effects of the caller. */
1559 bool cannot_lead_to_return_p (void);
1560
1561 /* Return true when the edge represents a direct recursion. */
1562 bool recursive_p (void);
1563
1564 /* Return true if the call can be hot. */
1565 bool maybe_hot_p (void);
1566
1567 /* Rebuild cgraph edges for current function node. This needs to be run after
1568 passes that don't update the cgraph. */
1569 static unsigned int rebuild_edges (void);
1570
1571 /* Rebuild cgraph references for current function node. This needs to be run
1572 after passes that don't update the cgraph. */
1573 static void rebuild_references (void);
1574
1575 /* Expected number of executions: calculated in profile.c. */
1576 gcov_type count;
1577 cgraph_node *caller;
1578 cgraph_node *callee;
1579 cgraph_edge *prev_caller;
1580 cgraph_edge *next_caller;
1581 cgraph_edge *prev_callee;
1582 cgraph_edge *next_callee;
1583 gcall *call_stmt;
1584 /* Additional information about an indirect call. Not cleared when an edge
1585 becomes direct. */
1586 cgraph_indirect_call_info *indirect_info;
1587 PTR GTY ((skip (""))) aux;
1588 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1589 explanation why function was not inlined. */
1590 enum cgraph_inline_failed_t inline_failed;
1591 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1592 when the function is serialized in. */
1593 unsigned int lto_stmt_uid;
1594 /* Expected frequency of executions within the function.
1595 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1596 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1597 int frequency;
1598 /* Unique id of the edge. */
1599 int uid;
1600 /* Whether this edge was made direct by indirect inlining. */
1601 unsigned int indirect_inlining_edge : 1;
1602 /* Whether this edge describes an indirect call with an undetermined
1603 callee. */
1604 unsigned int indirect_unknown_callee : 1;
1605 /* Whether this edge is still a dangling */
1606 /* True if the corresponding CALL stmt cannot be inlined. */
1607 unsigned int call_stmt_cannot_inline_p : 1;
1608 /* Can this call throw externally? */
1609 unsigned int can_throw_external : 1;
1610 /* Edges with SPECULATIVE flag represents indirect calls that was
1611 speculatively turned into direct (i.e. by profile feedback).
1612 The final code sequence will have form:
1613
1614 if (call_target == expected_fn)
1615 expected_fn ();
1616 else
1617 call_target ();
1618
1619 Every speculative call is represented by three components attached
1620 to a same call statement:
1621 1) a direct call (to expected_fn)
1622 2) an indirect call (to call_target)
1623 3) a IPA_REF_ADDR refrence to expected_fn.
1624
1625 Optimizers may later redirect direct call to clone, so 1) and 3)
1626 do not need to necesarily agree with destination. */
1627 unsigned int speculative : 1;
1628 /* Set to true when caller is a constructor or destructor of polymorphic
1629 type. */
1630 unsigned in_polymorphic_cdtor : 1;
1631
1632 private:
1633 /* Remove the edge from the list of the callers of the callee. */
1634 void remove_caller (void);
1635
1636 /* Remove the edge from the list of the callees of the caller. */
1637 void remove_callee (void);
1638
1639 /* Set callee N of call graph edge and add it to the corresponding set of
1640 callers. */
1641 void set_callee (cgraph_node *n);
1642
1643 /* Output flags of edge to a file F. */
1644 void dump_edge_flags (FILE *f);
1645
1646 /* Verify that call graph edge corresponds to DECL from the associated
1647 statement. Return true if the verification should fail. */
1648 bool verify_corresponds_to_fndecl (tree decl);
1649 };
1650
1651 #define CGRAPH_FREQ_BASE 1000
1652 #define CGRAPH_FREQ_MAX 100000
1653
1654 /* The varpool data structure.
1655 Each static variable decl has assigned varpool_node. */
1656
1657 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1658 public:
1659 /* Dump given varpool node to F. */
1660 void dump (FILE *f);
1661
1662 /* Dump given varpool node to stderr. */
1663 void DEBUG_FUNCTION debug (void);
1664
1665 /* Remove variable from symbol table. */
1666 void remove (void);
1667
1668 /* Remove node initializer when it is no longer needed. */
1669 void remove_initializer (void);
1670
1671 void analyze (void);
1672
1673 /* Return variable availability. */
1674 availability get_availability (void);
1675
1676 /* When doing LTO, read variable's constructor from disk if
1677 it is not already present. */
1678 tree get_constructor (void);
1679
1680 /* Return true if variable has constructor that can be used for folding. */
1681 bool ctor_useable_for_folding_p (void);
1682
1683 /* For given variable pool node, walk the alias chain to return the function
1684 the variable is alias of. Do not walk through thunks.
1685 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
1686 inline varpool_node *ultimate_alias_target
1687 (availability *availability = NULL);
1688
1689 /* Return node that alias is aliasing. */
1690 inline varpool_node *get_alias_target (void);
1691
1692 /* Output one variable, if necessary. Return whether we output it. */
1693 bool assemble_decl (void);
1694
1695 /* For variables in named sections make sure get_variable_section
1696 is called before we switch to those sections. Then section
1697 conflicts between read-only and read-only requiring relocations
1698 sections can be resolved. */
1699 void finalize_named_section_flags (void);
1700
1701 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1702 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1703 skipped. */
1704 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
1705 void *data,
1706 bool include_overwritable);
1707
1708 /* Return true when variable should be considered externally visible. */
1709 bool externally_visible_p (void);
1710
1711 /* Return true when all references to variable must be visible
1712 in ipa_ref_list.
1713 i.e. if the variable is not externally visible or not used in some magic
1714 way (asm statement or such).
1715 The magic uses are all summarized in force_output flag. */
1716 inline bool all_refs_explicit_p ();
1717
1718 /* Return true when variable can be removed from variable pool
1719 if all direct calls are eliminated. */
1720 inline bool can_remove_if_no_refs_p (void);
1721
1722 /* Add the variable DECL to the varpool.
1723 Unlike finalize_decl function is intended to be used
1724 by middle end and allows insertion of new variable at arbitrary point
1725 of compilation. */
1726 static void add (tree decl);
1727
1728 /* Return varpool node for given symbol and check it is a function. */
1729 static inline varpool_node *get (const_tree decl);
1730
1731 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1732 the middle end to output the variable to asm file, if needed or externally
1733 visible. */
1734 static void finalize_decl (tree decl);
1735
1736 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1737 Extra name aliases are output whenever DECL is output. */
1738 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1739
1740 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1741 Extra name aliases are output whenever DECL is output. */
1742 static varpool_node * create_alias (tree, tree);
1743
1744 /* Dump the variable pool to F. */
1745 static void dump_varpool (FILE *f);
1746
1747 /* Dump the variable pool to stderr. */
1748 static void DEBUG_FUNCTION debug_varpool (void);
1749
1750 /* Allocate new callgraph node and insert it into basic data structures. */
1751 static varpool_node *create_empty (void);
1752
1753 /* Return varpool node assigned to DECL. Create new one when needed. */
1754 static varpool_node *get_create (tree decl);
1755
1756 /* Given an assembler name, lookup node. */
1757 static varpool_node *get_for_asmname (tree asmname);
1758
1759 /* Set when variable is scheduled to be assembled. */
1760 unsigned output : 1;
1761
1762 /* Set when variable has statically initialized pointer
1763 or is a static bounds variable and needs initalization. */
1764 unsigned need_bounds_init : 1;
1765
1766 /* Set if the variable is dynamically initialized, except for
1767 function local statics. */
1768 unsigned dynamically_initialized : 1;
1769
1770 ENUM_BITFIELD(tls_model) tls_model : 3;
1771
1772 /* Set if the variable is known to be used by single function only.
1773 This is computed by ipa_signle_use pass and used by late optimizations
1774 in places where optimization would be valid for local static variable
1775 if we did not do any inter-procedural code movement. */
1776 unsigned used_by_single_function : 1;
1777
1778 private:
1779 /* Assemble thunks and aliases associated to varpool node. */
1780 void assemble_aliases (void);
1781
1782 /* Worker for call_for_node_and_aliases. */
1783 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
1784 void *data,
1785 bool include_overwritable);
1786 };
1787
1788 /* Every top level asm statement is put into a asm_node. */
1789
1790 struct GTY(()) asm_node {
1791
1792
1793 /* Next asm node. */
1794 asm_node *next;
1795 /* String for this asm node. */
1796 tree asm_str;
1797 /* Ordering of all cgraph nodes. */
1798 int order;
1799 };
1800
1801 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1802
1803 template <>
1804 template <>
1805 inline bool
1806 is_a_helper <cgraph_node *>::test (symtab_node *p)
1807 {
1808 return p && p->type == SYMTAB_FUNCTION;
1809 }
1810
1811 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1812
1813 template <>
1814 template <>
1815 inline bool
1816 is_a_helper <varpool_node *>::test (symtab_node *p)
1817 {
1818 return p && p->type == SYMTAB_VARIABLE;
1819 }
1820
1821 /* Macros to access the next item in the list of free cgraph nodes and
1822 edges. */
1823 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1824 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1825 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1826
1827 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1828 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1829 typedef void (*varpool_node_hook)(varpool_node *, void *);
1830 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1831 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1832
1833 struct cgraph_edge_hook_list;
1834 struct cgraph_node_hook_list;
1835 struct varpool_node_hook_list;
1836 struct cgraph_2edge_hook_list;
1837 struct cgraph_2node_hook_list;
1838
1839 /* Map from a symbol to initialization/finalization priorities. */
1840 struct GTY(()) symbol_priority_map {
1841 priority_type init;
1842 priority_type fini;
1843 };
1844
1845 enum symtab_state
1846 {
1847 /* Frontend is parsing and finalizing functions. */
1848 PARSING,
1849 /* Callgraph is being constructed. It is safe to add new functions. */
1850 CONSTRUCTION,
1851 /* Callgraph is being streamed-in at LTO time. */
1852 LTO_STREAMING,
1853 /* Callgraph is built and early IPA passes are being run. */
1854 IPA,
1855 /* Callgraph is built and all functions are transformed to SSA form. */
1856 IPA_SSA,
1857 /* All inline decisions are done; it is now possible to remove extern inline
1858 functions and virtual call targets. */
1859 IPA_SSA_AFTER_INLINING,
1860 /* Functions are now ordered and being passed to RTL expanders. */
1861 EXPANSION,
1862 /* All cgraph expansion is done. */
1863 FINISHED
1864 };
1865
1866 struct asmname_hasher : ggc_ptr_hash <symtab_node>
1867 {
1868 typedef const_tree compare_type;
1869
1870 static hashval_t hash (symtab_node *n);
1871 static bool equal (symtab_node *n, const_tree t);
1872 };
1873
1874 class GTY((tag ("SYMTAB"))) symbol_table
1875 {
1876 public:
1877 friend class symtab_node;
1878 friend class cgraph_node;
1879 friend class cgraph_edge;
1880
1881 symbol_table (): cgraph_max_summary_uid (1)
1882 {
1883 }
1884
1885 /* Initialize callgraph dump file. */
1886 void initialize (void);
1887
1888 /* Register a top-level asm statement ASM_STR. */
1889 inline asm_node *finalize_toplevel_asm (tree asm_str);
1890
1891 /* Analyze the whole compilation unit once it is parsed completely. */
1892 void finalize_compilation_unit (void);
1893
1894 /* C++ frontend produce same body aliases all over the place, even before PCH
1895 gets streamed out. It relies on us linking the aliases with their function
1896 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
1897 first produce aliases without links, but once C++ FE is sure he won't sream
1898 PCH we build the links via this function. */
1899 void process_same_body_aliases (void);
1900
1901 /* Perform simple optimizations based on callgraph. */
1902 void compile (void);
1903
1904 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
1905 functions into callgraph in a way so they look like ordinary reachable
1906 functions inserted into callgraph already at construction time. */
1907 void process_new_functions (void);
1908
1909 /* Once all functions from compilation unit are in memory, produce all clones
1910 and update all calls. We might also do this on demand if we don't want to
1911 bring all functions to memory prior compilation, but current WHOPR
1912 implementation does that and it is bit easier to keep everything right
1913 in this order. */
1914 void materialize_all_clones (void);
1915
1916 /* Register a symbol NODE. */
1917 inline void register_symbol (symtab_node *node);
1918
1919 inline void
1920 clear_asm_symbols (void)
1921 {
1922 asmnodes = NULL;
1923 asm_last_node = NULL;
1924 }
1925
1926 /* Perform reachability analysis and reclaim all unreachable nodes. */
1927 bool remove_unreachable_nodes (FILE *file);
1928
1929 /* Optimization of function bodies might've rendered some variables as
1930 unnecessary so we want to avoid these from being compiled. Re-do
1931 reachability starting from variables that are either externally visible
1932 or was referred from the asm output routines. */
1933 void remove_unreferenced_decls (void);
1934
1935 /* Unregister a symbol NODE. */
1936 inline void unregister (symtab_node *node);
1937
1938 /* Allocate new callgraph node and insert it into basic data structures. */
1939 cgraph_node *create_empty (void);
1940
1941 /* Release a callgraph NODE with UID and put in to the list
1942 of free nodes. */
1943 void release_symbol (cgraph_node *node, int uid);
1944
1945 /* Output all variables enqueued to be assembled. */
1946 bool output_variables (void);
1947
1948 /* Weakrefs may be associated to external decls and thus not output
1949 at expansion time. Emit all necessary aliases. */
1950 void output_weakrefs (void);
1951
1952 /* Return first static symbol with definition. */
1953 inline symtab_node *first_symbol (void);
1954
1955 /* Return first assembler symbol. */
1956 inline asm_node *
1957 first_asm_symbol (void)
1958 {
1959 return asmnodes;
1960 }
1961
1962 /* Return first static symbol with definition. */
1963 inline symtab_node *first_defined_symbol (void);
1964
1965 /* Return first variable. */
1966 inline varpool_node *first_variable (void);
1967
1968 /* Return next variable after NODE. */
1969 inline varpool_node *next_variable (varpool_node *node);
1970
1971 /* Return first static variable with initializer. */
1972 inline varpool_node *first_static_initializer (void);
1973
1974 /* Return next static variable with initializer after NODE. */
1975 inline varpool_node *next_static_initializer (varpool_node *node);
1976
1977 /* Return first static variable with definition. */
1978 inline varpool_node *first_defined_variable (void);
1979
1980 /* Return next static variable with definition after NODE. */
1981 inline varpool_node *next_defined_variable (varpool_node *node);
1982
1983 /* Return first function with body defined. */
1984 inline cgraph_node *first_defined_function (void);
1985
1986 /* Return next function with body defined after NODE. */
1987 inline cgraph_node *next_defined_function (cgraph_node *node);
1988
1989 /* Return first function. */
1990 inline cgraph_node *first_function (void);
1991
1992 /* Return next function. */
1993 inline cgraph_node *next_function (cgraph_node *node);
1994
1995 /* Return first function with body defined. */
1996 cgraph_node *first_function_with_gimple_body (void);
1997
1998 /* Return next reachable static variable with initializer after NODE. */
1999 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
2000
2001 /* Register HOOK to be called with DATA on each removed edge. */
2002 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
2003 void *data);
2004
2005 /* Remove ENTRY from the list of hooks called on removing edges. */
2006 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2007
2008 /* Register HOOK to be called with DATA on each removed node. */
2009 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2010 void *data);
2011
2012 /* Remove ENTRY from the list of hooks called on removing nodes. */
2013 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2014
2015 /* Register HOOK to be called with DATA on each removed node. */
2016 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2017 void *data);
2018
2019 /* Remove ENTRY from the list of hooks called on removing nodes. */
2020 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2021
2022 /* Register HOOK to be called with DATA on each inserted node. */
2023 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2024 void *data);
2025
2026 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2027 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2028
2029 /* Register HOOK to be called with DATA on each inserted node. */
2030 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2031 void *data);
2032
2033 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2034 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2035
2036 /* Register HOOK to be called with DATA on each duplicated edge. */
2037 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2038 void *data);
2039 /* Remove ENTRY from the list of hooks called on duplicating edges. */
2040 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2041
2042 /* Register HOOK to be called with DATA on each duplicated node. */
2043 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2044 void *data);
2045
2046 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
2047 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2048
2049 /* Call all edge removal hooks. */
2050 void call_edge_removal_hooks (cgraph_edge *e);
2051
2052 /* Call all node insertion hooks. */
2053 void call_cgraph_insertion_hooks (cgraph_node *node);
2054
2055 /* Call all node removal hooks. */
2056 void call_cgraph_removal_hooks (cgraph_node *node);
2057
2058 /* Call all node duplication hooks. */
2059 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2060
2061 /* Call all edge duplication hooks. */
2062 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2063
2064 /* Call all node removal hooks. */
2065 void call_varpool_removal_hooks (varpool_node *node);
2066
2067 /* Call all node insertion hooks. */
2068 void call_varpool_insertion_hooks (varpool_node *node);
2069
2070 /* Arrange node to be first in its entry of assembler_name_hash. */
2071 void symtab_prevail_in_asm_name_hash (symtab_node *node);
2072
2073 /* Initalize asm name hash unless. */
2074 void symtab_initialize_asm_name_hash (void);
2075
2076 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
2077 void change_decl_assembler_name (tree decl, tree name);
2078
2079 int cgraph_count;
2080 int cgraph_max_uid;
2081 int cgraph_max_summary_uid;
2082
2083 int edges_count;
2084 int edges_max_uid;
2085
2086 symtab_node* GTY(()) nodes;
2087 asm_node* GTY(()) asmnodes;
2088 asm_node* GTY(()) asm_last_node;
2089 cgraph_node* GTY(()) free_nodes;
2090
2091 /* Head of a linked list of unused (freed) call graph edges.
2092 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
2093 cgraph_edge * GTY(()) free_edges;
2094
2095 /* The order index of the next symtab node to be created. This is
2096 used so that we can sort the cgraph nodes in order by when we saw
2097 them, to support -fno-toplevel-reorder. */
2098 int order;
2099
2100 /* Set when whole unit has been analyzed so we can access global info. */
2101 bool global_info_ready;
2102 /* What state callgraph is in right now. */
2103 enum symtab_state state;
2104 /* Set when the cgraph is fully build and the basic flags are computed. */
2105 bool function_flags_ready;
2106
2107 bool cpp_implicit_aliases_done;
2108
2109 /* Hash table used to hold sectoons. */
2110 hash_table<section_name_hasher> *GTY(()) section_hash;
2111
2112 /* Hash table used to convert assembler names into nodes. */
2113 hash_table<asmname_hasher> *assembler_name_hash;
2114
2115 /* Hash table used to hold init priorities. */
2116 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2117
2118 FILE* GTY ((skip)) dump_file;
2119
2120 private:
2121 /* Allocate new callgraph node. */
2122 inline cgraph_node * allocate_cgraph_symbol (void);
2123
2124 /* Allocate a cgraph_edge structure and fill it with data according to the
2125 parameters of which only CALLEE can be NULL (when creating an indirect call
2126 edge). */
2127 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2128 gcall *call_stmt, gcov_type count, int freq,
2129 bool indir_unknown_callee);
2130
2131 /* Put the edge onto the free list. */
2132 void free_edge (cgraph_edge *e);
2133
2134 /* Insert NODE to assembler name hash. */
2135 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2136
2137 /* Remove NODE from assembler name hash. */
2138 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2139
2140 /* Hash asmnames ignoring the user specified marks. */
2141 static hashval_t decl_assembler_name_hash (const_tree asmname);
2142
2143 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2144 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2145
2146 friend struct asmname_hasher;
2147
2148 /* List of hooks triggered when an edge is removed. */
2149 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2150 /* List of hooks triggem_red when a cgraph node is removed. */
2151 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2152 /* List of hooks triggered when an edge is duplicated. */
2153 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2154 /* List of hooks triggered when a node is duplicated. */
2155 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2156 /* List of hooks triggered when an function is inserted. */
2157 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2158 /* List of hooks triggered when an variable is inserted. */
2159 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2160 /* List of hooks triggered when a node is removed. */
2161 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2162 };
2163
2164 extern GTY(()) symbol_table *symtab;
2165
2166 extern vec<cgraph_node *> cgraph_new_nodes;
2167
2168 inline hashval_t
2169 asmname_hasher::hash (symtab_node *n)
2170 {
2171 return symbol_table::decl_assembler_name_hash
2172 (DECL_ASSEMBLER_NAME (n->decl));
2173 }
2174
2175 inline bool
2176 asmname_hasher::equal (symtab_node *n, const_tree t)
2177 {
2178 return symbol_table::decl_assembler_name_equal (n->decl, t);
2179 }
2180
2181 /* In cgraph.c */
2182 void cgraph_c_finalize (void);
2183 void release_function_body (tree);
2184 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2185
2186 void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
2187 bool cgraph_function_possibly_inlined_p (tree);
2188
2189 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2190 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2191
2192 extern bool gimple_check_call_matching_types (gimple *, tree, bool);
2193
2194 /* In cgraphunit.c */
2195 void cgraphunit_c_finalize (void);
2196
2197 /* Initialize datastructures so DECL is a function in lowered gimple form.
2198 IN_SSA is true if the gimple is in SSA. */
2199 basic_block init_lowered_empty_function (tree, bool, gcov_type);
2200
2201 /* In cgraphclones.c */
2202
2203 tree clone_function_name_1 (const char *, const char *);
2204 tree clone_function_name (tree decl, const char *);
2205
2206 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2207 bool, bitmap, bool, bitmap, basic_block);
2208
2209 /* In cgraphbuild.c */
2210 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2211 void record_references_in_initializer (tree, bool);
2212
2213 /* In ipa.c */
2214 void cgraph_build_static_cdtor (char which, tree body, int priority);
2215 bool ipa_discover_readonly_nonaddressable_vars (void);
2216
2217 /* In varpool.c */
2218 tree ctor_for_folding (tree);
2219
2220 /* In tree-chkp.c */
2221 extern bool chkp_function_instrumented_p (tree fndecl);
2222
2223 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2224 or abstract function kept for debug info purposes only. */
2225 inline bool
2226 symtab_node::real_symbol_p (void)
2227 {
2228 cgraph_node *cnode;
2229
2230 if (DECL_ABSTRACT_P (decl))
2231 return false;
2232 if (!is_a <cgraph_node *> (this))
2233 return true;
2234 cnode = dyn_cast <cgraph_node *> (this);
2235 if (cnode->global.inlined_to)
2236 return false;
2237 return true;
2238 }
2239
2240 /* Return true if DECL should have entry in symbol table if used.
2241 Those are functions and static & external veriables*/
2242
2243 static inline bool
2244 decl_in_symtab_p (const_tree decl)
2245 {
2246 return (TREE_CODE (decl) == FUNCTION_DECL
2247 || (TREE_CODE (decl) == VAR_DECL
2248 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2249 }
2250
2251 inline bool
2252 symtab_node::in_same_comdat_group_p (symtab_node *target)
2253 {
2254 symtab_node *source = this;
2255
2256 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2257 {
2258 if (cn->global.inlined_to)
2259 source = cn->global.inlined_to;
2260 }
2261 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2262 {
2263 if (cn->global.inlined_to)
2264 target = cn->global.inlined_to;
2265 }
2266
2267 return source->get_comdat_group () == target->get_comdat_group ();
2268 }
2269
2270 /* Return node that alias is aliasing. */
2271
2272 inline symtab_node *
2273 symtab_node::get_alias_target (void)
2274 {
2275 ipa_ref *ref = NULL;
2276 iterate_reference (0, ref);
2277 if (ref->use == IPA_REF_CHKP)
2278 iterate_reference (1, ref);
2279 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2280 return ref->referred;
2281 }
2282
2283 /* Return next reachable static symbol with initializer after the node. */
2284
2285 inline symtab_node *
2286 symtab_node::next_defined_symbol (void)
2287 {
2288 symtab_node *node1 = next;
2289
2290 for (; node1; node1 = node1->next)
2291 if (node1->definition)
2292 return node1;
2293
2294 return NULL;
2295 }
2296
2297 /* Iterates I-th reference in the list, REF is also set. */
2298
2299 inline ipa_ref *
2300 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2301 {
2302 vec_safe_iterate (ref_list.references, i, &ref);
2303
2304 return ref;
2305 }
2306
2307 /* Iterates I-th referring item in the list, REF is also set. */
2308
2309 inline ipa_ref *
2310 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2311 {
2312 ref_list.referring.iterate (i, &ref);
2313
2314 return ref;
2315 }
2316
2317 /* Iterates I-th referring alias item in the list, REF is also set. */
2318
2319 inline ipa_ref *
2320 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2321 {
2322 ref_list.referring.iterate (i, &ref);
2323
2324 if (ref && ref->use != IPA_REF_ALIAS)
2325 return NULL;
2326
2327 return ref;
2328 }
2329
2330 /* Return true if list contains an alias. */
2331
2332 inline bool
2333 symtab_node::has_aliases_p (void)
2334 {
2335 ipa_ref *ref = NULL;
2336
2337 return (iterate_direct_aliases (0, ref) != NULL);
2338 }
2339
2340 /* Return true when RESOLUTION indicate that linker will use
2341 the symbol from non-LTO object files. */
2342
2343 inline bool
2344 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2345 {
2346 return (resolution == LDPR_PREVAILING_DEF
2347 || resolution == LDPR_PREEMPTED_REG
2348 || resolution == LDPR_RESOLVED_EXEC
2349 || resolution == LDPR_RESOLVED_DYN);
2350 }
2351
2352 /* Return true when symtab_node is known to be used from other (non-LTO)
2353 object file. Known only when doing LTO via linker plugin. */
2354
2355 inline bool
2356 symtab_node::used_from_object_file_p (void)
2357 {
2358 if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl))
2359 return false;
2360 if (resolution_used_from_other_file_p (resolution))
2361 return true;
2362 return false;
2363 }
2364
2365 /* Return varpool node for given symbol and check it is a function. */
2366
2367 inline varpool_node *
2368 varpool_node::get (const_tree decl)
2369 {
2370 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2371 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2372 }
2373
2374 /* Register a symbol NODE. */
2375
2376 inline void
2377 symbol_table::register_symbol (symtab_node *node)
2378 {
2379 node->next = nodes;
2380 node->previous = NULL;
2381
2382 if (nodes)
2383 nodes->previous = node;
2384 nodes = node;
2385
2386 node->order = order++;
2387 }
2388
2389 /* Register a top-level asm statement ASM_STR. */
2390
2391 asm_node *
2392 symbol_table::finalize_toplevel_asm (tree asm_str)
2393 {
2394 asm_node *node;
2395
2396 node = ggc_cleared_alloc<asm_node> ();
2397 node->asm_str = asm_str;
2398 node->order = order++;
2399 node->next = NULL;
2400
2401 if (asmnodes == NULL)
2402 asmnodes = node;
2403 else
2404 asm_last_node->next = node;
2405
2406 asm_last_node = node;
2407 return node;
2408 }
2409
2410 /* Unregister a symbol NODE. */
2411 inline void
2412 symbol_table::unregister (symtab_node *node)
2413 {
2414 if (node->previous)
2415 node->previous->next = node->next;
2416 else
2417 nodes = node->next;
2418
2419 if (node->next)
2420 node->next->previous = node->previous;
2421
2422 node->next = NULL;
2423 node->previous = NULL;
2424 }
2425
2426 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2427
2428 inline void
2429 symbol_table::release_symbol (cgraph_node *node, int uid)
2430 {
2431 cgraph_count--;
2432
2433 /* Clear out the node to NULL all pointers and add the node to the free
2434 list. */
2435 memset (node, 0, sizeof (*node));
2436 node->type = SYMTAB_FUNCTION;
2437 node->uid = uid;
2438 SET_NEXT_FREE_NODE (node, free_nodes);
2439 free_nodes = node;
2440 }
2441
2442 /* Allocate new callgraph node. */
2443
2444 inline cgraph_node *
2445 symbol_table::allocate_cgraph_symbol (void)
2446 {
2447 cgraph_node *node;
2448
2449 if (free_nodes)
2450 {
2451 node = free_nodes;
2452 free_nodes = NEXT_FREE_NODE (node);
2453 }
2454 else
2455 {
2456 node = ggc_cleared_alloc<cgraph_node> ();
2457 node->uid = cgraph_max_uid++;
2458 }
2459
2460 node->summary_uid = cgraph_max_summary_uid++;
2461 return node;
2462 }
2463
2464
2465 /* Return first static symbol with definition. */
2466 inline symtab_node *
2467 symbol_table::first_symbol (void)
2468 {
2469 return nodes;
2470 }
2471
2472 /* Walk all symbols. */
2473 #define FOR_EACH_SYMBOL(node) \
2474 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2475
2476 /* Return first static symbol with definition. */
2477 inline symtab_node *
2478 symbol_table::first_defined_symbol (void)
2479 {
2480 symtab_node *node;
2481
2482 for (node = nodes; node; node = node->next)
2483 if (node->definition)
2484 return node;
2485
2486 return NULL;
2487 }
2488
2489 /* Walk all symbols with definitions in current unit. */
2490 #define FOR_EACH_DEFINED_SYMBOL(node) \
2491 for ((node) = symtab->first_defined_symbol (); (node); \
2492 (node) = node->next_defined_symbol ())
2493
2494 /* Return first variable. */
2495 inline varpool_node *
2496 symbol_table::first_variable (void)
2497 {
2498 symtab_node *node;
2499 for (node = nodes; node; node = node->next)
2500 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2501 return vnode;
2502 return NULL;
2503 }
2504
2505 /* Return next variable after NODE. */
2506 inline varpool_node *
2507 symbol_table::next_variable (varpool_node *node)
2508 {
2509 symtab_node *node1 = node->next;
2510 for (; node1; node1 = node1->next)
2511 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2512 return vnode1;
2513 return NULL;
2514 }
2515 /* Walk all variables. */
2516 #define FOR_EACH_VARIABLE(node) \
2517 for ((node) = symtab->first_variable (); \
2518 (node); \
2519 (node) = symtab->next_variable ((node)))
2520
2521 /* Return first static variable with initializer. */
2522 inline varpool_node *
2523 symbol_table::first_static_initializer (void)
2524 {
2525 symtab_node *node;
2526 for (node = nodes; node; node = node->next)
2527 {
2528 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2529 if (vnode && DECL_INITIAL (node->decl))
2530 return vnode;
2531 }
2532 return NULL;
2533 }
2534
2535 /* Return next static variable with initializer after NODE. */
2536 inline varpool_node *
2537 symbol_table::next_static_initializer (varpool_node *node)
2538 {
2539 symtab_node *node1 = node->next;
2540 for (; node1; node1 = node1->next)
2541 {
2542 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2543 if (vnode1 && DECL_INITIAL (node1->decl))
2544 return vnode1;
2545 }
2546 return NULL;
2547 }
2548
2549 /* Walk all static variables with initializer set. */
2550 #define FOR_EACH_STATIC_INITIALIZER(node) \
2551 for ((node) = symtab->first_static_initializer (); (node); \
2552 (node) = symtab->next_static_initializer (node))
2553
2554 /* Return first static variable with definition. */
2555 inline varpool_node *
2556 symbol_table::first_defined_variable (void)
2557 {
2558 symtab_node *node;
2559 for (node = nodes; node; node = node->next)
2560 {
2561 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2562 if (vnode && vnode->definition)
2563 return vnode;
2564 }
2565 return NULL;
2566 }
2567
2568 /* Return next static variable with definition after NODE. */
2569 inline varpool_node *
2570 symbol_table::next_defined_variable (varpool_node *node)
2571 {
2572 symtab_node *node1 = node->next;
2573 for (; node1; node1 = node1->next)
2574 {
2575 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2576 if (vnode1 && vnode1->definition)
2577 return vnode1;
2578 }
2579 return NULL;
2580 }
2581 /* Walk all variables with definitions in current unit. */
2582 #define FOR_EACH_DEFINED_VARIABLE(node) \
2583 for ((node) = symtab->first_defined_variable (); (node); \
2584 (node) = symtab->next_defined_variable (node))
2585
2586 /* Return first function with body defined. */
2587 inline cgraph_node *
2588 symbol_table::first_defined_function (void)
2589 {
2590 symtab_node *node;
2591 for (node = nodes; node; node = node->next)
2592 {
2593 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2594 if (cn && cn->definition)
2595 return cn;
2596 }
2597 return NULL;
2598 }
2599
2600 /* Return next function with body defined after NODE. */
2601 inline cgraph_node *
2602 symbol_table::next_defined_function (cgraph_node *node)
2603 {
2604 symtab_node *node1 = node->next;
2605 for (; node1; node1 = node1->next)
2606 {
2607 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2608 if (cn1 && cn1->definition)
2609 return cn1;
2610 }
2611 return NULL;
2612 }
2613
2614 /* Walk all functions with body defined. */
2615 #define FOR_EACH_DEFINED_FUNCTION(node) \
2616 for ((node) = symtab->first_defined_function (); (node); \
2617 (node) = symtab->next_defined_function ((node)))
2618
2619 /* Return first function. */
2620 inline cgraph_node *
2621 symbol_table::first_function (void)
2622 {
2623 symtab_node *node;
2624 for (node = nodes; node; node = node->next)
2625 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2626 return cn;
2627 return NULL;
2628 }
2629
2630 /* Return next function. */
2631 inline cgraph_node *
2632 symbol_table::next_function (cgraph_node *node)
2633 {
2634 symtab_node *node1 = node->next;
2635 for (; node1; node1 = node1->next)
2636 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2637 return cn1;
2638 return NULL;
2639 }
2640
2641 /* Return first function with body defined. */
2642 inline cgraph_node *
2643 symbol_table::first_function_with_gimple_body (void)
2644 {
2645 symtab_node *node;
2646 for (node = nodes; node; node = node->next)
2647 {
2648 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2649 if (cn && cn->has_gimple_body_p ())
2650 return cn;
2651 }
2652 return NULL;
2653 }
2654
2655 /* Return next reachable static variable with initializer after NODE. */
2656 inline cgraph_node *
2657 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2658 {
2659 symtab_node *node1 = node->next;
2660 for (; node1; node1 = node1->next)
2661 {
2662 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2663 if (cn1 && cn1->has_gimple_body_p ())
2664 return cn1;
2665 }
2666 return NULL;
2667 }
2668
2669 /* Walk all functions. */
2670 #define FOR_EACH_FUNCTION(node) \
2671 for ((node) = symtab->first_function (); (node); \
2672 (node) = symtab->next_function ((node)))
2673
2674 /* Return true when callgraph node is a function with Gimple body defined
2675 in current unit. Functions can also be define externally or they
2676 can be thunks with no Gimple representation.
2677
2678 Note that at WPA stage, the function body may not be present in memory. */
2679
2680 inline bool
2681 cgraph_node::has_gimple_body_p (void)
2682 {
2683 return definition && !thunk.thunk_p && !alias;
2684 }
2685
2686 /* Walk all functions with body defined. */
2687 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2688 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2689 (node) = symtab->next_function_with_gimple_body (node))
2690
2691 /* Uniquize all constants that appear in memory.
2692 Each constant in memory thus far output is recorded
2693 in `const_desc_table'. */
2694
2695 struct GTY((for_user)) constant_descriptor_tree {
2696 /* A MEM for the constant. */
2697 rtx rtl;
2698
2699 /* The value of the constant. */
2700 tree value;
2701
2702 /* Hash of value. Computing the hash from value each time
2703 hashfn is called can't work properly, as that means recursive
2704 use of the hash table during hash table expansion. */
2705 hashval_t hash;
2706 };
2707
2708 /* Return true when function is only called directly or it has alias.
2709 i.e. it is not externally visible, address was not taken and
2710 it is not used in any other non-standard way. */
2711
2712 inline bool
2713 cgraph_node::only_called_directly_or_aliased_p (void)
2714 {
2715 gcc_assert (!global.inlined_to);
2716 return (!force_output && !address_taken
2717 && !used_from_other_partition
2718 && !DECL_VIRTUAL_P (decl)
2719 && !DECL_STATIC_CONSTRUCTOR (decl)
2720 && !DECL_STATIC_DESTRUCTOR (decl)
2721 && !used_from_object_file_p ()
2722 && !externally_visible);
2723 }
2724
2725 /* Return true when function can be removed from callgraph
2726 if all direct calls are eliminated. */
2727
2728 inline bool
2729 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
2730 {
2731 gcc_checking_assert (!global.inlined_to);
2732 /* Instrumentation clones should not be removed before
2733 instrumentation happens. New callers may appear after
2734 instrumentation. */
2735 if (instrumentation_clone
2736 && !chkp_function_instrumented_p (decl))
2737 return false;
2738 /* Extern inlines can always go, we will use the external definition. */
2739 if (DECL_EXTERNAL (decl))
2740 return true;
2741 /* When function is needed, we can not remove it. */
2742 if (force_output || used_from_other_partition)
2743 return false;
2744 if (DECL_STATIC_CONSTRUCTOR (decl)
2745 || DECL_STATIC_DESTRUCTOR (decl))
2746 return false;
2747 /* Only COMDAT functions can be removed if externally visible. */
2748 if (externally_visible
2749 && (!DECL_COMDAT (decl)
2750 || forced_by_abi
2751 || used_from_object_file_p ()))
2752 return false;
2753 return true;
2754 }
2755
2756 /* Return true when variable can be removed from variable pool
2757 if all direct calls are eliminated. */
2758
2759 inline bool
2760 varpool_node::can_remove_if_no_refs_p (void)
2761 {
2762 if (DECL_EXTERNAL (decl))
2763 return true;
2764 return (!force_output && !used_from_other_partition
2765 && ((DECL_COMDAT (decl)
2766 && !forced_by_abi
2767 && !used_from_object_file_p ())
2768 || !externally_visible
2769 || DECL_HAS_VALUE_EXPR_P (decl)));
2770 }
2771
2772 /* Return true when all references to variable must be visible in ipa_ref_list.
2773 i.e. if the variable is not externally visible or not used in some magic
2774 way (asm statement or such).
2775 The magic uses are all summarized in force_output flag. */
2776
2777 inline bool
2778 varpool_node::all_refs_explicit_p ()
2779 {
2780 return (definition
2781 && !externally_visible
2782 && !used_from_other_partition
2783 && !force_output);
2784 }
2785
2786 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
2787 {
2788 static hashval_t hash (constant_descriptor_tree *);
2789 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2790 };
2791
2792 /* Constant pool accessor function. */
2793 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2794
2795 /* Return node that alias is aliasing. */
2796
2797 inline cgraph_node *
2798 cgraph_node::get_alias_target (void)
2799 {
2800 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2801 }
2802
2803 /* Return node that alias is aliasing. */
2804
2805 inline varpool_node *
2806 varpool_node::get_alias_target (void)
2807 {
2808 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2809 }
2810
2811 /* Walk the alias chain to return the symbol NODE is alias of.
2812 If NODE is not an alias, return NODE.
2813 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2814
2815 inline symtab_node *
2816 symtab_node::ultimate_alias_target (enum availability *availability)
2817 {
2818 if (!alias)
2819 {
2820 if (availability)
2821 *availability = get_availability ();
2822 return this;
2823 }
2824
2825 return ultimate_alias_target_1 (availability);
2826 }
2827
2828 /* Given function symbol, walk the alias chain to return the function node
2829 is alias of. Do not walk through thunks.
2830 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2831
2832 inline cgraph_node *
2833 cgraph_node::ultimate_alias_target (enum availability *availability)
2834 {
2835 cgraph_node *n = dyn_cast <cgraph_node *>
2836 (symtab_node::ultimate_alias_target (availability));
2837 if (!n && availability)
2838 *availability = AVAIL_NOT_AVAILABLE;
2839 return n;
2840 }
2841
2842 /* For given variable pool node, walk the alias chain to return the function
2843 the variable is alias of. Do not walk through thunks.
2844 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2845
2846 inline varpool_node *
2847 varpool_node::ultimate_alias_target (availability *availability)
2848 {
2849 varpool_node *n = dyn_cast <varpool_node *>
2850 (symtab_node::ultimate_alias_target (availability));
2851
2852 if (!n && availability)
2853 *availability = AVAIL_NOT_AVAILABLE;
2854 return n;
2855 }
2856
2857 /* Set callee N of call graph edge and add it to the corresponding set of
2858 callers. */
2859
2860 inline void
2861 cgraph_edge::set_callee (cgraph_node *n)
2862 {
2863 prev_caller = NULL;
2864 if (n->callers)
2865 n->callers->prev_caller = this;
2866 next_caller = n->callers;
2867 n->callers = this;
2868 callee = n;
2869 }
2870
2871 /* Redirect callee of the edge to N. The function does not update underlying
2872 call expression. */
2873
2874 inline void
2875 cgraph_edge::redirect_callee (cgraph_node *n)
2876 {
2877 /* Remove from callers list of the current callee. */
2878 remove_callee ();
2879
2880 /* Insert to callers list of the new callee. */
2881 set_callee (n);
2882 }
2883
2884 /* Return true when the edge represents a direct recursion. */
2885
2886 inline bool
2887 cgraph_edge::recursive_p (void)
2888 {
2889 cgraph_node *c = callee->ultimate_alias_target ();
2890 if (caller->global.inlined_to)
2891 return caller->global.inlined_to->decl == c->decl;
2892 else
2893 return caller->decl == c->decl;
2894 }
2895
2896 /* Remove the edge from the list of the callers of the callee. */
2897
2898 inline void
2899 cgraph_edge::remove_callee (void)
2900 {
2901 gcc_assert (!indirect_unknown_callee);
2902 if (prev_caller)
2903 prev_caller->next_caller = next_caller;
2904 if (next_caller)
2905 next_caller->prev_caller = prev_caller;
2906 if (!prev_caller)
2907 callee->callers = next_caller;
2908 }
2909
2910 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
2911 static inline bool
2912 decl_is_tm_clone (const_tree fndecl)
2913 {
2914 cgraph_node *n = cgraph_node::get (fndecl);
2915 if (n)
2916 return n->tm_clone;
2917 return false;
2918 }
2919
2920 /* Likewise indicate that a node is needed, i.e. reachable via some
2921 external means. */
2922
2923 inline void
2924 cgraph_node::mark_force_output (void)
2925 {
2926 force_output = 1;
2927 gcc_checking_assert (!global.inlined_to);
2928 }
2929
2930 /* Return true if function should be optimized for size. */
2931
2932 inline bool
2933 cgraph_node::optimize_for_size_p (void)
2934 {
2935 if (opt_for_fn (decl, optimize_size))
2936 return true;
2937 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2938 return true;
2939 else
2940 return false;
2941 }
2942
2943 /* Return symtab_node for NODE or create one if it is not present
2944 in symtab. */
2945
2946 inline symtab_node *
2947 symtab_node::get_create (tree node)
2948 {
2949 if (TREE_CODE (node) == VAR_DECL)
2950 return varpool_node::get_create (node);
2951 else
2952 return cgraph_node::get_create (node);
2953 }
2954
2955 /* Return availability of NODE. */
2956
2957 inline enum availability
2958 symtab_node::get_availability (void)
2959 {
2960 if (is_a <cgraph_node *> (this))
2961 return dyn_cast <cgraph_node *> (this)->get_availability ();
2962 else
2963 return dyn_cast <varpool_node *> (this)->get_availability ();
2964 }
2965
2966 /* Call calback on symtab node and aliases associated to this node.
2967 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2968 skipped. */
2969
2970 inline bool
2971 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
2972 void *),
2973 void *data,
2974 bool include_overwritable)
2975 {
2976 if (callback (this, data))
2977 return true;
2978 if (has_aliases_p ())
2979 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
2980 return false;
2981 }
2982
2983 /* Call callback on function and aliases associated to the function.
2984 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2985 skipped. */
2986
2987 inline bool
2988 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
2989 void *),
2990 void *data,
2991 bool include_overwritable)
2992 {
2993 if (callback (this, data))
2994 return true;
2995 if (has_aliases_p ())
2996 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
2997 return false;
2998 }
2999
3000 /* Call calback on varpool symbol and aliases associated to varpool symbol.
3001 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
3002 skipped. */
3003
3004 inline bool
3005 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3006 void *),
3007 void *data,
3008 bool include_overwritable)
3009 {
3010 if (callback (this, data))
3011 return true;
3012 if (has_aliases_p ())
3013 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3014 return false;
3015 }
3016
3017 /* Return true if refernece may be used in address compare. */
3018
3019 inline bool
3020 ipa_ref::address_matters_p ()
3021 {
3022 if (use != IPA_REF_ADDR)
3023 return false;
3024 /* Addresses taken from virtual tables are never compared. */
3025 if (is_a <varpool_node *> (referring)
3026 && DECL_VIRTUAL_P (referring->decl))
3027 return false;
3028 return referred->address_can_be_compared_p ();
3029 }
3030
3031 /* Build polymorphic call context for indirect call E. */
3032
3033 inline
3034 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3035 {
3036 gcc_checking_assert (e->indirect_info->polymorphic);
3037 *this = e->indirect_info->context;
3038 }
3039
3040 /* Build empty "I know nothing" context. */
3041
3042 inline
3043 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3044 {
3045 clear_speculation ();
3046 clear_outer_type ();
3047 invalid = false;
3048 }
3049
3050 /* Make context non-speculative. */
3051
3052 inline void
3053 ipa_polymorphic_call_context::clear_speculation ()
3054 {
3055 speculative_outer_type = NULL;
3056 speculative_offset = 0;
3057 speculative_maybe_derived_type = false;
3058 }
3059
3060 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
3061 NULL, the context is set to dummy "I know nothing" setting. */
3062
3063 inline void
3064 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3065 {
3066 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
3067 offset = 0;
3068 maybe_derived_type = true;
3069 maybe_in_construction = true;
3070 dynamic = true;
3071 }
3072
3073 /* Adjust all offsets in contexts by OFF bits. */
3074
3075 inline void
3076 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
3077 {
3078 if (outer_type)
3079 offset += off;
3080 if (speculative_outer_type)
3081 speculative_offset += off;
3082 }
3083
3084 /* Return TRUE if context is fully useless. */
3085
3086 inline bool
3087 ipa_polymorphic_call_context::useless_p () const
3088 {
3089 return (!outer_type && !speculative_outer_type);
3090 }
3091
3092 /* Return true if NODE is local. Instrumentation clones are counted as local
3093 only when original function is local. */
3094
3095 static inline bool
3096 cgraph_local_p (cgraph_node *node)
3097 {
3098 if (!node->instrumentation_clone || !node->instrumented_version)
3099 return node->local.local;
3100
3101 return node->local.local && node->instrumented_version->local.local;
3102 }
3103
3104 /* When using fprintf (or similar), problems can arise with
3105 transient generated strings. Many string-generation APIs
3106 only support one result being alive at once (e.g. by
3107 returning a pointer to a statically-allocated buffer).
3108
3109 If there is more than one generated string within one
3110 fprintf call: the first string gets evicted or overwritten
3111 by the second, before fprintf is fully evaluated.
3112 See e.g. PR/53136.
3113
3114 This function provides a workaround for this, by providing
3115 a simple way to create copies of these transient strings,
3116 without the need to have explicit cleanup:
3117
3118 fprintf (dumpfile, "string 1: %s string 2:%s\n",
3119 xstrdup_for_dump (EXPR_1),
3120 xstrdup_for_dump (EXPR_2));
3121
3122 This is actually a simple wrapper around ggc_strdup, but
3123 the name documents the intent. We require that no GC can occur
3124 within the fprintf call. */
3125
3126 static inline const char *
3127 xstrdup_for_dump (const char *transient_str)
3128 {
3129 return ggc_strdup (transient_str);
3130 }
3131
3132 #endif /* GCC_CGRAPH_H */