]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
Update copyright years.
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
8d9254fc 2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
c7131fb2
AM
25#include "tree.h"
26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "stringpool.h"
30#include "cgraph.h"
45b0be94 31#include "gimplify.h"
9e97ff61 32#include "tree-iterator.h"
af8bca3c 33#include "ipa-utils.h"
dd912cb8 34#include "symbol-summary.h"
8bc5448f 35#include "tree-vrp.h"
c582198b 36#include "ipa-prop.h"
27d020cf 37#include "ipa-fnsummary.h"
2b5f0895 38#include "dbgcnt.h"
775669c1 39#include "debug.h"
314e6352
ML
40#include "stringpool.h"
41#include "attribs.h"
e70670cf
JH
42
43/* Return true when NODE has ADDR reference. */
44
45static bool
46has_addr_references_p (struct cgraph_node *node,
4f4ada6a 47 void *)
e70670cf
JH
48{
49 int i;
d122681a 50 struct ipa_ref *ref = NULL;
e70670cf 51
d122681a 52 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
53 if (ref->use == IPA_REF_ADDR)
54 return true;
55 return false;
56}
57
4f4ada6a
JH
58/* Return true when NODE can be target of an indirect call. */
59
60static bool
61is_indirect_call_target_p (struct cgraph_node *node, void *)
62{
63 return node->indirect_call_target;
64}
65
d563610d
JH
66/* Look for all functions inlined to NODE and update their inlined_to pointers
67 to INLINED_TO. */
68
69static void
70update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
71{
72 struct cgraph_edge *e;
73 for (e = node->callees; e; e = e->next_callee)
a62bfab5 74 if (e->callee->inlined_to)
d563610d 75 {
a62bfab5 76 e->callee->inlined_to = inlined_to;
d563610d
JH
77 update_inlined_to_pointer (e->callee, inlined_to);
78 }
79}
80
04142cc3 81/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
82
83 The queue is linked via AUX pointers and terminated by pointer to 1.
84 We enqueue nodes at two occasions: when we find them reachable or when we find
85 their bodies needed for further clonning. In the second case we mark them
86 by pointer to 2 after processing so they are re-queue when they become
87 reachable. */
b34fd25c
JH
88
89static void
5e20cdc9 90enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 91 hash_set<symtab_node *> *reachable)
b34fd25c 92{
19fb0b86 93 /* Node is still in queue; do nothing. */
67348ccc 94 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
95 return;
96 /* Node was already processed as unreachable, re-enqueue
97 only if it became reachable now. */
6e2830c3 98 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 99 return;
67348ccc 100 node->aux = *first;
b34fd25c
JH
101 *first = node;
102}
103
12485662
JH
104/* Return true if NODE may get inlined later.
105 This is used to keep DECL_EXTERNAL function bodies around long enough
106 so inliner can proces them. */
107
108static bool
109possible_inline_candidate_p (symtab_node *node)
110{
111 if (symtab->state >= IPA_SSA_AFTER_INLINING)
112 return false;
113 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
114 if (!cnode)
115 return false;
116 if (DECL_UNINLINABLE (cnode->decl))
117 return false;
118 if (opt_for_fn (cnode->decl, optimize))
119 return true;
120 if (symtab->state >= IPA_SSA)
121 return false;
122 return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
123}
124
b34fd25c
JH
125/* Process references. */
126
127static void
d122681a 128process_references (symtab_node *snode,
5e20cdc9 129 symtab_node **first,
6e2830c3 130 hash_set<symtab_node *> *reachable)
b34fd25c
JH
131{
132 int i;
d122681a
ML
133 struct ipa_ref *ref = NULL;
134 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 135 {
5e20cdc9 136 symtab_node *node = ref->referred;
17e0fc92 137 symtab_node *body = node->ultimate_alias_target ();
e70670cf 138
67348ccc
DM
139 if (node->definition && !node->in_other_partition
140 && ((!DECL_EXTERNAL (node->decl) || node->alias)
12485662 141 || (possible_inline_candidate_p (node)
17e0fc92 142 /* We use variable constructors during late compilation for
e70670cf
JH
143 constant folding. Keep references alive so partitioning
144 knows about potential references. */
8813a647 145 || (VAR_P (node->decl)
5b42d196
JH
146 && (flag_wpa
147 || flag_incremental_link
148 == INCREMENTAL_LINK_LTO)
149 && dyn_cast <varpool_node *> (node)
150 ->ctor_useable_for_folding_p ()))))
17e0fc92
JH
151 {
152 /* Be sure that we will not optimize out alias target
153 body. */
154 if (DECL_EXTERNAL (node->decl)
155 && node->alias
12485662 156 && symtab->state < IPA_SSA_AFTER_INLINING)
17e0fc92
JH
157 reachable->add (body);
158 reachable->add (node);
159 }
67348ccc 160 enqueue_node (node, first, reachable);
b34fd25c
JH
161 }
162}
163
3462aa02
JH
164/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
165 all its potential targets as reachable to permit later inlining if
166 devirtualization happens. After inlining still keep their declarations
167 around, so we can devirtualize to a direct call.
168
169 Also try to make trivial devirutalization when no or only one target is
170 possible. */
171
172static void
6e2830c3 173walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 174 struct cgraph_edge *edge,
5e20cdc9 175 symtab_node **first,
12485662 176 hash_set<symtab_node *> *reachable)
3462aa02
JH
177{
178 unsigned int i;
179 void *cache_token;
180 bool final;
181 vec <cgraph_node *>targets
182 = possible_polymorphic_call_targets
183 (edge, &final, &cache_token);
184
6e2830c3 185 if (!reachable_call_targets->add (cache_token))
3462aa02 186 {
c3284718 187 for (i = 0; i < targets.length (); i++)
3462aa02
JH
188 {
189 struct cgraph_node *n = targets[i];
190
191 /* Do not bother to mark virtual methods in anonymous namespace;
192 either we will find use of virtual table defining it, or it is
193 unused. */
67348ccc 194 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 195 && type_in_anonymous_namespace_p
70e7f2a2 196 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
3462aa02
JH
197 continue;
198
4f4ada6a
JH
199 n->indirect_call_target = true;
200 symtab_node *body = n->function_symbol ();
17e0fc92 201
3462aa02
JH
202 /* Prior inlining, keep alive bodies of possible targets for
203 devirtualization. */
4f4ada6a 204 if (n->definition
12485662 205 && (possible_inline_candidate_p (body)
4f4ada6a
JH
206 && opt_for_fn (body->decl, flag_devirtualize)))
207 {
208 /* Be sure that we will not optimize out alias target
209 body. */
210 if (DECL_EXTERNAL (n->decl)
211 && n->alias
12485662 212 && symtab->state < IPA_SSA_AFTER_INLINING)
4f4ada6a
JH
213 reachable->add (body);
214 reachable->add (n);
215 }
3462aa02
JH
216 /* Even after inlining we want to keep the possible targets in the
217 boundary, so late passes can still produce direct call even if
218 the chance for inlining is lost. */
67348ccc 219 enqueue_node (n, first, reachable);
3462aa02
JH
220 }
221 }
222
223 /* Very trivial devirtualization; when the type is
224 final or anonymous (so we know all its derivation)
225 and there is only one possible virtual call target,
226 make the edge direct. */
227 if (final)
228 {
2b5f0895 229 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 230 {
7b395ddd 231 cgraph_node *target, *node = edge->caller;
3462aa02
JH
232 if (targets.length () == 1)
233 target = targets[0];
234 else
d52f5295 235 target = cgraph_node::get_create
3462aa02
JH
236 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
237
2b5f0895 238 if (dump_enabled_p ())
4f5b9c80
DM
239 {
240 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
464d0118
ML
241 "devirtualizing call in %s to %s\n",
242 edge->caller->dump_name (),
243 target->dump_name ());
2b5f0895 244 }
3dafb85c 245 edge = edge->make_direct (target);
0bceb671 246 if (ipa_fn_summaries)
65c2b6d2
JH
247 ipa_update_overall_fn_summary (node->inlined_to
248 ? node->inlined_to : node);
477145c8 249 else if (edge->call_stmt)
31db0fe0 250 edge->redirect_call_stmt_to_callee ();
3462aa02
JH
251 }
252 }
253}
41817394 254
ca31b95f 255/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
256
257 The algorithm is basically mark&sweep but with some extra refinements:
258
259 - reachable extern inline functions needs special handling; the bodies needs
260 to stay in memory until inlining in hope that they will be inlined.
261 After inlining we release their bodies and turn them into unanalyzed
262 nodes even when they are reachable.
263
04142cc3
JH
264 - virtual functions are kept in callgraph even if they seem unreachable in
265 hope calls to them will be devirtualized.
266
267 Again we remove them after inlining. In late optimization some
31519c38 268 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
269 the call. In theory early opts and IPA should work out all important cases.
270
271 - virtual clones needs bodies of their origins for later materialization;
272 this means that we want to keep the body even if the origin is unreachable
273 otherwise. To avoid origin from sitting in the callgraph and being
274 walked by IPA passes, we turn them into unanalyzed nodes with body
275 defined.
276
277 We maintain set of function declaration where body needs to stay in
278 body_needed_for_clonning
279
280 Inline clones represent special case: their declaration match the
281 declaration of origin and cgraph_remove_node already knows how to
282 reshape callgraph and preserve body when offline copy of function or
283 inline clone is being removed.
284
6649df51
JH
285 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
286 variables with DECL_INITIAL set. We finalize these and keep reachable
287 ones around for constant folding purposes. After inlining we however
288 stop walking their references to let everything static referneced by them
289 to be removed when it is otherwise unreachable.
290
04142cc3
JH
291 We maintain queue of both reachable symbols (i.e. defined symbols that needs
292 to stay) and symbols that are in boundary (i.e. external symbols referenced
293 by reachable symbols or origins of clones). The queue is represented
294 as linked list by AUX pointer terminated by 1.
295
31519c38 296 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
297 turn definition into a declaration, but we may keep function body around
298 based on body_needed_for_clonning
299
300 All symbols that enter the queue have AUX pointer non-zero and are in the
301 boundary. Pointer set REACHABLE is used to track reachable symbols.
302
303 Every symbol can be visited twice - once as part of boundary and once
304 as real reachable symbol. enqueue_node needs to decide whether the
305 node needs to be re-queued for second processing. For this purpose
306 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
307
308bool
17e0fc92 309symbol_table::remove_unreachable_nodes (FILE *file)
ca31b95f 310{
5e20cdc9 311 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 312 struct cgraph_node *node, *next;
2c8326a5 313 varpool_node *vnode, *vnext;
ca31b95f 314 bool changed = false;
6e2830c3
TS
315 hash_set<symtab_node *> reachable;
316 hash_set<tree> body_needed_for_clonning;
317 hash_set<void *> reachable_call_targets;
ca31b95f 318
3462aa02 319 timevar_push (TV_IPA_UNREACHABLE);
2bf86c84 320 build_type_inheritance_graph ();
10d22567
ZD
321 if (file)
322 fprintf (file, "\nReclaiming functions:");
b2b29377
MM
323 if (flag_checking)
324 {
325 FOR_EACH_FUNCTION (node)
326 gcc_assert (!node->aux);
327 FOR_EACH_VARIABLE (vnode)
328 gcc_assert (!vnode->aux);
329 }
530f3a1b
JH
330 /* Mark functions whose bodies are obviously needed.
331 This is mostly when they can be referenced externally. Inline clones
332 are special since their declarations are shared with master clone and thus
333 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
334 FOR_EACH_FUNCTION (node)
335 {
336 node->used_as_abstract_origin = false;
4f4ada6a 337 node->indirect_call_target = false;
67348ccc 338 if (node->definition
a62bfab5 339 && !node->inlined_to
67348ccc 340 && !node->in_other_partition
d52f5295 341 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef 342 {
a62bfab5 343 gcc_assert (!node->inlined_to);
6e2830c3
TS
344 reachable.add (node);
345 enqueue_node (node, &first, &reachable);
c0c123ef
JH
346 }
347 else
67348ccc 348 gcc_assert (!node->aux);
c0c123ef 349 }
530f3a1b
JH
350
351 /* Mark variables that are obviously needed. */
04142cc3 352 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 353 if (!vnode->can_remove_if_no_refs_p()
67348ccc 354 && !vnode->in_other_partition)
04142cc3 355 {
6e2830c3
TS
356 reachable.add (vnode);
357 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
358 }
359
360 /* Perform reachability analysis. */
5e20cdc9 361 while (first != (symtab_node *) (void *) 1)
b34fd25c 362 {
6e2830c3 363 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 364 symtab_node *node = first;
ca31b95f 365
5e20cdc9 366 first = (symtab_node *)first->aux;
19fb0b86 367
04142cc3
JH
368 /* If we are processing symbol in boundary, mark its AUX pointer for
369 possible later re-processing in enqueue_node. */
370 if (in_boundary_p)
4bd019b8
JH
371 {
372 node->aux = (void *)2;
373 if (node->alias && node->analyzed)
374 enqueue_node (node->get_alias_target (), &first, &reachable);
375 }
04142cc3
JH
376 else
377 {
31dad809
JJ
378 if (TREE_CODE (node->decl) == FUNCTION_DECL
379 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
380 {
381 struct cgraph_node *origin_node
4ad08ee8
JH
382 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
383 if (origin_node && !origin_node->used_as_abstract_origin)
384 {
385 origin_node->used_as_abstract_origin = true;
386 gcc_assert (!origin_node->prev_sibling_clone);
387 gcc_assert (!origin_node->next_sibling_clone);
388 for (cgraph_node *n = origin_node->clones; n;
389 n = n->next_sibling_clone)
390 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
391 n->used_as_abstract_origin = true;
4ad08ee8 392 }
c0c123ef 393 }
04142cc3 394 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
395 all externally visible symbols in the same comdat
396 group to be reachable as well. Comdat-local symbols
397 can be discarded if all uses were inlined. */
67348ccc 398 if (node->same_comdat_group)
04142cc3 399 {
5e20cdc9 400 symtab_node *next;
67348ccc 401 for (next = node->same_comdat_group;
04142cc3 402 next != node;
67348ccc 403 next = next->same_comdat_group)
d52f5295 404 if (!next->comdat_local_p ()
6e2830c3
TS
405 && !reachable.add (next))
406 enqueue_node (next, &first, &reachable);
04142cc3
JH
407 }
408 /* Mark references as reachable. */
12485662 409 process_references (node, &first, &reachable);
04142cc3 410 }
19fb0b86 411
7de90a6c 412 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 413 {
04142cc3
JH
414 /* Mark the callees reachable unless they are direct calls to extern
415 inline functions we decided to not inline. */
416 if (!in_boundary_p)
8a6295ba 417 {
04142cc3 418 struct cgraph_edge *e;
3462aa02 419 /* Keep alive possible targets for devirtualization. */
2bf86c84
JH
420 if (opt_for_fn (cnode->decl, optimize)
421 && opt_for_fn (cnode->decl, flag_devirtualize))
3462aa02
JH
422 {
423 struct cgraph_edge *next;
424 for (e = cnode->indirect_calls; e; e = next)
425 {
426 next = e->next_callee;
427 if (e->indirect_info->polymorphic)
6e2830c3 428 walk_polymorphic_call_targets (&reachable_call_targets,
12485662 429 e, &first, &reachable);
3462aa02
JH
430 }
431 }
04142cc3 432 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 433 {
17e0fc92 434 symtab_node *body = e->callee->function_symbol ();
67348ccc
DM
435 if (e->callee->definition
436 && !e->callee->in_other_partition
ed62e0d9 437 && (!e->inline_failed
67348ccc
DM
438 || !DECL_EXTERNAL (e->callee->decl)
439 || e->callee->alias
12485662 440 || possible_inline_candidate_p (e->callee)))
789c2741
JH
441 {
442 /* Be sure that we will not optimize out alias target
443 body. */
444 if (DECL_EXTERNAL (e->callee->decl)
445 && e->callee->alias
12485662 446 && symtab->state < IPA_SSA_AFTER_INLINING)
17e0fc92 447 reachable.add (body);
6e2830c3 448 reachable.add (e->callee);
789c2741 449 }
6e2830c3 450 enqueue_node (e->callee, &first, &reachable);
93a18a70 451 }
04142cc3
JH
452
453 /* When inline clone exists, mark body to be preserved so when removing
454 offline copy of the function we don't kill it. */
a62bfab5 455 if (cnode->inlined_to)
6e2830c3 456 body_needed_for_clonning.add (cnode->decl);
b66887e4 457
4f63dfc6
JH
458 /* For non-inline clones, force their origins to the boundary and ensure
459 that body is not removed. */
460 while (cnode->clone_of)
461 {
67348ccc 462 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
463 cnode = cnode->clone_of;
464 if (noninline)
465 {
6e2830c3
TS
466 body_needed_for_clonning.add (cnode->decl);
467 enqueue_node (cnode, &first, &reachable);
4f63dfc6 468 }
b34fd25c 469 }
0136f8f0
AH
470
471 }
4bd019b8
JH
472 else if (cnode->thunk.thunk_p)
473 enqueue_node (cnode->callees->callee, &first, &reachable);
48de5d37 474
0136f8f0
AH
475 /* If any reachable function has simd clones, mark them as
476 reachable as well. */
477 if (cnode->simd_clones)
478 {
479 cgraph_node *next;
480 for (next = cnode->simd_clones;
481 next;
482 next = next->simdclone->next_clone)
483 if (in_boundary_p
6e2830c3
TS
484 || !reachable.add (next))
485 enqueue_node (next, &first, &reachable);
47cb0d7d 486 }
b34fd25c 487 }
6649df51 488 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
489 boundary. This will also hold initializers of the external vars NODE
490 refers to. */
7de90a6c 491 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 492 if (vnode
67348ccc
DM
493 && DECL_EXTERNAL (node->decl)
494 && !vnode->alias
6649df51 495 && in_boundary_p)
5d59b5e1 496 {
d122681a
ML
497 struct ipa_ref *ref = NULL;
498 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 499 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 500 }
ca31b95f
JH
501 }
502
04142cc3 503 /* Remove unreachable functions. */
3dafb85c 504 for (node = first_function (); node; node = next)
ca31b95f 505 {
3dafb85c 506 next = next_function (node);
e70670cf
JH
507
508 /* If node is not needed at all, remove it. */
67348ccc 509 if (!node->aux)
ca31b95f 510 {
10d22567 511 if (file)
464d0118 512 fprintf (file, " %s", node->dump_name ());
d52f5295 513 node->remove ();
04142cc3
JH
514 changed = true;
515 }
e70670cf 516 /* If node is unreachable, remove its body. */
6e2830c3 517 else if (!reachable.contains (node))
04142cc3 518 {
d3f2e41e
JH
519 /* We keep definitions of thunks and aliases in the boundary so
520 we can walk to the ultimate alias targets and function symbols
521 reliably. */
522 if (node->alias || node->thunk.thunk_p)
523 ;
d0b1b67a
MJ
524 else if (!body_needed_for_clonning.contains (node->decl))
525 {
526 /* Make the node a non-clone so that we do not attempt to
527 materialize it later. */
528 if (node->clone_of)
529 node->remove_from_clone_tree ();
530 node->release_body ();
531 }
4f63dfc6 532 else if (!node->clone_of)
67348ccc 533 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
4bd019b8 534 if (node->definition && !node->alias && !node->thunk.thunk_p)
bb853349 535 {
04142cc3 536 if (file)
464d0118 537 fprintf (file, " %s", node->dump_name ());
3d8d0043 538 node->body_removed = true;
67348ccc
DM
539 node->analyzed = false;
540 node->definition = false;
541 node->cpp_implicit_alias = false;
542 node->alias = false;
71e54687 543 node->transparent_alias = false;
d833415c 544 node->thunk.thunk_p = false;
67348ccc 545 node->weakref = false;
8fe91ca8
JH
546 /* After early inlining we drop always_inline attributes on
547 bodies of functions that are still referenced (have their
548 address taken). */
549 DECL_ATTRIBUTES (node->decl)
550 = remove_attribute ("always_inline",
551 DECL_ATTRIBUTES (node->decl));
67348ccc 552 if (!node->in_other_partition)
87f94429 553 node->local = false;
d52f5295 554 node->remove_callees ();
d122681a 555 node->remove_all_references ();
bb853349
JH
556 changed = true;
557 }
ca31b95f 558 }
4f63dfc6 559 else
d52f5295 560 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 561 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 562 }
04142cc3
JH
563
564 /* Inline clones might be kept around so their materializing allows further
565 cloning. If the function the clone is inlined into is removed, we need
566 to turn it into normal cone. */
65c70e6b 567 FOR_EACH_FUNCTION (node)
9187e02d 568 {
a62bfab5 569 if (node->inlined_to
9187e02d
JH
570 && !node->callers)
571 {
572 gcc_assert (node->clones);
a62bfab5 573 node->inlined_to = NULL;
d563610d 574 update_inlined_to_pointer (node, node);
9187e02d 575 }
67348ccc 576 node->aux = NULL;
9187e02d 577 }
4a444e58 578
04142cc3 579 /* Remove unreachable variables. */
4a444e58 580 if (file)
04142cc3 581 fprintf (file, "\nReclaiming variables:");
3dafb85c 582 for (vnode = first_variable (); vnode; vnode = vnext)
b34fd25c 583 {
3dafb85c 584 vnext = next_variable (vnode);
67348ccc 585 if (!vnode->aux
b9bd2075
JH
586 /* For can_refer_decl_in_current_unit_p we want to track for
587 all external variables if they are defined in other partition
588 or not. */
67348ccc 589 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 590 {
d2b35c04
JH
591 struct ipa_ref *ref = NULL;
592
593 /* First remove the aliases, so varpool::remove can possibly lookup
594 the constructor and save it for future use. */
595 while (vnode->iterate_direct_aliases (0, ref))
596 {
597 if (file)
464d0118 598 fprintf (file, " %s", ref->referred->dump_name ());
d2b35c04
JH
599 ref->referring->remove ();
600 }
4a444e58 601 if (file)
464d0118 602 fprintf (file, " %s", vnode->dump_name ());
d2b35c04 603 vnext = next_variable (vnode);
775669c1 604 /* Signal removal to the debug machinery. */
5b42d196 605 if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
775669c1
RB
606 {
607 vnode->definition = false;
608 (*debug_hooks->late_global_decl) (vnode->decl);
609 }
d52f5295 610 vnode->remove ();
4a444e58 611 changed = true;
b34fd25c 612 }
4bd019b8 613 else if (!reachable.contains (vnode) && !vnode->alias)
04142cc3 614 {
6a6dac52 615 tree init;
67348ccc 616 if (vnode->definition)
04142cc3
JH
617 {
618 if (file)
fec39fa6 619 fprintf (file, " %s", vnode->name ());
04142cc3
JH
620 changed = true;
621 }
1acc5591 622 /* Keep body if it may be useful for constant folding. */
5b42d196 623 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
31db0fe0 624 || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
1acc5591
JH
625 vnode->remove_initializer ();
626 else
627 DECL_INITIAL (vnode->decl) = init;
3d8d0043 628 vnode->body_removed = true;
67348ccc
DM
629 vnode->definition = false;
630 vnode->analyzed = false;
631 vnode->aux = NULL;
e70670cf 632
d52f5295 633 vnode->remove_from_same_comdat_group ();
7b3376a0 634
d122681a 635 vnode->remove_all_references ();
04142cc3
JH
636 }
637 else
67348ccc 638 vnode->aux = NULL;
b34fd25c 639 }
4a444e58 640
04142cc3 641 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
642 if (file)
643 fprintf (file, "\nClearing address taken flags:");
65c70e6b 644 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
645 if (node->address_taken
646 && !node->used_from_other_partition)
bd3cdcc0 647 {
1ede94c5 648 if (!node->call_for_symbol_and_aliases
31db0fe0 649 (has_addr_references_p, NULL, true))
bd3cdcc0
JH
650 {
651 if (file)
fec39fa6 652 fprintf (file, " %s", node->name ());
67348ccc 653 node->address_taken = false;
4a444e58 654 changed = true;
4f4ada6a
JH
655 if (node->local_p ()
656 /* Virtual functions may be kept in cgraph just because
657 of possible later devirtualization. Do not mark them as
658 local too early so we won't optimize them out before
659 we are done with polymorphic call analysis. */
12485662 660 && (symtab->state >= IPA_SSA_AFTER_INLINING
4f4ada6a
JH
661 || !node->call_for_symbol_and_aliases
662 (is_indirect_call_target_p, NULL, true)))
4a444e58 663 {
87f94429 664 node->local = true;
4a444e58
JH
665 if (file)
666 fprintf (file, " (local)");
667 }
bd3cdcc0
JH
668 }
669 }
10a5dd5d
JH
670 if (file)
671 fprintf (file, "\n");
b34fd25c 672
b2b29377 673 symtab_node::checking_verify_symtab_nodes ();
4537ec0c 674
a8da72b8 675 /* If we removed something, perhaps profile could be improved. */
29f1e2b1 676 if (changed && (optimize || in_lto_p) && ipa_call_summaries)
a8da72b8 677 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 678 ipa_propagate_frequency (node);
a8da72b8 679
3462aa02 680 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
681 return changed;
682}
f4b3ca72 683
6de88c6a
JH
684/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
685 as needed, also clear EXPLICIT_REFS if the references to given variable
686 do not need to be explicit. */
687
688void
689process_references (varpool_node *vnode,
690 bool *written, bool *address_taken,
691 bool *read, bool *explicit_refs)
692{
693 int i;
694 struct ipa_ref *ref;
695
9041d2e6 696 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
697 || TREE_THIS_VOLATILE (vnode->decl))
698 *explicit_refs = false;
699
d122681a 700 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
701 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
702 switch (ref->use)
703 {
704 case IPA_REF_ADDR:
705 *address_taken = true;
706 break;
707 case IPA_REF_LOAD:
708 *read = true;
709 break;
710 case IPA_REF_STORE:
711 *written = true;
712 break;
713 case IPA_REF_ALIAS:
d52f5295
ML
714 process_references (dyn_cast<varpool_node *> (ref->referring), written,
715 address_taken, read, explicit_refs);
6de88c6a
JH
716 break;
717 }
718}
719
720/* Set TREE_READONLY bit. */
721
722bool
723set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
724{
725 TREE_READONLY (vnode->decl) = true;
726 return false;
727}
728
729/* Set writeonly bit and clear the initalizer, since it will not be needed. */
730
731bool
dea91a66 732set_writeonly_bit (varpool_node *vnode, void *data)
6de88c6a
JH
733{
734 vnode->writeonly = true;
29f1e2b1 735 if (optimize || in_lto_p)
6de88c6a
JH
736 {
737 DECL_INITIAL (vnode->decl) = NULL;
738 if (!vnode->alias)
dea91a66
JH
739 {
740 if (vnode->num_references ())
741 *(bool *)data = true;
742 vnode->remove_all_references ();
743 }
6de88c6a
JH
744 }
745 return false;
746}
747
748/* Clear addressale bit of VNODE. */
749
750bool
751clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
752{
753 vnode->address_taken = false;
754 TREE_ADDRESSABLE (vnode->decl) = 0;
755 return false;
756}
757
2e14744f
ML
758/* Discover variables that have no longer address taken, are read-only or
759 write-only and update their flags.
4a444e58 760
2e14744f 761 Return true when unreachable symbol removal should be done.
dea91a66 762
67914693 763 FIXME: This cannot be done in between gimplify and omp_expand since
4a444e58 764 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
765 this transformation as part of whole program visibility and re-do at
766 ipa-reference pass (to take into account clonning), but it would
767 make sense to do it before early optimizations. */
4a444e58 768
dea91a66 769bool
2e14744f 770ipa_discover_variable_flags (void)
4a444e58 771{
2e14744f
ML
772 if (!flag_ipa_reference_addressable)
773 return false;
774
dea91a66 775 bool remove_p = false;
2c8326a5 776 varpool_node *vnode;
4a444e58
JH
777 if (dump_file)
778 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 779 FOR_EACH_VARIABLE (vnode)
6de88c6a 780 if (!vnode->alias
67348ccc 781 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 782 || !vnode->writeonly
67348ccc 783 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
784 {
785 bool written = false;
786 bool address_taken = false;
6de88c6a
JH
787 bool read = false;
788 bool explicit_refs = true;
789
dea91a66
JH
790 process_references (vnode, &written, &address_taken, &read,
791 &explicit_refs);
6de88c6a
JH
792 if (!explicit_refs)
793 continue;
794 if (!address_taken)
4a444e58 795 {
6de88c6a 796 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 797 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
31de7606
JH
798 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
799 true);
4a444e58 800 }
6de88c6a 801 if (!address_taken && !written
4a444e58
JH
802 /* Making variable in explicit section readonly can cause section
803 type conflict.
804 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 805 && vnode->get_section () == NULL)
4a444e58 806 {
6de88c6a 807 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 808 fprintf (dump_file, " %s (read-only)", vnode->name ());
31de7606 809 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 810 }
d5ce4663 811 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
812 {
813 if (dump_file)
814 fprintf (dump_file, " %s (write-only)", vnode->name ());
31de7606
JH
815 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
816 true);
4a444e58
JH
817 }
818 }
819 if (dump_file)
820 fprintf (dump_file, "\n");
dea91a66 821 return remove_p;
4a444e58
JH
822}
823
9e97ff61 824/* Generate and emit a static constructor or destructor. WHICH must
31db0fe0
ML
825 be one of 'I' (for a constructor), 'D' (for a destructor).
826 BODY is a STATEMENT_LIST containing GENERIC
d5e254e1
IE
827 statements. PRIORITY is the initialization priority for this
828 constructor or destructor.
9e97ff61 829
3a9ed12a
JH
830 FINAL specify whether the externally visible name for collect2 should
831 be produced. */
832
833static void
ee34ebba
JH
834cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final,
835 tree optimization,
836 tree target)
9e97ff61
JH
837{
838 static int counter = 0;
839 char which_buf[16];
840 tree decl, name, resdecl;
841
842 /* The priority is encoded in the constructor or destructor name.
843 collect2 will sort the names and arrange that they are called at
844 program startup. */
00f7060a
RB
845 if (!targetm.have_ctors_dtors && final)
846 {
847 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
848 name = get_file_function_name (which_buf);
849 }
3a9ed12a 850 else
00f7060a
RB
851 {
852 /* Proudce sane name but one not recognizable by collect2, just for the
853 case we fail to inline the function. */
854 sprintf (which_buf, "_sub_%c_%.5d_%d", which, priority, counter++);
855 name = get_identifier (which_buf);
856 }
9e97ff61
JH
857
858 decl = build_decl (input_location, FUNCTION_DECL, name,
859 build_function_type_list (void_type_node, NULL_TREE));
860 current_function_decl = decl;
861
862 resdecl = build_decl (input_location,
863 RESULT_DECL, NULL_TREE, void_type_node);
864 DECL_ARTIFICIAL (resdecl) = 1;
865 DECL_RESULT (decl) = resdecl;
866 DECL_CONTEXT (resdecl) = decl;
867
868 allocate_struct_function (decl, false);
869
870 TREE_STATIC (decl) = 1;
871 TREE_USED (decl) = 1;
ee34ebba
JH
872 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization;
873 DECL_FUNCTION_SPECIFIC_TARGET (decl) = target;
9e97ff61 874 DECL_ARTIFICIAL (decl) = 1;
3f2dd8cd 875 DECL_IGNORED_P (decl) = 1;
9e97ff61
JH
876 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
877 DECL_SAVED_TREE (decl) = body;
3a9ed12a 878 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
879 {
880 TREE_PUBLIC (decl) = 1;
881 DECL_PRESERVE_P (decl) = 1;
882 }
883 DECL_UNINLINABLE (decl) = 1;
884
885 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 886 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
9e97ff61
JH
887 TREE_USED (DECL_INITIAL (decl)) = 1;
888
889 DECL_SOURCE_LOCATION (decl) = input_location;
890 cfun->function_end_locus = input_location;
891
892 switch (which)
893 {
894 case 'I':
895 DECL_STATIC_CONSTRUCTOR (decl) = 1;
896 decl_init_priority_insert (decl, priority);
897 break;
898 case 'D':
899 DECL_STATIC_DESTRUCTOR (decl) = 1;
900 decl_fini_priority_insert (decl, priority);
901 break;
902 default:
903 gcc_unreachable ();
904 }
905
906 gimplify_function_tree (decl);
907
d52f5295 908 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
909
910 set_cfun (NULL);
911 current_function_decl = NULL;
912}
913
3a9ed12a 914/* Generate and emit a static constructor or destructor. WHICH must
31db0fe0
ML
915 be one of 'I' (for a constructor) or 'D' (for a destructor).
916 BODY is a STATEMENT_LIST containing GENERIC
d5e254e1
IE
917 statements. PRIORITY is the initialization priority for this
918 constructor or destructor. */
3a9ed12a
JH
919
920void
921cgraph_build_static_cdtor (char which, tree body, int priority)
922{
d4b44b83
JH
923 /* FIXME: We should be able to
924 gcc_assert (!in_lto_p);
925 because at LTO time the global options are not safe to use.
926 Unfortunately ASAN finish_file will produce constructors late and they
927 may lead to surprises. */
928 cgraph_build_static_cdtor_1 (which, body, priority, false,
929 optimization_default_node,
930 target_option_default_node);
3a9ed12a 931}
9e97ff61 932
9e97ff61
JH
933/* When target does not have ctors and dtors, we call all constructor
934 and destructor by special initialization/destruction function
935 recognized by collect2.
936
937 When we are going to build this function, collect all constructors and
938 destructors and turn them into normal functions. */
939
940static void
37a51997 941record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 942{
67348ccc 943 if (DECL_STATIC_CONSTRUCTOR (node->decl))
37a51997 944 ctors->safe_push (node->decl);
67348ccc 945 if (DECL_STATIC_DESTRUCTOR (node->decl))
37a51997 946 dtors->safe_push (node->decl);
d52f5295 947 node = cgraph_node::get (node->decl);
67348ccc 948 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
949}
950
951/* Define global constructors/destructor functions for the CDTORS, of
952 which they are LEN. The CDTORS are sorted by initialization
953 priority. If CTOR_P is true, these are constructors; otherwise,
954 they are destructors. */
955
956static void
37a51997 957build_cdtor (bool ctor_p, const vec<tree> &cdtors)
9e97ff61
JH
958{
959 size_t i,j;
9771b263 960 size_t len = cdtors.length ();
9e97ff61
JH
961
962 i = 0;
963 while (i < len)
964 {
965 tree body;
966 tree fn;
967 priority_type priority;
968
969 priority = 0;
970 body = NULL_TREE;
971 j = i;
972 do
973 {
974 priority_type p;
9771b263 975 fn = cdtors[j];
9e97ff61
JH
976 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
977 if (j == i)
978 priority = p;
979 else if (p != priority)
980 break;
981 j++;
982 }
983 while (j < len);
984
48c24aca 985 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
986 if (j == i + 1
987 && targetm.have_ctors_dtors)
988 {
989 i++;
990 continue;
991 }
992 /* Find the next batch of constructors/destructors with the same
993 initialization priority. */
48c24aca 994 for (;i < j; i++)
9e97ff61 995 {
9e97ff61 996 tree call;
9771b263 997 fn = cdtors[i];
9e97ff61
JH
998 call = build_call_expr (fn, 0);
999 if (ctor_p)
1000 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1001 else
1002 DECL_STATIC_DESTRUCTOR (fn) = 0;
1003 /* We do not want to optimize away pure/const calls here.
1004 When optimizing, these should be already removed, when not
1005 optimizing, we want user to be able to breakpoint in them. */
1006 TREE_SIDE_EFFECTS (call) = 1;
1007 append_to_statement_list (call, &body);
9e97ff61 1008 }
9e97ff61
JH
1009 gcc_assert (body != NULL_TREE);
1010 /* Generate a function to call all the function of like
1011 priority. */
ee34ebba
JH
1012 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true,
1013 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]),
1014 DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0]));
9e97ff61
JH
1015 }
1016}
1017
1018/* Comparison function for qsort. P1 and P2 are actually of type
1019 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1020 used to determine the sort order. */
1021
1022static int
1023compare_ctor (const void *p1, const void *p2)
1024{
1025 tree f1;
1026 tree f2;
1027 int priority1;
1028 int priority2;
1029
1030 f1 = *(const tree *)p1;
1031 f2 = *(const tree *)p2;
1032 priority1 = DECL_INIT_PRIORITY (f1);
1033 priority2 = DECL_INIT_PRIORITY (f2);
1034
1035 if (priority1 < priority2)
1036 return -1;
1037 else if (priority1 > priority2)
1038 return 1;
1039 else
1040 /* Ensure a stable sort. Constructors are executed in backwarding
1041 order to make LTO initialize braries first. */
1042 return DECL_UID (f2) - DECL_UID (f1);
1043}
1044
1045/* Comparison function for qsort. P1 and P2 are actually of type
1046 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1047 used to determine the sort order. */
1048
1049static int
1050compare_dtor (const void *p1, const void *p2)
1051{
1052 tree f1;
1053 tree f2;
1054 int priority1;
1055 int priority2;
1056
1057 f1 = *(const tree *)p1;
1058 f2 = *(const tree *)p2;
1059 priority1 = DECL_FINI_PRIORITY (f1);
1060 priority2 = DECL_FINI_PRIORITY (f2);
1061
1062 if (priority1 < priority2)
1063 return -1;
1064 else if (priority1 > priority2)
1065 return 1;
1066 else
1067 /* Ensure a stable sort. */
1068 return DECL_UID (f1) - DECL_UID (f2);
1069}
1070
1071/* Generate functions to call static constructors and destructors
1072 for targets that do not support .ctors/.dtors sections. These
1073 functions have magic names which are detected by collect2. */
1074
1075static void
37a51997 1076build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 1077{
37a51997 1078 if (!ctors->is_empty ())
9e97ff61
JH
1079 {
1080 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1081 ctors->qsort (compare_ctor);
1082 build_cdtor (/*ctor_p=*/true, *ctors);
9e97ff61
JH
1083 }
1084
37a51997 1085 if (!dtors->is_empty ())
9e97ff61
JH
1086 {
1087 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1088 dtors->qsort (compare_dtor);
1089 build_cdtor (/*ctor_p=*/false, *dtors);
9e97ff61
JH
1090 }
1091}
1092
1093/* Look for constructors and destructors and produce function calling them.
1094 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1095 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1096 constructors/destructors into single function to improve code locality and
1097 reduce size. */
1098
1099static unsigned int
1100ipa_cdtor_merge (void)
1101{
37a51997
TS
1102 /* A vector of FUNCTION_DECLs declared as static constructors. */
1103 auto_vec<tree, 20> ctors;
1104 /* A vector of FUNCTION_DECLs declared as static destructors. */
1105 auto_vec<tree, 20> dtors;
9e97ff61 1106 struct cgraph_node *node;
65c70e6b 1107 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1108 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1109 || DECL_STATIC_DESTRUCTOR (node->decl))
37a51997
TS
1110 record_cdtor_fn (node, &ctors, &dtors);
1111 build_cdtor_fns (&ctors, &dtors);
9e97ff61
JH
1112 return 0;
1113}
1114
17795822
TS
1115namespace {
1116
1117const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1118{
27a4cd48
DM
1119 IPA_PASS, /* type */
1120 "cdtor", /* name */
1121 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1122 TV_CGRAPHOPT, /* tv_id */
1123 0, /* properties_required */
1124 0, /* properties_provided */
1125 0, /* properties_destroyed */
1126 0, /* todo_flags_start */
1127 0, /* todo_flags_finish */
9e97ff61 1128};
27a4cd48 1129
17795822 1130class pass_ipa_cdtor_merge : public ipa_opt_pass_d
27a4cd48
DM
1131{
1132public:
c3284718
RS
1133 pass_ipa_cdtor_merge (gcc::context *ctxt)
1134 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1135 NULL, /* generate_summary */
1136 NULL, /* write_summary */
1137 NULL, /* read_summary */
1138 NULL, /* write_optimization_summary */
1139 NULL, /* read_optimization_summary */
1140 NULL, /* stmt_fixup */
1141 0, /* function_transform_todo_flags_start */
1142 NULL, /* function_transform */
1143 NULL) /* variable_transform */
27a4cd48
DM
1144 {}
1145
1146 /* opt_pass methods: */
1a3d085c 1147 virtual bool gate (function *);
be55bfe6 1148 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1149
1150}; // class pass_ipa_cdtor_merge
1151
1a3d085c
TS
1152bool
1153pass_ipa_cdtor_merge::gate (function *)
1154{
1155 /* Perform the pass when we have no ctors/dtors support
1156 or at LTO time to merge multiple constructors into single
1157 function. */
29f1e2b1 1158 return !targetm.have_ctors_dtors || in_lto_p;
1a3d085c
TS
1159}
1160
17795822
TS
1161} // anon namespace
1162
27a4cd48
DM
1163ipa_opt_pass_d *
1164make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1165{
1166 return new pass_ipa_cdtor_merge (ctxt);
1167}
eb6a09a7
JH
1168
1169/* Invalid pointer representing BOTTOM for single user dataflow. */
1170#define BOTTOM ((cgraph_node *)(size_t) 2)
1171
1172/* Meet operation for single user dataflow.
1173 Here we want to associate variables with sigle function that may access it.
1174
1175 FUNCTION is current single user of a variable, VAR is variable that uses it.
1176 Latttice is stored in SINGLE_USER_MAP.
1177
1178 We represent:
1179 - TOP by no entry in SIGNLE_USER_MAP
1180 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1181 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1182
1183cgraph_node *
1184meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1185 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1186{
1187 struct cgraph_node *user, **f;
1188
1189 if (var->aux == BOTTOM)
1190 return BOTTOM;
1191
1eb68d2d 1192 f = single_user_map.get (var);
eb6a09a7
JH
1193 if (!f)
1194 return function;
1195 user = *f;
1196 if (!function)
1197 return user;
1198 else if (function != user)
1199 return BOTTOM;
1200 else
1201 return function;
1202}
1203
1204/* Propagation step of single-use dataflow.
1205
1206 Check all uses of VNODE and see if they are used by single function FUNCTION.
1207 SINGLE_USER_MAP represents the dataflow lattice. */
1208
1209cgraph_node *
1210propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1211 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1212{
1213 int i;
1214 struct ipa_ref *ref;
1215
1216 gcc_assert (!vnode->externally_visible);
1217
1218 /* If node is an alias, first meet with its target. */
1219 if (vnode->alias)
9041d2e6 1220 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1221
1222 /* Check all users and see if they correspond to a single function. */
d52f5295 1223 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1224 {
1225 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1226 if (cnode)
1227 {
a62bfab5
ML
1228 if (cnode->inlined_to)
1229 cnode = cnode->inlined_to;
eb6a09a7
JH
1230 if (!function)
1231 function = cnode;
1232 else if (function != cnode)
1233 function = BOTTOM;
1234 }
1235 else
17e0fc92
JH
1236 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1237 single_user_map);
eb6a09a7
JH
1238 }
1239 return function;
1240}
1241
1242/* Pass setting used_by_single_function flag.
17e0fc92
JH
1243 This flag is set on variable when there is only one function that may
1244 possibly referr to it. */
eb6a09a7
JH
1245
1246static unsigned int
1247ipa_single_use (void)
1248{
1249 varpool_node *first = (varpool_node *) (void *) 1;
1250 varpool_node *var;
1eb68d2d 1251 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1252
1253 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1254 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1255 var->aux = BOTTOM;
1256 else
1257 {
1258 /* Enqueue symbol for dataflow. */
1259 var->aux = first;
1260 first = var;
1261 }
1262
1263 /* The actual dataflow. */
1264
1265 while (first != (void *) 1)
1266 {
1267 cgraph_node *user, *orig_user, **f;
1268
1269 var = first;
1270 first = (varpool_node *)first->aux;
1271
1eb68d2d 1272 f = single_user_map.get (var);
eb6a09a7
JH
1273 if (f)
1274 orig_user = *f;
1275 else
1276 orig_user = NULL;
1277 user = propagate_single_user (var, orig_user, single_user_map);
1278
1279 gcc_checking_assert (var->aux != BOTTOM);
1280
1281 /* If user differs, enqueue all references. */
1282 if (user != orig_user)
1283 {
1284 unsigned int i;
1285 ipa_ref *ref;
1286
1eb68d2d 1287 single_user_map.put (var, user);
eb6a09a7
JH
1288
1289 /* Enqueue all aliases for re-processing. */
31de7606
JH
1290 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1291 if (!ref->referring->aux)
eb6a09a7
JH
1292 {
1293 ref->referring->aux = first;
1294 first = dyn_cast <varpool_node *> (ref->referring);
1295 }
1296 /* Enqueue all users for re-processing. */
d52f5295 1297 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1298 if (!ref->referred->aux
1299 && ref->referred->definition
1300 && is_a <varpool_node *> (ref->referred))
1301 {
1302 ref->referred->aux = first;
1303 first = dyn_cast <varpool_node *> (ref->referred);
1304 }
1305
1306 /* If user is BOTTOM, just punt on this var. */
1307 if (user == BOTTOM)
1308 var->aux = BOTTOM;
1309 else
1310 var->aux = NULL;
1311 }
1312 else
1313 var->aux = NULL;
1314 }
1315
1316 FOR_EACH_DEFINED_VARIABLE (var)
1317 {
1318 if (var->aux != BOTTOM)
1319 {
17e0fc92
JH
1320 /* Not having the single user known means that the VAR is
1321 unreachable. Either someone forgot to remove unreachable
1322 variables or the reachability here is wrong. */
1323
b2b29377
MM
1324 gcc_checking_assert (single_user_map.get (var));
1325
eb6a09a7
JH
1326 if (dump_file)
1327 {
464d0118
ML
1328 fprintf (dump_file, "Variable %s is used by single function\n",
1329 var->dump_name ());
eb6a09a7
JH
1330 }
1331 var->used_by_single_function = true;
1332 }
1333 var->aux = NULL;
1334 }
1335 return 0;
1336}
1337
17795822
TS
1338namespace {
1339
1340const pass_data pass_data_ipa_single_use =
eb6a09a7
JH
1341{
1342 IPA_PASS, /* type */
1343 "single-use", /* name */
1344 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1345 TV_CGRAPHOPT, /* tv_id */
1346 0, /* properties_required */
1347 0, /* properties_provided */
1348 0, /* properties_destroyed */
1349 0, /* todo_flags_start */
1350 0, /* todo_flags_finish */
1351};
1352
17795822 1353class pass_ipa_single_use : public ipa_opt_pass_d
eb6a09a7
JH
1354{
1355public:
1356 pass_ipa_single_use (gcc::context *ctxt)
1357 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1358 NULL, /* generate_summary */
1359 NULL, /* write_summary */
1360 NULL, /* read_summary */
1361 NULL, /* write_optimization_summary */
1362 NULL, /* read_optimization_summary */
1363 NULL, /* stmt_fixup */
1364 0, /* function_transform_todo_flags_start */
1365 NULL, /* function_transform */
1366 NULL) /* variable_transform */
1367 {}
1368
1369 /* opt_pass methods: */
eb6a09a7
JH
1370 virtual unsigned int execute (function *) { return ipa_single_use (); }
1371
1372}; // class pass_ipa_single_use
1373
17795822
TS
1374} // anon namespace
1375
eb6a09a7
JH
1376ipa_opt_pass_d *
1377make_pass_ipa_single_use (gcc::context *ctxt)
1378{
1379 return new pass_ipa_single_use (ctxt);
1380}
f0251020
RB
1381
1382/* Materialize all clones. */
1383
1384namespace {
1385
1386const pass_data pass_data_materialize_all_clones =
1387{
1388 SIMPLE_IPA_PASS, /* type */
1389 "materialize-all-clones", /* name */
1390 OPTGROUP_NONE, /* optinfo_flags */
1391 TV_IPA_OPT, /* tv_id */
1392 0, /* properties_required */
1393 0, /* properties_provided */
1394 0, /* properties_destroyed */
1395 0, /* todo_flags_start */
1396 0, /* todo_flags_finish */
1397};
1398
1399class pass_materialize_all_clones : public simple_ipa_opt_pass
1400{
1401public:
1402 pass_materialize_all_clones (gcc::context *ctxt)
1403 : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1404 {}
1405
1406 /* opt_pass methods: */
1407 virtual unsigned int execute (function *)
1408 {
1409 symtab->materialize_all_clones ();
1410 return 0;
1411 }
1412
1413}; // class pass_materialize_all_clones
1414
1415} // anon namespace
1416
1417simple_ipa_opt_pass *
1418make_pass_materialize_all_clones (gcc::context *ctxt)
1419{
1420 return new pass_materialize_all_clones (ctxt);
1421}