]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
[6/6] Preprocessor forced macro location
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
85ec4feb 2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
c7131fb2
AM
25#include "tree.h"
26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "stringpool.h"
30#include "cgraph.h"
45b0be94 31#include "gimplify.h"
9e97ff61 32#include "tree-iterator.h"
af8bca3c 33#include "ipa-utils.h"
dd912cb8 34#include "symbol-summary.h"
8bc5448f 35#include "tree-vrp.h"
c582198b 36#include "ipa-prop.h"
27d020cf 37#include "ipa-fnsummary.h"
2b5f0895 38#include "dbgcnt.h"
775669c1 39#include "debug.h"
314e6352
ML
40#include "stringpool.h"
41#include "attribs.h"
e70670cf
JH
42
43/* Return true when NODE has ADDR reference. */
44
45static bool
46has_addr_references_p (struct cgraph_node *node,
4f4ada6a 47 void *)
e70670cf
JH
48{
49 int i;
d122681a 50 struct ipa_ref *ref = NULL;
e70670cf 51
d122681a 52 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
53 if (ref->use == IPA_REF_ADDR)
54 return true;
55 return false;
56}
57
4f4ada6a
JH
58/* Return true when NODE can be target of an indirect call. */
59
60static bool
61is_indirect_call_target_p (struct cgraph_node *node, void *)
62{
63 return node->indirect_call_target;
64}
65
d563610d
JH
66/* Look for all functions inlined to NODE and update their inlined_to pointers
67 to INLINED_TO. */
68
69static void
70update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
71{
72 struct cgraph_edge *e;
73 for (e = node->callees; e; e = e->next_callee)
74 if (e->callee->global.inlined_to)
75 {
76 e->callee->global.inlined_to = inlined_to;
77 update_inlined_to_pointer (e->callee, inlined_to);
78 }
79}
80
04142cc3 81/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
82
83 The queue is linked via AUX pointers and terminated by pointer to 1.
84 We enqueue nodes at two occasions: when we find them reachable or when we find
85 their bodies needed for further clonning. In the second case we mark them
86 by pointer to 2 after processing so they are re-queue when they become
87 reachable. */
b34fd25c
JH
88
89static void
5e20cdc9 90enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 91 hash_set<symtab_node *> *reachable)
b34fd25c 92{
19fb0b86 93 /* Node is still in queue; do nothing. */
67348ccc 94 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
95 return;
96 /* Node was already processed as unreachable, re-enqueue
97 only if it became reachable now. */
6e2830c3 98 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 99 return;
67348ccc 100 node->aux = *first;
b34fd25c
JH
101 *first = node;
102}
103
b34fd25c
JH
104/* Process references. */
105
106static void
d122681a 107process_references (symtab_node *snode,
5e20cdc9 108 symtab_node **first,
93a18a70 109 bool before_inlining_p,
6e2830c3 110 hash_set<symtab_node *> *reachable)
b34fd25c
JH
111{
112 int i;
d122681a
ML
113 struct ipa_ref *ref = NULL;
114 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 115 {
5e20cdc9 116 symtab_node *node = ref->referred;
17e0fc92 117 symtab_node *body = node->ultimate_alias_target ();
e70670cf 118
67348ccc
DM
119 if (node->definition && !node->in_other_partition
120 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8 121 || (((before_inlining_p
29f1e2b1 122 && (TREE_CODE (node->decl) != FUNCTION_DECL
f1ced6f5
JH
123 || (TREE_CODE (node->decl) == FUNCTION_DECL
124 && opt_for_fn (body->decl, optimize))
17e0fc92
JH
125 || (symtab->state < IPA_SSA
126 && lookup_attribute
127 ("always_inline",
128 DECL_ATTRIBUTES (body->decl))))))
129 /* We use variable constructors during late compilation for
e70670cf
JH
130 constant folding. Keep references alive so partitioning
131 knows about potential references. */
8813a647 132 || (VAR_P (node->decl)
5b42d196
JH
133 && (flag_wpa
134 || flag_incremental_link
135 == INCREMENTAL_LINK_LTO)
136 && dyn_cast <varpool_node *> (node)
137 ->ctor_useable_for_folding_p ()))))
17e0fc92
JH
138 {
139 /* Be sure that we will not optimize out alias target
140 body. */
141 if (DECL_EXTERNAL (node->decl)
142 && node->alias
143 && before_inlining_p)
144 reachable->add (body);
145 reachable->add (node);
146 }
67348ccc 147 enqueue_node (node, first, reachable);
b34fd25c
JH
148 }
149}
150
3462aa02
JH
151/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
152 all its potential targets as reachable to permit later inlining if
153 devirtualization happens. After inlining still keep their declarations
154 around, so we can devirtualize to a direct call.
155
156 Also try to make trivial devirutalization when no or only one target is
157 possible. */
158
159static void
6e2830c3 160walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 161 struct cgraph_edge *edge,
5e20cdc9 162 symtab_node **first,
6e2830c3
TS
163 hash_set<symtab_node *> *reachable,
164 bool before_inlining_p)
3462aa02
JH
165{
166 unsigned int i;
167 void *cache_token;
168 bool final;
169 vec <cgraph_node *>targets
170 = possible_polymorphic_call_targets
171 (edge, &final, &cache_token);
172
6e2830c3 173 if (!reachable_call_targets->add (cache_token))
3462aa02 174 {
c3284718 175 for (i = 0; i < targets.length (); i++)
3462aa02
JH
176 {
177 struct cgraph_node *n = targets[i];
178
179 /* Do not bother to mark virtual methods in anonymous namespace;
180 either we will find use of virtual table defining it, or it is
181 unused. */
67348ccc 182 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 183 && type_in_anonymous_namespace_p
70e7f2a2 184 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
3462aa02
JH
185 continue;
186
4f4ada6a
JH
187 n->indirect_call_target = true;
188 symtab_node *body = n->function_symbol ();
17e0fc92 189
3462aa02
JH
190 /* Prior inlining, keep alive bodies of possible targets for
191 devirtualization. */
4f4ada6a
JH
192 if (n->definition
193 && (before_inlining_p
194 && opt_for_fn (body->decl, optimize)
195 && opt_for_fn (body->decl, flag_devirtualize)))
196 {
197 /* Be sure that we will not optimize out alias target
198 body. */
199 if (DECL_EXTERNAL (n->decl)
200 && n->alias
201 && before_inlining_p)
202 reachable->add (body);
203 reachable->add (n);
204 }
3462aa02
JH
205 /* Even after inlining we want to keep the possible targets in the
206 boundary, so late passes can still produce direct call even if
207 the chance for inlining is lost. */
67348ccc 208 enqueue_node (n, first, reachable);
3462aa02
JH
209 }
210 }
211
212 /* Very trivial devirtualization; when the type is
213 final or anonymous (so we know all its derivation)
214 and there is only one possible virtual call target,
215 make the edge direct. */
216 if (final)
217 {
2b5f0895 218 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 219 {
7b395ddd 220 cgraph_node *target, *node = edge->caller;
3462aa02
JH
221 if (targets.length () == 1)
222 target = targets[0];
223 else
d52f5295 224 target = cgraph_node::get_create
3462aa02
JH
225 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
226
2b5f0895 227 if (dump_enabled_p ())
4f5b9c80
DM
228 {
229 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
464d0118
ML
230 "devirtualizing call in %s to %s\n",
231 edge->caller->dump_name (),
232 target->dump_name ());
2b5f0895 233 }
3dafb85c 234 edge = edge->make_direct (target);
0bceb671
JH
235 if (ipa_fn_summaries)
236 ipa_update_overall_fn_summary (node);
477145c8 237 else if (edge->call_stmt)
31db0fe0 238 edge->redirect_call_stmt_to_callee ();
3462aa02
JH
239 }
240 }
241}
41817394 242
ca31b95f 243/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
244
245 The algorithm is basically mark&sweep but with some extra refinements:
246
247 - reachable extern inline functions needs special handling; the bodies needs
248 to stay in memory until inlining in hope that they will be inlined.
249 After inlining we release their bodies and turn them into unanalyzed
250 nodes even when they are reachable.
251
04142cc3
JH
252 - virtual functions are kept in callgraph even if they seem unreachable in
253 hope calls to them will be devirtualized.
254
255 Again we remove them after inlining. In late optimization some
31519c38 256 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
257 the call. In theory early opts and IPA should work out all important cases.
258
259 - virtual clones needs bodies of their origins for later materialization;
260 this means that we want to keep the body even if the origin is unreachable
261 otherwise. To avoid origin from sitting in the callgraph and being
262 walked by IPA passes, we turn them into unanalyzed nodes with body
263 defined.
264
265 We maintain set of function declaration where body needs to stay in
266 body_needed_for_clonning
267
268 Inline clones represent special case: their declaration match the
269 declaration of origin and cgraph_remove_node already knows how to
270 reshape callgraph and preserve body when offline copy of function or
271 inline clone is being removed.
272
6649df51
JH
273 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
274 variables with DECL_INITIAL set. We finalize these and keep reachable
275 ones around for constant folding purposes. After inlining we however
276 stop walking their references to let everything static referneced by them
277 to be removed when it is otherwise unreachable.
278
04142cc3
JH
279 We maintain queue of both reachable symbols (i.e. defined symbols that needs
280 to stay) and symbols that are in boundary (i.e. external symbols referenced
281 by reachable symbols or origins of clones). The queue is represented
282 as linked list by AUX pointer terminated by 1.
283
31519c38 284 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
285 turn definition into a declaration, but we may keep function body around
286 based on body_needed_for_clonning
287
288 All symbols that enter the queue have AUX pointer non-zero and are in the
289 boundary. Pointer set REACHABLE is used to track reachable symbols.
290
291 Every symbol can be visited twice - once as part of boundary and once
292 as real reachable symbol. enqueue_node needs to decide whether the
293 node needs to be re-queued for second processing. For this purpose
294 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
295
296bool
17e0fc92 297symbol_table::remove_unreachable_nodes (FILE *file)
ca31b95f 298{
5e20cdc9 299 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 300 struct cgraph_node *node, *next;
2c8326a5 301 varpool_node *vnode, *vnext;
ca31b95f 302 bool changed = false;
6e2830c3
TS
303 hash_set<symtab_node *> reachable;
304 hash_set<tree> body_needed_for_clonning;
305 hash_set<void *> reachable_call_targets;
29f1e2b1 306 bool before_inlining_p = symtab->state < (!optimize && !in_lto_p ? IPA_SSA
17e0fc92 307 : IPA_SSA_AFTER_INLINING);
ca31b95f 308
3462aa02 309 timevar_push (TV_IPA_UNREACHABLE);
2bf86c84 310 build_type_inheritance_graph ();
10d22567
ZD
311 if (file)
312 fprintf (file, "\nReclaiming functions:");
b2b29377
MM
313 if (flag_checking)
314 {
315 FOR_EACH_FUNCTION (node)
316 gcc_assert (!node->aux);
317 FOR_EACH_VARIABLE (vnode)
318 gcc_assert (!vnode->aux);
319 }
530f3a1b
JH
320 /* Mark functions whose bodies are obviously needed.
321 This is mostly when they can be referenced externally. Inline clones
322 are special since their declarations are shared with master clone and thus
323 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
324 FOR_EACH_FUNCTION (node)
325 {
326 node->used_as_abstract_origin = false;
4f4ada6a 327 node->indirect_call_target = false;
67348ccc 328 if (node->definition
c0c123ef 329 && !node->global.inlined_to
67348ccc 330 && !node->in_other_partition
d52f5295 331 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef
JH
332 {
333 gcc_assert (!node->global.inlined_to);
6e2830c3
TS
334 reachable.add (node);
335 enqueue_node (node, &first, &reachable);
c0c123ef
JH
336 }
337 else
67348ccc 338 gcc_assert (!node->aux);
c0c123ef 339 }
530f3a1b
JH
340
341 /* Mark variables that are obviously needed. */
04142cc3 342 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 343 if (!vnode->can_remove_if_no_refs_p()
67348ccc 344 && !vnode->in_other_partition)
04142cc3 345 {
6e2830c3
TS
346 reachable.add (vnode);
347 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
348 }
349
350 /* Perform reachability analysis. */
5e20cdc9 351 while (first != (symtab_node *) (void *) 1)
b34fd25c 352 {
6e2830c3 353 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 354 symtab_node *node = first;
ca31b95f 355
5e20cdc9 356 first = (symtab_node *)first->aux;
19fb0b86 357
04142cc3
JH
358 /* If we are processing symbol in boundary, mark its AUX pointer for
359 possible later re-processing in enqueue_node. */
360 if (in_boundary_p)
4bd019b8
JH
361 {
362 node->aux = (void *)2;
363 if (node->alias && node->analyzed)
364 enqueue_node (node->get_alias_target (), &first, &reachable);
365 }
04142cc3
JH
366 else
367 {
31dad809
JJ
368 if (TREE_CODE (node->decl) == FUNCTION_DECL
369 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
370 {
371 struct cgraph_node *origin_node
4ad08ee8
JH
372 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
373 if (origin_node && !origin_node->used_as_abstract_origin)
374 {
375 origin_node->used_as_abstract_origin = true;
376 gcc_assert (!origin_node->prev_sibling_clone);
377 gcc_assert (!origin_node->next_sibling_clone);
378 for (cgraph_node *n = origin_node->clones; n;
379 n = n->next_sibling_clone)
380 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
381 n->used_as_abstract_origin = true;
4ad08ee8 382 }
c0c123ef 383 }
04142cc3 384 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
385 all externally visible symbols in the same comdat
386 group to be reachable as well. Comdat-local symbols
387 can be discarded if all uses were inlined. */
67348ccc 388 if (node->same_comdat_group)
04142cc3 389 {
5e20cdc9 390 symtab_node *next;
67348ccc 391 for (next = node->same_comdat_group;
04142cc3 392 next != node;
67348ccc 393 next = next->same_comdat_group)
d52f5295 394 if (!next->comdat_local_p ()
6e2830c3
TS
395 && !reachable.add (next))
396 enqueue_node (next, &first, &reachable);
04142cc3
JH
397 }
398 /* Mark references as reachable. */
6e2830c3 399 process_references (node, &first, before_inlining_p, &reachable);
04142cc3 400 }
19fb0b86 401
7de90a6c 402 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 403 {
04142cc3
JH
404 /* Mark the callees reachable unless they are direct calls to extern
405 inline functions we decided to not inline. */
406 if (!in_boundary_p)
8a6295ba 407 {
04142cc3 408 struct cgraph_edge *e;
3462aa02 409 /* Keep alive possible targets for devirtualization. */
2bf86c84
JH
410 if (opt_for_fn (cnode->decl, optimize)
411 && opt_for_fn (cnode->decl, flag_devirtualize))
3462aa02
JH
412 {
413 struct cgraph_edge *next;
414 for (e = cnode->indirect_calls; e; e = next)
415 {
416 next = e->next_callee;
417 if (e->indirect_info->polymorphic)
6e2830c3
TS
418 walk_polymorphic_call_targets (&reachable_call_targets,
419 e, &first, &reachable,
3462aa02
JH
420 before_inlining_p);
421 }
422 }
04142cc3 423 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 424 {
17e0fc92 425 symtab_node *body = e->callee->function_symbol ();
67348ccc
DM
426 if (e->callee->definition
427 && !e->callee->in_other_partition
ed62e0d9 428 && (!e->inline_failed
67348ccc
DM
429 || !DECL_EXTERNAL (e->callee->decl)
430 || e->callee->alias
17e0fc92
JH
431 || (before_inlining_p
432 && (opt_for_fn (body->decl, optimize)
433 || (symtab->state < IPA_SSA
434 && lookup_attribute
435 ("always_inline",
436 DECL_ATTRIBUTES (body->decl)))))))
789c2741
JH
437 {
438 /* Be sure that we will not optimize out alias target
439 body. */
440 if (DECL_EXTERNAL (e->callee->decl)
441 && e->callee->alias
442 && before_inlining_p)
17e0fc92 443 reachable.add (body);
6e2830c3 444 reachable.add (e->callee);
789c2741 445 }
6e2830c3 446 enqueue_node (e->callee, &first, &reachable);
93a18a70 447 }
04142cc3
JH
448
449 /* When inline clone exists, mark body to be preserved so when removing
450 offline copy of the function we don't kill it. */
4f63dfc6 451 if (cnode->global.inlined_to)
6e2830c3 452 body_needed_for_clonning.add (cnode->decl);
b66887e4 453
4f63dfc6
JH
454 /* For non-inline clones, force their origins to the boundary and ensure
455 that body is not removed. */
456 while (cnode->clone_of)
457 {
67348ccc 458 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
459 cnode = cnode->clone_of;
460 if (noninline)
461 {
6e2830c3
TS
462 body_needed_for_clonning.add (cnode->decl);
463 enqueue_node (cnode, &first, &reachable);
4f63dfc6 464 }
b34fd25c 465 }
0136f8f0
AH
466
467 }
4bd019b8
JH
468 else if (cnode->thunk.thunk_p)
469 enqueue_node (cnode->callees->callee, &first, &reachable);
48de5d37 470
0136f8f0
AH
471 /* If any reachable function has simd clones, mark them as
472 reachable as well. */
473 if (cnode->simd_clones)
474 {
475 cgraph_node *next;
476 for (next = cnode->simd_clones;
477 next;
478 next = next->simdclone->next_clone)
479 if (in_boundary_p
6e2830c3
TS
480 || !reachable.add (next))
481 enqueue_node (next, &first, &reachable);
47cb0d7d 482 }
b34fd25c 483 }
6649df51 484 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
485 boundary. This will also hold initializers of the external vars NODE
486 refers to. */
7de90a6c 487 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 488 if (vnode
67348ccc
DM
489 && DECL_EXTERNAL (node->decl)
490 && !vnode->alias
6649df51 491 && in_boundary_p)
5d59b5e1 492 {
d122681a
ML
493 struct ipa_ref *ref = NULL;
494 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 495 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 496 }
ca31b95f
JH
497 }
498
04142cc3 499 /* Remove unreachable functions. */
3dafb85c 500 for (node = first_function (); node; node = next)
ca31b95f 501 {
3dafb85c 502 next = next_function (node);
e70670cf
JH
503
504 /* If node is not needed at all, remove it. */
67348ccc 505 if (!node->aux)
ca31b95f 506 {
10d22567 507 if (file)
464d0118 508 fprintf (file, " %s", node->dump_name ());
d52f5295 509 node->remove ();
04142cc3
JH
510 changed = true;
511 }
e70670cf 512 /* If node is unreachable, remove its body. */
6e2830c3 513 else if (!reachable.contains (node))
04142cc3 514 {
d3f2e41e
JH
515 /* We keep definitions of thunks and aliases in the boundary so
516 we can walk to the ultimate alias targets and function symbols
517 reliably. */
518 if (node->alias || node->thunk.thunk_p)
519 ;
520 else if (!body_needed_for_clonning.contains (node->decl)
521 && !node->alias && !node->thunk.thunk_p)
d52f5295 522 node->release_body ();
4f63dfc6 523 else if (!node->clone_of)
67348ccc 524 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
4bd019b8 525 if (node->definition && !node->alias && !node->thunk.thunk_p)
bb853349 526 {
04142cc3 527 if (file)
464d0118 528 fprintf (file, " %s", node->dump_name ());
3d8d0043 529 node->body_removed = true;
67348ccc
DM
530 node->analyzed = false;
531 node->definition = false;
532 node->cpp_implicit_alias = false;
533 node->alias = false;
71e54687 534 node->transparent_alias = false;
d833415c 535 node->thunk.thunk_p = false;
67348ccc 536 node->weakref = false;
8fe91ca8
JH
537 /* After early inlining we drop always_inline attributes on
538 bodies of functions that are still referenced (have their
539 address taken). */
540 DECL_ATTRIBUTES (node->decl)
541 = remove_attribute ("always_inline",
542 DECL_ATTRIBUTES (node->decl));
67348ccc 543 if (!node->in_other_partition)
51a5c0c2 544 node->local.local = false;
d52f5295 545 node->remove_callees ();
d122681a 546 node->remove_all_references ();
bb853349 547 changed = true;
d5e254e1
IE
548 if (node->thunk.thunk_p
549 && node->thunk.add_pointer_bounds_args)
550 {
551 node->thunk.thunk_p = false;
552 node->thunk.add_pointer_bounds_args = false;
553 }
bb853349 554 }
ca31b95f 555 }
4f63dfc6 556 else
d52f5295 557 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 558 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 559 }
04142cc3
JH
560
561 /* Inline clones might be kept around so their materializing allows further
562 cloning. If the function the clone is inlined into is removed, we need
563 to turn it into normal cone. */
65c70e6b 564 FOR_EACH_FUNCTION (node)
9187e02d 565 {
9187e02d
JH
566 if (node->global.inlined_to
567 && !node->callers)
568 {
569 gcc_assert (node->clones);
d563610d
JH
570 node->global.inlined_to = NULL;
571 update_inlined_to_pointer (node, node);
9187e02d 572 }
67348ccc 573 node->aux = NULL;
9187e02d 574 }
4a444e58 575
04142cc3 576 /* Remove unreachable variables. */
4a444e58 577 if (file)
04142cc3 578 fprintf (file, "\nReclaiming variables:");
3dafb85c 579 for (vnode = first_variable (); vnode; vnode = vnext)
b34fd25c 580 {
3dafb85c 581 vnext = next_variable (vnode);
67348ccc 582 if (!vnode->aux
b9bd2075
JH
583 /* For can_refer_decl_in_current_unit_p we want to track for
584 all external variables if they are defined in other partition
585 or not. */
67348ccc 586 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 587 {
d2b35c04
JH
588 struct ipa_ref *ref = NULL;
589
590 /* First remove the aliases, so varpool::remove can possibly lookup
591 the constructor and save it for future use. */
592 while (vnode->iterate_direct_aliases (0, ref))
593 {
594 if (file)
464d0118 595 fprintf (file, " %s", ref->referred->dump_name ());
d2b35c04
JH
596 ref->referring->remove ();
597 }
4a444e58 598 if (file)
464d0118 599 fprintf (file, " %s", vnode->dump_name ());
d2b35c04 600 vnext = next_variable (vnode);
775669c1 601 /* Signal removal to the debug machinery. */
5b42d196 602 if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
775669c1
RB
603 {
604 vnode->definition = false;
605 (*debug_hooks->late_global_decl) (vnode->decl);
606 }
d52f5295 607 vnode->remove ();
4a444e58 608 changed = true;
b34fd25c 609 }
4bd019b8 610 else if (!reachable.contains (vnode) && !vnode->alias)
04142cc3 611 {
6a6dac52 612 tree init;
67348ccc 613 if (vnode->definition)
04142cc3
JH
614 {
615 if (file)
fec39fa6 616 fprintf (file, " %s", vnode->name ());
04142cc3
JH
617 changed = true;
618 }
1acc5591 619 /* Keep body if it may be useful for constant folding. */
5b42d196 620 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
31db0fe0 621 || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
1acc5591
JH
622 vnode->remove_initializer ();
623 else
624 DECL_INITIAL (vnode->decl) = init;
3d8d0043 625 vnode->body_removed = true;
67348ccc
DM
626 vnode->definition = false;
627 vnode->analyzed = false;
628 vnode->aux = NULL;
e70670cf 629
d52f5295 630 vnode->remove_from_same_comdat_group ();
7b3376a0 631
d122681a 632 vnode->remove_all_references ();
04142cc3
JH
633 }
634 else
67348ccc 635 vnode->aux = NULL;
b34fd25c 636 }
4a444e58 637
04142cc3 638 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
639 if (file)
640 fprintf (file, "\nClearing address taken flags:");
65c70e6b 641 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
642 if (node->address_taken
643 && !node->used_from_other_partition)
bd3cdcc0 644 {
1ede94c5 645 if (!node->call_for_symbol_and_aliases
31db0fe0 646 (has_addr_references_p, NULL, true))
bd3cdcc0
JH
647 {
648 if (file)
fec39fa6 649 fprintf (file, " %s", node->name ());
67348ccc 650 node->address_taken = false;
4a444e58 651 changed = true;
4f4ada6a
JH
652 if (node->local_p ()
653 /* Virtual functions may be kept in cgraph just because
654 of possible later devirtualization. Do not mark them as
655 local too early so we won't optimize them out before
656 we are done with polymorphic call analysis. */
657 && (!before_inlining_p
658 || !node->call_for_symbol_and_aliases
659 (is_indirect_call_target_p, NULL, true)))
4a444e58
JH
660 {
661 node->local.local = true;
662 if (file)
663 fprintf (file, " (local)");
664 }
bd3cdcc0
JH
665 }
666 }
10a5dd5d
JH
667 if (file)
668 fprintf (file, "\n");
b34fd25c 669
b2b29377 670 symtab_node::checking_verify_symtab_nodes ();
4537ec0c 671
a8da72b8 672 /* If we removed something, perhaps profile could be improved. */
29f1e2b1 673 if (changed && (optimize || in_lto_p) && ipa_call_summaries)
a8da72b8 674 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 675 ipa_propagate_frequency (node);
a8da72b8 676
3462aa02 677 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
678 return changed;
679}
f4b3ca72 680
6de88c6a
JH
681/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
682 as needed, also clear EXPLICIT_REFS if the references to given variable
683 do not need to be explicit. */
684
685void
686process_references (varpool_node *vnode,
687 bool *written, bool *address_taken,
688 bool *read, bool *explicit_refs)
689{
690 int i;
691 struct ipa_ref *ref;
692
9041d2e6 693 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
694 || TREE_THIS_VOLATILE (vnode->decl))
695 *explicit_refs = false;
696
d122681a 697 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
698 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
699 switch (ref->use)
700 {
701 case IPA_REF_ADDR:
702 *address_taken = true;
703 break;
704 case IPA_REF_LOAD:
705 *read = true;
706 break;
707 case IPA_REF_STORE:
708 *written = true;
709 break;
710 case IPA_REF_ALIAS:
d52f5295
ML
711 process_references (dyn_cast<varpool_node *> (ref->referring), written,
712 address_taken, read, explicit_refs);
6de88c6a
JH
713 break;
714 }
715}
716
717/* Set TREE_READONLY bit. */
718
719bool
720set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
721{
722 TREE_READONLY (vnode->decl) = true;
723 return false;
724}
725
726/* Set writeonly bit and clear the initalizer, since it will not be needed. */
727
728bool
dea91a66 729set_writeonly_bit (varpool_node *vnode, void *data)
6de88c6a
JH
730{
731 vnode->writeonly = true;
29f1e2b1 732 if (optimize || in_lto_p)
6de88c6a
JH
733 {
734 DECL_INITIAL (vnode->decl) = NULL;
735 if (!vnode->alias)
dea91a66
JH
736 {
737 if (vnode->num_references ())
738 *(bool *)data = true;
739 vnode->remove_all_references ();
740 }
6de88c6a
JH
741 }
742 return false;
743}
744
745/* Clear addressale bit of VNODE. */
746
747bool
748clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
749{
750 vnode->address_taken = false;
751 TREE_ADDRESSABLE (vnode->decl) = 0;
752 return false;
753}
754
4a444e58
JH
755/* Discover variables that have no longer address taken or that are read only
756 and update their flags.
757
dea91a66
JH
758 Return true when unreachable symbol removan should be done.
759
4a444e58
JH
760 FIXME: This can not be done in between gimplify and omp_expand since
761 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
762 this transformation as part of whole program visibility and re-do at
763 ipa-reference pass (to take into account clonning), but it would
764 make sense to do it before early optimizations. */
4a444e58 765
dea91a66 766bool
4a444e58
JH
767ipa_discover_readonly_nonaddressable_vars (void)
768{
dea91a66 769 bool remove_p = false;
2c8326a5 770 varpool_node *vnode;
4a444e58
JH
771 if (dump_file)
772 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 773 FOR_EACH_VARIABLE (vnode)
6de88c6a 774 if (!vnode->alias
67348ccc 775 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 776 || !vnode->writeonly
67348ccc 777 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
778 {
779 bool written = false;
780 bool address_taken = false;
6de88c6a
JH
781 bool read = false;
782 bool explicit_refs = true;
783
dea91a66
JH
784 process_references (vnode, &written, &address_taken, &read,
785 &explicit_refs);
6de88c6a
JH
786 if (!explicit_refs)
787 continue;
788 if (!address_taken)
4a444e58 789 {
6de88c6a 790 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 791 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
31de7606
JH
792 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
793 true);
4a444e58 794 }
6de88c6a 795 if (!address_taken && !written
4a444e58
JH
796 /* Making variable in explicit section readonly can cause section
797 type conflict.
798 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 799 && vnode->get_section () == NULL)
4a444e58 800 {
6de88c6a 801 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 802 fprintf (dump_file, " %s (read-only)", vnode->name ());
31de7606 803 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 804 }
d5ce4663 805 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
806 {
807 if (dump_file)
808 fprintf (dump_file, " %s (write-only)", vnode->name ());
31de7606
JH
809 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
810 true);
4a444e58
JH
811 }
812 }
813 if (dump_file)
814 fprintf (dump_file, "\n");
dea91a66 815 return remove_p;
4a444e58
JH
816}
817
9e97ff61 818/* Generate and emit a static constructor or destructor. WHICH must
31db0fe0
ML
819 be one of 'I' (for a constructor), 'D' (for a destructor).
820 BODY is a STATEMENT_LIST containing GENERIC
d5e254e1
IE
821 statements. PRIORITY is the initialization priority for this
822 constructor or destructor.
9e97ff61 823
3a9ed12a
JH
824 FINAL specify whether the externally visible name for collect2 should
825 be produced. */
826
827static void
828cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
829{
830 static int counter = 0;
831 char which_buf[16];
832 tree decl, name, resdecl;
833
834 /* The priority is encoded in the constructor or destructor name.
835 collect2 will sort the names and arrange that they are called at
836 program startup. */
3a9ed12a
JH
837 if (final)
838 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
839 else
840 /* Proudce sane name but one not recognizable by collect2, just for the
841 case we fail to inline the function. */
842 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
843 name = get_file_function_name (which_buf);
844
845 decl = build_decl (input_location, FUNCTION_DECL, name,
846 build_function_type_list (void_type_node, NULL_TREE));
847 current_function_decl = decl;
848
849 resdecl = build_decl (input_location,
850 RESULT_DECL, NULL_TREE, void_type_node);
851 DECL_ARTIFICIAL (resdecl) = 1;
852 DECL_RESULT (decl) = resdecl;
853 DECL_CONTEXT (resdecl) = decl;
854
855 allocate_struct_function (decl, false);
856
857 TREE_STATIC (decl) = 1;
858 TREE_USED (decl) = 1;
859 DECL_ARTIFICIAL (decl) = 1;
3f2dd8cd 860 DECL_IGNORED_P (decl) = 1;
9e97ff61
JH
861 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
862 DECL_SAVED_TREE (decl) = body;
3a9ed12a 863 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
864 {
865 TREE_PUBLIC (decl) = 1;
866 DECL_PRESERVE_P (decl) = 1;
867 }
868 DECL_UNINLINABLE (decl) = 1;
869
870 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 871 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
9e97ff61
JH
872 TREE_USED (DECL_INITIAL (decl)) = 1;
873
874 DECL_SOURCE_LOCATION (decl) = input_location;
875 cfun->function_end_locus = input_location;
876
877 switch (which)
878 {
879 case 'I':
880 DECL_STATIC_CONSTRUCTOR (decl) = 1;
881 decl_init_priority_insert (decl, priority);
882 break;
883 case 'D':
884 DECL_STATIC_DESTRUCTOR (decl) = 1;
885 decl_fini_priority_insert (decl, priority);
886 break;
887 default:
888 gcc_unreachable ();
889 }
890
891 gimplify_function_tree (decl);
892
d52f5295 893 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
894
895 set_cfun (NULL);
896 current_function_decl = NULL;
897}
898
3a9ed12a 899/* Generate and emit a static constructor or destructor. WHICH must
31db0fe0
ML
900 be one of 'I' (for a constructor) or 'D' (for a destructor).
901 BODY is a STATEMENT_LIST containing GENERIC
d5e254e1
IE
902 statements. PRIORITY is the initialization priority for this
903 constructor or destructor. */
3a9ed12a
JH
904
905void
906cgraph_build_static_cdtor (char which, tree body, int priority)
907{
908 cgraph_build_static_cdtor_1 (which, body, priority, false);
909}
9e97ff61 910
9e97ff61
JH
911/* When target does not have ctors and dtors, we call all constructor
912 and destructor by special initialization/destruction function
913 recognized by collect2.
914
915 When we are going to build this function, collect all constructors and
916 destructors and turn them into normal functions. */
917
918static void
37a51997 919record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 920{
67348ccc 921 if (DECL_STATIC_CONSTRUCTOR (node->decl))
37a51997 922 ctors->safe_push (node->decl);
67348ccc 923 if (DECL_STATIC_DESTRUCTOR (node->decl))
37a51997 924 dtors->safe_push (node->decl);
d52f5295 925 node = cgraph_node::get (node->decl);
67348ccc 926 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
927}
928
929/* Define global constructors/destructor functions for the CDTORS, of
930 which they are LEN. The CDTORS are sorted by initialization
931 priority. If CTOR_P is true, these are constructors; otherwise,
932 they are destructors. */
933
934static void
37a51997 935build_cdtor (bool ctor_p, const vec<tree> &cdtors)
9e97ff61
JH
936{
937 size_t i,j;
9771b263 938 size_t len = cdtors.length ();
9e97ff61
JH
939
940 i = 0;
941 while (i < len)
942 {
943 tree body;
944 tree fn;
945 priority_type priority;
946
947 priority = 0;
948 body = NULL_TREE;
949 j = i;
950 do
951 {
952 priority_type p;
9771b263 953 fn = cdtors[j];
9e97ff61
JH
954 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
955 if (j == i)
956 priority = p;
957 else if (p != priority)
958 break;
959 j++;
960 }
961 while (j < len);
962
48c24aca 963 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
964 if (j == i + 1
965 && targetm.have_ctors_dtors)
966 {
967 i++;
968 continue;
969 }
970 /* Find the next batch of constructors/destructors with the same
971 initialization priority. */
48c24aca 972 for (;i < j; i++)
9e97ff61 973 {
9e97ff61 974 tree call;
9771b263 975 fn = cdtors[i];
9e97ff61
JH
976 call = build_call_expr (fn, 0);
977 if (ctor_p)
978 DECL_STATIC_CONSTRUCTOR (fn) = 0;
979 else
980 DECL_STATIC_DESTRUCTOR (fn) = 0;
981 /* We do not want to optimize away pure/const calls here.
982 When optimizing, these should be already removed, when not
983 optimizing, we want user to be able to breakpoint in them. */
984 TREE_SIDE_EFFECTS (call) = 1;
985 append_to_statement_list (call, &body);
9e97ff61 986 }
9e97ff61
JH
987 gcc_assert (body != NULL_TREE);
988 /* Generate a function to call all the function of like
989 priority. */
3a9ed12a 990 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
991 }
992}
993
994/* Comparison function for qsort. P1 and P2 are actually of type
995 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
996 used to determine the sort order. */
997
998static int
999compare_ctor (const void *p1, const void *p2)
1000{
1001 tree f1;
1002 tree f2;
1003 int priority1;
1004 int priority2;
1005
1006 f1 = *(const tree *)p1;
1007 f2 = *(const tree *)p2;
1008 priority1 = DECL_INIT_PRIORITY (f1);
1009 priority2 = DECL_INIT_PRIORITY (f2);
1010
1011 if (priority1 < priority2)
1012 return -1;
1013 else if (priority1 > priority2)
1014 return 1;
1015 else
1016 /* Ensure a stable sort. Constructors are executed in backwarding
1017 order to make LTO initialize braries first. */
1018 return DECL_UID (f2) - DECL_UID (f1);
1019}
1020
1021/* Comparison function for qsort. P1 and P2 are actually of type
1022 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1023 used to determine the sort order. */
1024
1025static int
1026compare_dtor (const void *p1, const void *p2)
1027{
1028 tree f1;
1029 tree f2;
1030 int priority1;
1031 int priority2;
1032
1033 f1 = *(const tree *)p1;
1034 f2 = *(const tree *)p2;
1035 priority1 = DECL_FINI_PRIORITY (f1);
1036 priority2 = DECL_FINI_PRIORITY (f2);
1037
1038 if (priority1 < priority2)
1039 return -1;
1040 else if (priority1 > priority2)
1041 return 1;
1042 else
1043 /* Ensure a stable sort. */
1044 return DECL_UID (f1) - DECL_UID (f2);
1045}
1046
1047/* Generate functions to call static constructors and destructors
1048 for targets that do not support .ctors/.dtors sections. These
1049 functions have magic names which are detected by collect2. */
1050
1051static void
37a51997 1052build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 1053{
37a51997 1054 if (!ctors->is_empty ())
9e97ff61
JH
1055 {
1056 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1057 ctors->qsort (compare_ctor);
1058 build_cdtor (/*ctor_p=*/true, *ctors);
9e97ff61
JH
1059 }
1060
37a51997 1061 if (!dtors->is_empty ())
9e97ff61
JH
1062 {
1063 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1064 dtors->qsort (compare_dtor);
1065 build_cdtor (/*ctor_p=*/false, *dtors);
9e97ff61
JH
1066 }
1067}
1068
1069/* Look for constructors and destructors and produce function calling them.
1070 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1071 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1072 constructors/destructors into single function to improve code locality and
1073 reduce size. */
1074
1075static unsigned int
1076ipa_cdtor_merge (void)
1077{
37a51997
TS
1078 /* A vector of FUNCTION_DECLs declared as static constructors. */
1079 auto_vec<tree, 20> ctors;
1080 /* A vector of FUNCTION_DECLs declared as static destructors. */
1081 auto_vec<tree, 20> dtors;
9e97ff61 1082 struct cgraph_node *node;
65c70e6b 1083 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1084 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1085 || DECL_STATIC_DESTRUCTOR (node->decl))
37a51997
TS
1086 record_cdtor_fn (node, &ctors, &dtors);
1087 build_cdtor_fns (&ctors, &dtors);
9e97ff61
JH
1088 return 0;
1089}
1090
17795822
TS
1091namespace {
1092
1093const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1094{
27a4cd48
DM
1095 IPA_PASS, /* type */
1096 "cdtor", /* name */
1097 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1098 TV_CGRAPHOPT, /* tv_id */
1099 0, /* properties_required */
1100 0, /* properties_provided */
1101 0, /* properties_destroyed */
1102 0, /* todo_flags_start */
1103 0, /* todo_flags_finish */
9e97ff61 1104};
27a4cd48 1105
17795822 1106class pass_ipa_cdtor_merge : public ipa_opt_pass_d
27a4cd48
DM
1107{
1108public:
c3284718
RS
1109 pass_ipa_cdtor_merge (gcc::context *ctxt)
1110 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1111 NULL, /* generate_summary */
1112 NULL, /* write_summary */
1113 NULL, /* read_summary */
1114 NULL, /* write_optimization_summary */
1115 NULL, /* read_optimization_summary */
1116 NULL, /* stmt_fixup */
1117 0, /* function_transform_todo_flags_start */
1118 NULL, /* function_transform */
1119 NULL) /* variable_transform */
27a4cd48
DM
1120 {}
1121
1122 /* opt_pass methods: */
1a3d085c 1123 virtual bool gate (function *);
be55bfe6 1124 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1125
1126}; // class pass_ipa_cdtor_merge
1127
1a3d085c
TS
1128bool
1129pass_ipa_cdtor_merge::gate (function *)
1130{
1131 /* Perform the pass when we have no ctors/dtors support
1132 or at LTO time to merge multiple constructors into single
1133 function. */
29f1e2b1 1134 return !targetm.have_ctors_dtors || in_lto_p;
1a3d085c
TS
1135}
1136
17795822
TS
1137} // anon namespace
1138
27a4cd48
DM
1139ipa_opt_pass_d *
1140make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1141{
1142 return new pass_ipa_cdtor_merge (ctxt);
1143}
eb6a09a7
JH
1144
1145/* Invalid pointer representing BOTTOM for single user dataflow. */
1146#define BOTTOM ((cgraph_node *)(size_t) 2)
1147
1148/* Meet operation for single user dataflow.
1149 Here we want to associate variables with sigle function that may access it.
1150
1151 FUNCTION is current single user of a variable, VAR is variable that uses it.
1152 Latttice is stored in SINGLE_USER_MAP.
1153
1154 We represent:
1155 - TOP by no entry in SIGNLE_USER_MAP
1156 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1157 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1158
1159cgraph_node *
1160meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1161 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1162{
1163 struct cgraph_node *user, **f;
1164
1165 if (var->aux == BOTTOM)
1166 return BOTTOM;
1167
1eb68d2d 1168 f = single_user_map.get (var);
eb6a09a7
JH
1169 if (!f)
1170 return function;
1171 user = *f;
1172 if (!function)
1173 return user;
1174 else if (function != user)
1175 return BOTTOM;
1176 else
1177 return function;
1178}
1179
1180/* Propagation step of single-use dataflow.
1181
1182 Check all uses of VNODE and see if they are used by single function FUNCTION.
1183 SINGLE_USER_MAP represents the dataflow lattice. */
1184
1185cgraph_node *
1186propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1187 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1188{
1189 int i;
1190 struct ipa_ref *ref;
1191
1192 gcc_assert (!vnode->externally_visible);
1193
1194 /* If node is an alias, first meet with its target. */
1195 if (vnode->alias)
9041d2e6 1196 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1197
1198 /* Check all users and see if they correspond to a single function. */
d52f5295 1199 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1200 {
1201 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1202 if (cnode)
1203 {
1204 if (cnode->global.inlined_to)
1205 cnode = cnode->global.inlined_to;
1206 if (!function)
1207 function = cnode;
1208 else if (function != cnode)
1209 function = BOTTOM;
1210 }
1211 else
17e0fc92
JH
1212 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1213 single_user_map);
eb6a09a7
JH
1214 }
1215 return function;
1216}
1217
1218/* Pass setting used_by_single_function flag.
17e0fc92
JH
1219 This flag is set on variable when there is only one function that may
1220 possibly referr to it. */
eb6a09a7
JH
1221
1222static unsigned int
1223ipa_single_use (void)
1224{
1225 varpool_node *first = (varpool_node *) (void *) 1;
1226 varpool_node *var;
1eb68d2d 1227 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1228
1229 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1230 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1231 var->aux = BOTTOM;
1232 else
1233 {
1234 /* Enqueue symbol for dataflow. */
1235 var->aux = first;
1236 first = var;
1237 }
1238
1239 /* The actual dataflow. */
1240
1241 while (first != (void *) 1)
1242 {
1243 cgraph_node *user, *orig_user, **f;
1244
1245 var = first;
1246 first = (varpool_node *)first->aux;
1247
1eb68d2d 1248 f = single_user_map.get (var);
eb6a09a7
JH
1249 if (f)
1250 orig_user = *f;
1251 else
1252 orig_user = NULL;
1253 user = propagate_single_user (var, orig_user, single_user_map);
1254
1255 gcc_checking_assert (var->aux != BOTTOM);
1256
1257 /* If user differs, enqueue all references. */
1258 if (user != orig_user)
1259 {
1260 unsigned int i;
1261 ipa_ref *ref;
1262
1eb68d2d 1263 single_user_map.put (var, user);
eb6a09a7
JH
1264
1265 /* Enqueue all aliases for re-processing. */
31de7606
JH
1266 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1267 if (!ref->referring->aux)
eb6a09a7
JH
1268 {
1269 ref->referring->aux = first;
1270 first = dyn_cast <varpool_node *> (ref->referring);
1271 }
1272 /* Enqueue all users for re-processing. */
d52f5295 1273 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1274 if (!ref->referred->aux
1275 && ref->referred->definition
1276 && is_a <varpool_node *> (ref->referred))
1277 {
1278 ref->referred->aux = first;
1279 first = dyn_cast <varpool_node *> (ref->referred);
1280 }
1281
1282 /* If user is BOTTOM, just punt on this var. */
1283 if (user == BOTTOM)
1284 var->aux = BOTTOM;
1285 else
1286 var->aux = NULL;
1287 }
1288 else
1289 var->aux = NULL;
1290 }
1291
1292 FOR_EACH_DEFINED_VARIABLE (var)
1293 {
1294 if (var->aux != BOTTOM)
1295 {
17e0fc92
JH
1296 /* Not having the single user known means that the VAR is
1297 unreachable. Either someone forgot to remove unreachable
1298 variables or the reachability here is wrong. */
1299
b2b29377
MM
1300 gcc_checking_assert (single_user_map.get (var));
1301
eb6a09a7
JH
1302 if (dump_file)
1303 {
464d0118
ML
1304 fprintf (dump_file, "Variable %s is used by single function\n",
1305 var->dump_name ());
eb6a09a7
JH
1306 }
1307 var->used_by_single_function = true;
1308 }
1309 var->aux = NULL;
1310 }
1311 return 0;
1312}
1313
17795822
TS
1314namespace {
1315
1316const pass_data pass_data_ipa_single_use =
eb6a09a7
JH
1317{
1318 IPA_PASS, /* type */
1319 "single-use", /* name */
1320 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1321 TV_CGRAPHOPT, /* tv_id */
1322 0, /* properties_required */
1323 0, /* properties_provided */
1324 0, /* properties_destroyed */
1325 0, /* todo_flags_start */
1326 0, /* todo_flags_finish */
1327};
1328
17795822 1329class pass_ipa_single_use : public ipa_opt_pass_d
eb6a09a7
JH
1330{
1331public:
1332 pass_ipa_single_use (gcc::context *ctxt)
1333 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1334 NULL, /* generate_summary */
1335 NULL, /* write_summary */
1336 NULL, /* read_summary */
1337 NULL, /* write_optimization_summary */
1338 NULL, /* read_optimization_summary */
1339 NULL, /* stmt_fixup */
1340 0, /* function_transform_todo_flags_start */
1341 NULL, /* function_transform */
1342 NULL) /* variable_transform */
1343 {}
1344
1345 /* opt_pass methods: */
eb6a09a7
JH
1346 virtual unsigned int execute (function *) { return ipa_single_use (); }
1347
1348}; // class pass_ipa_single_use
1349
17795822
TS
1350} // anon namespace
1351
eb6a09a7
JH
1352ipa_opt_pass_d *
1353make_pass_ipa_single_use (gcc::context *ctxt)
1354{
1355 return new pass_ipa_single_use (ctxt);
1356}
f0251020
RB
1357
1358/* Materialize all clones. */
1359
1360namespace {
1361
1362const pass_data pass_data_materialize_all_clones =
1363{
1364 SIMPLE_IPA_PASS, /* type */
1365 "materialize-all-clones", /* name */
1366 OPTGROUP_NONE, /* optinfo_flags */
1367 TV_IPA_OPT, /* tv_id */
1368 0, /* properties_required */
1369 0, /* properties_provided */
1370 0, /* properties_destroyed */
1371 0, /* todo_flags_start */
1372 0, /* todo_flags_finish */
1373};
1374
1375class pass_materialize_all_clones : public simple_ipa_opt_pass
1376{
1377public:
1378 pass_materialize_all_clones (gcc::context *ctxt)
1379 : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1380 {}
1381
1382 /* opt_pass methods: */
1383 virtual unsigned int execute (function *)
1384 {
1385 symtab->materialize_all_clones ();
1386 return 0;
1387 }
1388
1389}; // class pass_materialize_all_clones
1390
1391} // anon namespace
1392
1393simple_ipa_opt_pass *
1394make_pass_materialize_all_clones (gcc::context *ctxt)
1395{
1396 return new pass_materialize_all_clones (ctxt);
1397}