]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
Remove MPX
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
85ec4feb 2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
c7131fb2
AM
25#include "tree.h"
26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "stringpool.h"
30#include "cgraph.h"
45b0be94 31#include "gimplify.h"
9e97ff61 32#include "tree-iterator.h"
af8bca3c 33#include "ipa-utils.h"
dd912cb8 34#include "symbol-summary.h"
8bc5448f 35#include "tree-vrp.h"
c582198b 36#include "ipa-prop.h"
27d020cf 37#include "ipa-fnsummary.h"
2b5f0895 38#include "dbgcnt.h"
775669c1 39#include "debug.h"
314e6352
ML
40#include "stringpool.h"
41#include "attribs.h"
e70670cf
JH
42
43/* Return true when NODE has ADDR reference. */
44
45static bool
46has_addr_references_p (struct cgraph_node *node,
4f4ada6a 47 void *)
e70670cf
JH
48{
49 int i;
d122681a 50 struct ipa_ref *ref = NULL;
e70670cf 51
d122681a 52 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
53 if (ref->use == IPA_REF_ADDR)
54 return true;
55 return false;
56}
57
4f4ada6a
JH
58/* Return true when NODE can be target of an indirect call. */
59
60static bool
61is_indirect_call_target_p (struct cgraph_node *node, void *)
62{
63 return node->indirect_call_target;
64}
65
d563610d
JH
66/* Look for all functions inlined to NODE and update their inlined_to pointers
67 to INLINED_TO. */
68
69static void
70update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
71{
72 struct cgraph_edge *e;
73 for (e = node->callees; e; e = e->next_callee)
74 if (e->callee->global.inlined_to)
75 {
76 e->callee->global.inlined_to = inlined_to;
77 update_inlined_to_pointer (e->callee, inlined_to);
78 }
79}
80
04142cc3 81/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
82
83 The queue is linked via AUX pointers and terminated by pointer to 1.
84 We enqueue nodes at two occasions: when we find them reachable or when we find
85 their bodies needed for further clonning. In the second case we mark them
86 by pointer to 2 after processing so they are re-queue when they become
87 reachable. */
b34fd25c
JH
88
89static void
5e20cdc9 90enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 91 hash_set<symtab_node *> *reachable)
b34fd25c 92{
19fb0b86 93 /* Node is still in queue; do nothing. */
67348ccc 94 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
95 return;
96 /* Node was already processed as unreachable, re-enqueue
97 only if it became reachable now. */
6e2830c3 98 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 99 return;
67348ccc 100 node->aux = *first;
b34fd25c
JH
101 *first = node;
102}
103
b34fd25c
JH
104/* Process references. */
105
106static void
d122681a 107process_references (symtab_node *snode,
5e20cdc9 108 symtab_node **first,
93a18a70 109 bool before_inlining_p,
6e2830c3 110 hash_set<symtab_node *> *reachable)
b34fd25c
JH
111{
112 int i;
d122681a
ML
113 struct ipa_ref *ref = NULL;
114 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 115 {
5e20cdc9 116 symtab_node *node = ref->referred;
17e0fc92 117 symtab_node *body = node->ultimate_alias_target ();
e70670cf 118
67348ccc
DM
119 if (node->definition && !node->in_other_partition
120 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8 121 || (((before_inlining_p
29f1e2b1 122 && (TREE_CODE (node->decl) != FUNCTION_DECL
f1ced6f5
JH
123 || (TREE_CODE (node->decl) == FUNCTION_DECL
124 && opt_for_fn (body->decl, optimize))
17e0fc92
JH
125 || (symtab->state < IPA_SSA
126 && lookup_attribute
127 ("always_inline",
128 DECL_ATTRIBUTES (body->decl))))))
129 /* We use variable constructors during late compilation for
e70670cf
JH
130 constant folding. Keep references alive so partitioning
131 knows about potential references. */
8813a647 132 || (VAR_P (node->decl)
5b42d196
JH
133 && (flag_wpa
134 || flag_incremental_link
135 == INCREMENTAL_LINK_LTO)
136 && dyn_cast <varpool_node *> (node)
137 ->ctor_useable_for_folding_p ()))))
17e0fc92
JH
138 {
139 /* Be sure that we will not optimize out alias target
140 body. */
141 if (DECL_EXTERNAL (node->decl)
142 && node->alias
143 && before_inlining_p)
144 reachable->add (body);
145 reachable->add (node);
146 }
67348ccc 147 enqueue_node (node, first, reachable);
b34fd25c
JH
148 }
149}
150
3462aa02
JH
151/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
152 all its potential targets as reachable to permit later inlining if
153 devirtualization happens. After inlining still keep their declarations
154 around, so we can devirtualize to a direct call.
155
156 Also try to make trivial devirutalization when no or only one target is
157 possible. */
158
159static void
6e2830c3 160walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 161 struct cgraph_edge *edge,
5e20cdc9 162 symtab_node **first,
6e2830c3
TS
163 hash_set<symtab_node *> *reachable,
164 bool before_inlining_p)
3462aa02
JH
165{
166 unsigned int i;
167 void *cache_token;
168 bool final;
169 vec <cgraph_node *>targets
170 = possible_polymorphic_call_targets
171 (edge, &final, &cache_token);
172
6e2830c3 173 if (!reachable_call_targets->add (cache_token))
3462aa02 174 {
c3284718 175 for (i = 0; i < targets.length (); i++)
3462aa02
JH
176 {
177 struct cgraph_node *n = targets[i];
178
179 /* Do not bother to mark virtual methods in anonymous namespace;
180 either we will find use of virtual table defining it, or it is
181 unused. */
67348ccc 182 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 183 && type_in_anonymous_namespace_p
70e7f2a2 184 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
3462aa02
JH
185 continue;
186
4f4ada6a
JH
187 n->indirect_call_target = true;
188 symtab_node *body = n->function_symbol ();
17e0fc92 189
3462aa02
JH
190 /* Prior inlining, keep alive bodies of possible targets for
191 devirtualization. */
4f4ada6a
JH
192 if (n->definition
193 && (before_inlining_p
194 && opt_for_fn (body->decl, optimize)
195 && opt_for_fn (body->decl, flag_devirtualize)))
196 {
197 /* Be sure that we will not optimize out alias target
198 body. */
199 if (DECL_EXTERNAL (n->decl)
200 && n->alias
201 && before_inlining_p)
202 reachable->add (body);
203 reachable->add (n);
204 }
3462aa02
JH
205 /* Even after inlining we want to keep the possible targets in the
206 boundary, so late passes can still produce direct call even if
207 the chance for inlining is lost. */
67348ccc 208 enqueue_node (n, first, reachable);
3462aa02
JH
209 }
210 }
211
212 /* Very trivial devirtualization; when the type is
213 final or anonymous (so we know all its derivation)
214 and there is only one possible virtual call target,
215 make the edge direct. */
216 if (final)
217 {
2b5f0895 218 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 219 {
7b395ddd 220 cgraph_node *target, *node = edge->caller;
3462aa02
JH
221 if (targets.length () == 1)
222 target = targets[0];
223 else
d52f5295 224 target = cgraph_node::get_create
3462aa02
JH
225 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
226
2b5f0895
XDL
227 if (dump_enabled_p ())
228 {
9189aff7
JH
229 location_t locus;
230 if (edge->call_stmt)
231 locus = gimple_location (edge->call_stmt);
232 else
233 locus = UNKNOWN_LOCATION;
d52f5295 234 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
464d0118
ML
235 "devirtualizing call in %s to %s\n",
236 edge->caller->dump_name (),
237 target->dump_name ());
2b5f0895 238 }
3dafb85c 239 edge = edge->make_direct (target);
0bceb671
JH
240 if (ipa_fn_summaries)
241 ipa_update_overall_fn_summary (node);
477145c8 242 else if (edge->call_stmt)
31db0fe0 243 edge->redirect_call_stmt_to_callee ();
3462aa02
JH
244 }
245 }
246}
41817394 247
ca31b95f 248/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
249
250 The algorithm is basically mark&sweep but with some extra refinements:
251
252 - reachable extern inline functions needs special handling; the bodies needs
253 to stay in memory until inlining in hope that they will be inlined.
254 After inlining we release their bodies and turn them into unanalyzed
255 nodes even when they are reachable.
256
04142cc3
JH
257 - virtual functions are kept in callgraph even if they seem unreachable in
258 hope calls to them will be devirtualized.
259
260 Again we remove them after inlining. In late optimization some
31519c38 261 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
262 the call. In theory early opts and IPA should work out all important cases.
263
264 - virtual clones needs bodies of their origins for later materialization;
265 this means that we want to keep the body even if the origin is unreachable
266 otherwise. To avoid origin from sitting in the callgraph and being
267 walked by IPA passes, we turn them into unanalyzed nodes with body
268 defined.
269
270 We maintain set of function declaration where body needs to stay in
271 body_needed_for_clonning
272
273 Inline clones represent special case: their declaration match the
274 declaration of origin and cgraph_remove_node already knows how to
275 reshape callgraph and preserve body when offline copy of function or
276 inline clone is being removed.
277
6649df51
JH
278 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
279 variables with DECL_INITIAL set. We finalize these and keep reachable
280 ones around for constant folding purposes. After inlining we however
281 stop walking their references to let everything static referneced by them
282 to be removed when it is otherwise unreachable.
283
04142cc3
JH
284 We maintain queue of both reachable symbols (i.e. defined symbols that needs
285 to stay) and symbols that are in boundary (i.e. external symbols referenced
286 by reachable symbols or origins of clones). The queue is represented
287 as linked list by AUX pointer terminated by 1.
288
31519c38 289 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
290 turn definition into a declaration, but we may keep function body around
291 based on body_needed_for_clonning
292
293 All symbols that enter the queue have AUX pointer non-zero and are in the
294 boundary. Pointer set REACHABLE is used to track reachable symbols.
295
296 Every symbol can be visited twice - once as part of boundary and once
297 as real reachable symbol. enqueue_node needs to decide whether the
298 node needs to be re-queued for second processing. For this purpose
299 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
300
301bool
17e0fc92 302symbol_table::remove_unreachable_nodes (FILE *file)
ca31b95f 303{
5e20cdc9 304 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 305 struct cgraph_node *node, *next;
2c8326a5 306 varpool_node *vnode, *vnext;
ca31b95f 307 bool changed = false;
6e2830c3
TS
308 hash_set<symtab_node *> reachable;
309 hash_set<tree> body_needed_for_clonning;
310 hash_set<void *> reachable_call_targets;
29f1e2b1 311 bool before_inlining_p = symtab->state < (!optimize && !in_lto_p ? IPA_SSA
17e0fc92 312 : IPA_SSA_AFTER_INLINING);
ca31b95f 313
3462aa02 314 timevar_push (TV_IPA_UNREACHABLE);
2bf86c84 315 build_type_inheritance_graph ();
10d22567
ZD
316 if (file)
317 fprintf (file, "\nReclaiming functions:");
b2b29377
MM
318 if (flag_checking)
319 {
320 FOR_EACH_FUNCTION (node)
321 gcc_assert (!node->aux);
322 FOR_EACH_VARIABLE (vnode)
323 gcc_assert (!vnode->aux);
324 }
530f3a1b
JH
325 /* Mark functions whose bodies are obviously needed.
326 This is mostly when they can be referenced externally. Inline clones
327 are special since their declarations are shared with master clone and thus
328 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
329 FOR_EACH_FUNCTION (node)
330 {
331 node->used_as_abstract_origin = false;
4f4ada6a 332 node->indirect_call_target = false;
67348ccc 333 if (node->definition
c0c123ef 334 && !node->global.inlined_to
67348ccc 335 && !node->in_other_partition
d52f5295 336 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef
JH
337 {
338 gcc_assert (!node->global.inlined_to);
6e2830c3
TS
339 reachable.add (node);
340 enqueue_node (node, &first, &reachable);
c0c123ef
JH
341 }
342 else
67348ccc 343 gcc_assert (!node->aux);
c0c123ef 344 }
530f3a1b
JH
345
346 /* Mark variables that are obviously needed. */
04142cc3 347 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 348 if (!vnode->can_remove_if_no_refs_p()
67348ccc 349 && !vnode->in_other_partition)
04142cc3 350 {
6e2830c3
TS
351 reachable.add (vnode);
352 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
353 }
354
355 /* Perform reachability analysis. */
5e20cdc9 356 while (first != (symtab_node *) (void *) 1)
b34fd25c 357 {
6e2830c3 358 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 359 symtab_node *node = first;
ca31b95f 360
5e20cdc9 361 first = (symtab_node *)first->aux;
19fb0b86 362
04142cc3
JH
363 /* If we are processing symbol in boundary, mark its AUX pointer for
364 possible later re-processing in enqueue_node. */
365 if (in_boundary_p)
4bd019b8
JH
366 {
367 node->aux = (void *)2;
368 if (node->alias && node->analyzed)
369 enqueue_node (node->get_alias_target (), &first, &reachable);
370 }
04142cc3
JH
371 else
372 {
31dad809
JJ
373 if (TREE_CODE (node->decl) == FUNCTION_DECL
374 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
375 {
376 struct cgraph_node *origin_node
4ad08ee8
JH
377 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
378 if (origin_node && !origin_node->used_as_abstract_origin)
379 {
380 origin_node->used_as_abstract_origin = true;
381 gcc_assert (!origin_node->prev_sibling_clone);
382 gcc_assert (!origin_node->next_sibling_clone);
383 for (cgraph_node *n = origin_node->clones; n;
384 n = n->next_sibling_clone)
385 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
386 n->used_as_abstract_origin = true;
4ad08ee8 387 }
c0c123ef 388 }
04142cc3 389 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
390 all externally visible symbols in the same comdat
391 group to be reachable as well. Comdat-local symbols
392 can be discarded if all uses were inlined. */
67348ccc 393 if (node->same_comdat_group)
04142cc3 394 {
5e20cdc9 395 symtab_node *next;
67348ccc 396 for (next = node->same_comdat_group;
04142cc3 397 next != node;
67348ccc 398 next = next->same_comdat_group)
d52f5295 399 if (!next->comdat_local_p ()
6e2830c3
TS
400 && !reachable.add (next))
401 enqueue_node (next, &first, &reachable);
04142cc3
JH
402 }
403 /* Mark references as reachable. */
6e2830c3 404 process_references (node, &first, before_inlining_p, &reachable);
04142cc3 405 }
19fb0b86 406
7de90a6c 407 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 408 {
04142cc3
JH
409 /* Mark the callees reachable unless they are direct calls to extern
410 inline functions we decided to not inline. */
411 if (!in_boundary_p)
8a6295ba 412 {
04142cc3 413 struct cgraph_edge *e;
3462aa02 414 /* Keep alive possible targets for devirtualization. */
2bf86c84
JH
415 if (opt_for_fn (cnode->decl, optimize)
416 && opt_for_fn (cnode->decl, flag_devirtualize))
3462aa02
JH
417 {
418 struct cgraph_edge *next;
419 for (e = cnode->indirect_calls; e; e = next)
420 {
421 next = e->next_callee;
422 if (e->indirect_info->polymorphic)
6e2830c3
TS
423 walk_polymorphic_call_targets (&reachable_call_targets,
424 e, &first, &reachable,
3462aa02
JH
425 before_inlining_p);
426 }
427 }
04142cc3 428 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 429 {
17e0fc92 430 symtab_node *body = e->callee->function_symbol ();
67348ccc
DM
431 if (e->callee->definition
432 && !e->callee->in_other_partition
ed62e0d9 433 && (!e->inline_failed
67348ccc
DM
434 || !DECL_EXTERNAL (e->callee->decl)
435 || e->callee->alias
17e0fc92
JH
436 || (before_inlining_p
437 && (opt_for_fn (body->decl, optimize)
438 || (symtab->state < IPA_SSA
439 && lookup_attribute
440 ("always_inline",
441 DECL_ATTRIBUTES (body->decl)))))))
789c2741
JH
442 {
443 /* Be sure that we will not optimize out alias target
444 body. */
445 if (DECL_EXTERNAL (e->callee->decl)
446 && e->callee->alias
447 && before_inlining_p)
17e0fc92 448 reachable.add (body);
6e2830c3 449 reachable.add (e->callee);
789c2741 450 }
6e2830c3 451 enqueue_node (e->callee, &first, &reachable);
93a18a70 452 }
04142cc3
JH
453
454 /* When inline clone exists, mark body to be preserved so when removing
455 offline copy of the function we don't kill it. */
4f63dfc6 456 if (cnode->global.inlined_to)
6e2830c3 457 body_needed_for_clonning.add (cnode->decl);
b66887e4 458
4f63dfc6
JH
459 /* For non-inline clones, force their origins to the boundary and ensure
460 that body is not removed. */
461 while (cnode->clone_of)
462 {
67348ccc 463 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
464 cnode = cnode->clone_of;
465 if (noninline)
466 {
6e2830c3
TS
467 body_needed_for_clonning.add (cnode->decl);
468 enqueue_node (cnode, &first, &reachable);
4f63dfc6 469 }
b34fd25c 470 }
0136f8f0
AH
471
472 }
4bd019b8
JH
473 else if (cnode->thunk.thunk_p)
474 enqueue_node (cnode->callees->callee, &first, &reachable);
48de5d37 475
0136f8f0
AH
476 /* If any reachable function has simd clones, mark them as
477 reachable as well. */
478 if (cnode->simd_clones)
479 {
480 cgraph_node *next;
481 for (next = cnode->simd_clones;
482 next;
483 next = next->simdclone->next_clone)
484 if (in_boundary_p
6e2830c3
TS
485 || !reachable.add (next))
486 enqueue_node (next, &first, &reachable);
47cb0d7d 487 }
b34fd25c 488 }
6649df51 489 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
490 boundary. This will also hold initializers of the external vars NODE
491 refers to. */
7de90a6c 492 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 493 if (vnode
67348ccc
DM
494 && DECL_EXTERNAL (node->decl)
495 && !vnode->alias
6649df51 496 && in_boundary_p)
5d59b5e1 497 {
d122681a
ML
498 struct ipa_ref *ref = NULL;
499 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 500 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 501 }
ca31b95f
JH
502 }
503
04142cc3 504 /* Remove unreachable functions. */
3dafb85c 505 for (node = first_function (); node; node = next)
ca31b95f 506 {
3dafb85c 507 next = next_function (node);
e70670cf
JH
508
509 /* If node is not needed at all, remove it. */
67348ccc 510 if (!node->aux)
ca31b95f 511 {
10d22567 512 if (file)
464d0118 513 fprintf (file, " %s", node->dump_name ());
d52f5295 514 node->remove ();
04142cc3
JH
515 changed = true;
516 }
e70670cf 517 /* If node is unreachable, remove its body. */
6e2830c3 518 else if (!reachable.contains (node))
04142cc3 519 {
d3f2e41e
JH
520 /* We keep definitions of thunks and aliases in the boundary so
521 we can walk to the ultimate alias targets and function symbols
522 reliably. */
523 if (node->alias || node->thunk.thunk_p)
524 ;
525 else if (!body_needed_for_clonning.contains (node->decl)
526 && !node->alias && !node->thunk.thunk_p)
d52f5295 527 node->release_body ();
4f63dfc6 528 else if (!node->clone_of)
67348ccc 529 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
4bd019b8 530 if (node->definition && !node->alias && !node->thunk.thunk_p)
bb853349 531 {
04142cc3 532 if (file)
464d0118 533 fprintf (file, " %s", node->dump_name ());
3d8d0043 534 node->body_removed = true;
67348ccc
DM
535 node->analyzed = false;
536 node->definition = false;
537 node->cpp_implicit_alias = false;
538 node->alias = false;
71e54687 539 node->transparent_alias = false;
d833415c 540 node->thunk.thunk_p = false;
67348ccc 541 node->weakref = false;
8fe91ca8
JH
542 /* After early inlining we drop always_inline attributes on
543 bodies of functions that are still referenced (have their
544 address taken). */
545 DECL_ATTRIBUTES (node->decl)
546 = remove_attribute ("always_inline",
547 DECL_ATTRIBUTES (node->decl));
67348ccc 548 if (!node->in_other_partition)
51a5c0c2 549 node->local.local = false;
d52f5295 550 node->remove_callees ();
d122681a 551 node->remove_all_references ();
bb853349 552 changed = true;
d5e254e1
IE
553 if (node->thunk.thunk_p
554 && node->thunk.add_pointer_bounds_args)
555 {
556 node->thunk.thunk_p = false;
557 node->thunk.add_pointer_bounds_args = false;
558 }
bb853349 559 }
ca31b95f 560 }
4f63dfc6 561 else
d52f5295 562 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 563 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 564 }
04142cc3
JH
565
566 /* Inline clones might be kept around so their materializing allows further
567 cloning. If the function the clone is inlined into is removed, we need
568 to turn it into normal cone. */
65c70e6b 569 FOR_EACH_FUNCTION (node)
9187e02d 570 {
9187e02d
JH
571 if (node->global.inlined_to
572 && !node->callers)
573 {
574 gcc_assert (node->clones);
d563610d
JH
575 node->global.inlined_to = NULL;
576 update_inlined_to_pointer (node, node);
9187e02d 577 }
67348ccc 578 node->aux = NULL;
9187e02d 579 }
4a444e58 580
04142cc3 581 /* Remove unreachable variables. */
4a444e58 582 if (file)
04142cc3 583 fprintf (file, "\nReclaiming variables:");
3dafb85c 584 for (vnode = first_variable (); vnode; vnode = vnext)
b34fd25c 585 {
3dafb85c 586 vnext = next_variable (vnode);
67348ccc 587 if (!vnode->aux
b9bd2075
JH
588 /* For can_refer_decl_in_current_unit_p we want to track for
589 all external variables if they are defined in other partition
590 or not. */
67348ccc 591 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 592 {
d2b35c04
JH
593 struct ipa_ref *ref = NULL;
594
595 /* First remove the aliases, so varpool::remove can possibly lookup
596 the constructor and save it for future use. */
597 while (vnode->iterate_direct_aliases (0, ref))
598 {
599 if (file)
464d0118 600 fprintf (file, " %s", ref->referred->dump_name ());
d2b35c04
JH
601 ref->referring->remove ();
602 }
4a444e58 603 if (file)
464d0118 604 fprintf (file, " %s", vnode->dump_name ());
d2b35c04 605 vnext = next_variable (vnode);
775669c1 606 /* Signal removal to the debug machinery. */
5b42d196 607 if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
775669c1
RB
608 {
609 vnode->definition = false;
610 (*debug_hooks->late_global_decl) (vnode->decl);
611 }
d52f5295 612 vnode->remove ();
4a444e58 613 changed = true;
b34fd25c 614 }
4bd019b8 615 else if (!reachable.contains (vnode) && !vnode->alias)
04142cc3 616 {
6a6dac52 617 tree init;
67348ccc 618 if (vnode->definition)
04142cc3
JH
619 {
620 if (file)
fec39fa6 621 fprintf (file, " %s", vnode->name ());
04142cc3
JH
622 changed = true;
623 }
1acc5591 624 /* Keep body if it may be useful for constant folding. */
5b42d196 625 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
31db0fe0 626 || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
1acc5591
JH
627 vnode->remove_initializer ();
628 else
629 DECL_INITIAL (vnode->decl) = init;
3d8d0043 630 vnode->body_removed = true;
67348ccc
DM
631 vnode->definition = false;
632 vnode->analyzed = false;
633 vnode->aux = NULL;
e70670cf 634
d52f5295 635 vnode->remove_from_same_comdat_group ();
7b3376a0 636
d122681a 637 vnode->remove_all_references ();
04142cc3
JH
638 }
639 else
67348ccc 640 vnode->aux = NULL;
b34fd25c 641 }
4a444e58 642
04142cc3 643 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
644 if (file)
645 fprintf (file, "\nClearing address taken flags:");
65c70e6b 646 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
647 if (node->address_taken
648 && !node->used_from_other_partition)
bd3cdcc0 649 {
1ede94c5 650 if (!node->call_for_symbol_and_aliases
31db0fe0 651 (has_addr_references_p, NULL, true))
bd3cdcc0
JH
652 {
653 if (file)
fec39fa6 654 fprintf (file, " %s", node->name ());
67348ccc 655 node->address_taken = false;
4a444e58 656 changed = true;
4f4ada6a
JH
657 if (node->local_p ()
658 /* Virtual functions may be kept in cgraph just because
659 of possible later devirtualization. Do not mark them as
660 local too early so we won't optimize them out before
661 we are done with polymorphic call analysis. */
662 && (!before_inlining_p
663 || !node->call_for_symbol_and_aliases
664 (is_indirect_call_target_p, NULL, true)))
4a444e58
JH
665 {
666 node->local.local = true;
667 if (file)
668 fprintf (file, " (local)");
669 }
bd3cdcc0
JH
670 }
671 }
10a5dd5d
JH
672 if (file)
673 fprintf (file, "\n");
b34fd25c 674
b2b29377 675 symtab_node::checking_verify_symtab_nodes ();
4537ec0c 676
a8da72b8 677 /* If we removed something, perhaps profile could be improved. */
29f1e2b1 678 if (changed && (optimize || in_lto_p) && ipa_call_summaries)
a8da72b8 679 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 680 ipa_propagate_frequency (node);
a8da72b8 681
3462aa02 682 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
683 return changed;
684}
f4b3ca72 685
6de88c6a
JH
686/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
687 as needed, also clear EXPLICIT_REFS if the references to given variable
688 do not need to be explicit. */
689
690void
691process_references (varpool_node *vnode,
692 bool *written, bool *address_taken,
693 bool *read, bool *explicit_refs)
694{
695 int i;
696 struct ipa_ref *ref;
697
9041d2e6 698 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
699 || TREE_THIS_VOLATILE (vnode->decl))
700 *explicit_refs = false;
701
d122681a 702 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
703 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
704 switch (ref->use)
705 {
706 case IPA_REF_ADDR:
707 *address_taken = true;
708 break;
709 case IPA_REF_LOAD:
710 *read = true;
711 break;
712 case IPA_REF_STORE:
713 *written = true;
714 break;
715 case IPA_REF_ALIAS:
d52f5295
ML
716 process_references (dyn_cast<varpool_node *> (ref->referring), written,
717 address_taken, read, explicit_refs);
6de88c6a
JH
718 break;
719 }
720}
721
722/* Set TREE_READONLY bit. */
723
724bool
725set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
726{
727 TREE_READONLY (vnode->decl) = true;
728 return false;
729}
730
731/* Set writeonly bit and clear the initalizer, since it will not be needed. */
732
733bool
dea91a66 734set_writeonly_bit (varpool_node *vnode, void *data)
6de88c6a
JH
735{
736 vnode->writeonly = true;
29f1e2b1 737 if (optimize || in_lto_p)
6de88c6a
JH
738 {
739 DECL_INITIAL (vnode->decl) = NULL;
740 if (!vnode->alias)
dea91a66
JH
741 {
742 if (vnode->num_references ())
743 *(bool *)data = true;
744 vnode->remove_all_references ();
745 }
6de88c6a
JH
746 }
747 return false;
748}
749
750/* Clear addressale bit of VNODE. */
751
752bool
753clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
754{
755 vnode->address_taken = false;
756 TREE_ADDRESSABLE (vnode->decl) = 0;
757 return false;
758}
759
4a444e58
JH
760/* Discover variables that have no longer address taken or that are read only
761 and update their flags.
762
dea91a66
JH
763 Return true when unreachable symbol removan should be done.
764
4a444e58
JH
765 FIXME: This can not be done in between gimplify and omp_expand since
766 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
767 this transformation as part of whole program visibility and re-do at
768 ipa-reference pass (to take into account clonning), but it would
769 make sense to do it before early optimizations. */
4a444e58 770
dea91a66 771bool
4a444e58
JH
772ipa_discover_readonly_nonaddressable_vars (void)
773{
dea91a66 774 bool remove_p = false;
2c8326a5 775 varpool_node *vnode;
4a444e58
JH
776 if (dump_file)
777 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 778 FOR_EACH_VARIABLE (vnode)
6de88c6a 779 if (!vnode->alias
67348ccc 780 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 781 || !vnode->writeonly
67348ccc 782 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
783 {
784 bool written = false;
785 bool address_taken = false;
6de88c6a
JH
786 bool read = false;
787 bool explicit_refs = true;
788
dea91a66
JH
789 process_references (vnode, &written, &address_taken, &read,
790 &explicit_refs);
6de88c6a
JH
791 if (!explicit_refs)
792 continue;
793 if (!address_taken)
4a444e58 794 {
6de88c6a 795 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 796 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
31de7606
JH
797 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
798 true);
4a444e58 799 }
6de88c6a 800 if (!address_taken && !written
4a444e58
JH
801 /* Making variable in explicit section readonly can cause section
802 type conflict.
803 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 804 && vnode->get_section () == NULL)
4a444e58 805 {
6de88c6a 806 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 807 fprintf (dump_file, " %s (read-only)", vnode->name ());
31de7606 808 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 809 }
d5ce4663 810 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
811 {
812 if (dump_file)
813 fprintf (dump_file, " %s (write-only)", vnode->name ());
31de7606
JH
814 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
815 true);
4a444e58
JH
816 }
817 }
818 if (dump_file)
819 fprintf (dump_file, "\n");
dea91a66 820 return remove_p;
4a444e58
JH
821}
822
9e97ff61 823/* Generate and emit a static constructor or destructor. WHICH must
31db0fe0
ML
824 be one of 'I' (for a constructor), 'D' (for a destructor).
825 BODY is a STATEMENT_LIST containing GENERIC
d5e254e1
IE
826 statements. PRIORITY is the initialization priority for this
827 constructor or destructor.
9e97ff61 828
3a9ed12a
JH
829 FINAL specify whether the externally visible name for collect2 should
830 be produced. */
831
832static void
833cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
834{
835 static int counter = 0;
836 char which_buf[16];
837 tree decl, name, resdecl;
838
839 /* The priority is encoded in the constructor or destructor name.
840 collect2 will sort the names and arrange that they are called at
841 program startup. */
3a9ed12a
JH
842 if (final)
843 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
844 else
845 /* Proudce sane name but one not recognizable by collect2, just for the
846 case we fail to inline the function. */
847 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
848 name = get_file_function_name (which_buf);
849
850 decl = build_decl (input_location, FUNCTION_DECL, name,
851 build_function_type_list (void_type_node, NULL_TREE));
852 current_function_decl = decl;
853
854 resdecl = build_decl (input_location,
855 RESULT_DECL, NULL_TREE, void_type_node);
856 DECL_ARTIFICIAL (resdecl) = 1;
857 DECL_RESULT (decl) = resdecl;
858 DECL_CONTEXT (resdecl) = decl;
859
860 allocate_struct_function (decl, false);
861
862 TREE_STATIC (decl) = 1;
863 TREE_USED (decl) = 1;
864 DECL_ARTIFICIAL (decl) = 1;
3f2dd8cd 865 DECL_IGNORED_P (decl) = 1;
9e97ff61
JH
866 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
867 DECL_SAVED_TREE (decl) = body;
3a9ed12a 868 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
869 {
870 TREE_PUBLIC (decl) = 1;
871 DECL_PRESERVE_P (decl) = 1;
872 }
873 DECL_UNINLINABLE (decl) = 1;
874
875 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 876 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
9e97ff61
JH
877 TREE_USED (DECL_INITIAL (decl)) = 1;
878
879 DECL_SOURCE_LOCATION (decl) = input_location;
880 cfun->function_end_locus = input_location;
881
882 switch (which)
883 {
884 case 'I':
885 DECL_STATIC_CONSTRUCTOR (decl) = 1;
886 decl_init_priority_insert (decl, priority);
887 break;
888 case 'D':
889 DECL_STATIC_DESTRUCTOR (decl) = 1;
890 decl_fini_priority_insert (decl, priority);
891 break;
892 default:
893 gcc_unreachable ();
894 }
895
896 gimplify_function_tree (decl);
897
d52f5295 898 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
899
900 set_cfun (NULL);
901 current_function_decl = NULL;
902}
903
3a9ed12a 904/* Generate and emit a static constructor or destructor. WHICH must
31db0fe0
ML
905 be one of 'I' (for a constructor) or 'D' (for a destructor).
906 BODY is a STATEMENT_LIST containing GENERIC
d5e254e1
IE
907 statements. PRIORITY is the initialization priority for this
908 constructor or destructor. */
3a9ed12a
JH
909
910void
911cgraph_build_static_cdtor (char which, tree body, int priority)
912{
913 cgraph_build_static_cdtor_1 (which, body, priority, false);
914}
9e97ff61 915
9e97ff61
JH
916/* When target does not have ctors and dtors, we call all constructor
917 and destructor by special initialization/destruction function
918 recognized by collect2.
919
920 When we are going to build this function, collect all constructors and
921 destructors and turn them into normal functions. */
922
923static void
37a51997 924record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 925{
67348ccc 926 if (DECL_STATIC_CONSTRUCTOR (node->decl))
37a51997 927 ctors->safe_push (node->decl);
67348ccc 928 if (DECL_STATIC_DESTRUCTOR (node->decl))
37a51997 929 dtors->safe_push (node->decl);
d52f5295 930 node = cgraph_node::get (node->decl);
67348ccc 931 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
932}
933
934/* Define global constructors/destructor functions for the CDTORS, of
935 which they are LEN. The CDTORS are sorted by initialization
936 priority. If CTOR_P is true, these are constructors; otherwise,
937 they are destructors. */
938
939static void
37a51997 940build_cdtor (bool ctor_p, const vec<tree> &cdtors)
9e97ff61
JH
941{
942 size_t i,j;
9771b263 943 size_t len = cdtors.length ();
9e97ff61
JH
944
945 i = 0;
946 while (i < len)
947 {
948 tree body;
949 tree fn;
950 priority_type priority;
951
952 priority = 0;
953 body = NULL_TREE;
954 j = i;
955 do
956 {
957 priority_type p;
9771b263 958 fn = cdtors[j];
9e97ff61
JH
959 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
960 if (j == i)
961 priority = p;
962 else if (p != priority)
963 break;
964 j++;
965 }
966 while (j < len);
967
48c24aca 968 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
969 if (j == i + 1
970 && targetm.have_ctors_dtors)
971 {
972 i++;
973 continue;
974 }
975 /* Find the next batch of constructors/destructors with the same
976 initialization priority. */
48c24aca 977 for (;i < j; i++)
9e97ff61 978 {
9e97ff61 979 tree call;
9771b263 980 fn = cdtors[i];
9e97ff61
JH
981 call = build_call_expr (fn, 0);
982 if (ctor_p)
983 DECL_STATIC_CONSTRUCTOR (fn) = 0;
984 else
985 DECL_STATIC_DESTRUCTOR (fn) = 0;
986 /* We do not want to optimize away pure/const calls here.
987 When optimizing, these should be already removed, when not
988 optimizing, we want user to be able to breakpoint in them. */
989 TREE_SIDE_EFFECTS (call) = 1;
990 append_to_statement_list (call, &body);
9e97ff61 991 }
9e97ff61
JH
992 gcc_assert (body != NULL_TREE);
993 /* Generate a function to call all the function of like
994 priority. */
3a9ed12a 995 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
996 }
997}
998
999/* Comparison function for qsort. P1 and P2 are actually of type
1000 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1001 used to determine the sort order. */
1002
1003static int
1004compare_ctor (const void *p1, const void *p2)
1005{
1006 tree f1;
1007 tree f2;
1008 int priority1;
1009 int priority2;
1010
1011 f1 = *(const tree *)p1;
1012 f2 = *(const tree *)p2;
1013 priority1 = DECL_INIT_PRIORITY (f1);
1014 priority2 = DECL_INIT_PRIORITY (f2);
1015
1016 if (priority1 < priority2)
1017 return -1;
1018 else if (priority1 > priority2)
1019 return 1;
1020 else
1021 /* Ensure a stable sort. Constructors are executed in backwarding
1022 order to make LTO initialize braries first. */
1023 return DECL_UID (f2) - DECL_UID (f1);
1024}
1025
1026/* Comparison function for qsort. P1 and P2 are actually of type
1027 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1028 used to determine the sort order. */
1029
1030static int
1031compare_dtor (const void *p1, const void *p2)
1032{
1033 tree f1;
1034 tree f2;
1035 int priority1;
1036 int priority2;
1037
1038 f1 = *(const tree *)p1;
1039 f2 = *(const tree *)p2;
1040 priority1 = DECL_FINI_PRIORITY (f1);
1041 priority2 = DECL_FINI_PRIORITY (f2);
1042
1043 if (priority1 < priority2)
1044 return -1;
1045 else if (priority1 > priority2)
1046 return 1;
1047 else
1048 /* Ensure a stable sort. */
1049 return DECL_UID (f1) - DECL_UID (f2);
1050}
1051
1052/* Generate functions to call static constructors and destructors
1053 for targets that do not support .ctors/.dtors sections. These
1054 functions have magic names which are detected by collect2. */
1055
1056static void
37a51997 1057build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 1058{
37a51997 1059 if (!ctors->is_empty ())
9e97ff61
JH
1060 {
1061 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1062 ctors->qsort (compare_ctor);
1063 build_cdtor (/*ctor_p=*/true, *ctors);
9e97ff61
JH
1064 }
1065
37a51997 1066 if (!dtors->is_empty ())
9e97ff61
JH
1067 {
1068 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1069 dtors->qsort (compare_dtor);
1070 build_cdtor (/*ctor_p=*/false, *dtors);
9e97ff61
JH
1071 }
1072}
1073
1074/* Look for constructors and destructors and produce function calling them.
1075 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1076 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1077 constructors/destructors into single function to improve code locality and
1078 reduce size. */
1079
1080static unsigned int
1081ipa_cdtor_merge (void)
1082{
37a51997
TS
1083 /* A vector of FUNCTION_DECLs declared as static constructors. */
1084 auto_vec<tree, 20> ctors;
1085 /* A vector of FUNCTION_DECLs declared as static destructors. */
1086 auto_vec<tree, 20> dtors;
9e97ff61 1087 struct cgraph_node *node;
65c70e6b 1088 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1089 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1090 || DECL_STATIC_DESTRUCTOR (node->decl))
37a51997
TS
1091 record_cdtor_fn (node, &ctors, &dtors);
1092 build_cdtor_fns (&ctors, &dtors);
9e97ff61
JH
1093 return 0;
1094}
1095
17795822
TS
1096namespace {
1097
1098const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1099{
27a4cd48
DM
1100 IPA_PASS, /* type */
1101 "cdtor", /* name */
1102 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1103 TV_CGRAPHOPT, /* tv_id */
1104 0, /* properties_required */
1105 0, /* properties_provided */
1106 0, /* properties_destroyed */
1107 0, /* todo_flags_start */
1108 0, /* todo_flags_finish */
9e97ff61 1109};
27a4cd48 1110
17795822 1111class pass_ipa_cdtor_merge : public ipa_opt_pass_d
27a4cd48
DM
1112{
1113public:
c3284718
RS
1114 pass_ipa_cdtor_merge (gcc::context *ctxt)
1115 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1116 NULL, /* generate_summary */
1117 NULL, /* write_summary */
1118 NULL, /* read_summary */
1119 NULL, /* write_optimization_summary */
1120 NULL, /* read_optimization_summary */
1121 NULL, /* stmt_fixup */
1122 0, /* function_transform_todo_flags_start */
1123 NULL, /* function_transform */
1124 NULL) /* variable_transform */
27a4cd48
DM
1125 {}
1126
1127 /* opt_pass methods: */
1a3d085c 1128 virtual bool gate (function *);
be55bfe6 1129 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1130
1131}; // class pass_ipa_cdtor_merge
1132
1a3d085c
TS
1133bool
1134pass_ipa_cdtor_merge::gate (function *)
1135{
1136 /* Perform the pass when we have no ctors/dtors support
1137 or at LTO time to merge multiple constructors into single
1138 function. */
29f1e2b1 1139 return !targetm.have_ctors_dtors || in_lto_p;
1a3d085c
TS
1140}
1141
17795822
TS
1142} // anon namespace
1143
27a4cd48
DM
1144ipa_opt_pass_d *
1145make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1146{
1147 return new pass_ipa_cdtor_merge (ctxt);
1148}
eb6a09a7
JH
1149
1150/* Invalid pointer representing BOTTOM for single user dataflow. */
1151#define BOTTOM ((cgraph_node *)(size_t) 2)
1152
1153/* Meet operation for single user dataflow.
1154 Here we want to associate variables with sigle function that may access it.
1155
1156 FUNCTION is current single user of a variable, VAR is variable that uses it.
1157 Latttice is stored in SINGLE_USER_MAP.
1158
1159 We represent:
1160 - TOP by no entry in SIGNLE_USER_MAP
1161 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1162 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1163
1164cgraph_node *
1165meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1166 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1167{
1168 struct cgraph_node *user, **f;
1169
1170 if (var->aux == BOTTOM)
1171 return BOTTOM;
1172
1eb68d2d 1173 f = single_user_map.get (var);
eb6a09a7
JH
1174 if (!f)
1175 return function;
1176 user = *f;
1177 if (!function)
1178 return user;
1179 else if (function != user)
1180 return BOTTOM;
1181 else
1182 return function;
1183}
1184
1185/* Propagation step of single-use dataflow.
1186
1187 Check all uses of VNODE and see if they are used by single function FUNCTION.
1188 SINGLE_USER_MAP represents the dataflow lattice. */
1189
1190cgraph_node *
1191propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1192 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1193{
1194 int i;
1195 struct ipa_ref *ref;
1196
1197 gcc_assert (!vnode->externally_visible);
1198
1199 /* If node is an alias, first meet with its target. */
1200 if (vnode->alias)
9041d2e6 1201 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1202
1203 /* Check all users and see if they correspond to a single function. */
d52f5295 1204 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1205 {
1206 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1207 if (cnode)
1208 {
1209 if (cnode->global.inlined_to)
1210 cnode = cnode->global.inlined_to;
1211 if (!function)
1212 function = cnode;
1213 else if (function != cnode)
1214 function = BOTTOM;
1215 }
1216 else
17e0fc92
JH
1217 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1218 single_user_map);
eb6a09a7
JH
1219 }
1220 return function;
1221}
1222
1223/* Pass setting used_by_single_function flag.
17e0fc92
JH
1224 This flag is set on variable when there is only one function that may
1225 possibly referr to it. */
eb6a09a7
JH
1226
1227static unsigned int
1228ipa_single_use (void)
1229{
1230 varpool_node *first = (varpool_node *) (void *) 1;
1231 varpool_node *var;
1eb68d2d 1232 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1233
1234 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1235 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1236 var->aux = BOTTOM;
1237 else
1238 {
1239 /* Enqueue symbol for dataflow. */
1240 var->aux = first;
1241 first = var;
1242 }
1243
1244 /* The actual dataflow. */
1245
1246 while (first != (void *) 1)
1247 {
1248 cgraph_node *user, *orig_user, **f;
1249
1250 var = first;
1251 first = (varpool_node *)first->aux;
1252
1eb68d2d 1253 f = single_user_map.get (var);
eb6a09a7
JH
1254 if (f)
1255 orig_user = *f;
1256 else
1257 orig_user = NULL;
1258 user = propagate_single_user (var, orig_user, single_user_map);
1259
1260 gcc_checking_assert (var->aux != BOTTOM);
1261
1262 /* If user differs, enqueue all references. */
1263 if (user != orig_user)
1264 {
1265 unsigned int i;
1266 ipa_ref *ref;
1267
1eb68d2d 1268 single_user_map.put (var, user);
eb6a09a7
JH
1269
1270 /* Enqueue all aliases for re-processing. */
31de7606
JH
1271 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1272 if (!ref->referring->aux)
eb6a09a7
JH
1273 {
1274 ref->referring->aux = first;
1275 first = dyn_cast <varpool_node *> (ref->referring);
1276 }
1277 /* Enqueue all users for re-processing. */
d52f5295 1278 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1279 if (!ref->referred->aux
1280 && ref->referred->definition
1281 && is_a <varpool_node *> (ref->referred))
1282 {
1283 ref->referred->aux = first;
1284 first = dyn_cast <varpool_node *> (ref->referred);
1285 }
1286
1287 /* If user is BOTTOM, just punt on this var. */
1288 if (user == BOTTOM)
1289 var->aux = BOTTOM;
1290 else
1291 var->aux = NULL;
1292 }
1293 else
1294 var->aux = NULL;
1295 }
1296
1297 FOR_EACH_DEFINED_VARIABLE (var)
1298 {
1299 if (var->aux != BOTTOM)
1300 {
17e0fc92
JH
1301 /* Not having the single user known means that the VAR is
1302 unreachable. Either someone forgot to remove unreachable
1303 variables or the reachability here is wrong. */
1304
b2b29377
MM
1305 gcc_checking_assert (single_user_map.get (var));
1306
eb6a09a7
JH
1307 if (dump_file)
1308 {
464d0118
ML
1309 fprintf (dump_file, "Variable %s is used by single function\n",
1310 var->dump_name ());
eb6a09a7
JH
1311 }
1312 var->used_by_single_function = true;
1313 }
1314 var->aux = NULL;
1315 }
1316 return 0;
1317}
1318
17795822
TS
1319namespace {
1320
1321const pass_data pass_data_ipa_single_use =
eb6a09a7
JH
1322{
1323 IPA_PASS, /* type */
1324 "single-use", /* name */
1325 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1326 TV_CGRAPHOPT, /* tv_id */
1327 0, /* properties_required */
1328 0, /* properties_provided */
1329 0, /* properties_destroyed */
1330 0, /* todo_flags_start */
1331 0, /* todo_flags_finish */
1332};
1333
17795822 1334class pass_ipa_single_use : public ipa_opt_pass_d
eb6a09a7
JH
1335{
1336public:
1337 pass_ipa_single_use (gcc::context *ctxt)
1338 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1339 NULL, /* generate_summary */
1340 NULL, /* write_summary */
1341 NULL, /* read_summary */
1342 NULL, /* write_optimization_summary */
1343 NULL, /* read_optimization_summary */
1344 NULL, /* stmt_fixup */
1345 0, /* function_transform_todo_flags_start */
1346 NULL, /* function_transform */
1347 NULL) /* variable_transform */
1348 {}
1349
1350 /* opt_pass methods: */
eb6a09a7
JH
1351 virtual unsigned int execute (function *) { return ipa_single_use (); }
1352
1353}; // class pass_ipa_single_use
1354
17795822
TS
1355} // anon namespace
1356
eb6a09a7
JH
1357ipa_opt_pass_d *
1358make_pass_ipa_single_use (gcc::context *ctxt)
1359{
1360 return new pass_ipa_single_use (ctxt);
1361}
f0251020
RB
1362
1363/* Materialize all clones. */
1364
1365namespace {
1366
1367const pass_data pass_data_materialize_all_clones =
1368{
1369 SIMPLE_IPA_PASS, /* type */
1370 "materialize-all-clones", /* name */
1371 OPTGROUP_NONE, /* optinfo_flags */
1372 TV_IPA_OPT, /* tv_id */
1373 0, /* properties_required */
1374 0, /* properties_provided */
1375 0, /* properties_destroyed */
1376 0, /* todo_flags_start */
1377 0, /* todo_flags_finish */
1378};
1379
1380class pass_materialize_all_clones : public simple_ipa_opt_pass
1381{
1382public:
1383 pass_materialize_all_clones (gcc::context *ctxt)
1384 : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1385 {}
1386
1387 /* opt_pass methods: */
1388 virtual unsigned int execute (function *)
1389 {
1390 symtab->materialize_all_clones ();
1391 return 0;
1392 }
1393
1394}; // class pass_materialize_all_clones
1395
1396} // anon namespace
1397
1398simple_ipa_opt_pass *
1399make_pass_materialize_all_clones (gcc::context *ctxt)
1400{
1401 return new pass_materialize_all_clones (ctxt);
1402}