]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
decl.c (start_decl): Look through member variable template.
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
23a5b65a 2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
4d648807 24#include "tree.h"
d8a2d370
DN
25#include "calls.h"
26#include "stringpool.h"
ca31b95f 27#include "cgraph.h"
f4b3ca72 28#include "tree-pass.h"
1eb68d2d 29#include "hash-map.h"
6e2830c3 30#include "hash-set.h"
2fb9a547 31#include "gimple-expr.h"
45b0be94 32#include "gimplify.h"
4a444e58 33#include "flags.h"
9e97ff61
JH
34#include "target.h"
35#include "tree-iterator.h"
af8bca3c 36#include "ipa-utils.h"
04142cc3 37#include "ipa-inline.h"
0208f7da
JH
38#include "tree-inline.h"
39#include "profile.h"
40#include "params.h"
2b5f0895
XDL
41#include "internal-fn.h"
42#include "tree-ssa-alias.h"
43#include "gimple.h"
44#include "dbgcnt.h"
ca31b95f 45
e70670cf
JH
46
47/* Return true when NODE has ADDR reference. */
48
49static bool
50has_addr_references_p (struct cgraph_node *node,
51 void *data ATTRIBUTE_UNUSED)
52{
53 int i;
d122681a 54 struct ipa_ref *ref = NULL;
e70670cf 55
d122681a 56 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
57 if (ref->use == IPA_REF_ADDR)
58 return true;
59 return false;
60}
61
d563610d
JH
62/* Look for all functions inlined to NODE and update their inlined_to pointers
63 to INLINED_TO. */
64
65static void
66update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
67{
68 struct cgraph_edge *e;
69 for (e = node->callees; e; e = e->next_callee)
70 if (e->callee->global.inlined_to)
71 {
72 e->callee->global.inlined_to = inlined_to;
73 update_inlined_to_pointer (e->callee, inlined_to);
74 }
75}
76
04142cc3 77/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
78
79 The queue is linked via AUX pointers and terminated by pointer to 1.
80 We enqueue nodes at two occasions: when we find them reachable or when we find
81 their bodies needed for further clonning. In the second case we mark them
82 by pointer to 2 after processing so they are re-queue when they become
83 reachable. */
b34fd25c
JH
84
85static void
5e20cdc9 86enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 87 hash_set<symtab_node *> *reachable)
b34fd25c 88{
19fb0b86 89 /* Node is still in queue; do nothing. */
67348ccc 90 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
91 return;
92 /* Node was already processed as unreachable, re-enqueue
93 only if it became reachable now. */
6e2830c3 94 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 95 return;
67348ccc 96 node->aux = *first;
b34fd25c
JH
97 *first = node;
98}
99
b34fd25c
JH
100/* Process references. */
101
102static void
d122681a 103process_references (symtab_node *snode,
5e20cdc9 104 symtab_node **first,
93a18a70 105 bool before_inlining_p,
6e2830c3 106 hash_set<symtab_node *> *reachable)
b34fd25c
JH
107{
108 int i;
d122681a
ML
109 struct ipa_ref *ref = NULL;
110 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 111 {
5e20cdc9 112 symtab_node *node = ref->referred;
e70670cf 113
67348ccc
DM
114 if (node->definition && !node->in_other_partition
115 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8
JH
116 || (((before_inlining_p
117 && (cgraph_state < CGRAPH_STATE_IPA_SSA
118 || !lookup_attribute ("always_inline",
119 DECL_ATTRIBUTES (node->decl)))))
e70670cf
JH
120 /* We use variable constructors during late complation for
121 constant folding. Keep references alive so partitioning
122 knows about potential references. */
67348ccc 123 || (TREE_CODE (node->decl) == VAR_DECL
6a6dac52 124 && flag_wpa
67348ccc 125 && ctor_for_folding (node->decl)
6a6dac52 126 != error_mark_node))))
6e2830c3 127 reachable->add (node);
67348ccc 128 enqueue_node (node, first, reachable);
b34fd25c
JH
129 }
130}
131
3462aa02
JH
132/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
133 all its potential targets as reachable to permit later inlining if
134 devirtualization happens. After inlining still keep their declarations
135 around, so we can devirtualize to a direct call.
136
137 Also try to make trivial devirutalization when no or only one target is
138 possible. */
139
140static void
6e2830c3 141walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 142 struct cgraph_edge *edge,
5e20cdc9 143 symtab_node **first,
6e2830c3
TS
144 hash_set<symtab_node *> *reachable,
145 bool before_inlining_p)
3462aa02
JH
146{
147 unsigned int i;
148 void *cache_token;
149 bool final;
150 vec <cgraph_node *>targets
151 = possible_polymorphic_call_targets
152 (edge, &final, &cache_token);
153
6e2830c3 154 if (!reachable_call_targets->add (cache_token))
3462aa02 155 {
c3284718 156 for (i = 0; i < targets.length (); i++)
3462aa02
JH
157 {
158 struct cgraph_node *n = targets[i];
159
160 /* Do not bother to mark virtual methods in anonymous namespace;
161 either we will find use of virtual table defining it, or it is
162 unused. */
67348ccc 163 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 164 && type_in_anonymous_namespace_p
67348ccc 165 (method_class_type (TREE_TYPE (n->decl))))
3462aa02
JH
166 continue;
167
168 /* Prior inlining, keep alive bodies of possible targets for
169 devirtualization. */
67348ccc 170 if (n->definition
8fe91ca8
JH
171 && (before_inlining_p
172 && (cgraph_state < CGRAPH_STATE_IPA_SSA
173 || !lookup_attribute ("always_inline",
174 DECL_ATTRIBUTES (n->decl)))))
6e2830c3 175 reachable->add (n);
3462aa02
JH
176
177 /* Even after inlining we want to keep the possible targets in the
178 boundary, so late passes can still produce direct call even if
179 the chance for inlining is lost. */
67348ccc 180 enqueue_node (n, first, reachable);
3462aa02
JH
181 }
182 }
183
184 /* Very trivial devirtualization; when the type is
185 final or anonymous (so we know all its derivation)
186 and there is only one possible virtual call target,
187 make the edge direct. */
188 if (final)
189 {
2b5f0895 190 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 191 {
7b395ddd 192 cgraph_node *target, *node = edge->caller;
3462aa02
JH
193 if (targets.length () == 1)
194 target = targets[0];
195 else
d52f5295 196 target = cgraph_node::get_create
3462aa02
JH
197 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
198
2b5f0895
XDL
199 if (dump_enabled_p ())
200 {
d52f5295
ML
201 location_t locus = gimple_location (edge->call_stmt);
202 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2b5f0895
XDL
203 "devirtualizing call in %s/%i to %s/%i\n",
204 edge->caller->name (), edge->caller->order,
205 target->name (),
206 target->order);
207 }
3462aa02 208 edge = cgraph_make_edge_direct (edge, target);
477145c8 209 if (inline_summary_vec)
7b395ddd 210 inline_update_overall_summary (node);
477145c8
RB
211 else if (edge->call_stmt)
212 cgraph_redirect_edge_call_stmt_to_callee (edge);
3462aa02
JH
213 }
214 }
215}
41817394 216
ca31b95f 217/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
218
219 The algorithm is basically mark&sweep but with some extra refinements:
220
221 - reachable extern inline functions needs special handling; the bodies needs
222 to stay in memory until inlining in hope that they will be inlined.
223 After inlining we release their bodies and turn them into unanalyzed
224 nodes even when they are reachable.
225
226 BEFORE_INLINING_P specify whether we are before or after inlining.
227
228 - virtual functions are kept in callgraph even if they seem unreachable in
229 hope calls to them will be devirtualized.
230
231 Again we remove them after inlining. In late optimization some
31519c38 232 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
233 the call. In theory early opts and IPA should work out all important cases.
234
235 - virtual clones needs bodies of their origins for later materialization;
236 this means that we want to keep the body even if the origin is unreachable
237 otherwise. To avoid origin from sitting in the callgraph and being
238 walked by IPA passes, we turn them into unanalyzed nodes with body
239 defined.
240
241 We maintain set of function declaration where body needs to stay in
242 body_needed_for_clonning
243
244 Inline clones represent special case: their declaration match the
245 declaration of origin and cgraph_remove_node already knows how to
246 reshape callgraph and preserve body when offline copy of function or
247 inline clone is being removed.
248
6649df51
JH
249 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
250 variables with DECL_INITIAL set. We finalize these and keep reachable
251 ones around for constant folding purposes. After inlining we however
252 stop walking their references to let everything static referneced by them
253 to be removed when it is otherwise unreachable.
254
04142cc3
JH
255 We maintain queue of both reachable symbols (i.e. defined symbols that needs
256 to stay) and symbols that are in boundary (i.e. external symbols referenced
257 by reachable symbols or origins of clones). The queue is represented
258 as linked list by AUX pointer terminated by 1.
259
31519c38 260 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
261 turn definition into a declaration, but we may keep function body around
262 based on body_needed_for_clonning
263
264 All symbols that enter the queue have AUX pointer non-zero and are in the
265 boundary. Pointer set REACHABLE is used to track reachable symbols.
266
267 Every symbol can be visited twice - once as part of boundary and once
268 as real reachable symbol. enqueue_node needs to decide whether the
269 node needs to be re-queued for second processing. For this purpose
270 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
271
272bool
04142cc3 273symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
ca31b95f 274{
5e20cdc9 275 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 276 struct cgraph_node *node, *next;
2c8326a5 277 varpool_node *vnode, *vnext;
ca31b95f 278 bool changed = false;
6e2830c3
TS
279 hash_set<symtab_node *> reachable;
280 hash_set<tree> body_needed_for_clonning;
281 hash_set<void *> reachable_call_targets;
ca31b95f 282
3462aa02 283 timevar_push (TV_IPA_UNREACHABLE);
61a74079
JH
284 if (optimize && flag_devirtualize)
285 build_type_inheritance_graph ();
10d22567
ZD
286 if (file)
287 fprintf (file, "\nReclaiming functions:");
ca31b95f 288#ifdef ENABLE_CHECKING
65c70e6b 289 FOR_EACH_FUNCTION (node)
67348ccc 290 gcc_assert (!node->aux);
65c70e6b 291 FOR_EACH_VARIABLE (vnode)
67348ccc 292 gcc_assert (!vnode->aux);
ca31b95f 293#endif
530f3a1b
JH
294 /* Mark functions whose bodies are obviously needed.
295 This is mostly when they can be referenced externally. Inline clones
296 are special since their declarations are shared with master clone and thus
297 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
298 FOR_EACH_FUNCTION (node)
299 {
300 node->used_as_abstract_origin = false;
67348ccc 301 if (node->definition
c0c123ef 302 && !node->global.inlined_to
67348ccc 303 && !node->in_other_partition
d52f5295 304 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef
JH
305 {
306 gcc_assert (!node->global.inlined_to);
6e2830c3
TS
307 reachable.add (node);
308 enqueue_node (node, &first, &reachable);
c0c123ef
JH
309 }
310 else
67348ccc 311 gcc_assert (!node->aux);
c0c123ef 312 }
530f3a1b
JH
313
314 /* Mark variables that are obviously needed. */
04142cc3 315 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 316 if (!vnode->can_remove_if_no_refs_p()
67348ccc 317 && !vnode->in_other_partition)
04142cc3 318 {
6e2830c3
TS
319 reachable.add (vnode);
320 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
321 }
322
323 /* Perform reachability analysis. */
5e20cdc9 324 while (first != (symtab_node *) (void *) 1)
b34fd25c 325 {
6e2830c3 326 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 327 symtab_node *node = first;
ca31b95f 328
5e20cdc9 329 first = (symtab_node *)first->aux;
19fb0b86 330
04142cc3
JH
331 /* If we are processing symbol in boundary, mark its AUX pointer for
332 possible later re-processing in enqueue_node. */
333 if (in_boundary_p)
67348ccc 334 node->aux = (void *)2;
04142cc3
JH
335 else
336 {
31dad809
JJ
337 if (TREE_CODE (node->decl) == FUNCTION_DECL
338 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
339 {
340 struct cgraph_node *origin_node
d52f5295 341 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
c0c123ef 342 origin_node->used_as_abstract_origin = true;
6e2830c3 343 enqueue_node (origin_node, &first, &reachable);
c0c123ef 344 }
04142cc3 345 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
346 all externally visible symbols in the same comdat
347 group to be reachable as well. Comdat-local symbols
348 can be discarded if all uses were inlined. */
67348ccc 349 if (node->same_comdat_group)
04142cc3 350 {
5e20cdc9 351 symtab_node *next;
67348ccc 352 for (next = node->same_comdat_group;
04142cc3 353 next != node;
67348ccc 354 next = next->same_comdat_group)
d52f5295 355 if (!next->comdat_local_p ()
6e2830c3
TS
356 && !reachable.add (next))
357 enqueue_node (next, &first, &reachable);
04142cc3
JH
358 }
359 /* Mark references as reachable. */
6e2830c3 360 process_references (node, &first, before_inlining_p, &reachable);
04142cc3 361 }
19fb0b86 362
7de90a6c 363 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 364 {
04142cc3
JH
365 /* Mark the callees reachable unless they are direct calls to extern
366 inline functions we decided to not inline. */
367 if (!in_boundary_p)
8a6295ba 368 {
04142cc3 369 struct cgraph_edge *e;
3462aa02
JH
370 /* Keep alive possible targets for devirtualization. */
371 if (optimize && flag_devirtualize)
372 {
373 struct cgraph_edge *next;
374 for (e = cnode->indirect_calls; e; e = next)
375 {
376 next = e->next_callee;
377 if (e->indirect_info->polymorphic)
6e2830c3
TS
378 walk_polymorphic_call_targets (&reachable_call_targets,
379 e, &first, &reachable,
3462aa02
JH
380 before_inlining_p);
381 }
382 }
04142cc3 383 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 384 {
67348ccc
DM
385 if (e->callee->definition
386 && !e->callee->in_other_partition
ed62e0d9 387 && (!e->inline_failed
67348ccc
DM
388 || !DECL_EXTERNAL (e->callee->decl)
389 || e->callee->alias
ed62e0d9 390 || before_inlining_p))
789c2741
JH
391 {
392 /* Be sure that we will not optimize out alias target
393 body. */
394 if (DECL_EXTERNAL (e->callee->decl)
395 && e->callee->alias
396 && before_inlining_p)
6e2830c3
TS
397 reachable.add (e->callee->function_symbol ());
398 reachable.add (e->callee);
789c2741 399 }
6e2830c3 400 enqueue_node (e->callee, &first, &reachable);
93a18a70 401 }
04142cc3
JH
402
403 /* When inline clone exists, mark body to be preserved so when removing
404 offline copy of the function we don't kill it. */
4f63dfc6 405 if (cnode->global.inlined_to)
6e2830c3 406 body_needed_for_clonning.add (cnode->decl);
b66887e4 407
4f63dfc6
JH
408 /* For non-inline clones, force their origins to the boundary and ensure
409 that body is not removed. */
410 while (cnode->clone_of)
411 {
67348ccc 412 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
413 cnode = cnode->clone_of;
414 if (noninline)
415 {
6e2830c3
TS
416 body_needed_for_clonning.add (cnode->decl);
417 enqueue_node (cnode, &first, &reachable);
4f63dfc6 418 }
b34fd25c 419 }
0136f8f0
AH
420
421 }
422 /* If any reachable function has simd clones, mark them as
423 reachable as well. */
424 if (cnode->simd_clones)
425 {
426 cgraph_node *next;
427 for (next = cnode->simd_clones;
428 next;
429 next = next->simdclone->next_clone)
430 if (in_boundary_p
6e2830c3
TS
431 || !reachable.add (next))
432 enqueue_node (next, &first, &reachable);
47cb0d7d 433 }
b34fd25c 434 }
6649df51 435 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
436 boundary. This will also hold initializers of the external vars NODE
437 refers to. */
7de90a6c 438 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 439 if (vnode
67348ccc
DM
440 && DECL_EXTERNAL (node->decl)
441 && !vnode->alias
6649df51 442 && in_boundary_p)
5d59b5e1 443 {
d122681a
ML
444 struct ipa_ref *ref = NULL;
445 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 446 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 447 }
ca31b95f
JH
448 }
449
04142cc3 450 /* Remove unreachable functions. */
2aae7680 451 for (node = cgraph_first_function (); node; node = next)
ca31b95f 452 {
2aae7680 453 next = cgraph_next_function (node);
e70670cf
JH
454
455 /* If node is not needed at all, remove it. */
67348ccc 456 if (!node->aux)
ca31b95f 457 {
10d22567 458 if (file)
5bed50e8 459 fprintf (file, " %s/%i", node->name (), node->order);
d52f5295 460 node->remove ();
04142cc3
JH
461 changed = true;
462 }
e70670cf 463 /* If node is unreachable, remove its body. */
6e2830c3 464 else if (!reachable.contains (node))
04142cc3 465 {
6e2830c3 466 if (!body_needed_for_clonning.contains (node->decl))
d52f5295 467 node->release_body ();
4f63dfc6 468 else if (!node->clone_of)
67348ccc
DM
469 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
470 if (node->definition)
bb853349 471 {
04142cc3 472 if (file)
5bed50e8 473 fprintf (file, " %s/%i", node->name (), node->order);
3d8d0043 474 node->body_removed = true;
67348ccc
DM
475 node->analyzed = false;
476 node->definition = false;
477 node->cpp_implicit_alias = false;
478 node->alias = false;
d833415c 479 node->thunk.thunk_p = false;
67348ccc 480 node->weakref = false;
8fe91ca8
JH
481 /* After early inlining we drop always_inline attributes on
482 bodies of functions that are still referenced (have their
483 address taken). */
484 DECL_ATTRIBUTES (node->decl)
485 = remove_attribute ("always_inline",
486 DECL_ATTRIBUTES (node->decl));
67348ccc 487 if (!node->in_other_partition)
51a5c0c2 488 node->local.local = false;
d52f5295
ML
489 node->remove_callees ();
490 node->remove_from_same_comdat_group ();
d122681a 491 node->remove_all_references ();
bb853349
JH
492 changed = true;
493 }
ca31b95f 494 }
4f63dfc6 495 else
d52f5295 496 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 497 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 498 }
04142cc3
JH
499
500 /* Inline clones might be kept around so their materializing allows further
501 cloning. If the function the clone is inlined into is removed, we need
502 to turn it into normal cone. */
65c70e6b 503 FOR_EACH_FUNCTION (node)
9187e02d 504 {
9187e02d
JH
505 if (node->global.inlined_to
506 && !node->callers)
507 {
508 gcc_assert (node->clones);
d563610d
JH
509 node->global.inlined_to = NULL;
510 update_inlined_to_pointer (node, node);
9187e02d 511 }
67348ccc 512 node->aux = NULL;
9187e02d 513 }
4a444e58 514
04142cc3 515 /* Remove unreachable variables. */
4a444e58 516 if (file)
04142cc3 517 fprintf (file, "\nReclaiming variables:");
2aae7680 518 for (vnode = varpool_first_variable (); vnode; vnode = vnext)
b34fd25c 519 {
2aae7680 520 vnext = varpool_next_variable (vnode);
67348ccc 521 if (!vnode->aux
b9bd2075
JH
522 /* For can_refer_decl_in_current_unit_p we want to track for
523 all external variables if they are defined in other partition
524 or not. */
67348ccc 525 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 526 {
4a444e58 527 if (file)
5bed50e8 528 fprintf (file, " %s/%i", vnode->name (), vnode->order);
d52f5295 529 vnode->remove ();
4a444e58 530 changed = true;
b34fd25c 531 }
6e2830c3 532 else if (!reachable.contains (vnode))
04142cc3 533 {
6a6dac52 534 tree init;
67348ccc 535 if (vnode->definition)
04142cc3
JH
536 {
537 if (file)
fec39fa6 538 fprintf (file, " %s", vnode->name ());
04142cc3
JH
539 changed = true;
540 }
3d8d0043 541 vnode->body_removed = true;
67348ccc
DM
542 vnode->definition = false;
543 vnode->analyzed = false;
544 vnode->aux = NULL;
e70670cf 545
d52f5295 546 vnode->remove_from_same_comdat_group ();
7b3376a0 547
e70670cf 548 /* Keep body if it may be useful for constant folding. */
67348ccc 549 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
9041d2e6 550 vnode->remove_initializer ();
6a6dac52 551 else
67348ccc 552 DECL_INITIAL (vnode->decl) = init;
d122681a 553 vnode->remove_all_references ();
04142cc3
JH
554 }
555 else
67348ccc 556 vnode->aux = NULL;
b34fd25c 557 }
4a444e58 558
04142cc3 559 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
560 if (file)
561 fprintf (file, "\nClearing address taken flags:");
65c70e6b 562 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
563 if (node->address_taken
564 && !node->used_from_other_partition)
bd3cdcc0 565 {
d52f5295
ML
566 if (!node->call_for_symbol_thunks_and_aliases
567 (has_addr_references_p, NULL, true))
bd3cdcc0
JH
568 {
569 if (file)
fec39fa6 570 fprintf (file, " %s", node->name ());
67348ccc 571 node->address_taken = false;
4a444e58 572 changed = true;
d52f5295 573 if (node->local_p ())
4a444e58
JH
574 {
575 node->local.local = true;
576 if (file)
577 fprintf (file, " (local)");
578 }
bd3cdcc0
JH
579 }
580 }
10a5dd5d
JH
581 if (file)
582 fprintf (file, "\n");
b34fd25c 583
873aa8f5 584#ifdef ENABLE_CHECKING
d52f5295 585 symtab_node::verify_symtab_nodes ();
873aa8f5 586#endif
4537ec0c 587
a8da72b8 588 /* If we removed something, perhaps profile could be improved. */
9771b263 589 if (changed && optimize && inline_edge_summary_vec.exists ())
a8da72b8 590 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 591 ipa_propagate_frequency (node);
a8da72b8 592
3462aa02 593 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
594 return changed;
595}
f4b3ca72 596
6de88c6a
JH
597/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
598 as needed, also clear EXPLICIT_REFS if the references to given variable
599 do not need to be explicit. */
600
601void
602process_references (varpool_node *vnode,
603 bool *written, bool *address_taken,
604 bool *read, bool *explicit_refs)
605{
606 int i;
607 struct ipa_ref *ref;
608
9041d2e6 609 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
610 || TREE_THIS_VOLATILE (vnode->decl))
611 *explicit_refs = false;
612
d122681a 613 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
614 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
615 switch (ref->use)
616 {
617 case IPA_REF_ADDR:
618 *address_taken = true;
619 break;
620 case IPA_REF_LOAD:
621 *read = true;
622 break;
623 case IPA_REF_STORE:
624 *written = true;
625 break;
626 case IPA_REF_ALIAS:
d52f5295
ML
627 process_references (dyn_cast<varpool_node *> (ref->referring), written,
628 address_taken, read, explicit_refs);
6de88c6a
JH
629 break;
630 }
631}
632
633/* Set TREE_READONLY bit. */
634
635bool
636set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
637{
638 TREE_READONLY (vnode->decl) = true;
639 return false;
640}
641
642/* Set writeonly bit and clear the initalizer, since it will not be needed. */
643
644bool
645set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
646{
647 vnode->writeonly = true;
648 if (optimize)
649 {
650 DECL_INITIAL (vnode->decl) = NULL;
651 if (!vnode->alias)
d122681a 652 vnode->remove_all_references ();
6de88c6a
JH
653 }
654 return false;
655}
656
657/* Clear addressale bit of VNODE. */
658
659bool
660clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
661{
662 vnode->address_taken = false;
663 TREE_ADDRESSABLE (vnode->decl) = 0;
664 return false;
665}
666
4a444e58
JH
667/* Discover variables that have no longer address taken or that are read only
668 and update their flags.
669
670 FIXME: This can not be done in between gimplify and omp_expand since
671 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
672 this transformation as part of whole program visibility and re-do at
673 ipa-reference pass (to take into account clonning), but it would
674 make sense to do it before early optimizations. */
4a444e58
JH
675
676void
677ipa_discover_readonly_nonaddressable_vars (void)
678{
2c8326a5 679 varpool_node *vnode;
4a444e58
JH
680 if (dump_file)
681 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 682 FOR_EACH_VARIABLE (vnode)
6de88c6a 683 if (!vnode->alias
67348ccc 684 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 685 || !vnode->writeonly
67348ccc 686 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
687 {
688 bool written = false;
689 bool address_taken = false;
6de88c6a
JH
690 bool read = false;
691 bool explicit_refs = true;
692
693 process_references (vnode, &written, &address_taken, &read, &explicit_refs);
694 if (!explicit_refs)
695 continue;
696 if (!address_taken)
4a444e58 697 {
6de88c6a 698 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 699 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
9041d2e6 700 vnode->call_for_node_and_aliases (clear_addressable_bit, NULL, true);
4a444e58 701 }
6de88c6a 702 if (!address_taken && !written
4a444e58
JH
703 /* Making variable in explicit section readonly can cause section
704 type conflict.
705 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 706 && vnode->get_section () == NULL)
4a444e58 707 {
6de88c6a 708 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 709 fprintf (dump_file, " %s (read-only)", vnode->name ());
9041d2e6 710 vnode->call_for_node_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 711 }
d5ce4663 712 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
713 {
714 if (dump_file)
715 fprintf (dump_file, " %s (write-only)", vnode->name ());
9041d2e6 716 vnode->call_for_node_and_aliases (set_writeonly_bit, NULL, true);
4a444e58
JH
717 }
718 }
719 if (dump_file)
720 fprintf (dump_file, "\n");
721}
722
a8da72b8
L
723/* Free inline summary. */
724
27a4cd48
DM
725namespace {
726
727const pass_data pass_data_ipa_free_inline_summary =
a8da72b8 728{
27a4cd48 729 SIMPLE_IPA_PASS, /* type */
8605403e 730 "free-inline-summary", /* name */
27a4cd48 731 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
732 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
733 0, /* properties_required */
734 0, /* properties_provided */
735 0, /* properties_destroyed */
736 0, /* todo_flags_start */
8605403e
JH
737 /* Early optimizations may make function unreachable. We can not
738 remove unreachable functions as part of the ealry opts pass because
739 TODOs are run before subpasses. Do it here. */
740 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
a8da72b8
L
741};
742
27a4cd48
DM
743class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
744{
745public:
c3284718
RS
746 pass_ipa_free_inline_summary (gcc::context *ctxt)
747 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
27a4cd48
DM
748 {}
749
750 /* opt_pass methods: */
be55bfe6
TS
751 virtual unsigned int execute (function *)
752 {
753 inline_free_summary ();
754 return 0;
755 }
27a4cd48
DM
756
757}; // class pass_ipa_free_inline_summary
758
759} // anon namespace
760
761simple_ipa_opt_pass *
762make_pass_ipa_free_inline_summary (gcc::context *ctxt)
763{
764 return new pass_ipa_free_inline_summary (ctxt);
765}
766
9e97ff61 767/* Generate and emit a static constructor or destructor. WHICH must
089d1227
IE
768 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
769 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
770 initialization priority for this constructor or destructor.
9e97ff61 771
3a9ed12a
JH
772 FINAL specify whether the externally visible name for collect2 should
773 be produced. */
774
775static void
776cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
777{
778 static int counter = 0;
779 char which_buf[16];
780 tree decl, name, resdecl;
781
782 /* The priority is encoded in the constructor or destructor name.
783 collect2 will sort the names and arrange that they are called at
784 program startup. */
3a9ed12a
JH
785 if (final)
786 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
787 else
788 /* Proudce sane name but one not recognizable by collect2, just for the
789 case we fail to inline the function. */
790 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
791 name = get_file_function_name (which_buf);
792
793 decl = build_decl (input_location, FUNCTION_DECL, name,
794 build_function_type_list (void_type_node, NULL_TREE));
795 current_function_decl = decl;
796
797 resdecl = build_decl (input_location,
798 RESULT_DECL, NULL_TREE, void_type_node);
799 DECL_ARTIFICIAL (resdecl) = 1;
800 DECL_RESULT (decl) = resdecl;
801 DECL_CONTEXT (resdecl) = decl;
802
803 allocate_struct_function (decl, false);
804
805 TREE_STATIC (decl) = 1;
806 TREE_USED (decl) = 1;
807 DECL_ARTIFICIAL (decl) = 1;
808 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
809 DECL_SAVED_TREE (decl) = body;
3a9ed12a 810 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
811 {
812 TREE_PUBLIC (decl) = 1;
813 DECL_PRESERVE_P (decl) = 1;
814 }
815 DECL_UNINLINABLE (decl) = 1;
816
817 DECL_INITIAL (decl) = make_node (BLOCK);
818 TREE_USED (DECL_INITIAL (decl)) = 1;
819
820 DECL_SOURCE_LOCATION (decl) = input_location;
821 cfun->function_end_locus = input_location;
822
823 switch (which)
824 {
825 case 'I':
826 DECL_STATIC_CONSTRUCTOR (decl) = 1;
827 decl_init_priority_insert (decl, priority);
828 break;
829 case 'D':
830 DECL_STATIC_DESTRUCTOR (decl) = 1;
831 decl_fini_priority_insert (decl, priority);
832 break;
833 default:
834 gcc_unreachable ();
835 }
836
837 gimplify_function_tree (decl);
838
d52f5295 839 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
840
841 set_cfun (NULL);
842 current_function_decl = NULL;
843}
844
3a9ed12a 845/* Generate and emit a static constructor or destructor. WHICH must
089d1227
IE
846 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
847 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
848 initialization priority for this constructor or destructor. */
3a9ed12a
JH
849
850void
851cgraph_build_static_cdtor (char which, tree body, int priority)
852{
853 cgraph_build_static_cdtor_1 (which, body, priority, false);
854}
9e97ff61
JH
855
856/* A vector of FUNCTION_DECLs declared as static constructors. */
9771b263 857static vec<tree> static_ctors;
9e97ff61 858/* A vector of FUNCTION_DECLs declared as static destructors. */
9771b263 859static vec<tree> static_dtors;
9e97ff61
JH
860
861/* When target does not have ctors and dtors, we call all constructor
862 and destructor by special initialization/destruction function
863 recognized by collect2.
864
865 When we are going to build this function, collect all constructors and
866 destructors and turn them into normal functions. */
867
868static void
869record_cdtor_fn (struct cgraph_node *node)
870{
67348ccc
DM
871 if (DECL_STATIC_CONSTRUCTOR (node->decl))
872 static_ctors.safe_push (node->decl);
873 if (DECL_STATIC_DESTRUCTOR (node->decl))
874 static_dtors.safe_push (node->decl);
d52f5295 875 node = cgraph_node::get (node->decl);
67348ccc 876 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
877}
878
879/* Define global constructors/destructor functions for the CDTORS, of
880 which they are LEN. The CDTORS are sorted by initialization
881 priority. If CTOR_P is true, these are constructors; otherwise,
882 they are destructors. */
883
884static void
9771b263 885build_cdtor (bool ctor_p, vec<tree> cdtors)
9e97ff61
JH
886{
887 size_t i,j;
9771b263 888 size_t len = cdtors.length ();
9e97ff61
JH
889
890 i = 0;
891 while (i < len)
892 {
893 tree body;
894 tree fn;
895 priority_type priority;
896
897 priority = 0;
898 body = NULL_TREE;
899 j = i;
900 do
901 {
902 priority_type p;
9771b263 903 fn = cdtors[j];
9e97ff61
JH
904 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
905 if (j == i)
906 priority = p;
907 else if (p != priority)
908 break;
909 j++;
910 }
911 while (j < len);
912
48c24aca 913 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
914 if (j == i + 1
915 && targetm.have_ctors_dtors)
916 {
917 i++;
918 continue;
919 }
920 /* Find the next batch of constructors/destructors with the same
921 initialization priority. */
48c24aca 922 for (;i < j; i++)
9e97ff61 923 {
9e97ff61 924 tree call;
9771b263 925 fn = cdtors[i];
9e97ff61
JH
926 call = build_call_expr (fn, 0);
927 if (ctor_p)
928 DECL_STATIC_CONSTRUCTOR (fn) = 0;
929 else
930 DECL_STATIC_DESTRUCTOR (fn) = 0;
931 /* We do not want to optimize away pure/const calls here.
932 When optimizing, these should be already removed, when not
933 optimizing, we want user to be able to breakpoint in them. */
934 TREE_SIDE_EFFECTS (call) = 1;
935 append_to_statement_list (call, &body);
9e97ff61 936 }
9e97ff61
JH
937 gcc_assert (body != NULL_TREE);
938 /* Generate a function to call all the function of like
939 priority. */
3a9ed12a 940 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
941 }
942}
943
944/* Comparison function for qsort. P1 and P2 are actually of type
945 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
946 used to determine the sort order. */
947
948static int
949compare_ctor (const void *p1, const void *p2)
950{
951 tree f1;
952 tree f2;
953 int priority1;
954 int priority2;
955
956 f1 = *(const tree *)p1;
957 f2 = *(const tree *)p2;
958 priority1 = DECL_INIT_PRIORITY (f1);
959 priority2 = DECL_INIT_PRIORITY (f2);
960
961 if (priority1 < priority2)
962 return -1;
963 else if (priority1 > priority2)
964 return 1;
965 else
966 /* Ensure a stable sort. Constructors are executed in backwarding
967 order to make LTO initialize braries first. */
968 return DECL_UID (f2) - DECL_UID (f1);
969}
970
971/* Comparison function for qsort. P1 and P2 are actually of type
972 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
973 used to determine the sort order. */
974
975static int
976compare_dtor (const void *p1, const void *p2)
977{
978 tree f1;
979 tree f2;
980 int priority1;
981 int priority2;
982
983 f1 = *(const tree *)p1;
984 f2 = *(const tree *)p2;
985 priority1 = DECL_FINI_PRIORITY (f1);
986 priority2 = DECL_FINI_PRIORITY (f2);
987
988 if (priority1 < priority2)
989 return -1;
990 else if (priority1 > priority2)
991 return 1;
992 else
993 /* Ensure a stable sort. */
994 return DECL_UID (f1) - DECL_UID (f2);
995}
996
997/* Generate functions to call static constructors and destructors
998 for targets that do not support .ctors/.dtors sections. These
999 functions have magic names which are detected by collect2. */
1000
1001static void
1002build_cdtor_fns (void)
1003{
9771b263 1004 if (!static_ctors.is_empty ())
9e97ff61
JH
1005 {
1006 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1007 static_ctors.qsort (compare_ctor);
48c24aca 1008 build_cdtor (/*ctor_p=*/true, static_ctors);
9e97ff61
JH
1009 }
1010
9771b263 1011 if (!static_dtors.is_empty ())
9e97ff61
JH
1012 {
1013 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1014 static_dtors.qsort (compare_dtor);
48c24aca 1015 build_cdtor (/*ctor_p=*/false, static_dtors);
9e97ff61
JH
1016 }
1017}
1018
1019/* Look for constructors and destructors and produce function calling them.
1020 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1021 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1022 constructors/destructors into single function to improve code locality and
1023 reduce size. */
1024
1025static unsigned int
1026ipa_cdtor_merge (void)
1027{
1028 struct cgraph_node *node;
65c70e6b 1029 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1030 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1031 || DECL_STATIC_DESTRUCTOR (node->decl))
9e97ff61
JH
1032 record_cdtor_fn (node);
1033 build_cdtor_fns ();
9771b263
DN
1034 static_ctors.release ();
1035 static_dtors.release ();
9e97ff61
JH
1036 return 0;
1037}
1038
27a4cd48
DM
1039namespace {
1040
1041const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1042{
27a4cd48
DM
1043 IPA_PASS, /* type */
1044 "cdtor", /* name */
1045 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1046 TV_CGRAPHOPT, /* tv_id */
1047 0, /* properties_required */
1048 0, /* properties_provided */
1049 0, /* properties_destroyed */
1050 0, /* todo_flags_start */
1051 0, /* todo_flags_finish */
9e97ff61 1052};
27a4cd48
DM
1053
1054class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1055{
1056public:
c3284718
RS
1057 pass_ipa_cdtor_merge (gcc::context *ctxt)
1058 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1059 NULL, /* generate_summary */
1060 NULL, /* write_summary */
1061 NULL, /* read_summary */
1062 NULL, /* write_optimization_summary */
1063 NULL, /* read_optimization_summary */
1064 NULL, /* stmt_fixup */
1065 0, /* function_transform_todo_flags_start */
1066 NULL, /* function_transform */
1067 NULL) /* variable_transform */
27a4cd48
DM
1068 {}
1069
1070 /* opt_pass methods: */
1a3d085c 1071 virtual bool gate (function *);
be55bfe6 1072 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1073
1074}; // class pass_ipa_cdtor_merge
1075
1a3d085c
TS
1076bool
1077pass_ipa_cdtor_merge::gate (function *)
1078{
1079 /* Perform the pass when we have no ctors/dtors support
1080 or at LTO time to merge multiple constructors into single
1081 function. */
1082 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1083}
1084
27a4cd48
DM
1085} // anon namespace
1086
1087ipa_opt_pass_d *
1088make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1089{
1090 return new pass_ipa_cdtor_merge (ctxt);
1091}
eb6a09a7
JH
1092
1093/* Invalid pointer representing BOTTOM for single user dataflow. */
1094#define BOTTOM ((cgraph_node *)(size_t) 2)
1095
1096/* Meet operation for single user dataflow.
1097 Here we want to associate variables with sigle function that may access it.
1098
1099 FUNCTION is current single user of a variable, VAR is variable that uses it.
1100 Latttice is stored in SINGLE_USER_MAP.
1101
1102 We represent:
1103 - TOP by no entry in SIGNLE_USER_MAP
1104 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1105 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1106
1107cgraph_node *
1108meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1109 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1110{
1111 struct cgraph_node *user, **f;
1112
1113 if (var->aux == BOTTOM)
1114 return BOTTOM;
1115
1eb68d2d 1116 f = single_user_map.get (var);
eb6a09a7
JH
1117 if (!f)
1118 return function;
1119 user = *f;
1120 if (!function)
1121 return user;
1122 else if (function != user)
1123 return BOTTOM;
1124 else
1125 return function;
1126}
1127
1128/* Propagation step of single-use dataflow.
1129
1130 Check all uses of VNODE and see if they are used by single function FUNCTION.
1131 SINGLE_USER_MAP represents the dataflow lattice. */
1132
1133cgraph_node *
1134propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1135 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1136{
1137 int i;
1138 struct ipa_ref *ref;
1139
1140 gcc_assert (!vnode->externally_visible);
1141
1142 /* If node is an alias, first meet with its target. */
1143 if (vnode->alias)
9041d2e6 1144 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1145
1146 /* Check all users and see if they correspond to a single function. */
d52f5295 1147 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1148 {
1149 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1150 if (cnode)
1151 {
1152 if (cnode->global.inlined_to)
1153 cnode = cnode->global.inlined_to;
1154 if (!function)
1155 function = cnode;
1156 else if (function != cnode)
1157 function = BOTTOM;
1158 }
1159 else
1160 function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map);
1161 }
1162 return function;
1163}
1164
1165/* Pass setting used_by_single_function flag.
1166 This flag is set on variable when there is only one function that may possibly
1167 referr to it. */
1168
1169static unsigned int
1170ipa_single_use (void)
1171{
1172 varpool_node *first = (varpool_node *) (void *) 1;
1173 varpool_node *var;
1eb68d2d 1174 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1175
1176 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1177 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1178 var->aux = BOTTOM;
1179 else
1180 {
1181 /* Enqueue symbol for dataflow. */
1182 var->aux = first;
1183 first = var;
1184 }
1185
1186 /* The actual dataflow. */
1187
1188 while (first != (void *) 1)
1189 {
1190 cgraph_node *user, *orig_user, **f;
1191
1192 var = first;
1193 first = (varpool_node *)first->aux;
1194
1eb68d2d 1195 f = single_user_map.get (var);
eb6a09a7
JH
1196 if (f)
1197 orig_user = *f;
1198 else
1199 orig_user = NULL;
1200 user = propagate_single_user (var, orig_user, single_user_map);
1201
1202 gcc_checking_assert (var->aux != BOTTOM);
1203
1204 /* If user differs, enqueue all references. */
1205 if (user != orig_user)
1206 {
1207 unsigned int i;
1208 ipa_ref *ref;
1209
1eb68d2d 1210 single_user_map.put (var, user);
eb6a09a7
JH
1211
1212 /* Enqueue all aliases for re-processing. */
d52f5295 1213 for (i = 0; var->iterate_referring (i, ref); i++)
eb6a09a7
JH
1214 if (ref->use == IPA_REF_ALIAS
1215 && !ref->referring->aux)
1216 {
1217 ref->referring->aux = first;
1218 first = dyn_cast <varpool_node *> (ref->referring);
1219 }
1220 /* Enqueue all users for re-processing. */
d52f5295 1221 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1222 if (!ref->referred->aux
1223 && ref->referred->definition
1224 && is_a <varpool_node *> (ref->referred))
1225 {
1226 ref->referred->aux = first;
1227 first = dyn_cast <varpool_node *> (ref->referred);
1228 }
1229
1230 /* If user is BOTTOM, just punt on this var. */
1231 if (user == BOTTOM)
1232 var->aux = BOTTOM;
1233 else
1234 var->aux = NULL;
1235 }
1236 else
1237 var->aux = NULL;
1238 }
1239
1240 FOR_EACH_DEFINED_VARIABLE (var)
1241 {
1242 if (var->aux != BOTTOM)
1243 {
1244#ifdef ENABLE_CHECKING
1eb68d2d
TS
1245 if (!single_user_map.get (var))
1246 gcc_assert (single_user_map.get (var));
eb6a09a7
JH
1247#endif
1248 if (dump_file)
1249 {
1250 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1251 var->name (), var->order);
1252 }
1253 var->used_by_single_function = true;
1254 }
1255 var->aux = NULL;
1256 }
1257 return 0;
1258}
1259
1260namespace {
1261
1262const pass_data pass_data_ipa_single_use =
1263{
1264 IPA_PASS, /* type */
1265 "single-use", /* name */
1266 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1267 TV_CGRAPHOPT, /* tv_id */
1268 0, /* properties_required */
1269 0, /* properties_provided */
1270 0, /* properties_destroyed */
1271 0, /* todo_flags_start */
1272 0, /* todo_flags_finish */
1273};
1274
1275class pass_ipa_single_use : public ipa_opt_pass_d
1276{
1277public:
1278 pass_ipa_single_use (gcc::context *ctxt)
1279 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1280 NULL, /* generate_summary */
1281 NULL, /* write_summary */
1282 NULL, /* read_summary */
1283 NULL, /* write_optimization_summary */
1284 NULL, /* read_optimization_summary */
1285 NULL, /* stmt_fixup */
1286 0, /* function_transform_todo_flags_start */
1287 NULL, /* function_transform */
1288 NULL) /* variable_transform */
1289 {}
1290
1291 /* opt_pass methods: */
1292 virtual bool gate (function *);
1293 virtual unsigned int execute (function *) { return ipa_single_use (); }
1294
1295}; // class pass_ipa_single_use
1296
1297bool
1298pass_ipa_single_use::gate (function *)
1299{
1300 return optimize;
1301}
1302
1303} // anon namespace
1304
1305ipa_opt_pass_d *
1306make_pass_ipa_single_use (gcc::context *ctxt)
1307{
1308 return new pass_ipa_single_use (ctxt);
1309}