]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
re PR ipa/61823 (gcc.dg/torture/pr43879_[12].c FAILs with -fno-inline)
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
23a5b65a 2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
4d648807 24#include "tree.h"
d8a2d370
DN
25#include "calls.h"
26#include "stringpool.h"
ca31b95f 27#include "cgraph.h"
f4b3ca72 28#include "tree-pass.h"
1eb68d2d 29#include "hash-map.h"
2fb9a547
AM
30#include "pointer-set.h"
31#include "gimple-expr.h"
45b0be94 32#include "gimplify.h"
4a444e58 33#include "flags.h"
9e97ff61
JH
34#include "target.h"
35#include "tree-iterator.h"
af8bca3c 36#include "ipa-utils.h"
04142cc3 37#include "ipa-inline.h"
0208f7da
JH
38#include "tree-inline.h"
39#include "profile.h"
40#include "params.h"
2b5f0895
XDL
41#include "internal-fn.h"
42#include "tree-ssa-alias.h"
43#include "gimple.h"
44#include "dbgcnt.h"
ca31b95f 45
e70670cf
JH
46
47/* Return true when NODE has ADDR reference. */
48
49static bool
50has_addr_references_p (struct cgraph_node *node,
51 void *data ATTRIBUTE_UNUSED)
52{
53 int i;
d122681a 54 struct ipa_ref *ref = NULL;
e70670cf 55
d122681a 56 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
57 if (ref->use == IPA_REF_ADDR)
58 return true;
59 return false;
60}
61
d563610d
JH
62/* Look for all functions inlined to NODE and update their inlined_to pointers
63 to INLINED_TO. */
64
65static void
66update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
67{
68 struct cgraph_edge *e;
69 for (e = node->callees; e; e = e->next_callee)
70 if (e->callee->global.inlined_to)
71 {
72 e->callee->global.inlined_to = inlined_to;
73 update_inlined_to_pointer (e->callee, inlined_to);
74 }
75}
76
04142cc3 77/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
78
79 The queue is linked via AUX pointers and terminated by pointer to 1.
80 We enqueue nodes at two occasions: when we find them reachable or when we find
81 their bodies needed for further clonning. In the second case we mark them
82 by pointer to 2 after processing so they are re-queue when they become
83 reachable. */
b34fd25c
JH
84
85static void
5e20cdc9 86enqueue_node (symtab_node *node, symtab_node **first,
04142cc3 87 struct pointer_set_t *reachable)
b34fd25c 88{
19fb0b86 89 /* Node is still in queue; do nothing. */
67348ccc 90 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
91 return;
92 /* Node was already processed as unreachable, re-enqueue
93 only if it became reachable now. */
67348ccc 94 if (node->aux == (void *)2 && !pointer_set_contains (reachable, node))
19fb0b86 95 return;
67348ccc 96 node->aux = *first;
b34fd25c
JH
97 *first = node;
98}
99
b34fd25c
JH
100/* Process references. */
101
102static void
d122681a 103process_references (symtab_node *snode,
5e20cdc9 104 symtab_node **first,
93a18a70
JH
105 bool before_inlining_p,
106 struct pointer_set_t *reachable)
b34fd25c
JH
107{
108 int i;
d122681a
ML
109 struct ipa_ref *ref = NULL;
110 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 111 {
5e20cdc9 112 symtab_node *node = ref->referred;
e70670cf 113
67348ccc
DM
114 if (node->definition && !node->in_other_partition
115 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8
JH
116 || (((before_inlining_p
117 && (cgraph_state < CGRAPH_STATE_IPA_SSA
118 || !lookup_attribute ("always_inline",
119 DECL_ATTRIBUTES (node->decl)))))
e70670cf
JH
120 /* We use variable constructors during late complation for
121 constant folding. Keep references alive so partitioning
122 knows about potential references. */
67348ccc 123 || (TREE_CODE (node->decl) == VAR_DECL
6a6dac52 124 && flag_wpa
67348ccc 125 && ctor_for_folding (node->decl)
6a6dac52 126 != error_mark_node))))
e70670cf 127 pointer_set_insert (reachable, node);
67348ccc 128 enqueue_node (node, first, reachable);
b34fd25c
JH
129 }
130}
131
3462aa02
JH
132/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
133 all its potential targets as reachable to permit later inlining if
134 devirtualization happens. After inlining still keep their declarations
135 around, so we can devirtualize to a direct call.
136
137 Also try to make trivial devirutalization when no or only one target is
138 possible. */
139
140static void
141walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
142 struct cgraph_edge *edge,
5e20cdc9 143 symtab_node **first,
3462aa02
JH
144 pointer_set_t *reachable, bool before_inlining_p)
145{
146 unsigned int i;
147 void *cache_token;
148 bool final;
149 vec <cgraph_node *>targets
150 = possible_polymorphic_call_targets
151 (edge, &final, &cache_token);
152
153 if (!pointer_set_insert (reachable_call_targets,
154 cache_token))
155 {
c3284718 156 for (i = 0; i < targets.length (); i++)
3462aa02
JH
157 {
158 struct cgraph_node *n = targets[i];
159
160 /* Do not bother to mark virtual methods in anonymous namespace;
161 either we will find use of virtual table defining it, or it is
162 unused. */
67348ccc 163 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 164 && type_in_anonymous_namespace_p
67348ccc 165 (method_class_type (TREE_TYPE (n->decl))))
3462aa02
JH
166 continue;
167
168 /* Prior inlining, keep alive bodies of possible targets for
169 devirtualization. */
67348ccc 170 if (n->definition
8fe91ca8
JH
171 && (before_inlining_p
172 && (cgraph_state < CGRAPH_STATE_IPA_SSA
173 || !lookup_attribute ("always_inline",
174 DECL_ATTRIBUTES (n->decl)))))
3462aa02
JH
175 pointer_set_insert (reachable, n);
176
177 /* Even after inlining we want to keep the possible targets in the
178 boundary, so late passes can still produce direct call even if
179 the chance for inlining is lost. */
67348ccc 180 enqueue_node (n, first, reachable);
3462aa02
JH
181 }
182 }
183
184 /* Very trivial devirtualization; when the type is
185 final or anonymous (so we know all its derivation)
186 and there is only one possible virtual call target,
187 make the edge direct. */
188 if (final)
189 {
2b5f0895 190 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 191 {
7b395ddd 192 cgraph_node *target, *node = edge->caller;
3462aa02
JH
193 if (targets.length () == 1)
194 target = targets[0];
195 else
196 target = cgraph_get_create_node
197 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
198
2b5f0895
XDL
199 if (dump_enabled_p ())
200 {
807b7d62 201 location_t locus = gimple_location_safe (edge->call_stmt);
2b5f0895
XDL
202 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
203 "devirtualizing call in %s/%i to %s/%i\n",
204 edge->caller->name (), edge->caller->order,
205 target->name (),
206 target->order);
207 }
3462aa02 208 edge = cgraph_make_edge_direct (edge, target);
477145c8 209 if (inline_summary_vec)
7b395ddd 210 inline_update_overall_summary (node);
477145c8
RB
211 else if (edge->call_stmt)
212 cgraph_redirect_edge_call_stmt_to_callee (edge);
3462aa02
JH
213 }
214 }
215}
41817394 216
ca31b95f 217/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
218
219 The algorithm is basically mark&sweep but with some extra refinements:
220
221 - reachable extern inline functions needs special handling; the bodies needs
222 to stay in memory until inlining in hope that they will be inlined.
223 After inlining we release their bodies and turn them into unanalyzed
224 nodes even when they are reachable.
225
226 BEFORE_INLINING_P specify whether we are before or after inlining.
227
228 - virtual functions are kept in callgraph even if they seem unreachable in
229 hope calls to them will be devirtualized.
230
231 Again we remove them after inlining. In late optimization some
31519c38 232 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
233 the call. In theory early opts and IPA should work out all important cases.
234
235 - virtual clones needs bodies of their origins for later materialization;
236 this means that we want to keep the body even if the origin is unreachable
237 otherwise. To avoid origin from sitting in the callgraph and being
238 walked by IPA passes, we turn them into unanalyzed nodes with body
239 defined.
240
241 We maintain set of function declaration where body needs to stay in
242 body_needed_for_clonning
243
244 Inline clones represent special case: their declaration match the
245 declaration of origin and cgraph_remove_node already knows how to
246 reshape callgraph and preserve body when offline copy of function or
247 inline clone is being removed.
248
6649df51
JH
249 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
250 variables with DECL_INITIAL set. We finalize these and keep reachable
251 ones around for constant folding purposes. After inlining we however
252 stop walking their references to let everything static referneced by them
253 to be removed when it is otherwise unreachable.
254
04142cc3
JH
255 We maintain queue of both reachable symbols (i.e. defined symbols that needs
256 to stay) and symbols that are in boundary (i.e. external symbols referenced
257 by reachable symbols or origins of clones). The queue is represented
258 as linked list by AUX pointer terminated by 1.
259
31519c38 260 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
261 turn definition into a declaration, but we may keep function body around
262 based on body_needed_for_clonning
263
264 All symbols that enter the queue have AUX pointer non-zero and are in the
265 boundary. Pointer set REACHABLE is used to track reachable symbols.
266
267 Every symbol can be visited twice - once as part of boundary and once
268 as real reachable symbol. enqueue_node needs to decide whether the
269 node needs to be re-queued for second processing. For this purpose
270 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
271
272bool
04142cc3 273symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
ca31b95f 274{
5e20cdc9 275 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 276 struct cgraph_node *node, *next;
2c8326a5 277 varpool_node *vnode, *vnext;
ca31b95f 278 bool changed = false;
93a18a70 279 struct pointer_set_t *reachable = pointer_set_create ();
04142cc3 280 struct pointer_set_t *body_needed_for_clonning = pointer_set_create ();
3462aa02 281 struct pointer_set_t *reachable_call_targets = pointer_set_create ();
ca31b95f 282
3462aa02 283 timevar_push (TV_IPA_UNREACHABLE);
61a74079
JH
284 if (optimize && flag_devirtualize)
285 build_type_inheritance_graph ();
10d22567
ZD
286 if (file)
287 fprintf (file, "\nReclaiming functions:");
ca31b95f 288#ifdef ENABLE_CHECKING
65c70e6b 289 FOR_EACH_FUNCTION (node)
67348ccc 290 gcc_assert (!node->aux);
65c70e6b 291 FOR_EACH_VARIABLE (vnode)
67348ccc 292 gcc_assert (!vnode->aux);
ca31b95f 293#endif
530f3a1b
JH
294 /* Mark functions whose bodies are obviously needed.
295 This is mostly when they can be referenced externally. Inline clones
296 are special since their declarations are shared with master clone and thus
297 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
298 FOR_EACH_FUNCTION (node)
299 {
300 node->used_as_abstract_origin = false;
67348ccc 301 if (node->definition
c0c123ef 302 && !node->global.inlined_to
67348ccc 303 && !node->in_other_partition
3462aa02 304 && !cgraph_can_remove_if_no_direct_calls_and_refs_p (node))
c0c123ef
JH
305 {
306 gcc_assert (!node->global.inlined_to);
307 pointer_set_insert (reachable, node);
67348ccc 308 enqueue_node (node, &first, reachable);
c0c123ef
JH
309 }
310 else
67348ccc 311 gcc_assert (!node->aux);
c0c123ef 312 }
530f3a1b
JH
313
314 /* Mark variables that are obviously needed. */
04142cc3 315 FOR_EACH_DEFINED_VARIABLE (vnode)
4f63dfc6 316 if (!varpool_can_remove_if_no_refs (vnode)
67348ccc 317 && !vnode->in_other_partition)
04142cc3
JH
318 {
319 pointer_set_insert (reachable, vnode);
67348ccc 320 enqueue_node (vnode, &first, reachable);
04142cc3
JH
321 }
322
323 /* Perform reachability analysis. */
5e20cdc9 324 while (first != (symtab_node *) (void *) 1)
b34fd25c 325 {
04142cc3 326 bool in_boundary_p = !pointer_set_contains (reachable, first);
5e20cdc9 327 symtab_node *node = first;
ca31b95f 328
5e20cdc9 329 first = (symtab_node *)first->aux;
19fb0b86 330
04142cc3
JH
331 /* If we are processing symbol in boundary, mark its AUX pointer for
332 possible later re-processing in enqueue_node. */
333 if (in_boundary_p)
67348ccc 334 node->aux = (void *)2;
04142cc3
JH
335 else
336 {
31dad809
JJ
337 if (TREE_CODE (node->decl) == FUNCTION_DECL
338 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
339 {
340 struct cgraph_node *origin_node
6f99e449 341 = cgraph_get_create_node (DECL_ABSTRACT_ORIGIN (node->decl));
c0c123ef 342 origin_node->used_as_abstract_origin = true;
67348ccc 343 enqueue_node (origin_node, &first, reachable);
c0c123ef 344 }
04142cc3 345 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
346 all externally visible symbols in the same comdat
347 group to be reachable as well. Comdat-local symbols
348 can be discarded if all uses were inlined. */
67348ccc 349 if (node->same_comdat_group)
04142cc3 350 {
5e20cdc9 351 symtab_node *next;
67348ccc 352 for (next = node->same_comdat_group;
04142cc3 353 next != node;
67348ccc 354 next = next->same_comdat_group)
1f26ac87
JM
355 if (!symtab_comdat_local_p (next)
356 && !pointer_set_insert (reachable, next))
67348ccc 357 enqueue_node (next, &first, reachable);
04142cc3
JH
358 }
359 /* Mark references as reachable. */
d122681a 360 process_references (node, &first, before_inlining_p, reachable);
04142cc3 361 }
19fb0b86 362
7de90a6c 363 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 364 {
04142cc3
JH
365 /* Mark the callees reachable unless they are direct calls to extern
366 inline functions we decided to not inline. */
367 if (!in_boundary_p)
8a6295ba 368 {
04142cc3 369 struct cgraph_edge *e;
3462aa02
JH
370 /* Keep alive possible targets for devirtualization. */
371 if (optimize && flag_devirtualize)
372 {
373 struct cgraph_edge *next;
374 for (e = cnode->indirect_calls; e; e = next)
375 {
376 next = e->next_callee;
377 if (e->indirect_info->polymorphic)
378 walk_polymorphic_call_targets (reachable_call_targets,
379 e, &first, reachable,
380 before_inlining_p);
381 }
382 }
04142cc3 383 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 384 {
67348ccc
DM
385 if (e->callee->definition
386 && !e->callee->in_other_partition
ed62e0d9 387 && (!e->inline_failed
67348ccc
DM
388 || !DECL_EXTERNAL (e->callee->decl)
389 || e->callee->alias
ed62e0d9 390 || before_inlining_p))
789c2741
JH
391 {
392 /* Be sure that we will not optimize out alias target
393 body. */
394 if (DECL_EXTERNAL (e->callee->decl)
395 && e->callee->alias
396 && before_inlining_p)
397 {
398 pointer_set_insert (reachable,
399 cgraph_function_node (e->callee));
400 }
401 pointer_set_insert (reachable, e->callee);
402 }
67348ccc 403 enqueue_node (e->callee, &first, reachable);
93a18a70 404 }
04142cc3
JH
405
406 /* When inline clone exists, mark body to be preserved so when removing
407 offline copy of the function we don't kill it. */
4f63dfc6 408 if (cnode->global.inlined_to)
67348ccc 409 pointer_set_insert (body_needed_for_clonning, cnode->decl);
b66887e4 410
4f63dfc6
JH
411 /* For non-inline clones, force their origins to the boundary and ensure
412 that body is not removed. */
413 while (cnode->clone_of)
414 {
67348ccc 415 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
416 cnode = cnode->clone_of;
417 if (noninline)
418 {
67348ccc
DM
419 pointer_set_insert (body_needed_for_clonning, cnode->decl);
420 enqueue_node (cnode, &first, reachable);
4f63dfc6 421 }
b34fd25c 422 }
0136f8f0
AH
423
424 }
425 /* If any reachable function has simd clones, mark them as
426 reachable as well. */
427 if (cnode->simd_clones)
428 {
429 cgraph_node *next;
430 for (next = cnode->simd_clones;
431 next;
432 next = next->simdclone->next_clone)
433 if (in_boundary_p
434 || !pointer_set_insert (reachable, next))
435 enqueue_node (next, &first, reachable);
47cb0d7d 436 }
b34fd25c 437 }
6649df51 438 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
439 boundary. This will also hold initializers of the external vars NODE
440 refers to. */
7de90a6c 441 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 442 if (vnode
67348ccc
DM
443 && DECL_EXTERNAL (node->decl)
444 && !vnode->alias
6649df51 445 && in_boundary_p)
5d59b5e1 446 {
d122681a
ML
447 struct ipa_ref *ref = NULL;
448 for (int i = 0; node->iterate_reference (i, ref); i++)
6649df51 449 enqueue_node (ref->referred, &first, reachable);
5d59b5e1 450 }
ca31b95f
JH
451 }
452
04142cc3 453 /* Remove unreachable functions. */
2aae7680 454 for (node = cgraph_first_function (); node; node = next)
ca31b95f 455 {
2aae7680 456 next = cgraph_next_function (node);
e70670cf
JH
457
458 /* If node is not needed at all, remove it. */
67348ccc 459 if (!node->aux)
ca31b95f 460 {
10d22567 461 if (file)
5bed50e8 462 fprintf (file, " %s/%i", node->name (), node->order);
04142cc3
JH
463 cgraph_remove_node (node);
464 changed = true;
465 }
e70670cf 466 /* If node is unreachable, remove its body. */
04142cc3
JH
467 else if (!pointer_set_contains (reachable, node))
468 {
67348ccc 469 if (!pointer_set_contains (body_needed_for_clonning, node->decl))
e70670cf 470 cgraph_release_function_body (node);
4f63dfc6 471 else if (!node->clone_of)
67348ccc
DM
472 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
473 if (node->definition)
bb853349 474 {
04142cc3 475 if (file)
5bed50e8 476 fprintf (file, " %s/%i", node->name (), node->order);
3d8d0043 477 node->body_removed = true;
67348ccc
DM
478 node->analyzed = false;
479 node->definition = false;
480 node->cpp_implicit_alias = false;
481 node->alias = false;
d833415c 482 node->thunk.thunk_p = false;
67348ccc 483 node->weakref = false;
8fe91ca8
JH
484 /* After early inlining we drop always_inline attributes on
485 bodies of functions that are still referenced (have their
486 address taken). */
487 DECL_ATTRIBUTES (node->decl)
488 = remove_attribute ("always_inline",
489 DECL_ATTRIBUTES (node->decl));
67348ccc 490 if (!node->in_other_partition)
51a5c0c2
JH
491 node->local.local = false;
492 cgraph_node_remove_callees (node);
7b3376a0 493 symtab_remove_from_same_comdat_group (node);
d122681a 494 node->remove_all_references ();
bb853349
JH
495 changed = true;
496 }
ca31b95f 497 }
4f63dfc6
JH
498 else
499 gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node)
67348ccc 500 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 501 }
04142cc3
JH
502
503 /* Inline clones might be kept around so their materializing allows further
504 cloning. If the function the clone is inlined into is removed, we need
505 to turn it into normal cone. */
65c70e6b 506 FOR_EACH_FUNCTION (node)
9187e02d 507 {
9187e02d
JH
508 if (node->global.inlined_to
509 && !node->callers)
510 {
511 gcc_assert (node->clones);
d563610d
JH
512 node->global.inlined_to = NULL;
513 update_inlined_to_pointer (node, node);
9187e02d 514 }
67348ccc 515 node->aux = NULL;
9187e02d 516 }
4a444e58 517
04142cc3 518 /* Remove unreachable variables. */
4a444e58 519 if (file)
04142cc3 520 fprintf (file, "\nReclaiming variables:");
2aae7680 521 for (vnode = varpool_first_variable (); vnode; vnode = vnext)
b34fd25c 522 {
2aae7680 523 vnext = varpool_next_variable (vnode);
67348ccc 524 if (!vnode->aux
b9bd2075
JH
525 /* For can_refer_decl_in_current_unit_p we want to track for
526 all external variables if they are defined in other partition
527 or not. */
67348ccc 528 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 529 {
4a444e58 530 if (file)
5bed50e8 531 fprintf (file, " %s/%i", vnode->name (), vnode->order);
4a444e58
JH
532 varpool_remove_node (vnode);
533 changed = true;
b34fd25c 534 }
04142cc3
JH
535 else if (!pointer_set_contains (reachable, vnode))
536 {
6a6dac52 537 tree init;
67348ccc 538 if (vnode->definition)
04142cc3
JH
539 {
540 if (file)
fec39fa6 541 fprintf (file, " %s", vnode->name ());
04142cc3
JH
542 changed = true;
543 }
3d8d0043 544 vnode->body_removed = true;
67348ccc
DM
545 vnode->definition = false;
546 vnode->analyzed = false;
547 vnode->aux = NULL;
e70670cf 548
7b3376a0
JH
549 symtab_remove_from_same_comdat_group (vnode);
550
e70670cf 551 /* Keep body if it may be useful for constant folding. */
67348ccc 552 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
e70670cf 553 varpool_remove_initializer (vnode);
6a6dac52 554 else
67348ccc 555 DECL_INITIAL (vnode->decl) = init;
d122681a 556 vnode->remove_all_references ();
04142cc3
JH
557 }
558 else
67348ccc 559 vnode->aux = NULL;
b34fd25c 560 }
4a444e58 561
04142cc3
JH
562 pointer_set_destroy (reachable);
563 pointer_set_destroy (body_needed_for_clonning);
3462aa02 564 pointer_set_destroy (reachable_call_targets);
4a444e58 565
04142cc3 566 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
567 if (file)
568 fprintf (file, "\nClearing address taken flags:");
65c70e6b 569 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
570 if (node->address_taken
571 && !node->used_from_other_partition)
bd3cdcc0 572 {
41817394 573 if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
bd3cdcc0
JH
574 {
575 if (file)
fec39fa6 576 fprintf (file, " %s", node->name ());
67348ccc 577 node->address_taken = false;
4a444e58
JH
578 changed = true;
579 if (cgraph_local_node_p (node))
580 {
581 node->local.local = true;
582 if (file)
583 fprintf (file, " (local)");
584 }
bd3cdcc0
JH
585 }
586 }
10a5dd5d
JH
587 if (file)
588 fprintf (file, "\n");
b34fd25c 589
873aa8f5 590#ifdef ENABLE_CHECKING
474ffc72 591 verify_symtab ();
873aa8f5 592#endif
4537ec0c 593
a8da72b8 594 /* If we removed something, perhaps profile could be improved. */
9771b263 595 if (changed && optimize && inline_edge_summary_vec.exists ())
a8da72b8 596 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 597 ipa_propagate_frequency (node);
a8da72b8 598
3462aa02 599 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
600 return changed;
601}
f4b3ca72 602
6de88c6a
JH
603/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
604 as needed, also clear EXPLICIT_REFS if the references to given variable
605 do not need to be explicit. */
606
607void
608process_references (varpool_node *vnode,
609 bool *written, bool *address_taken,
610 bool *read, bool *explicit_refs)
611{
612 int i;
613 struct ipa_ref *ref;
614
615 if (!varpool_all_refs_explicit_p (vnode)
616 || TREE_THIS_VOLATILE (vnode->decl))
617 *explicit_refs = false;
618
d122681a 619 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
620 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
621 switch (ref->use)
622 {
623 case IPA_REF_ADDR:
624 *address_taken = true;
625 break;
626 case IPA_REF_LOAD:
627 *read = true;
628 break;
629 case IPA_REF_STORE:
630 *written = true;
631 break;
632 case IPA_REF_ALIAS:
633 process_references (varpool (ref->referring), written, address_taken,
634 read, explicit_refs);
635 break;
636 }
637}
638
639/* Set TREE_READONLY bit. */
640
641bool
642set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
643{
644 TREE_READONLY (vnode->decl) = true;
645 return false;
646}
647
648/* Set writeonly bit and clear the initalizer, since it will not be needed. */
649
650bool
651set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
652{
653 vnode->writeonly = true;
654 if (optimize)
655 {
656 DECL_INITIAL (vnode->decl) = NULL;
657 if (!vnode->alias)
d122681a 658 vnode->remove_all_references ();
6de88c6a
JH
659 }
660 return false;
661}
662
663/* Clear addressale bit of VNODE. */
664
665bool
666clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
667{
668 vnode->address_taken = false;
669 TREE_ADDRESSABLE (vnode->decl) = 0;
670 return false;
671}
672
4a444e58
JH
673/* Discover variables that have no longer address taken or that are read only
674 and update their flags.
675
676 FIXME: This can not be done in between gimplify and omp_expand since
677 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
678 this transformation as part of whole program visibility and re-do at
679 ipa-reference pass (to take into account clonning), but it would
680 make sense to do it before early optimizations. */
4a444e58
JH
681
682void
683ipa_discover_readonly_nonaddressable_vars (void)
684{
2c8326a5 685 varpool_node *vnode;
4a444e58
JH
686 if (dump_file)
687 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 688 FOR_EACH_VARIABLE (vnode)
6de88c6a 689 if (!vnode->alias
67348ccc 690 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 691 || !vnode->writeonly
67348ccc 692 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
693 {
694 bool written = false;
695 bool address_taken = false;
6de88c6a
JH
696 bool read = false;
697 bool explicit_refs = true;
698
699 process_references (vnode, &written, &address_taken, &read, &explicit_refs);
700 if (!explicit_refs)
701 continue;
702 if (!address_taken)
4a444e58 703 {
6de88c6a 704 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 705 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
6de88c6a 706 varpool_for_node_and_aliases (vnode, clear_addressable_bit, NULL, true);
4a444e58 707 }
6de88c6a 708 if (!address_taken && !written
4a444e58
JH
709 /* Making variable in explicit section readonly can cause section
710 type conflict.
711 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 712 && vnode->get_section () == NULL)
4a444e58 713 {
6de88c6a 714 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 715 fprintf (dump_file, " %s (read-only)", vnode->name ());
6de88c6a
JH
716 varpool_for_node_and_aliases (vnode, set_readonly_bit, NULL, true);
717 }
d5ce4663 718 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
719 {
720 if (dump_file)
721 fprintf (dump_file, " %s (write-only)", vnode->name ());
722 varpool_for_node_and_aliases (vnode, set_writeonly_bit, NULL, true);
4a444e58
JH
723 }
724 }
725 if (dump_file)
726 fprintf (dump_file, "\n");
727}
728
a8da72b8
L
729/* Free inline summary. */
730
27a4cd48
DM
731namespace {
732
733const pass_data pass_data_ipa_free_inline_summary =
a8da72b8 734{
27a4cd48
DM
735 SIMPLE_IPA_PASS, /* type */
736 "*free_inline_summary", /* name */
737 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
738 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
739 0, /* properties_required */
740 0, /* properties_provided */
741 0, /* properties_destroyed */
742 0, /* todo_flags_start */
743 0, /* todo_flags_finish */
a8da72b8
L
744};
745
27a4cd48
DM
746class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
747{
748public:
c3284718
RS
749 pass_ipa_free_inline_summary (gcc::context *ctxt)
750 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
27a4cd48
DM
751 {}
752
753 /* opt_pass methods: */
be55bfe6
TS
754 virtual unsigned int execute (function *)
755 {
756 inline_free_summary ();
757 return 0;
758 }
27a4cd48
DM
759
760}; // class pass_ipa_free_inline_summary
761
762} // anon namespace
763
764simple_ipa_opt_pass *
765make_pass_ipa_free_inline_summary (gcc::context *ctxt)
766{
767 return new pass_ipa_free_inline_summary (ctxt);
768}
769
9e97ff61 770/* Generate and emit a static constructor or destructor. WHICH must
089d1227
IE
771 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
772 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
773 initialization priority for this constructor or destructor.
9e97ff61 774
3a9ed12a
JH
775 FINAL specify whether the externally visible name for collect2 should
776 be produced. */
777
778static void
779cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
780{
781 static int counter = 0;
782 char which_buf[16];
783 tree decl, name, resdecl;
784
785 /* The priority is encoded in the constructor or destructor name.
786 collect2 will sort the names and arrange that they are called at
787 program startup. */
3a9ed12a
JH
788 if (final)
789 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
790 else
791 /* Proudce sane name but one not recognizable by collect2, just for the
792 case we fail to inline the function. */
793 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
794 name = get_file_function_name (which_buf);
795
796 decl = build_decl (input_location, FUNCTION_DECL, name,
797 build_function_type_list (void_type_node, NULL_TREE));
798 current_function_decl = decl;
799
800 resdecl = build_decl (input_location,
801 RESULT_DECL, NULL_TREE, void_type_node);
802 DECL_ARTIFICIAL (resdecl) = 1;
803 DECL_RESULT (decl) = resdecl;
804 DECL_CONTEXT (resdecl) = decl;
805
806 allocate_struct_function (decl, false);
807
808 TREE_STATIC (decl) = 1;
809 TREE_USED (decl) = 1;
810 DECL_ARTIFICIAL (decl) = 1;
811 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
812 DECL_SAVED_TREE (decl) = body;
3a9ed12a 813 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
814 {
815 TREE_PUBLIC (decl) = 1;
816 DECL_PRESERVE_P (decl) = 1;
817 }
818 DECL_UNINLINABLE (decl) = 1;
819
820 DECL_INITIAL (decl) = make_node (BLOCK);
821 TREE_USED (DECL_INITIAL (decl)) = 1;
822
823 DECL_SOURCE_LOCATION (decl) = input_location;
824 cfun->function_end_locus = input_location;
825
826 switch (which)
827 {
828 case 'I':
829 DECL_STATIC_CONSTRUCTOR (decl) = 1;
830 decl_init_priority_insert (decl, priority);
831 break;
832 case 'D':
833 DECL_STATIC_DESTRUCTOR (decl) = 1;
834 decl_fini_priority_insert (decl, priority);
835 break;
836 default:
837 gcc_unreachable ();
838 }
839
840 gimplify_function_tree (decl);
841
842 cgraph_add_new_function (decl, false);
843
844 set_cfun (NULL);
845 current_function_decl = NULL;
846}
847
3a9ed12a 848/* Generate and emit a static constructor or destructor. WHICH must
089d1227
IE
849 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
850 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
851 initialization priority for this constructor or destructor. */
3a9ed12a
JH
852
853void
854cgraph_build_static_cdtor (char which, tree body, int priority)
855{
856 cgraph_build_static_cdtor_1 (which, body, priority, false);
857}
9e97ff61
JH
858
859/* A vector of FUNCTION_DECLs declared as static constructors. */
9771b263 860static vec<tree> static_ctors;
9e97ff61 861/* A vector of FUNCTION_DECLs declared as static destructors. */
9771b263 862static vec<tree> static_dtors;
9e97ff61
JH
863
864/* When target does not have ctors and dtors, we call all constructor
865 and destructor by special initialization/destruction function
866 recognized by collect2.
867
868 When we are going to build this function, collect all constructors and
869 destructors and turn them into normal functions. */
870
871static void
872record_cdtor_fn (struct cgraph_node *node)
873{
67348ccc
DM
874 if (DECL_STATIC_CONSTRUCTOR (node->decl))
875 static_ctors.safe_push (node->decl);
876 if (DECL_STATIC_DESTRUCTOR (node->decl))
877 static_dtors.safe_push (node->decl);
878 node = cgraph_get_node (node->decl);
879 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
880}
881
882/* Define global constructors/destructor functions for the CDTORS, of
883 which they are LEN. The CDTORS are sorted by initialization
884 priority. If CTOR_P is true, these are constructors; otherwise,
885 they are destructors. */
886
887static void
9771b263 888build_cdtor (bool ctor_p, vec<tree> cdtors)
9e97ff61
JH
889{
890 size_t i,j;
9771b263 891 size_t len = cdtors.length ();
9e97ff61
JH
892
893 i = 0;
894 while (i < len)
895 {
896 tree body;
897 tree fn;
898 priority_type priority;
899
900 priority = 0;
901 body = NULL_TREE;
902 j = i;
903 do
904 {
905 priority_type p;
9771b263 906 fn = cdtors[j];
9e97ff61
JH
907 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
908 if (j == i)
909 priority = p;
910 else if (p != priority)
911 break;
912 j++;
913 }
914 while (j < len);
915
48c24aca 916 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
917 if (j == i + 1
918 && targetm.have_ctors_dtors)
919 {
920 i++;
921 continue;
922 }
923 /* Find the next batch of constructors/destructors with the same
924 initialization priority. */
48c24aca 925 for (;i < j; i++)
9e97ff61 926 {
9e97ff61 927 tree call;
9771b263 928 fn = cdtors[i];
9e97ff61
JH
929 call = build_call_expr (fn, 0);
930 if (ctor_p)
931 DECL_STATIC_CONSTRUCTOR (fn) = 0;
932 else
933 DECL_STATIC_DESTRUCTOR (fn) = 0;
934 /* We do not want to optimize away pure/const calls here.
935 When optimizing, these should be already removed, when not
936 optimizing, we want user to be able to breakpoint in them. */
937 TREE_SIDE_EFFECTS (call) = 1;
938 append_to_statement_list (call, &body);
9e97ff61 939 }
9e97ff61
JH
940 gcc_assert (body != NULL_TREE);
941 /* Generate a function to call all the function of like
942 priority. */
3a9ed12a 943 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
944 }
945}
946
947/* Comparison function for qsort. P1 and P2 are actually of type
948 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
949 used to determine the sort order. */
950
951static int
952compare_ctor (const void *p1, const void *p2)
953{
954 tree f1;
955 tree f2;
956 int priority1;
957 int priority2;
958
959 f1 = *(const tree *)p1;
960 f2 = *(const tree *)p2;
961 priority1 = DECL_INIT_PRIORITY (f1);
962 priority2 = DECL_INIT_PRIORITY (f2);
963
964 if (priority1 < priority2)
965 return -1;
966 else if (priority1 > priority2)
967 return 1;
968 else
969 /* Ensure a stable sort. Constructors are executed in backwarding
970 order to make LTO initialize braries first. */
971 return DECL_UID (f2) - DECL_UID (f1);
972}
973
974/* Comparison function for qsort. P1 and P2 are actually of type
975 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
976 used to determine the sort order. */
977
978static int
979compare_dtor (const void *p1, const void *p2)
980{
981 tree f1;
982 tree f2;
983 int priority1;
984 int priority2;
985
986 f1 = *(const tree *)p1;
987 f2 = *(const tree *)p2;
988 priority1 = DECL_FINI_PRIORITY (f1);
989 priority2 = DECL_FINI_PRIORITY (f2);
990
991 if (priority1 < priority2)
992 return -1;
993 else if (priority1 > priority2)
994 return 1;
995 else
996 /* Ensure a stable sort. */
997 return DECL_UID (f1) - DECL_UID (f2);
998}
999
1000/* Generate functions to call static constructors and destructors
1001 for targets that do not support .ctors/.dtors sections. These
1002 functions have magic names which are detected by collect2. */
1003
1004static void
1005build_cdtor_fns (void)
1006{
9771b263 1007 if (!static_ctors.is_empty ())
9e97ff61
JH
1008 {
1009 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1010 static_ctors.qsort (compare_ctor);
48c24aca 1011 build_cdtor (/*ctor_p=*/true, static_ctors);
9e97ff61
JH
1012 }
1013
9771b263 1014 if (!static_dtors.is_empty ())
9e97ff61
JH
1015 {
1016 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1017 static_dtors.qsort (compare_dtor);
48c24aca 1018 build_cdtor (/*ctor_p=*/false, static_dtors);
9e97ff61
JH
1019 }
1020}
1021
1022/* Look for constructors and destructors and produce function calling them.
1023 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1024 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1025 constructors/destructors into single function to improve code locality and
1026 reduce size. */
1027
1028static unsigned int
1029ipa_cdtor_merge (void)
1030{
1031 struct cgraph_node *node;
65c70e6b 1032 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1033 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1034 || DECL_STATIC_DESTRUCTOR (node->decl))
9e97ff61
JH
1035 record_cdtor_fn (node);
1036 build_cdtor_fns ();
9771b263
DN
1037 static_ctors.release ();
1038 static_dtors.release ();
9e97ff61
JH
1039 return 0;
1040}
1041
27a4cd48
DM
1042namespace {
1043
1044const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1045{
27a4cd48
DM
1046 IPA_PASS, /* type */
1047 "cdtor", /* name */
1048 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1049 TV_CGRAPHOPT, /* tv_id */
1050 0, /* properties_required */
1051 0, /* properties_provided */
1052 0, /* properties_destroyed */
1053 0, /* todo_flags_start */
1054 0, /* todo_flags_finish */
9e97ff61 1055};
27a4cd48
DM
1056
1057class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1058{
1059public:
c3284718
RS
1060 pass_ipa_cdtor_merge (gcc::context *ctxt)
1061 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1062 NULL, /* generate_summary */
1063 NULL, /* write_summary */
1064 NULL, /* read_summary */
1065 NULL, /* write_optimization_summary */
1066 NULL, /* read_optimization_summary */
1067 NULL, /* stmt_fixup */
1068 0, /* function_transform_todo_flags_start */
1069 NULL, /* function_transform */
1070 NULL) /* variable_transform */
27a4cd48
DM
1071 {}
1072
1073 /* opt_pass methods: */
1a3d085c 1074 virtual bool gate (function *);
be55bfe6 1075 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1076
1077}; // class pass_ipa_cdtor_merge
1078
1a3d085c
TS
1079bool
1080pass_ipa_cdtor_merge::gate (function *)
1081{
1082 /* Perform the pass when we have no ctors/dtors support
1083 or at LTO time to merge multiple constructors into single
1084 function. */
1085 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1086}
1087
27a4cd48
DM
1088} // anon namespace
1089
1090ipa_opt_pass_d *
1091make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1092{
1093 return new pass_ipa_cdtor_merge (ctxt);
1094}
eb6a09a7
JH
1095
1096/* Invalid pointer representing BOTTOM for single user dataflow. */
1097#define BOTTOM ((cgraph_node *)(size_t) 2)
1098
1099/* Meet operation for single user dataflow.
1100 Here we want to associate variables with sigle function that may access it.
1101
1102 FUNCTION is current single user of a variable, VAR is variable that uses it.
1103 Latttice is stored in SINGLE_USER_MAP.
1104
1105 We represent:
1106 - TOP by no entry in SIGNLE_USER_MAP
1107 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1108 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1109
1110cgraph_node *
1111meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1112 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1113{
1114 struct cgraph_node *user, **f;
1115
1116 if (var->aux == BOTTOM)
1117 return BOTTOM;
1118
1eb68d2d 1119 f = single_user_map.get (var);
eb6a09a7
JH
1120 if (!f)
1121 return function;
1122 user = *f;
1123 if (!function)
1124 return user;
1125 else if (function != user)
1126 return BOTTOM;
1127 else
1128 return function;
1129}
1130
1131/* Propagation step of single-use dataflow.
1132
1133 Check all uses of VNODE and see if they are used by single function FUNCTION.
1134 SINGLE_USER_MAP represents the dataflow lattice. */
1135
1136cgraph_node *
1137propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1138 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1139{
1140 int i;
1141 struct ipa_ref *ref;
1142
1143 gcc_assert (!vnode->externally_visible);
1144
1145 /* If node is an alias, first meet with its target. */
1146 if (vnode->alias)
1147 function = meet (function, varpool_alias_target (vnode), single_user_map);
1148
1149 /* Check all users and see if they correspond to a single function. */
1150 for (i = 0;
d122681a 1151 vnode->iterate_referring (i, ref)
eb6a09a7
JH
1152 && function != BOTTOM; i++)
1153 {
1154 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1155 if (cnode)
1156 {
1157 if (cnode->global.inlined_to)
1158 cnode = cnode->global.inlined_to;
1159 if (!function)
1160 function = cnode;
1161 else if (function != cnode)
1162 function = BOTTOM;
1163 }
1164 else
1165 function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map);
1166 }
1167 return function;
1168}
1169
1170/* Pass setting used_by_single_function flag.
1171 This flag is set on variable when there is only one function that may possibly
1172 referr to it. */
1173
1174static unsigned int
1175ipa_single_use (void)
1176{
1177 varpool_node *first = (varpool_node *) (void *) 1;
1178 varpool_node *var;
1eb68d2d 1179 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1180
1181 FOR_EACH_DEFINED_VARIABLE (var)
1182 if (!varpool_all_refs_explicit_p (var))
1183 var->aux = BOTTOM;
1184 else
1185 {
1186 /* Enqueue symbol for dataflow. */
1187 var->aux = first;
1188 first = var;
1189 }
1190
1191 /* The actual dataflow. */
1192
1193 while (first != (void *) 1)
1194 {
1195 cgraph_node *user, *orig_user, **f;
1196
1197 var = first;
1198 first = (varpool_node *)first->aux;
1199
1eb68d2d 1200 f = single_user_map.get (var);
eb6a09a7
JH
1201 if (f)
1202 orig_user = *f;
1203 else
1204 orig_user = NULL;
1205 user = propagate_single_user (var, orig_user, single_user_map);
1206
1207 gcc_checking_assert (var->aux != BOTTOM);
1208
1209 /* If user differs, enqueue all references. */
1210 if (user != orig_user)
1211 {
1212 unsigned int i;
1213 ipa_ref *ref;
1214
1eb68d2d 1215 single_user_map.put (var, user);
eb6a09a7
JH
1216
1217 /* Enqueue all aliases for re-processing. */
1218 for (i = 0;
d122681a 1219 var->iterate_referring (i, ref); i++)
eb6a09a7
JH
1220 if (ref->use == IPA_REF_ALIAS
1221 && !ref->referring->aux)
1222 {
1223 ref->referring->aux = first;
1224 first = dyn_cast <varpool_node *> (ref->referring);
1225 }
1226 /* Enqueue all users for re-processing. */
1227 for (i = 0;
d122681a 1228 var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1229 if (!ref->referred->aux
1230 && ref->referred->definition
1231 && is_a <varpool_node *> (ref->referred))
1232 {
1233 ref->referred->aux = first;
1234 first = dyn_cast <varpool_node *> (ref->referred);
1235 }
1236
1237 /* If user is BOTTOM, just punt on this var. */
1238 if (user == BOTTOM)
1239 var->aux = BOTTOM;
1240 else
1241 var->aux = NULL;
1242 }
1243 else
1244 var->aux = NULL;
1245 }
1246
1247 FOR_EACH_DEFINED_VARIABLE (var)
1248 {
1249 if (var->aux != BOTTOM)
1250 {
1251#ifdef ENABLE_CHECKING
1eb68d2d
TS
1252 if (!single_user_map.get (var))
1253 gcc_assert (single_user_map.get (var));
eb6a09a7
JH
1254#endif
1255 if (dump_file)
1256 {
1257 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1258 var->name (), var->order);
1259 }
1260 var->used_by_single_function = true;
1261 }
1262 var->aux = NULL;
1263 }
1264 return 0;
1265}
1266
1267namespace {
1268
1269const pass_data pass_data_ipa_single_use =
1270{
1271 IPA_PASS, /* type */
1272 "single-use", /* name */
1273 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1274 TV_CGRAPHOPT, /* tv_id */
1275 0, /* properties_required */
1276 0, /* properties_provided */
1277 0, /* properties_destroyed */
1278 0, /* todo_flags_start */
1279 0, /* todo_flags_finish */
1280};
1281
1282class pass_ipa_single_use : public ipa_opt_pass_d
1283{
1284public:
1285 pass_ipa_single_use (gcc::context *ctxt)
1286 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1287 NULL, /* generate_summary */
1288 NULL, /* write_summary */
1289 NULL, /* read_summary */
1290 NULL, /* write_optimization_summary */
1291 NULL, /* read_optimization_summary */
1292 NULL, /* stmt_fixup */
1293 0, /* function_transform_todo_flags_start */
1294 NULL, /* function_transform */
1295 NULL) /* variable_transform */
1296 {}
1297
1298 /* opt_pass methods: */
1299 virtual bool gate (function *);
1300 virtual unsigned int execute (function *) { return ipa_single_use (); }
1301
1302}; // class pass_ipa_single_use
1303
1304bool
1305pass_ipa_single_use::gate (function *)
1306{
1307 return optimize;
1308}
1309
1310} // anon namespace
1311
1312ipa_opt_pass_d *
1313make_pass_ipa_single_use (gcc::context *ctxt)
1314{
1315 return new pass_ipa_single_use (ctxt);
1316}