]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
Reorganize post-ra pipeline for targets without register allocation.
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
65c1a668 1/* Basic IPA optimizations and utilities.
3aea1f79 2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
65c1a668 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
65c1a668 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
65c1a668 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
41a8aa41 24#include "tree.h"
9ed99284 25#include "calls.h"
26#include "stringpool.h"
94ea8568 27#include "predict.h"
28#include "basic-block.h"
d62dd039 29#include "hash-map.h"
1140c305 30#include "is-a.h"
31#include "plugin-api.h"
32#include "vec.h"
33#include "hashtab.h"
431205b7 34#include "hash-set.h"
1140c305 35#include "machmode.h"
36#include "hard-reg-set.h"
37#include "input.h"
38#include "function.h"
39#include "ipa-ref.h"
40#include "cgraph.h"
41#include "tree-pass.h"
bc61cadb 42#include "gimple-expr.h"
a8783bee 43#include "gimplify.h"
8dfbf71d 44#include "flags.h"
a53e7471 45#include "target.h"
46#include "tree-iterator.h"
7771d558 47#include "ipa-utils.h"
1140c305 48#include "alloc-pool.h"
49#include "ipa-prop.h"
91f0ab48 50#include "ipa-inline.h"
9e179a64 51#include "tree-inline.h"
52#include "profile.h"
53#include "params.h"
ceb49bba 54#include "internal-fn.h"
55#include "tree-ssa-alias.h"
56#include "gimple.h"
57#include "dbgcnt.h"
65c1a668 58
15ca8f90 59
60/* Return true when NODE has ADDR reference. */
61
62static bool
63has_addr_references_p (struct cgraph_node *node,
64 void *data ATTRIBUTE_UNUSED)
65{
66 int i;
51ce5652 67 struct ipa_ref *ref = NULL;
15ca8f90 68
51ce5652 69 for (i = 0; node->iterate_referring (i, ref); i++)
15ca8f90 70 if (ref->use == IPA_REF_ADDR)
71 return true;
72 return false;
73}
74
21f41380 75/* Look for all functions inlined to NODE and update their inlined_to pointers
76 to INLINED_TO. */
77
78static void
79update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
80{
81 struct cgraph_edge *e;
82 for (e = node->callees; e; e = e->next_callee)
83 if (e->callee->global.inlined_to)
84 {
85 e->callee->global.inlined_to = inlined_to;
86 update_inlined_to_pointer (e->callee, inlined_to);
87 }
88}
89
91f0ab48 90/* Add symtab NODE to queue starting at FIRST.
9da87cb8 91
92 The queue is linked via AUX pointers and terminated by pointer to 1.
93 We enqueue nodes at two occasions: when we find them reachable or when we find
94 their bodies needed for further clonning. In the second case we mark them
95 by pointer to 2 after processing so they are re-queue when they become
96 reachable. */
6f932b06 97
98static void
452659af 99enqueue_node (symtab_node *node, symtab_node **first,
431205b7 100 hash_set<symtab_node *> *reachable)
6f932b06 101{
9da87cb8 102 /* Node is still in queue; do nothing. */
02774f2d 103 if (node->aux && node->aux != (void *) 2)
9da87cb8 104 return;
105 /* Node was already processed as unreachable, re-enqueue
106 only if it became reachable now. */
431205b7 107 if (node->aux == (void *)2 && !reachable->contains (node))
9da87cb8 108 return;
02774f2d 109 node->aux = *first;
6f932b06 110 *first = node;
111}
112
6f932b06 113/* Process references. */
114
115static void
51ce5652 116process_references (symtab_node *snode,
452659af 117 symtab_node **first,
da751785 118 bool before_inlining_p,
431205b7 119 hash_set<symtab_node *> *reachable)
6f932b06 120{
121 int i;
51ce5652 122 struct ipa_ref *ref = NULL;
123 for (i = 0; snode->iterate_reference (i, ref); i++)
6f932b06 124 {
452659af 125 symtab_node *node = ref->referred;
15ca8f90 126
02774f2d 127 if (node->definition && !node->in_other_partition
128 && ((!DECL_EXTERNAL (node->decl) || node->alias)
f0d26d57 129 || (((before_inlining_p
35ee1c66 130 && (symtab->state < IPA_SSA
f0d26d57 131 || !lookup_attribute ("always_inline",
132 DECL_ATTRIBUTES (node->decl)))))
15ca8f90 133 /* We use variable constructors during late complation for
134 constant folding. Keep references alive so partitioning
135 knows about potential references. */
02774f2d 136 || (TREE_CODE (node->decl) == VAR_DECL
df8d3e89 137 && flag_wpa
02774f2d 138 && ctor_for_folding (node->decl)
df8d3e89 139 != error_mark_node))))
431205b7 140 reachable->add (node);
02774f2d 141 enqueue_node (node, first, reachable);
6f932b06 142 }
143}
144
e2fa5d74 145/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
146 all its potential targets as reachable to permit later inlining if
147 devirtualization happens. After inlining still keep their declarations
148 around, so we can devirtualize to a direct call.
149
150 Also try to make trivial devirutalization when no or only one target is
151 possible. */
152
153static void
431205b7 154walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
e2fa5d74 155 struct cgraph_edge *edge,
452659af 156 symtab_node **first,
431205b7 157 hash_set<symtab_node *> *reachable,
158 bool before_inlining_p)
e2fa5d74 159{
160 unsigned int i;
161 void *cache_token;
162 bool final;
163 vec <cgraph_node *>targets
164 = possible_polymorphic_call_targets
165 (edge, &final, &cache_token);
166
431205b7 167 if (!reachable_call_targets->add (cache_token))
e2fa5d74 168 {
9af5ce0c 169 for (i = 0; i < targets.length (); i++)
e2fa5d74 170 {
171 struct cgraph_node *n = targets[i];
172
173 /* Do not bother to mark virtual methods in anonymous namespace;
174 either we will find use of virtual table defining it, or it is
175 unused. */
02774f2d 176 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
e2fa5d74 177 && type_in_anonymous_namespace_p
02774f2d 178 (method_class_type (TREE_TYPE (n->decl))))
e2fa5d74 179 continue;
180
181 /* Prior inlining, keep alive bodies of possible targets for
182 devirtualization. */
02774f2d 183 if (n->definition
f0d26d57 184 && (before_inlining_p
35ee1c66 185 && (symtab->state < IPA_SSA
f0d26d57 186 || !lookup_attribute ("always_inline",
187 DECL_ATTRIBUTES (n->decl)))))
431205b7 188 reachable->add (n);
e2fa5d74 189
190 /* Even after inlining we want to keep the possible targets in the
191 boundary, so late passes can still produce direct call even if
192 the chance for inlining is lost. */
02774f2d 193 enqueue_node (n, first, reachable);
e2fa5d74 194 }
195 }
196
197 /* Very trivial devirtualization; when the type is
198 final or anonymous (so we know all its derivation)
199 and there is only one possible virtual call target,
200 make the edge direct. */
201 if (final)
202 {
ceb49bba 203 if (targets.length () <= 1 && dbg_cnt (devirt))
e2fa5d74 204 {
749c5b03 205 cgraph_node *target, *node = edge->caller;
e2fa5d74 206 if (targets.length () == 1)
207 target = targets[0];
208 else
415d1b9a 209 target = cgraph_node::get_create
e2fa5d74 210 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
211
ceb49bba 212 if (dump_enabled_p ())
213 {
28787ac0 214 location_t locus;
215 if (edge->call_stmt)
216 locus = gimple_location (edge->call_stmt);
217 else
218 locus = UNKNOWN_LOCATION;
415d1b9a 219 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
ceb49bba 220 "devirtualizing call in %s/%i to %s/%i\n",
221 edge->caller->name (), edge->caller->order,
222 target->name (),
223 target->order);
224 }
35ee1c66 225 edge = edge->make_direct (target);
6469adde 226 if (inline_summary_vec)
749c5b03 227 inline_update_overall_summary (node);
6469adde 228 else if (edge->call_stmt)
35ee1c66 229 edge->redirect_call_stmt_to_callee ();
e2fa5d74 230 }
231 }
232}
36a32361 233
65c1a668 234/* Perform reachability analysis and reclaim all unreachable nodes.
91f0ab48 235
236 The algorithm is basically mark&sweep but with some extra refinements:
237
238 - reachable extern inline functions needs special handling; the bodies needs
239 to stay in memory until inlining in hope that they will be inlined.
240 After inlining we release their bodies and turn them into unanalyzed
241 nodes even when they are reachable.
242
243 BEFORE_INLINING_P specify whether we are before or after inlining.
244
245 - virtual functions are kept in callgraph even if they seem unreachable in
246 hope calls to them will be devirtualized.
247
248 Again we remove them after inlining. In late optimization some
6bcfabf2 249 devirtualization may happen, but it is not important since we won't inline
91f0ab48 250 the call. In theory early opts and IPA should work out all important cases.
251
252 - virtual clones needs bodies of their origins for later materialization;
253 this means that we want to keep the body even if the origin is unreachable
254 otherwise. To avoid origin from sitting in the callgraph and being
255 walked by IPA passes, we turn them into unanalyzed nodes with body
256 defined.
257
258 We maintain set of function declaration where body needs to stay in
259 body_needed_for_clonning
260
261 Inline clones represent special case: their declaration match the
262 declaration of origin and cgraph_remove_node already knows how to
263 reshape callgraph and preserve body when offline copy of function or
264 inline clone is being removed.
265
aa419a52 266 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
267 variables with DECL_INITIAL set. We finalize these and keep reachable
268 ones around for constant folding purposes. After inlining we however
269 stop walking their references to let everything static referneced by them
270 to be removed when it is otherwise unreachable.
271
91f0ab48 272 We maintain queue of both reachable symbols (i.e. defined symbols that needs
273 to stay) and symbols that are in boundary (i.e. external symbols referenced
274 by reachable symbols or origins of clones). The queue is represented
275 as linked list by AUX pointer terminated by 1.
276
6bcfabf2 277 At the end we keep all reachable symbols. For symbols in boundary we always
91f0ab48 278 turn definition into a declaration, but we may keep function body around
279 based on body_needed_for_clonning
280
281 All symbols that enter the queue have AUX pointer non-zero and are in the
282 boundary. Pointer set REACHABLE is used to track reachable symbols.
283
284 Every symbol can be visited twice - once as part of boundary and once
285 as real reachable symbol. enqueue_node needs to decide whether the
286 node needs to be re-queued for second processing. For this purpose
287 we set AUX pointer of processed symbols in the boundary to constant 2. */
65c1a668 288
289bool
35ee1c66 290symbol_table::remove_unreachable_nodes (bool before_inlining_p, FILE *file)
65c1a668 291{
452659af 292 symtab_node *first = (symtab_node *) (void *) 1;
f4ec5ce1 293 struct cgraph_node *node, *next;
098f44bc 294 varpool_node *vnode, *vnext;
65c1a668 295 bool changed = false;
431205b7 296 hash_set<symtab_node *> reachable;
297 hash_set<tree> body_needed_for_clonning;
298 hash_set<void *> reachable_call_targets;
65c1a668 299
e2fa5d74 300 timevar_push (TV_IPA_UNREACHABLE);
4befb9f4 301 if (optimize && flag_devirtualize)
302 build_type_inheritance_graph ();
3f5be5f4 303 if (file)
304 fprintf (file, "\nReclaiming functions:");
65c1a668 305#ifdef ENABLE_CHECKING
7c455d87 306 FOR_EACH_FUNCTION (node)
02774f2d 307 gcc_assert (!node->aux);
7c455d87 308 FOR_EACH_VARIABLE (vnode)
02774f2d 309 gcc_assert (!vnode->aux);
65c1a668 310#endif
7f74ac6b 311 /* Mark functions whose bodies are obviously needed.
312 This is mostly when they can be referenced externally. Inline clones
313 are special since their declarations are shared with master clone and thus
314 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
abb1a237 315 FOR_EACH_FUNCTION (node)
316 {
317 node->used_as_abstract_origin = false;
02774f2d 318 if (node->definition
abb1a237 319 && !node->global.inlined_to
02774f2d 320 && !node->in_other_partition
415d1b9a 321 && !node->can_remove_if_no_direct_calls_and_refs_p ())
abb1a237 322 {
323 gcc_assert (!node->global.inlined_to);
431205b7 324 reachable.add (node);
325 enqueue_node (node, &first, &reachable);
abb1a237 326 }
327 else
02774f2d 328 gcc_assert (!node->aux);
abb1a237 329 }
7f74ac6b 330
331 /* Mark variables that are obviously needed. */
91f0ab48 332 FOR_EACH_DEFINED_VARIABLE (vnode)
97221fd7 333 if (!vnode->can_remove_if_no_refs_p()
02774f2d 334 && !vnode->in_other_partition)
91f0ab48 335 {
431205b7 336 reachable.add (vnode);
337 enqueue_node (vnode, &first, &reachable);
91f0ab48 338 }
339
340 /* Perform reachability analysis. */
452659af 341 while (first != (symtab_node *) (void *) 1)
6f932b06 342 {
431205b7 343 bool in_boundary_p = !reachable.contains (first);
452659af 344 symtab_node *node = first;
65c1a668 345
452659af 346 first = (symtab_node *)first->aux;
9da87cb8 347
91f0ab48 348 /* If we are processing symbol in boundary, mark its AUX pointer for
349 possible later re-processing in enqueue_node. */
350 if (in_boundary_p)
02774f2d 351 node->aux = (void *)2;
91f0ab48 352 else
353 {
9f0b7378 354 if (TREE_CODE (node->decl) == FUNCTION_DECL
355 && DECL_ABSTRACT_ORIGIN (node->decl))
abb1a237 356 {
357 struct cgraph_node *origin_node
415d1b9a 358 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
abb1a237 359 origin_node->used_as_abstract_origin = true;
431205b7 360 enqueue_node (origin_node, &first, &reachable);
abb1a237 361 }
91f0ab48 362 /* If any symbol in a comdat group is reachable, force
468088ac 363 all externally visible symbols in the same comdat
364 group to be reachable as well. Comdat-local symbols
365 can be discarded if all uses were inlined. */
02774f2d 366 if (node->same_comdat_group)
91f0ab48 367 {
452659af 368 symtab_node *next;
02774f2d 369 for (next = node->same_comdat_group;
91f0ab48 370 next != node;
02774f2d 371 next = next->same_comdat_group)
415d1b9a 372 if (!next->comdat_local_p ()
431205b7 373 && !reachable.add (next))
374 enqueue_node (next, &first, &reachable);
91f0ab48 375 }
376 /* Mark references as reachable. */
431205b7 377 process_references (node, &first, before_inlining_p, &reachable);
91f0ab48 378 }
9da87cb8 379
13cbeaac 380 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
6f932b06 381 {
91f0ab48 382 /* Mark the callees reachable unless they are direct calls to extern
383 inline functions we decided to not inline. */
384 if (!in_boundary_p)
e12f85b7 385 {
91f0ab48 386 struct cgraph_edge *e;
e2fa5d74 387 /* Keep alive possible targets for devirtualization. */
388 if (optimize && flag_devirtualize)
389 {
390 struct cgraph_edge *next;
391 for (e = cnode->indirect_calls; e; e = next)
392 {
393 next = e->next_callee;
394 if (e->indirect_info->polymorphic)
431205b7 395 walk_polymorphic_call_targets (&reachable_call_targets,
396 e, &first, &reachable,
e2fa5d74 397 before_inlining_p);
398 }
399 }
91f0ab48 400 for (e = cnode->callees; e; e = e->next_callee)
71ca01ff 401 {
02774f2d 402 if (e->callee->definition
403 && !e->callee->in_other_partition
71ca01ff 404 && (!e->inline_failed
02774f2d 405 || !DECL_EXTERNAL (e->callee->decl)
406 || e->callee->alias
71ca01ff 407 || before_inlining_p))
89ae81e0 408 {
409 /* Be sure that we will not optimize out alias target
410 body. */
411 if (DECL_EXTERNAL (e->callee->decl)
412 && e->callee->alias
413 && before_inlining_p)
431205b7 414 reachable.add (e->callee->function_symbol ());
415 reachable.add (e->callee);
89ae81e0 416 }
431205b7 417 enqueue_node (e->callee, &first, &reachable);
da751785 418 }
91f0ab48 419
420 /* When inline clone exists, mark body to be preserved so when removing
421 offline copy of the function we don't kill it. */
b9b49047 422 if (cnode->global.inlined_to)
431205b7 423 body_needed_for_clonning.add (cnode->decl);
61c2c7b1 424
b9b49047 425 /* For non-inline clones, force their origins to the boundary and ensure
426 that body is not removed. */
427 while (cnode->clone_of)
428 {
02774f2d 429 bool noninline = cnode->clone_of->decl != cnode->decl;
b9b49047 430 cnode = cnode->clone_of;
431 if (noninline)
432 {
431205b7 433 body_needed_for_clonning.add (cnode->decl);
434 enqueue_node (cnode, &first, &reachable);
b9b49047 435 }
6f932b06 436 }
d09768a4 437
438 }
439 /* If any reachable function has simd clones, mark them as
440 reachable as well. */
441 if (cnode->simd_clones)
442 {
443 cgraph_node *next;
444 for (next = cnode->simd_clones;
445 next;
446 next = next->simdclone->next_clone)
447 if (in_boundary_p
431205b7 448 || !reachable.add (next))
449 enqueue_node (next, &first, &reachable);
ee3f5fc0 450 }
6f932b06 451 }
aa419a52 452 /* When we see constructor of external variable, keep referred nodes in the
2dc9831f 453 boundary. This will also hold initializers of the external vars NODE
454 refers to. */
13cbeaac 455 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2dc9831f 456 if (vnode
02774f2d 457 && DECL_EXTERNAL (node->decl)
458 && !vnode->alias
aa419a52 459 && in_boundary_p)
2dc9831f 460 {
51ce5652 461 struct ipa_ref *ref = NULL;
462 for (int i = 0; node->iterate_reference (i, ref); i++)
431205b7 463 enqueue_node (ref->referred, &first, &reachable);
2dc9831f 464 }
65c1a668 465 }
466
91f0ab48 467 /* Remove unreachable functions. */
35ee1c66 468 for (node = first_function (); node; node = next)
65c1a668 469 {
35ee1c66 470 next = next_function (node);
15ca8f90 471
472 /* If node is not needed at all, remove it. */
02774f2d 473 if (!node->aux)
65c1a668 474 {
3f5be5f4 475 if (file)
3083a0b3 476 fprintf (file, " %s/%i", node->name (), node->order);
415d1b9a 477 node->remove ();
91f0ab48 478 changed = true;
479 }
15ca8f90 480 /* If node is unreachable, remove its body. */
431205b7 481 else if (!reachable.contains (node))
91f0ab48 482 {
431205b7 483 if (!body_needed_for_clonning.contains (node->decl))
415d1b9a 484 node->release_body ();
b9b49047 485 else if (!node->clone_of)
02774f2d 486 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
487 if (node->definition)
7fb046a4 488 {
91f0ab48 489 if (file)
3083a0b3 490 fprintf (file, " %s/%i", node->name (), node->order);
fa4052b3 491 node->body_removed = true;
02774f2d 492 node->analyzed = false;
493 node->definition = false;
494 node->cpp_implicit_alias = false;
495 node->alias = false;
95d0bdb9 496 node->thunk.thunk_p = false;
02774f2d 497 node->weakref = false;
f0d26d57 498 /* After early inlining we drop always_inline attributes on
499 bodies of functions that are still referenced (have their
500 address taken). */
501 DECL_ATTRIBUTES (node->decl)
502 = remove_attribute ("always_inline",
503 DECL_ATTRIBUTES (node->decl));
02774f2d 504 if (!node->in_other_partition)
281dea26 505 node->local.local = false;
415d1b9a 506 node->remove_callees ();
507 node->remove_from_same_comdat_group ();
51ce5652 508 node->remove_all_references ();
7fb046a4 509 changed = true;
510 }
65c1a668 511 }
b9b49047 512 else
415d1b9a 513 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
02774f2d 514 || in_lto_p || DECL_RESULT (node->decl));
65c1a668 515 }
91f0ab48 516
517 /* Inline clones might be kept around so their materializing allows further
518 cloning. If the function the clone is inlined into is removed, we need
519 to turn it into normal cone. */
7c455d87 520 FOR_EACH_FUNCTION (node)
ccf4ab6b 521 {
ccf4ab6b 522 if (node->global.inlined_to
523 && !node->callers)
524 {
525 gcc_assert (node->clones);
21f41380 526 node->global.inlined_to = NULL;
527 update_inlined_to_pointer (node, node);
ccf4ab6b 528 }
02774f2d 529 node->aux = NULL;
ccf4ab6b 530 }
8dfbf71d 531
91f0ab48 532 /* Remove unreachable variables. */
8dfbf71d 533 if (file)
91f0ab48 534 fprintf (file, "\nReclaiming variables:");
35ee1c66 535 for (vnode = first_variable (); vnode; vnode = vnext)
6f932b06 536 {
35ee1c66 537 vnext = next_variable (vnode);
02774f2d 538 if (!vnode->aux
f1a7feee 539 /* For can_refer_decl_in_current_unit_p we want to track for
540 all external variables if they are defined in other partition
541 or not. */
02774f2d 542 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
91f0ab48 543 {
8dfbf71d 544 if (file)
3083a0b3 545 fprintf (file, " %s/%i", vnode->name (), vnode->order);
415d1b9a 546 vnode->remove ();
8dfbf71d 547 changed = true;
6f932b06 548 }
431205b7 549 else if (!reachable.contains (vnode))
91f0ab48 550 {
df8d3e89 551 tree init;
02774f2d 552 if (vnode->definition)
91f0ab48 553 {
554 if (file)
f1c8b4d7 555 fprintf (file, " %s", vnode->name ());
91f0ab48 556 changed = true;
557 }
38889e98 558 /* Keep body if it may be useful for constant folding. */
559 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
560 vnode->remove_initializer ();
561 else
562 DECL_INITIAL (vnode->decl) = init;
fa4052b3 563 vnode->body_removed = true;
02774f2d 564 vnode->definition = false;
565 vnode->analyzed = false;
566 vnode->aux = NULL;
15ca8f90 567
415d1b9a 568 vnode->remove_from_same_comdat_group ();
04f65f92 569
51ce5652 570 vnode->remove_all_references ();
91f0ab48 571 }
572 else
02774f2d 573 vnode->aux = NULL;
6f932b06 574 }
8dfbf71d 575
91f0ab48 576 /* Now update address_taken flags and try to promote functions to be local. */
cdedc740 577 if (file)
578 fprintf (file, "\nClearing address taken flags:");
7c455d87 579 FOR_EACH_DEFINED_FUNCTION (node)
02774f2d 580 if (node->address_taken
581 && !node->used_from_other_partition)
cdedc740 582 {
415d1b9a 583 if (!node->call_for_symbol_thunks_and_aliases
584 (has_addr_references_p, NULL, true))
cdedc740 585 {
586 if (file)
f1c8b4d7 587 fprintf (file, " %s", node->name ());
02774f2d 588 node->address_taken = false;
8dfbf71d 589 changed = true;
415d1b9a 590 if (node->local_p ())
8dfbf71d 591 {
592 node->local.local = true;
593 if (file)
594 fprintf (file, " (local)");
595 }
cdedc740 596 }
597 }
c7b2cc59 598 if (file)
599 fprintf (file, "\n");
6f932b06 600
09a2e412 601#ifdef ENABLE_CHECKING
415d1b9a 602 symtab_node::verify_symtab_nodes ();
09a2e412 603#endif
34e5cced 604
f8bfd7f7 605 /* If we removed something, perhaps profile could be improved. */
f1f41a6c 606 if (changed && optimize && inline_edge_summary_vec.exists ())
f8bfd7f7 607 FOR_EACH_DEFINED_FUNCTION (node)
6eaf903b 608 ipa_propagate_frequency (node);
f8bfd7f7 609
e2fa5d74 610 timevar_pop (TV_IPA_UNREACHABLE);
65c1a668 611 return changed;
612}
f37a5008 613
703ad42c 614/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
615 as needed, also clear EXPLICIT_REFS if the references to given variable
616 do not need to be explicit. */
617
618void
619process_references (varpool_node *vnode,
620 bool *written, bool *address_taken,
621 bool *read, bool *explicit_refs)
622{
623 int i;
624 struct ipa_ref *ref;
625
97221fd7 626 if (!vnode->all_refs_explicit_p ()
703ad42c 627 || TREE_THIS_VOLATILE (vnode->decl))
628 *explicit_refs = false;
629
51ce5652 630 for (i = 0; vnode->iterate_referring (i, ref)
703ad42c 631 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
632 switch (ref->use)
633 {
634 case IPA_REF_ADDR:
635 *address_taken = true;
636 break;
637 case IPA_REF_LOAD:
638 *read = true;
639 break;
640 case IPA_REF_STORE:
641 *written = true;
642 break;
643 case IPA_REF_ALIAS:
415d1b9a 644 process_references (dyn_cast<varpool_node *> (ref->referring), written,
645 address_taken, read, explicit_refs);
703ad42c 646 break;
647 }
648}
649
650/* Set TREE_READONLY bit. */
651
652bool
653set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
654{
655 TREE_READONLY (vnode->decl) = true;
656 return false;
657}
658
659/* Set writeonly bit and clear the initalizer, since it will not be needed. */
660
661bool
662set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
663{
664 vnode->writeonly = true;
665 if (optimize)
666 {
667 DECL_INITIAL (vnode->decl) = NULL;
668 if (!vnode->alias)
51ce5652 669 vnode->remove_all_references ();
703ad42c 670 }
671 return false;
672}
673
674/* Clear addressale bit of VNODE. */
675
676bool
677clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
678{
679 vnode->address_taken = false;
680 TREE_ADDRESSABLE (vnode->decl) = 0;
681 return false;
682}
683
8dfbf71d 684/* Discover variables that have no longer address taken or that are read only
685 and update their flags.
686
687 FIXME: This can not be done in between gimplify and omp_expand since
688 readonly flag plays role on what is shared and what is not. Currently we do
023a28e1 689 this transformation as part of whole program visibility and re-do at
690 ipa-reference pass (to take into account clonning), but it would
691 make sense to do it before early optimizations. */
8dfbf71d 692
693void
694ipa_discover_readonly_nonaddressable_vars (void)
695{
098f44bc 696 varpool_node *vnode;
8dfbf71d 697 if (dump_file)
698 fprintf (dump_file, "Clearing variable flags:");
7c455d87 699 FOR_EACH_VARIABLE (vnode)
703ad42c 700 if (!vnode->alias
02774f2d 701 && (TREE_ADDRESSABLE (vnode->decl)
703ad42c 702 || !vnode->writeonly
02774f2d 703 || !TREE_READONLY (vnode->decl)))
8dfbf71d 704 {
705 bool written = false;
706 bool address_taken = false;
703ad42c 707 bool read = false;
708 bool explicit_refs = true;
709
710 process_references (vnode, &written, &address_taken, &read, &explicit_refs);
711 if (!explicit_refs)
712 continue;
713 if (!address_taken)
8dfbf71d 714 {
703ad42c 715 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
4206bfac 716 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
97221fd7 717 vnode->call_for_node_and_aliases (clear_addressable_bit, NULL, true);
8dfbf71d 718 }
703ad42c 719 if (!address_taken && !written
8dfbf71d 720 /* Making variable in explicit section readonly can cause section
721 type conflict.
722 See e.g. gcc.c-torture/compile/pr23237.c */
71e19e54 723 && vnode->get_section () == NULL)
8dfbf71d 724 {
703ad42c 725 if (!TREE_READONLY (vnode->decl) && dump_file)
f1c8b4d7 726 fprintf (dump_file, " %s (read-only)", vnode->name ());
97221fd7 727 vnode->call_for_node_and_aliases (set_readonly_bit, NULL, true);
703ad42c 728 }
4206bfac 729 if (!vnode->writeonly && !read && !address_taken && written)
703ad42c 730 {
731 if (dump_file)
732 fprintf (dump_file, " %s (write-only)", vnode->name ());
97221fd7 733 vnode->call_for_node_and_aliases (set_writeonly_bit, NULL, true);
8dfbf71d 734 }
735 }
736 if (dump_file)
737 fprintf (dump_file, "\n");
738}
739
f8bfd7f7 740/* Free inline summary. */
741
cbe8bda8 742namespace {
743
744const pass_data pass_data_ipa_free_inline_summary =
f8bfd7f7 745{
cbe8bda8 746 SIMPLE_IPA_PASS, /* type */
289c4db4 747 "free-inline-summary", /* name */
cbe8bda8 748 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 749 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
750 0, /* properties_required */
751 0, /* properties_provided */
752 0, /* properties_destroyed */
753 0, /* todo_flags_start */
289c4db4 754 /* Early optimizations may make function unreachable. We can not
755 remove unreachable functions as part of the ealry opts pass because
756 TODOs are run before subpasses. Do it here. */
757 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
f8bfd7f7 758};
759
cbe8bda8 760class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
761{
762public:
9af5ce0c 763 pass_ipa_free_inline_summary (gcc::context *ctxt)
764 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
cbe8bda8 765 {}
766
767 /* opt_pass methods: */
65b0537f 768 virtual unsigned int execute (function *)
769 {
770 inline_free_summary ();
771 return 0;
772 }
cbe8bda8 773
774}; // class pass_ipa_free_inline_summary
775
776} // anon namespace
777
778simple_ipa_opt_pass *
779make_pass_ipa_free_inline_summary (gcc::context *ctxt)
780{
781 return new pass_ipa_free_inline_summary (ctxt);
782}
783
a53e7471 784/* Generate and emit a static constructor or destructor. WHICH must
bbc26dcc 785 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
786 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
787 initialization priority for this constructor or destructor.
a53e7471 788
62510893 789 FINAL specify whether the externally visible name for collect2 should
790 be produced. */
791
792static void
793cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
a53e7471 794{
795 static int counter = 0;
796 char which_buf[16];
797 tree decl, name, resdecl;
798
799 /* The priority is encoded in the constructor or destructor name.
800 collect2 will sort the names and arrange that they are called at
801 program startup. */
62510893 802 if (final)
803 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
804 else
805 /* Proudce sane name but one not recognizable by collect2, just for the
806 case we fail to inline the function. */
807 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
a53e7471 808 name = get_file_function_name (which_buf);
809
810 decl = build_decl (input_location, FUNCTION_DECL, name,
811 build_function_type_list (void_type_node, NULL_TREE));
812 current_function_decl = decl;
813
814 resdecl = build_decl (input_location,
815 RESULT_DECL, NULL_TREE, void_type_node);
816 DECL_ARTIFICIAL (resdecl) = 1;
817 DECL_RESULT (decl) = resdecl;
818 DECL_CONTEXT (resdecl) = decl;
819
820 allocate_struct_function (decl, false);
821
822 TREE_STATIC (decl) = 1;
823 TREE_USED (decl) = 1;
824 DECL_ARTIFICIAL (decl) = 1;
825 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
826 DECL_SAVED_TREE (decl) = body;
62510893 827 if (!targetm.have_ctors_dtors && final)
a53e7471 828 {
829 TREE_PUBLIC (decl) = 1;
830 DECL_PRESERVE_P (decl) = 1;
831 }
832 DECL_UNINLINABLE (decl) = 1;
833
834 DECL_INITIAL (decl) = make_node (BLOCK);
835 TREE_USED (DECL_INITIAL (decl)) = 1;
836
837 DECL_SOURCE_LOCATION (decl) = input_location;
838 cfun->function_end_locus = input_location;
839
840 switch (which)
841 {
842 case 'I':
843 DECL_STATIC_CONSTRUCTOR (decl) = 1;
844 decl_init_priority_insert (decl, priority);
845 break;
846 case 'D':
847 DECL_STATIC_DESTRUCTOR (decl) = 1;
848 decl_fini_priority_insert (decl, priority);
849 break;
850 default:
851 gcc_unreachable ();
852 }
853
854 gimplify_function_tree (decl);
855
415d1b9a 856 cgraph_node::add_new_function (decl, false);
a53e7471 857
858 set_cfun (NULL);
859 current_function_decl = NULL;
860}
861
62510893 862/* Generate and emit a static constructor or destructor. WHICH must
bbc26dcc 863 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
864 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
865 initialization priority for this constructor or destructor. */
62510893 866
867void
868cgraph_build_static_cdtor (char which, tree body, int priority)
869{
870 cgraph_build_static_cdtor_1 (which, body, priority, false);
871}
a53e7471 872
873/* A vector of FUNCTION_DECLs declared as static constructors. */
f1f41a6c 874static vec<tree> static_ctors;
a53e7471 875/* A vector of FUNCTION_DECLs declared as static destructors. */
f1f41a6c 876static vec<tree> static_dtors;
a53e7471 877
878/* When target does not have ctors and dtors, we call all constructor
879 and destructor by special initialization/destruction function
880 recognized by collect2.
881
882 When we are going to build this function, collect all constructors and
883 destructors and turn them into normal functions. */
884
885static void
886record_cdtor_fn (struct cgraph_node *node)
887{
02774f2d 888 if (DECL_STATIC_CONSTRUCTOR (node->decl))
889 static_ctors.safe_push (node->decl);
890 if (DECL_STATIC_DESTRUCTOR (node->decl))
891 static_dtors.safe_push (node->decl);
415d1b9a 892 node = cgraph_node::get (node->decl);
02774f2d 893 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
a53e7471 894}
895
896/* Define global constructors/destructor functions for the CDTORS, of
897 which they are LEN. The CDTORS are sorted by initialization
898 priority. If CTOR_P is true, these are constructors; otherwise,
899 they are destructors. */
900
901static void
f1f41a6c 902build_cdtor (bool ctor_p, vec<tree> cdtors)
a53e7471 903{
904 size_t i,j;
f1f41a6c 905 size_t len = cdtors.length ();
a53e7471 906
907 i = 0;
908 while (i < len)
909 {
910 tree body;
911 tree fn;
912 priority_type priority;
913
914 priority = 0;
915 body = NULL_TREE;
916 j = i;
917 do
918 {
919 priority_type p;
f1f41a6c 920 fn = cdtors[j];
a53e7471 921 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
922 if (j == i)
923 priority = p;
924 else if (p != priority)
925 break;
926 j++;
927 }
928 while (j < len);
929
d2435fb0 930 /* When there is only one cdtor and target supports them, do nothing. */
a53e7471 931 if (j == i + 1
932 && targetm.have_ctors_dtors)
933 {
934 i++;
935 continue;
936 }
937 /* Find the next batch of constructors/destructors with the same
938 initialization priority. */
d2435fb0 939 for (;i < j; i++)
a53e7471 940 {
a53e7471 941 tree call;
f1f41a6c 942 fn = cdtors[i];
a53e7471 943 call = build_call_expr (fn, 0);
944 if (ctor_p)
945 DECL_STATIC_CONSTRUCTOR (fn) = 0;
946 else
947 DECL_STATIC_DESTRUCTOR (fn) = 0;
948 /* We do not want to optimize away pure/const calls here.
949 When optimizing, these should be already removed, when not
950 optimizing, we want user to be able to breakpoint in them. */
951 TREE_SIDE_EFFECTS (call) = 1;
952 append_to_statement_list (call, &body);
a53e7471 953 }
a53e7471 954 gcc_assert (body != NULL_TREE);
955 /* Generate a function to call all the function of like
956 priority. */
62510893 957 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
a53e7471 958 }
959}
960
961/* Comparison function for qsort. P1 and P2 are actually of type
962 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
963 used to determine the sort order. */
964
965static int
966compare_ctor (const void *p1, const void *p2)
967{
968 tree f1;
969 tree f2;
970 int priority1;
971 int priority2;
972
973 f1 = *(const tree *)p1;
974 f2 = *(const tree *)p2;
975 priority1 = DECL_INIT_PRIORITY (f1);
976 priority2 = DECL_INIT_PRIORITY (f2);
977
978 if (priority1 < priority2)
979 return -1;
980 else if (priority1 > priority2)
981 return 1;
982 else
983 /* Ensure a stable sort. Constructors are executed in backwarding
984 order to make LTO initialize braries first. */
985 return DECL_UID (f2) - DECL_UID (f1);
986}
987
988/* Comparison function for qsort. P1 and P2 are actually of type
989 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
990 used to determine the sort order. */
991
992static int
993compare_dtor (const void *p1, const void *p2)
994{
995 tree f1;
996 tree f2;
997 int priority1;
998 int priority2;
999
1000 f1 = *(const tree *)p1;
1001 f2 = *(const tree *)p2;
1002 priority1 = DECL_FINI_PRIORITY (f1);
1003 priority2 = DECL_FINI_PRIORITY (f2);
1004
1005 if (priority1 < priority2)
1006 return -1;
1007 else if (priority1 > priority2)
1008 return 1;
1009 else
1010 /* Ensure a stable sort. */
1011 return DECL_UID (f1) - DECL_UID (f2);
1012}
1013
1014/* Generate functions to call static constructors and destructors
1015 for targets that do not support .ctors/.dtors sections. These
1016 functions have magic names which are detected by collect2. */
1017
1018static void
1019build_cdtor_fns (void)
1020{
f1f41a6c 1021 if (!static_ctors.is_empty ())
a53e7471 1022 {
1023 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
f1f41a6c 1024 static_ctors.qsort (compare_ctor);
d2435fb0 1025 build_cdtor (/*ctor_p=*/true, static_ctors);
a53e7471 1026 }
1027
f1f41a6c 1028 if (!static_dtors.is_empty ())
a53e7471 1029 {
1030 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
f1f41a6c 1031 static_dtors.qsort (compare_dtor);
d2435fb0 1032 build_cdtor (/*ctor_p=*/false, static_dtors);
a53e7471 1033 }
1034}
1035
1036/* Look for constructors and destructors and produce function calling them.
1037 This is needed for targets not supporting ctors or dtors, but we perform the
9d75589a 1038 transformation also at linktime to merge possibly numerous
a53e7471 1039 constructors/destructors into single function to improve code locality and
1040 reduce size. */
1041
1042static unsigned int
1043ipa_cdtor_merge (void)
1044{
1045 struct cgraph_node *node;
7c455d87 1046 FOR_EACH_DEFINED_FUNCTION (node)
02774f2d 1047 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1048 || DECL_STATIC_DESTRUCTOR (node->decl))
a53e7471 1049 record_cdtor_fn (node);
1050 build_cdtor_fns ();
f1f41a6c 1051 static_ctors.release ();
1052 static_dtors.release ();
a53e7471 1053 return 0;
1054}
1055
cbe8bda8 1056namespace {
1057
1058const pass_data pass_data_ipa_cdtor_merge =
a53e7471 1059{
cbe8bda8 1060 IPA_PASS, /* type */
1061 "cdtor", /* name */
1062 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 1063 TV_CGRAPHOPT, /* tv_id */
1064 0, /* properties_required */
1065 0, /* properties_provided */
1066 0, /* properties_destroyed */
1067 0, /* todo_flags_start */
1068 0, /* todo_flags_finish */
a53e7471 1069};
cbe8bda8 1070
1071class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1072{
1073public:
9af5ce0c 1074 pass_ipa_cdtor_merge (gcc::context *ctxt)
1075 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1076 NULL, /* generate_summary */
1077 NULL, /* write_summary */
1078 NULL, /* read_summary */
1079 NULL, /* write_optimization_summary */
1080 NULL, /* read_optimization_summary */
1081 NULL, /* stmt_fixup */
1082 0, /* function_transform_todo_flags_start */
1083 NULL, /* function_transform */
1084 NULL) /* variable_transform */
cbe8bda8 1085 {}
1086
1087 /* opt_pass methods: */
31315c24 1088 virtual bool gate (function *);
65b0537f 1089 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
cbe8bda8 1090
1091}; // class pass_ipa_cdtor_merge
1092
31315c24 1093bool
1094pass_ipa_cdtor_merge::gate (function *)
1095{
1096 /* Perform the pass when we have no ctors/dtors support
1097 or at LTO time to merge multiple constructors into single
1098 function. */
1099 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1100}
1101
cbe8bda8 1102} // anon namespace
1103
1104ipa_opt_pass_d *
1105make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1106{
1107 return new pass_ipa_cdtor_merge (ctxt);
1108}
3f1f2be0 1109
1110/* Invalid pointer representing BOTTOM for single user dataflow. */
1111#define BOTTOM ((cgraph_node *)(size_t) 2)
1112
1113/* Meet operation for single user dataflow.
1114 Here we want to associate variables with sigle function that may access it.
1115
1116 FUNCTION is current single user of a variable, VAR is variable that uses it.
1117 Latttice is stored in SINGLE_USER_MAP.
1118
1119 We represent:
1120 - TOP by no entry in SIGNLE_USER_MAP
1121 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1122 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1123
1124cgraph_node *
1125meet (cgraph_node *function, varpool_node *var,
d62dd039 1126 hash_map<varpool_node *, cgraph_node *> &single_user_map)
3f1f2be0 1127{
1128 struct cgraph_node *user, **f;
1129
1130 if (var->aux == BOTTOM)
1131 return BOTTOM;
1132
d62dd039 1133 f = single_user_map.get (var);
3f1f2be0 1134 if (!f)
1135 return function;
1136 user = *f;
1137 if (!function)
1138 return user;
1139 else if (function != user)
1140 return BOTTOM;
1141 else
1142 return function;
1143}
1144
1145/* Propagation step of single-use dataflow.
1146
1147 Check all uses of VNODE and see if they are used by single function FUNCTION.
1148 SINGLE_USER_MAP represents the dataflow lattice. */
1149
1150cgraph_node *
1151propagate_single_user (varpool_node *vnode, cgraph_node *function,
d62dd039 1152 hash_map<varpool_node *, cgraph_node *> &single_user_map)
3f1f2be0 1153{
1154 int i;
1155 struct ipa_ref *ref;
1156
1157 gcc_assert (!vnode->externally_visible);
1158
1159 /* If node is an alias, first meet with its target. */
1160 if (vnode->alias)
97221fd7 1161 function = meet (function, vnode->get_alias_target (), single_user_map);
3f1f2be0 1162
1163 /* Check all users and see if they correspond to a single function. */
415d1b9a 1164 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
3f1f2be0 1165 {
1166 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1167 if (cnode)
1168 {
1169 if (cnode->global.inlined_to)
1170 cnode = cnode->global.inlined_to;
1171 if (!function)
1172 function = cnode;
1173 else if (function != cnode)
1174 function = BOTTOM;
1175 }
1176 else
1177 function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map);
1178 }
1179 return function;
1180}
1181
1182/* Pass setting used_by_single_function flag.
1183 This flag is set on variable when there is only one function that may possibly
1184 referr to it. */
1185
1186static unsigned int
1187ipa_single_use (void)
1188{
1189 varpool_node *first = (varpool_node *) (void *) 1;
1190 varpool_node *var;
d62dd039 1191 hash_map<varpool_node *, cgraph_node *> single_user_map;
3f1f2be0 1192
1193 FOR_EACH_DEFINED_VARIABLE (var)
97221fd7 1194 if (!var->all_refs_explicit_p ())
3f1f2be0 1195 var->aux = BOTTOM;
1196 else
1197 {
1198 /* Enqueue symbol for dataflow. */
1199 var->aux = first;
1200 first = var;
1201 }
1202
1203 /* The actual dataflow. */
1204
1205 while (first != (void *) 1)
1206 {
1207 cgraph_node *user, *orig_user, **f;
1208
1209 var = first;
1210 first = (varpool_node *)first->aux;
1211
d62dd039 1212 f = single_user_map.get (var);
3f1f2be0 1213 if (f)
1214 orig_user = *f;
1215 else
1216 orig_user = NULL;
1217 user = propagate_single_user (var, orig_user, single_user_map);
1218
1219 gcc_checking_assert (var->aux != BOTTOM);
1220
1221 /* If user differs, enqueue all references. */
1222 if (user != orig_user)
1223 {
1224 unsigned int i;
1225 ipa_ref *ref;
1226
d62dd039 1227 single_user_map.put (var, user);
3f1f2be0 1228
1229 /* Enqueue all aliases for re-processing. */
415d1b9a 1230 for (i = 0; var->iterate_referring (i, ref); i++)
3f1f2be0 1231 if (ref->use == IPA_REF_ALIAS
1232 && !ref->referring->aux)
1233 {
1234 ref->referring->aux = first;
1235 first = dyn_cast <varpool_node *> (ref->referring);
1236 }
1237 /* Enqueue all users for re-processing. */
415d1b9a 1238 for (i = 0; var->iterate_reference (i, ref); i++)
3f1f2be0 1239 if (!ref->referred->aux
1240 && ref->referred->definition
1241 && is_a <varpool_node *> (ref->referred))
1242 {
1243 ref->referred->aux = first;
1244 first = dyn_cast <varpool_node *> (ref->referred);
1245 }
1246
1247 /* If user is BOTTOM, just punt on this var. */
1248 if (user == BOTTOM)
1249 var->aux = BOTTOM;
1250 else
1251 var->aux = NULL;
1252 }
1253 else
1254 var->aux = NULL;
1255 }
1256
1257 FOR_EACH_DEFINED_VARIABLE (var)
1258 {
1259 if (var->aux != BOTTOM)
1260 {
1261#ifdef ENABLE_CHECKING
d62dd039 1262 if (!single_user_map.get (var))
1263 gcc_assert (single_user_map.get (var));
3f1f2be0 1264#endif
1265 if (dump_file)
1266 {
1267 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1268 var->name (), var->order);
1269 }
1270 var->used_by_single_function = true;
1271 }
1272 var->aux = NULL;
1273 }
1274 return 0;
1275}
1276
1277namespace {
1278
1279const pass_data pass_data_ipa_single_use =
1280{
1281 IPA_PASS, /* type */
1282 "single-use", /* name */
1283 OPTGROUP_NONE, /* optinfo_flags */
3f1f2be0 1284 TV_CGRAPHOPT, /* tv_id */
1285 0, /* properties_required */
1286 0, /* properties_provided */
1287 0, /* properties_destroyed */
1288 0, /* todo_flags_start */
1289 0, /* todo_flags_finish */
1290};
1291
1292class pass_ipa_single_use : public ipa_opt_pass_d
1293{
1294public:
1295 pass_ipa_single_use (gcc::context *ctxt)
1296 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1297 NULL, /* generate_summary */
1298 NULL, /* write_summary */
1299 NULL, /* read_summary */
1300 NULL, /* write_optimization_summary */
1301 NULL, /* read_optimization_summary */
1302 NULL, /* stmt_fixup */
1303 0, /* function_transform_todo_flags_start */
1304 NULL, /* function_transform */
1305 NULL) /* variable_transform */
1306 {}
1307
1308 /* opt_pass methods: */
1309 virtual bool gate (function *);
1310 virtual unsigned int execute (function *) { return ipa_single_use (); }
1311
1312}; // class pass_ipa_single_use
1313
1314bool
1315pass_ipa_single_use::gate (function *)
1316{
1317 return optimize;
1318}
1319
1320} // anon namespace
1321
1322ipa_opt_pass_d *
1323make_pass_ipa_single_use (gcc::context *ctxt)
1324{
1325 return new pass_ipa_single_use (ctxt);
1326}