]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
[NDS32] nds32*-*-linux* target using init_array/finit_array for ctor/dtor.
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
65c1a668 1/* Basic IPA optimizations and utilities.
fbd26352 2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
65c1a668 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
65c1a668 9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
65c1a668 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
9ef16211 25#include "tree.h"
26#include "gimple.h"
7c29e30e 27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "stringpool.h"
30#include "cgraph.h"
a8783bee 31#include "gimplify.h"
a53e7471 32#include "tree-iterator.h"
7771d558 33#include "ipa-utils.h"
2cc80ac3 34#include "symbol-summary.h"
25a8e007 35#include "tree-vrp.h"
1140c305 36#include "ipa-prop.h"
b9a58fc5 37#include "ipa-fnsummary.h"
ceb49bba 38#include "dbgcnt.h"
c72bf911 39#include "debug.h"
30a86690 40#include "stringpool.h"
41#include "attribs.h"
15ca8f90 42
43/* Return true when NODE has ADDR reference. */
44
45static bool
46has_addr_references_p (struct cgraph_node *node,
75e72311 47 void *)
15ca8f90 48{
49 int i;
51ce5652 50 struct ipa_ref *ref = NULL;
15ca8f90 51
51ce5652 52 for (i = 0; node->iterate_referring (i, ref); i++)
15ca8f90 53 if (ref->use == IPA_REF_ADDR)
54 return true;
55 return false;
56}
57
75e72311 58/* Return true when NODE can be target of an indirect call. */
59
60static bool
61is_indirect_call_target_p (struct cgraph_node *node, void *)
62{
63 return node->indirect_call_target;
64}
65
21f41380 66/* Look for all functions inlined to NODE and update their inlined_to pointers
67 to INLINED_TO. */
68
69static void
70update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
71{
72 struct cgraph_edge *e;
73 for (e = node->callees; e; e = e->next_callee)
74 if (e->callee->global.inlined_to)
75 {
76 e->callee->global.inlined_to = inlined_to;
77 update_inlined_to_pointer (e->callee, inlined_to);
78 }
79}
80
91f0ab48 81/* Add symtab NODE to queue starting at FIRST.
9da87cb8 82
83 The queue is linked via AUX pointers and terminated by pointer to 1.
84 We enqueue nodes at two occasions: when we find them reachable or when we find
85 their bodies needed for further clonning. In the second case we mark them
86 by pointer to 2 after processing so they are re-queue when they become
87 reachable. */
6f932b06 88
89static void
452659af 90enqueue_node (symtab_node *node, symtab_node **first,
431205b7 91 hash_set<symtab_node *> *reachable)
6f932b06 92{
9da87cb8 93 /* Node is still in queue; do nothing. */
02774f2d 94 if (node->aux && node->aux != (void *) 2)
9da87cb8 95 return;
96 /* Node was already processed as unreachable, re-enqueue
97 only if it became reachable now. */
431205b7 98 if (node->aux == (void *)2 && !reachable->contains (node))
9da87cb8 99 return;
02774f2d 100 node->aux = *first;
6f932b06 101 *first = node;
102}
103
9da914af 104/* Return true if NODE may get inlined later.
105 This is used to keep DECL_EXTERNAL function bodies around long enough
106 so inliner can proces them. */
107
108static bool
109possible_inline_candidate_p (symtab_node *node)
110{
111 if (symtab->state >= IPA_SSA_AFTER_INLINING)
112 return false;
113 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
114 if (!cnode)
115 return false;
116 if (DECL_UNINLINABLE (cnode->decl))
117 return false;
118 if (opt_for_fn (cnode->decl, optimize))
119 return true;
120 if (symtab->state >= IPA_SSA)
121 return false;
122 return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
123}
124
6f932b06 125/* Process references. */
126
127static void
51ce5652 128process_references (symtab_node *snode,
452659af 129 symtab_node **first,
431205b7 130 hash_set<symtab_node *> *reachable)
6f932b06 131{
132 int i;
51ce5652 133 struct ipa_ref *ref = NULL;
134 for (i = 0; snode->iterate_reference (i, ref); i++)
6f932b06 135 {
452659af 136 symtab_node *node = ref->referred;
366970c6 137 symtab_node *body = node->ultimate_alias_target ();
15ca8f90 138
02774f2d 139 if (node->definition && !node->in_other_partition
140 && ((!DECL_EXTERNAL (node->decl) || node->alias)
9da914af 141 || (possible_inline_candidate_p (node)
366970c6 142 /* We use variable constructors during late compilation for
15ca8f90 143 constant folding. Keep references alive so partitioning
144 knows about potential references. */
53e9c5c4 145 || (VAR_P (node->decl)
278cec16 146 && (flag_wpa
147 || flag_incremental_link
148 == INCREMENTAL_LINK_LTO)
149 && dyn_cast <varpool_node *> (node)
150 ->ctor_useable_for_folding_p ()))))
366970c6 151 {
152 /* Be sure that we will not optimize out alias target
153 body. */
154 if (DECL_EXTERNAL (node->decl)
155 && node->alias
9da914af 156 && symtab->state < IPA_SSA_AFTER_INLINING)
366970c6 157 reachable->add (body);
158 reachable->add (node);
159 }
02774f2d 160 enqueue_node (node, first, reachable);
6f932b06 161 }
162}
163
e2fa5d74 164/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
165 all its potential targets as reachable to permit later inlining if
166 devirtualization happens. After inlining still keep their declarations
167 around, so we can devirtualize to a direct call.
168
169 Also try to make trivial devirutalization when no or only one target is
170 possible. */
171
172static void
431205b7 173walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
e2fa5d74 174 struct cgraph_edge *edge,
452659af 175 symtab_node **first,
9da914af 176 hash_set<symtab_node *> *reachable)
e2fa5d74 177{
178 unsigned int i;
179 void *cache_token;
180 bool final;
181 vec <cgraph_node *>targets
182 = possible_polymorphic_call_targets
183 (edge, &final, &cache_token);
184
431205b7 185 if (!reachable_call_targets->add (cache_token))
e2fa5d74 186 {
9af5ce0c 187 for (i = 0; i < targets.length (); i++)
e2fa5d74 188 {
189 struct cgraph_node *n = targets[i];
190
191 /* Do not bother to mark virtual methods in anonymous namespace;
192 either we will find use of virtual table defining it, or it is
193 unused. */
02774f2d 194 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
e2fa5d74 195 && type_in_anonymous_namespace_p
1fda15e2 196 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
e2fa5d74 197 continue;
198
75e72311 199 n->indirect_call_target = true;
200 symtab_node *body = n->function_symbol ();
366970c6 201
e2fa5d74 202 /* Prior inlining, keep alive bodies of possible targets for
203 devirtualization. */
75e72311 204 if (n->definition
9da914af 205 && (possible_inline_candidate_p (body)
75e72311 206 && opt_for_fn (body->decl, flag_devirtualize)))
207 {
208 /* Be sure that we will not optimize out alias target
209 body. */
210 if (DECL_EXTERNAL (n->decl)
211 && n->alias
9da914af 212 && symtab->state < IPA_SSA_AFTER_INLINING)
75e72311 213 reachable->add (body);
214 reachable->add (n);
215 }
e2fa5d74 216 /* Even after inlining we want to keep the possible targets in the
217 boundary, so late passes can still produce direct call even if
218 the chance for inlining is lost. */
02774f2d 219 enqueue_node (n, first, reachable);
e2fa5d74 220 }
221 }
222
223 /* Very trivial devirtualization; when the type is
224 final or anonymous (so we know all its derivation)
225 and there is only one possible virtual call target,
226 make the edge direct. */
227 if (final)
228 {
ceb49bba 229 if (targets.length () <= 1 && dbg_cnt (devirt))
e2fa5d74 230 {
749c5b03 231 cgraph_node *target, *node = edge->caller;
e2fa5d74 232 if (targets.length () == 1)
233 target = targets[0];
234 else
415d1b9a 235 target = cgraph_node::get_create
e2fa5d74 236 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
237
ceb49bba 238 if (dump_enabled_p ())
c309657f 239 {
240 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
0e388735 241 "devirtualizing call in %s to %s\n",
242 edge->caller->dump_name (),
243 target->dump_name ());
ceb49bba 244 }
35ee1c66 245 edge = edge->make_direct (target);
1297cbcd 246 if (ipa_fn_summaries)
247 ipa_update_overall_fn_summary (node);
6469adde 248 else if (edge->call_stmt)
1e42d5c6 249 edge->redirect_call_stmt_to_callee ();
e2fa5d74 250 }
251 }
252}
36a32361 253
65c1a668 254/* Perform reachability analysis and reclaim all unreachable nodes.
91f0ab48 255
256 The algorithm is basically mark&sweep but with some extra refinements:
257
258 - reachable extern inline functions needs special handling; the bodies needs
259 to stay in memory until inlining in hope that they will be inlined.
260 After inlining we release their bodies and turn them into unanalyzed
261 nodes even when they are reachable.
262
91f0ab48 263 - virtual functions are kept in callgraph even if they seem unreachable in
264 hope calls to them will be devirtualized.
265
266 Again we remove them after inlining. In late optimization some
6bcfabf2 267 devirtualization may happen, but it is not important since we won't inline
91f0ab48 268 the call. In theory early opts and IPA should work out all important cases.
269
270 - virtual clones needs bodies of their origins for later materialization;
271 this means that we want to keep the body even if the origin is unreachable
272 otherwise. To avoid origin from sitting in the callgraph and being
273 walked by IPA passes, we turn them into unanalyzed nodes with body
274 defined.
275
276 We maintain set of function declaration where body needs to stay in
277 body_needed_for_clonning
278
279 Inline clones represent special case: their declaration match the
280 declaration of origin and cgraph_remove_node already knows how to
281 reshape callgraph and preserve body when offline copy of function or
282 inline clone is being removed.
283
aa419a52 284 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
285 variables with DECL_INITIAL set. We finalize these and keep reachable
286 ones around for constant folding purposes. After inlining we however
287 stop walking their references to let everything static referneced by them
288 to be removed when it is otherwise unreachable.
289
91f0ab48 290 We maintain queue of both reachable symbols (i.e. defined symbols that needs
291 to stay) and symbols that are in boundary (i.e. external symbols referenced
292 by reachable symbols or origins of clones). The queue is represented
293 as linked list by AUX pointer terminated by 1.
294
6bcfabf2 295 At the end we keep all reachable symbols. For symbols in boundary we always
91f0ab48 296 turn definition into a declaration, but we may keep function body around
297 based on body_needed_for_clonning
298
299 All symbols that enter the queue have AUX pointer non-zero and are in the
300 boundary. Pointer set REACHABLE is used to track reachable symbols.
301
302 Every symbol can be visited twice - once as part of boundary and once
303 as real reachable symbol. enqueue_node needs to decide whether the
304 node needs to be re-queued for second processing. For this purpose
305 we set AUX pointer of processed symbols in the boundary to constant 2. */
65c1a668 306
307bool
366970c6 308symbol_table::remove_unreachable_nodes (FILE *file)
65c1a668 309{
452659af 310 symtab_node *first = (symtab_node *) (void *) 1;
f4ec5ce1 311 struct cgraph_node *node, *next;
098f44bc 312 varpool_node *vnode, *vnext;
65c1a668 313 bool changed = false;
431205b7 314 hash_set<symtab_node *> reachable;
315 hash_set<tree> body_needed_for_clonning;
316 hash_set<void *> reachable_call_targets;
65c1a668 317
e2fa5d74 318 timevar_push (TV_IPA_UNREACHABLE);
d1f68cd8 319 build_type_inheritance_graph ();
3f5be5f4 320 if (file)
321 fprintf (file, "\nReclaiming functions:");
382ecba7 322 if (flag_checking)
323 {
324 FOR_EACH_FUNCTION (node)
325 gcc_assert (!node->aux);
326 FOR_EACH_VARIABLE (vnode)
327 gcc_assert (!vnode->aux);
328 }
7f74ac6b 329 /* Mark functions whose bodies are obviously needed.
330 This is mostly when they can be referenced externally. Inline clones
331 are special since their declarations are shared with master clone and thus
332 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
abb1a237 333 FOR_EACH_FUNCTION (node)
334 {
335 node->used_as_abstract_origin = false;
75e72311 336 node->indirect_call_target = false;
02774f2d 337 if (node->definition
abb1a237 338 && !node->global.inlined_to
02774f2d 339 && !node->in_other_partition
415d1b9a 340 && !node->can_remove_if_no_direct_calls_and_refs_p ())
abb1a237 341 {
342 gcc_assert (!node->global.inlined_to);
431205b7 343 reachable.add (node);
344 enqueue_node (node, &first, &reachable);
abb1a237 345 }
346 else
02774f2d 347 gcc_assert (!node->aux);
abb1a237 348 }
7f74ac6b 349
350 /* Mark variables that are obviously needed. */
91f0ab48 351 FOR_EACH_DEFINED_VARIABLE (vnode)
97221fd7 352 if (!vnode->can_remove_if_no_refs_p()
02774f2d 353 && !vnode->in_other_partition)
91f0ab48 354 {
431205b7 355 reachable.add (vnode);
356 enqueue_node (vnode, &first, &reachable);
91f0ab48 357 }
358
359 /* Perform reachability analysis. */
452659af 360 while (first != (symtab_node *) (void *) 1)
6f932b06 361 {
431205b7 362 bool in_boundary_p = !reachable.contains (first);
452659af 363 symtab_node *node = first;
65c1a668 364
452659af 365 first = (symtab_node *)first->aux;
9da87cb8 366
91f0ab48 367 /* If we are processing symbol in boundary, mark its AUX pointer for
368 possible later re-processing in enqueue_node. */
369 if (in_boundary_p)
c5e076fc 370 {
371 node->aux = (void *)2;
372 if (node->alias && node->analyzed)
373 enqueue_node (node->get_alias_target (), &first, &reachable);
374 }
91f0ab48 375 else
376 {
9f0b7378 377 if (TREE_CODE (node->decl) == FUNCTION_DECL
378 && DECL_ABSTRACT_ORIGIN (node->decl))
abb1a237 379 {
380 struct cgraph_node *origin_node
ca92a251 381 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
382 if (origin_node && !origin_node->used_as_abstract_origin)
383 {
384 origin_node->used_as_abstract_origin = true;
385 gcc_assert (!origin_node->prev_sibling_clone);
386 gcc_assert (!origin_node->next_sibling_clone);
387 for (cgraph_node *n = origin_node->clones; n;
388 n = n->next_sibling_clone)
389 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
390 n->used_as_abstract_origin = true;
ca92a251 391 }
abb1a237 392 }
91f0ab48 393 /* If any symbol in a comdat group is reachable, force
468088ac 394 all externally visible symbols in the same comdat
395 group to be reachable as well. Comdat-local symbols
396 can be discarded if all uses were inlined. */
02774f2d 397 if (node->same_comdat_group)
91f0ab48 398 {
452659af 399 symtab_node *next;
02774f2d 400 for (next = node->same_comdat_group;
91f0ab48 401 next != node;
02774f2d 402 next = next->same_comdat_group)
415d1b9a 403 if (!next->comdat_local_p ()
431205b7 404 && !reachable.add (next))
405 enqueue_node (next, &first, &reachable);
91f0ab48 406 }
407 /* Mark references as reachable. */
9da914af 408 process_references (node, &first, &reachable);
91f0ab48 409 }
9da87cb8 410
13cbeaac 411 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
6f932b06 412 {
91f0ab48 413 /* Mark the callees reachable unless they are direct calls to extern
414 inline functions we decided to not inline. */
415 if (!in_boundary_p)
e12f85b7 416 {
91f0ab48 417 struct cgraph_edge *e;
e2fa5d74 418 /* Keep alive possible targets for devirtualization. */
d1f68cd8 419 if (opt_for_fn (cnode->decl, optimize)
420 && opt_for_fn (cnode->decl, flag_devirtualize))
e2fa5d74 421 {
422 struct cgraph_edge *next;
423 for (e = cnode->indirect_calls; e; e = next)
424 {
425 next = e->next_callee;
426 if (e->indirect_info->polymorphic)
431205b7 427 walk_polymorphic_call_targets (&reachable_call_targets,
9da914af 428 e, &first, &reachable);
e2fa5d74 429 }
430 }
91f0ab48 431 for (e = cnode->callees; e; e = e->next_callee)
71ca01ff 432 {
366970c6 433 symtab_node *body = e->callee->function_symbol ();
02774f2d 434 if (e->callee->definition
435 && !e->callee->in_other_partition
71ca01ff 436 && (!e->inline_failed
02774f2d 437 || !DECL_EXTERNAL (e->callee->decl)
438 || e->callee->alias
9da914af 439 || possible_inline_candidate_p (e->callee)))
89ae81e0 440 {
441 /* Be sure that we will not optimize out alias target
442 body. */
443 if (DECL_EXTERNAL (e->callee->decl)
444 && e->callee->alias
9da914af 445 && symtab->state < IPA_SSA_AFTER_INLINING)
366970c6 446 reachable.add (body);
431205b7 447 reachable.add (e->callee);
89ae81e0 448 }
431205b7 449 enqueue_node (e->callee, &first, &reachable);
da751785 450 }
91f0ab48 451
452 /* When inline clone exists, mark body to be preserved so when removing
453 offline copy of the function we don't kill it. */
b9b49047 454 if (cnode->global.inlined_to)
431205b7 455 body_needed_for_clonning.add (cnode->decl);
61c2c7b1 456
b9b49047 457 /* For non-inline clones, force their origins to the boundary and ensure
458 that body is not removed. */
459 while (cnode->clone_of)
460 {
02774f2d 461 bool noninline = cnode->clone_of->decl != cnode->decl;
b9b49047 462 cnode = cnode->clone_of;
463 if (noninline)
464 {
431205b7 465 body_needed_for_clonning.add (cnode->decl);
466 enqueue_node (cnode, &first, &reachable);
b9b49047 467 }
6f932b06 468 }
d09768a4 469
470 }
c5e076fc 471 else if (cnode->thunk.thunk_p)
472 enqueue_node (cnode->callees->callee, &first, &reachable);
7f0dce59 473
d09768a4 474 /* If any reachable function has simd clones, mark them as
475 reachable as well. */
476 if (cnode->simd_clones)
477 {
478 cgraph_node *next;
479 for (next = cnode->simd_clones;
480 next;
481 next = next->simdclone->next_clone)
482 if (in_boundary_p
431205b7 483 || !reachable.add (next))
484 enqueue_node (next, &first, &reachable);
ee3f5fc0 485 }
6f932b06 486 }
aa419a52 487 /* When we see constructor of external variable, keep referred nodes in the
2dc9831f 488 boundary. This will also hold initializers of the external vars NODE
489 refers to. */
13cbeaac 490 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2dc9831f 491 if (vnode
02774f2d 492 && DECL_EXTERNAL (node->decl)
493 && !vnode->alias
aa419a52 494 && in_boundary_p)
2dc9831f 495 {
51ce5652 496 struct ipa_ref *ref = NULL;
497 for (int i = 0; node->iterate_reference (i, ref); i++)
431205b7 498 enqueue_node (ref->referred, &first, &reachable);
2dc9831f 499 }
65c1a668 500 }
501
91f0ab48 502 /* Remove unreachable functions. */
35ee1c66 503 for (node = first_function (); node; node = next)
65c1a668 504 {
35ee1c66 505 next = next_function (node);
15ca8f90 506
507 /* If node is not needed at all, remove it. */
02774f2d 508 if (!node->aux)
65c1a668 509 {
3f5be5f4 510 if (file)
0e388735 511 fprintf (file, " %s", node->dump_name ());
415d1b9a 512 node->remove ();
91f0ab48 513 changed = true;
514 }
15ca8f90 515 /* If node is unreachable, remove its body. */
431205b7 516 else if (!reachable.contains (node))
91f0ab48 517 {
6cb8fb82 518 /* We keep definitions of thunks and aliases in the boundary so
519 we can walk to the ultimate alias targets and function symbols
520 reliably. */
521 if (node->alias || node->thunk.thunk_p)
522 ;
523 else if (!body_needed_for_clonning.contains (node->decl)
524 && !node->alias && !node->thunk.thunk_p)
415d1b9a 525 node->release_body ();
b9b49047 526 else if (!node->clone_of)
02774f2d 527 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
c5e076fc 528 if (node->definition && !node->alias && !node->thunk.thunk_p)
7fb046a4 529 {
91f0ab48 530 if (file)
0e388735 531 fprintf (file, " %s", node->dump_name ());
fa4052b3 532 node->body_removed = true;
02774f2d 533 node->analyzed = false;
534 node->definition = false;
535 node->cpp_implicit_alias = false;
536 node->alias = false;
e0dec29d 537 node->transparent_alias = false;
95d0bdb9 538 node->thunk.thunk_p = false;
02774f2d 539 node->weakref = false;
f0d26d57 540 /* After early inlining we drop always_inline attributes on
541 bodies of functions that are still referenced (have their
542 address taken). */
543 DECL_ATTRIBUTES (node->decl)
544 = remove_attribute ("always_inline",
545 DECL_ATTRIBUTES (node->decl));
02774f2d 546 if (!node->in_other_partition)
281dea26 547 node->local.local = false;
415d1b9a 548 node->remove_callees ();
51ce5652 549 node->remove_all_references ();
7fb046a4 550 changed = true;
551 }
65c1a668 552 }
b9b49047 553 else
415d1b9a 554 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
02774f2d 555 || in_lto_p || DECL_RESULT (node->decl));
65c1a668 556 }
91f0ab48 557
558 /* Inline clones might be kept around so their materializing allows further
559 cloning. If the function the clone is inlined into is removed, we need
560 to turn it into normal cone. */
7c455d87 561 FOR_EACH_FUNCTION (node)
ccf4ab6b 562 {
ccf4ab6b 563 if (node->global.inlined_to
564 && !node->callers)
565 {
566 gcc_assert (node->clones);
21f41380 567 node->global.inlined_to = NULL;
568 update_inlined_to_pointer (node, node);
ccf4ab6b 569 }
02774f2d 570 node->aux = NULL;
ccf4ab6b 571 }
8dfbf71d 572
91f0ab48 573 /* Remove unreachable variables. */
8dfbf71d 574 if (file)
91f0ab48 575 fprintf (file, "\nReclaiming variables:");
35ee1c66 576 for (vnode = first_variable (); vnode; vnode = vnext)
6f932b06 577 {
35ee1c66 578 vnext = next_variable (vnode);
02774f2d 579 if (!vnode->aux
f1a7feee 580 /* For can_refer_decl_in_current_unit_p we want to track for
581 all external variables if they are defined in other partition
582 or not. */
02774f2d 583 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
91f0ab48 584 {
e7610412 585 struct ipa_ref *ref = NULL;
586
587 /* First remove the aliases, so varpool::remove can possibly lookup
588 the constructor and save it for future use. */
589 while (vnode->iterate_direct_aliases (0, ref))
590 {
591 if (file)
0e388735 592 fprintf (file, " %s", ref->referred->dump_name ());
e7610412 593 ref->referring->remove ();
594 }
8dfbf71d 595 if (file)
0e388735 596 fprintf (file, " %s", vnode->dump_name ());
e7610412 597 vnext = next_variable (vnode);
c72bf911 598 /* Signal removal to the debug machinery. */
278cec16 599 if (! flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
c72bf911 600 {
601 vnode->definition = false;
602 (*debug_hooks->late_global_decl) (vnode->decl);
603 }
415d1b9a 604 vnode->remove ();
8dfbf71d 605 changed = true;
6f932b06 606 }
c5e076fc 607 else if (!reachable.contains (vnode) && !vnode->alias)
91f0ab48 608 {
df8d3e89 609 tree init;
02774f2d 610 if (vnode->definition)
91f0ab48 611 {
612 if (file)
f1c8b4d7 613 fprintf (file, " %s", vnode->name ());
91f0ab48 614 changed = true;
615 }
38889e98 616 /* Keep body if it may be useful for constant folding. */
278cec16 617 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
1e42d5c6 618 || ((init = ctor_for_folding (vnode->decl)) == error_mark_node))
38889e98 619 vnode->remove_initializer ();
620 else
621 DECL_INITIAL (vnode->decl) = init;
fa4052b3 622 vnode->body_removed = true;
02774f2d 623 vnode->definition = false;
624 vnode->analyzed = false;
625 vnode->aux = NULL;
15ca8f90 626
415d1b9a 627 vnode->remove_from_same_comdat_group ();
04f65f92 628
51ce5652 629 vnode->remove_all_references ();
91f0ab48 630 }
631 else
02774f2d 632 vnode->aux = NULL;
6f932b06 633 }
8dfbf71d 634
91f0ab48 635 /* Now update address_taken flags and try to promote functions to be local. */
cdedc740 636 if (file)
637 fprintf (file, "\nClearing address taken flags:");
7c455d87 638 FOR_EACH_DEFINED_FUNCTION (node)
02774f2d 639 if (node->address_taken
640 && !node->used_from_other_partition)
cdedc740 641 {
7feaa33e 642 if (!node->call_for_symbol_and_aliases
1e42d5c6 643 (has_addr_references_p, NULL, true))
cdedc740 644 {
645 if (file)
f1c8b4d7 646 fprintf (file, " %s", node->name ());
02774f2d 647 node->address_taken = false;
8dfbf71d 648 changed = true;
75e72311 649 if (node->local_p ()
650 /* Virtual functions may be kept in cgraph just because
651 of possible later devirtualization. Do not mark them as
652 local too early so we won't optimize them out before
653 we are done with polymorphic call analysis. */
9da914af 654 && (symtab->state >= IPA_SSA_AFTER_INLINING
75e72311 655 || !node->call_for_symbol_and_aliases
656 (is_indirect_call_target_p, NULL, true)))
8dfbf71d 657 {
658 node->local.local = true;
659 if (file)
660 fprintf (file, " (local)");
661 }
cdedc740 662 }
663 }
c7b2cc59 664 if (file)
665 fprintf (file, "\n");
6f932b06 666
382ecba7 667 symtab_node::checking_verify_symtab_nodes ();
34e5cced 668
f8bfd7f7 669 /* If we removed something, perhaps profile could be improved. */
5f4e4f36 670 if (changed && (optimize || in_lto_p) && ipa_call_summaries)
f8bfd7f7 671 FOR_EACH_DEFINED_FUNCTION (node)
6eaf903b 672 ipa_propagate_frequency (node);
f8bfd7f7 673
e2fa5d74 674 timevar_pop (TV_IPA_UNREACHABLE);
65c1a668 675 return changed;
676}
f37a5008 677
703ad42c 678/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
679 as needed, also clear EXPLICIT_REFS if the references to given variable
680 do not need to be explicit. */
681
682void
683process_references (varpool_node *vnode,
684 bool *written, bool *address_taken,
685 bool *read, bool *explicit_refs)
686{
687 int i;
688 struct ipa_ref *ref;
689
97221fd7 690 if (!vnode->all_refs_explicit_p ()
703ad42c 691 || TREE_THIS_VOLATILE (vnode->decl))
692 *explicit_refs = false;
693
51ce5652 694 for (i = 0; vnode->iterate_referring (i, ref)
703ad42c 695 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
696 switch (ref->use)
697 {
698 case IPA_REF_ADDR:
699 *address_taken = true;
700 break;
701 case IPA_REF_LOAD:
702 *read = true;
703 break;
704 case IPA_REF_STORE:
705 *written = true;
706 break;
707 case IPA_REF_ALIAS:
415d1b9a 708 process_references (dyn_cast<varpool_node *> (ref->referring), written,
709 address_taken, read, explicit_refs);
703ad42c 710 break;
711 }
712}
713
714/* Set TREE_READONLY bit. */
715
716bool
717set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
718{
719 TREE_READONLY (vnode->decl) = true;
720 return false;
721}
722
723/* Set writeonly bit and clear the initalizer, since it will not be needed. */
724
725bool
30d89cd0 726set_writeonly_bit (varpool_node *vnode, void *data)
703ad42c 727{
728 vnode->writeonly = true;
5f4e4f36 729 if (optimize || in_lto_p)
703ad42c 730 {
731 DECL_INITIAL (vnode->decl) = NULL;
732 if (!vnode->alias)
30d89cd0 733 {
734 if (vnode->num_references ())
735 *(bool *)data = true;
736 vnode->remove_all_references ();
737 }
703ad42c 738 }
739 return false;
740}
741
742/* Clear addressale bit of VNODE. */
743
744bool
745clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
746{
747 vnode->address_taken = false;
748 TREE_ADDRESSABLE (vnode->decl) = 0;
749 return false;
750}
751
8c8b8b86 752/* Discover variables that have no longer address taken, are read-only or
753 write-only and update their flags.
8dfbf71d 754
8c8b8b86 755 Return true when unreachable symbol removal should be done.
30d89cd0 756
f4d3c071 757 FIXME: This cannot be done in between gimplify and omp_expand since
8dfbf71d 758 readonly flag plays role on what is shared and what is not. Currently we do
023a28e1 759 this transformation as part of whole program visibility and re-do at
760 ipa-reference pass (to take into account clonning), but it would
761 make sense to do it before early optimizations. */
8dfbf71d 762
30d89cd0 763bool
8c8b8b86 764ipa_discover_variable_flags (void)
8dfbf71d 765{
8c8b8b86 766 if (!flag_ipa_reference_addressable)
767 return false;
768
30d89cd0 769 bool remove_p = false;
098f44bc 770 varpool_node *vnode;
8dfbf71d 771 if (dump_file)
772 fprintf (dump_file, "Clearing variable flags:");
7c455d87 773 FOR_EACH_VARIABLE (vnode)
703ad42c 774 if (!vnode->alias
02774f2d 775 && (TREE_ADDRESSABLE (vnode->decl)
703ad42c 776 || !vnode->writeonly
02774f2d 777 || !TREE_READONLY (vnode->decl)))
8dfbf71d 778 {
779 bool written = false;
780 bool address_taken = false;
703ad42c 781 bool read = false;
782 bool explicit_refs = true;
783
30d89cd0 784 process_references (vnode, &written, &address_taken, &read,
785 &explicit_refs);
703ad42c 786 if (!explicit_refs)
787 continue;
788 if (!address_taken)
8dfbf71d 789 {
703ad42c 790 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
4206bfac 791 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
50f2a18b 792 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
793 true);
8dfbf71d 794 }
703ad42c 795 if (!address_taken && !written
8dfbf71d 796 /* Making variable in explicit section readonly can cause section
797 type conflict.
798 See e.g. gcc.c-torture/compile/pr23237.c */
71e19e54 799 && vnode->get_section () == NULL)
8dfbf71d 800 {
703ad42c 801 if (!TREE_READONLY (vnode->decl) && dump_file)
f1c8b4d7 802 fprintf (dump_file, " %s (read-only)", vnode->name ());
50f2a18b 803 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
703ad42c 804 }
4206bfac 805 if (!vnode->writeonly && !read && !address_taken && written)
703ad42c 806 {
807 if (dump_file)
808 fprintf (dump_file, " %s (write-only)", vnode->name ());
50f2a18b 809 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
810 true);
8dfbf71d 811 }
812 }
813 if (dump_file)
814 fprintf (dump_file, "\n");
30d89cd0 815 return remove_p;
8dfbf71d 816}
817
a53e7471 818/* Generate and emit a static constructor or destructor. WHICH must
1e42d5c6 819 be one of 'I' (for a constructor), 'D' (for a destructor).
820 BODY is a STATEMENT_LIST containing GENERIC
058a1b7a 821 statements. PRIORITY is the initialization priority for this
822 constructor or destructor.
a53e7471 823
62510893 824 FINAL specify whether the externally visible name for collect2 should
825 be produced. */
826
827static void
332446ac 828cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final,
829 tree optimization,
830 tree target)
a53e7471 831{
832 static int counter = 0;
833 char which_buf[16];
834 tree decl, name, resdecl;
835
836 /* The priority is encoded in the constructor or destructor name.
837 collect2 will sort the names and arrange that they are called at
838 program startup. */
62510893 839 if (final)
840 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
841 else
842 /* Proudce sane name but one not recognizable by collect2, just for the
843 case we fail to inline the function. */
844 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
a53e7471 845 name = get_file_function_name (which_buf);
846
847 decl = build_decl (input_location, FUNCTION_DECL, name,
848 build_function_type_list (void_type_node, NULL_TREE));
849 current_function_decl = decl;
850
851 resdecl = build_decl (input_location,
852 RESULT_DECL, NULL_TREE, void_type_node);
853 DECL_ARTIFICIAL (resdecl) = 1;
854 DECL_RESULT (decl) = resdecl;
855 DECL_CONTEXT (resdecl) = decl;
856
857 allocate_struct_function (decl, false);
858
859 TREE_STATIC (decl) = 1;
860 TREE_USED (decl) = 1;
332446ac 861 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) = optimization;
862 DECL_FUNCTION_SPECIFIC_TARGET (decl) = target;
a53e7471 863 DECL_ARTIFICIAL (decl) = 1;
bf20458b 864 DECL_IGNORED_P (decl) = 1;
a53e7471 865 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
866 DECL_SAVED_TREE (decl) = body;
62510893 867 if (!targetm.have_ctors_dtors && final)
a53e7471 868 {
869 TREE_PUBLIC (decl) = 1;
870 DECL_PRESERVE_P (decl) = 1;
871 }
872 DECL_UNINLINABLE (decl) = 1;
873
874 DECL_INITIAL (decl) = make_node (BLOCK);
2a066179 875 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
a53e7471 876 TREE_USED (DECL_INITIAL (decl)) = 1;
877
878 DECL_SOURCE_LOCATION (decl) = input_location;
879 cfun->function_end_locus = input_location;
880
881 switch (which)
882 {
883 case 'I':
884 DECL_STATIC_CONSTRUCTOR (decl) = 1;
885 decl_init_priority_insert (decl, priority);
886 break;
887 case 'D':
888 DECL_STATIC_DESTRUCTOR (decl) = 1;
889 decl_fini_priority_insert (decl, priority);
890 break;
891 default:
892 gcc_unreachable ();
893 }
894
895 gimplify_function_tree (decl);
896
415d1b9a 897 cgraph_node::add_new_function (decl, false);
a53e7471 898
899 set_cfun (NULL);
900 current_function_decl = NULL;
901}
902
62510893 903/* Generate and emit a static constructor or destructor. WHICH must
1e42d5c6 904 be one of 'I' (for a constructor) or 'D' (for a destructor).
905 BODY is a STATEMENT_LIST containing GENERIC
058a1b7a 906 statements. PRIORITY is the initialization priority for this
907 constructor or destructor. */
62510893 908
909void
910cgraph_build_static_cdtor (char which, tree body, int priority)
911{
332446ac 912 cgraph_build_static_cdtor_1 (which, body, priority, false, NULL, NULL);
62510893 913}
a53e7471 914
a53e7471 915/* When target does not have ctors and dtors, we call all constructor
916 and destructor by special initialization/destruction function
917 recognized by collect2.
918
919 When we are going to build this function, collect all constructors and
920 destructors and turn them into normal functions. */
921
922static void
5d6bbf55 923record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
a53e7471 924{
02774f2d 925 if (DECL_STATIC_CONSTRUCTOR (node->decl))
5d6bbf55 926 ctors->safe_push (node->decl);
02774f2d 927 if (DECL_STATIC_DESTRUCTOR (node->decl))
5d6bbf55 928 dtors->safe_push (node->decl);
415d1b9a 929 node = cgraph_node::get (node->decl);
02774f2d 930 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
a53e7471 931}
932
933/* Define global constructors/destructor functions for the CDTORS, of
934 which they are LEN. The CDTORS are sorted by initialization
935 priority. If CTOR_P is true, these are constructors; otherwise,
936 they are destructors. */
937
938static void
5d6bbf55 939build_cdtor (bool ctor_p, const vec<tree> &cdtors)
a53e7471 940{
941 size_t i,j;
f1f41a6c 942 size_t len = cdtors.length ();
a53e7471 943
944 i = 0;
945 while (i < len)
946 {
947 tree body;
948 tree fn;
949 priority_type priority;
950
951 priority = 0;
952 body = NULL_TREE;
953 j = i;
954 do
955 {
956 priority_type p;
f1f41a6c 957 fn = cdtors[j];
a53e7471 958 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
959 if (j == i)
960 priority = p;
961 else if (p != priority)
962 break;
963 j++;
964 }
965 while (j < len);
966
d2435fb0 967 /* When there is only one cdtor and target supports them, do nothing. */
a53e7471 968 if (j == i + 1
969 && targetm.have_ctors_dtors)
970 {
971 i++;
972 continue;
973 }
974 /* Find the next batch of constructors/destructors with the same
975 initialization priority. */
d2435fb0 976 for (;i < j; i++)
a53e7471 977 {
a53e7471 978 tree call;
f1f41a6c 979 fn = cdtors[i];
a53e7471 980 call = build_call_expr (fn, 0);
981 if (ctor_p)
982 DECL_STATIC_CONSTRUCTOR (fn) = 0;
983 else
984 DECL_STATIC_DESTRUCTOR (fn) = 0;
985 /* We do not want to optimize away pure/const calls here.
986 When optimizing, these should be already removed, when not
987 optimizing, we want user to be able to breakpoint in them. */
988 TREE_SIDE_EFFECTS (call) = 1;
989 append_to_statement_list (call, &body);
a53e7471 990 }
a53e7471 991 gcc_assert (body != NULL_TREE);
992 /* Generate a function to call all the function of like
993 priority. */
332446ac 994 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true,
995 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (cdtors[0]),
996 DECL_FUNCTION_SPECIFIC_TARGET (cdtors[0]));
a53e7471 997 }
998}
999
1000/* Comparison function for qsort. P1 and P2 are actually of type
1001 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1002 used to determine the sort order. */
1003
1004static int
1005compare_ctor (const void *p1, const void *p2)
1006{
1007 tree f1;
1008 tree f2;
1009 int priority1;
1010 int priority2;
1011
1012 f1 = *(const tree *)p1;
1013 f2 = *(const tree *)p2;
1014 priority1 = DECL_INIT_PRIORITY (f1);
1015 priority2 = DECL_INIT_PRIORITY (f2);
1016
1017 if (priority1 < priority2)
1018 return -1;
1019 else if (priority1 > priority2)
1020 return 1;
1021 else
1022 /* Ensure a stable sort. Constructors are executed in backwarding
1023 order to make LTO initialize braries first. */
1024 return DECL_UID (f2) - DECL_UID (f1);
1025}
1026
1027/* Comparison function for qsort. P1 and P2 are actually of type
1028 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1029 used to determine the sort order. */
1030
1031static int
1032compare_dtor (const void *p1, const void *p2)
1033{
1034 tree f1;
1035 tree f2;
1036 int priority1;
1037 int priority2;
1038
1039 f1 = *(const tree *)p1;
1040 f2 = *(const tree *)p2;
1041 priority1 = DECL_FINI_PRIORITY (f1);
1042 priority2 = DECL_FINI_PRIORITY (f2);
1043
1044 if (priority1 < priority2)
1045 return -1;
1046 else if (priority1 > priority2)
1047 return 1;
1048 else
1049 /* Ensure a stable sort. */
1050 return DECL_UID (f1) - DECL_UID (f2);
1051}
1052
1053/* Generate functions to call static constructors and destructors
1054 for targets that do not support .ctors/.dtors sections. These
1055 functions have magic names which are detected by collect2. */
1056
1057static void
5d6bbf55 1058build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
a53e7471 1059{
5d6bbf55 1060 if (!ctors->is_empty ())
a53e7471 1061 {
1062 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
5d6bbf55 1063 ctors->qsort (compare_ctor);
1064 build_cdtor (/*ctor_p=*/true, *ctors);
a53e7471 1065 }
1066
5d6bbf55 1067 if (!dtors->is_empty ())
a53e7471 1068 {
1069 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
5d6bbf55 1070 dtors->qsort (compare_dtor);
1071 build_cdtor (/*ctor_p=*/false, *dtors);
a53e7471 1072 }
1073}
1074
1075/* Look for constructors and destructors and produce function calling them.
1076 This is needed for targets not supporting ctors or dtors, but we perform the
9d75589a 1077 transformation also at linktime to merge possibly numerous
a53e7471 1078 constructors/destructors into single function to improve code locality and
1079 reduce size. */
1080
1081static unsigned int
1082ipa_cdtor_merge (void)
1083{
5d6bbf55 1084 /* A vector of FUNCTION_DECLs declared as static constructors. */
1085 auto_vec<tree, 20> ctors;
1086 /* A vector of FUNCTION_DECLs declared as static destructors. */
1087 auto_vec<tree, 20> dtors;
a53e7471 1088 struct cgraph_node *node;
7c455d87 1089 FOR_EACH_DEFINED_FUNCTION (node)
02774f2d 1090 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1091 || DECL_STATIC_DESTRUCTOR (node->decl))
5d6bbf55 1092 record_cdtor_fn (node, &ctors, &dtors);
1093 build_cdtor_fns (&ctors, &dtors);
a53e7471 1094 return 0;
1095}
1096
7620bc82 1097namespace {
1098
1099const pass_data pass_data_ipa_cdtor_merge =
a53e7471 1100{
cbe8bda8 1101 IPA_PASS, /* type */
1102 "cdtor", /* name */
1103 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 1104 TV_CGRAPHOPT, /* tv_id */
1105 0, /* properties_required */
1106 0, /* properties_provided */
1107 0, /* properties_destroyed */
1108 0, /* todo_flags_start */
1109 0, /* todo_flags_finish */
a53e7471 1110};
cbe8bda8 1111
7620bc82 1112class pass_ipa_cdtor_merge : public ipa_opt_pass_d
cbe8bda8 1113{
1114public:
9af5ce0c 1115 pass_ipa_cdtor_merge (gcc::context *ctxt)
1116 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1117 NULL, /* generate_summary */
1118 NULL, /* write_summary */
1119 NULL, /* read_summary */
1120 NULL, /* write_optimization_summary */
1121 NULL, /* read_optimization_summary */
1122 NULL, /* stmt_fixup */
1123 0, /* function_transform_todo_flags_start */
1124 NULL, /* function_transform */
1125 NULL) /* variable_transform */
cbe8bda8 1126 {}
1127
1128 /* opt_pass methods: */
31315c24 1129 virtual bool gate (function *);
65b0537f 1130 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
cbe8bda8 1131
1132}; // class pass_ipa_cdtor_merge
1133
31315c24 1134bool
1135pass_ipa_cdtor_merge::gate (function *)
1136{
1137 /* Perform the pass when we have no ctors/dtors support
1138 or at LTO time to merge multiple constructors into single
1139 function. */
5f4e4f36 1140 return !targetm.have_ctors_dtors || in_lto_p;
31315c24 1141}
1142
7620bc82 1143} // anon namespace
1144
cbe8bda8 1145ipa_opt_pass_d *
1146make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1147{
1148 return new pass_ipa_cdtor_merge (ctxt);
1149}
3f1f2be0 1150
1151/* Invalid pointer representing BOTTOM for single user dataflow. */
1152#define BOTTOM ((cgraph_node *)(size_t) 2)
1153
1154/* Meet operation for single user dataflow.
1155 Here we want to associate variables with sigle function that may access it.
1156
1157 FUNCTION is current single user of a variable, VAR is variable that uses it.
1158 Latttice is stored in SINGLE_USER_MAP.
1159
1160 We represent:
1161 - TOP by no entry in SIGNLE_USER_MAP
1162 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1163 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1164
1165cgraph_node *
1166meet (cgraph_node *function, varpool_node *var,
d62dd039 1167 hash_map<varpool_node *, cgraph_node *> &single_user_map)
3f1f2be0 1168{
1169 struct cgraph_node *user, **f;
1170
1171 if (var->aux == BOTTOM)
1172 return BOTTOM;
1173
d62dd039 1174 f = single_user_map.get (var);
3f1f2be0 1175 if (!f)
1176 return function;
1177 user = *f;
1178 if (!function)
1179 return user;
1180 else if (function != user)
1181 return BOTTOM;
1182 else
1183 return function;
1184}
1185
1186/* Propagation step of single-use dataflow.
1187
1188 Check all uses of VNODE and see if they are used by single function FUNCTION.
1189 SINGLE_USER_MAP represents the dataflow lattice. */
1190
1191cgraph_node *
1192propagate_single_user (varpool_node *vnode, cgraph_node *function,
d62dd039 1193 hash_map<varpool_node *, cgraph_node *> &single_user_map)
3f1f2be0 1194{
1195 int i;
1196 struct ipa_ref *ref;
1197
1198 gcc_assert (!vnode->externally_visible);
1199
1200 /* If node is an alias, first meet with its target. */
1201 if (vnode->alias)
97221fd7 1202 function = meet (function, vnode->get_alias_target (), single_user_map);
3f1f2be0 1203
1204 /* Check all users and see if they correspond to a single function. */
415d1b9a 1205 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
3f1f2be0 1206 {
1207 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1208 if (cnode)
1209 {
1210 if (cnode->global.inlined_to)
1211 cnode = cnode->global.inlined_to;
1212 if (!function)
1213 function = cnode;
1214 else if (function != cnode)
1215 function = BOTTOM;
1216 }
1217 else
366970c6 1218 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1219 single_user_map);
3f1f2be0 1220 }
1221 return function;
1222}
1223
1224/* Pass setting used_by_single_function flag.
366970c6 1225 This flag is set on variable when there is only one function that may
1226 possibly referr to it. */
3f1f2be0 1227
1228static unsigned int
1229ipa_single_use (void)
1230{
1231 varpool_node *first = (varpool_node *) (void *) 1;
1232 varpool_node *var;
d62dd039 1233 hash_map<varpool_node *, cgraph_node *> single_user_map;
3f1f2be0 1234
1235 FOR_EACH_DEFINED_VARIABLE (var)
97221fd7 1236 if (!var->all_refs_explicit_p ())
3f1f2be0 1237 var->aux = BOTTOM;
1238 else
1239 {
1240 /* Enqueue symbol for dataflow. */
1241 var->aux = first;
1242 first = var;
1243 }
1244
1245 /* The actual dataflow. */
1246
1247 while (first != (void *) 1)
1248 {
1249 cgraph_node *user, *orig_user, **f;
1250
1251 var = first;
1252 first = (varpool_node *)first->aux;
1253
d62dd039 1254 f = single_user_map.get (var);
3f1f2be0 1255 if (f)
1256 orig_user = *f;
1257 else
1258 orig_user = NULL;
1259 user = propagate_single_user (var, orig_user, single_user_map);
1260
1261 gcc_checking_assert (var->aux != BOTTOM);
1262
1263 /* If user differs, enqueue all references. */
1264 if (user != orig_user)
1265 {
1266 unsigned int i;
1267 ipa_ref *ref;
1268
d62dd039 1269 single_user_map.put (var, user);
3f1f2be0 1270
1271 /* Enqueue all aliases for re-processing. */
50f2a18b 1272 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1273 if (!ref->referring->aux)
3f1f2be0 1274 {
1275 ref->referring->aux = first;
1276 first = dyn_cast <varpool_node *> (ref->referring);
1277 }
1278 /* Enqueue all users for re-processing. */
415d1b9a 1279 for (i = 0; var->iterate_reference (i, ref); i++)
3f1f2be0 1280 if (!ref->referred->aux
1281 && ref->referred->definition
1282 && is_a <varpool_node *> (ref->referred))
1283 {
1284 ref->referred->aux = first;
1285 first = dyn_cast <varpool_node *> (ref->referred);
1286 }
1287
1288 /* If user is BOTTOM, just punt on this var. */
1289 if (user == BOTTOM)
1290 var->aux = BOTTOM;
1291 else
1292 var->aux = NULL;
1293 }
1294 else
1295 var->aux = NULL;
1296 }
1297
1298 FOR_EACH_DEFINED_VARIABLE (var)
1299 {
1300 if (var->aux != BOTTOM)
1301 {
366970c6 1302 /* Not having the single user known means that the VAR is
1303 unreachable. Either someone forgot to remove unreachable
1304 variables or the reachability here is wrong. */
1305
382ecba7 1306 gcc_checking_assert (single_user_map.get (var));
1307
3f1f2be0 1308 if (dump_file)
1309 {
0e388735 1310 fprintf (dump_file, "Variable %s is used by single function\n",
1311 var->dump_name ());
3f1f2be0 1312 }
1313 var->used_by_single_function = true;
1314 }
1315 var->aux = NULL;
1316 }
1317 return 0;
1318}
1319
7620bc82 1320namespace {
1321
1322const pass_data pass_data_ipa_single_use =
3f1f2be0 1323{
1324 IPA_PASS, /* type */
1325 "single-use", /* name */
1326 OPTGROUP_NONE, /* optinfo_flags */
3f1f2be0 1327 TV_CGRAPHOPT, /* tv_id */
1328 0, /* properties_required */
1329 0, /* properties_provided */
1330 0, /* properties_destroyed */
1331 0, /* todo_flags_start */
1332 0, /* todo_flags_finish */
1333};
1334
7620bc82 1335class pass_ipa_single_use : public ipa_opt_pass_d
3f1f2be0 1336{
1337public:
1338 pass_ipa_single_use (gcc::context *ctxt)
1339 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1340 NULL, /* generate_summary */
1341 NULL, /* write_summary */
1342 NULL, /* read_summary */
1343 NULL, /* write_optimization_summary */
1344 NULL, /* read_optimization_summary */
1345 NULL, /* stmt_fixup */
1346 0, /* function_transform_todo_flags_start */
1347 NULL, /* function_transform */
1348 NULL) /* variable_transform */
1349 {}
1350
1351 /* opt_pass methods: */
3f1f2be0 1352 virtual unsigned int execute (function *) { return ipa_single_use (); }
1353
1354}; // class pass_ipa_single_use
1355
7620bc82 1356} // anon namespace
1357
3f1f2be0 1358ipa_opt_pass_d *
1359make_pass_ipa_single_use (gcc::context *ctxt)
1360{
1361 return new pass_ipa_single_use (ctxt);
1362}
b35a87b1 1363
1364/* Materialize all clones. */
1365
1366namespace {
1367
1368const pass_data pass_data_materialize_all_clones =
1369{
1370 SIMPLE_IPA_PASS, /* type */
1371 "materialize-all-clones", /* name */
1372 OPTGROUP_NONE, /* optinfo_flags */
1373 TV_IPA_OPT, /* tv_id */
1374 0, /* properties_required */
1375 0, /* properties_provided */
1376 0, /* properties_destroyed */
1377 0, /* todo_flags_start */
1378 0, /* todo_flags_finish */
1379};
1380
1381class pass_materialize_all_clones : public simple_ipa_opt_pass
1382{
1383public:
1384 pass_materialize_all_clones (gcc::context *ctxt)
1385 : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1386 {}
1387
1388 /* opt_pass methods: */
1389 virtual unsigned int execute (function *)
1390 {
1391 symtab->materialize_all_clones ();
1392 return 0;
1393 }
1394
1395}; // class pass_materialize_all_clones
1396
1397} // anon namespace
1398
1399simple_ipa_opt_pass *
1400make_pass_materialize_all_clones (gcc::context *ctxt)
1401{
1402 return new pass_materialize_all_clones (ctxt);
1403}