]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
5624e564 2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2
AM
23#include "backend.h"
24#include "tree.h"
25#include "gimple.h"
26#include "hard-reg-set.h"
40e23961 27#include "alias.h"
40e23961 28#include "options.h"
40e23961 29#include "fold-const.h"
d8a2d370
DN
30#include "calls.h"
31#include "stringpool.h"
c582198b
AM
32#include "cgraph.h"
33#include "tree-pass.h"
45b0be94 34#include "gimplify.h"
4a444e58 35#include "flags.h"
9e97ff61
JH
36#include "target.h"
37#include "tree-iterator.h"
af8bca3c 38#include "ipa-utils.h"
c582198b 39#include "alloc-pool.h"
dd912cb8 40#include "symbol-summary.h"
c582198b 41#include "ipa-prop.h"
04142cc3 42#include "ipa-inline.h"
0208f7da
JH
43#include "tree-inline.h"
44#include "profile.h"
45#include "params.h"
2b5f0895 46#include "internal-fn.h"
2b5f0895 47#include "dbgcnt.h"
ca31b95f 48
e70670cf
JH
49
50/* Return true when NODE has ADDR reference. */
51
52static bool
53has_addr_references_p (struct cgraph_node *node,
54 void *data ATTRIBUTE_UNUSED)
55{
56 int i;
d122681a 57 struct ipa_ref *ref = NULL;
e70670cf 58
d122681a 59 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
60 if (ref->use == IPA_REF_ADDR)
61 return true;
62 return false;
63}
64
d563610d
JH
65/* Look for all functions inlined to NODE and update their inlined_to pointers
66 to INLINED_TO. */
67
68static void
69update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
70{
71 struct cgraph_edge *e;
72 for (e = node->callees; e; e = e->next_callee)
73 if (e->callee->global.inlined_to)
74 {
75 e->callee->global.inlined_to = inlined_to;
76 update_inlined_to_pointer (e->callee, inlined_to);
77 }
78}
79
04142cc3 80/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
81
82 The queue is linked via AUX pointers and terminated by pointer to 1.
83 We enqueue nodes at two occasions: when we find them reachable or when we find
84 their bodies needed for further clonning. In the second case we mark them
85 by pointer to 2 after processing so they are re-queue when they become
86 reachable. */
b34fd25c
JH
87
88static void
5e20cdc9 89enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 90 hash_set<symtab_node *> *reachable)
b34fd25c 91{
19fb0b86 92 /* Node is still in queue; do nothing. */
67348ccc 93 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
94 return;
95 /* Node was already processed as unreachable, re-enqueue
96 only if it became reachable now. */
6e2830c3 97 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 98 return;
67348ccc 99 node->aux = *first;
b34fd25c
JH
100 *first = node;
101}
102
b34fd25c
JH
103/* Process references. */
104
105static void
d122681a 106process_references (symtab_node *snode,
5e20cdc9 107 symtab_node **first,
93a18a70 108 bool before_inlining_p,
6e2830c3 109 hash_set<symtab_node *> *reachable)
b34fd25c
JH
110{
111 int i;
d122681a
ML
112 struct ipa_ref *ref = NULL;
113 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 114 {
5e20cdc9 115 symtab_node *node = ref->referred;
17e0fc92 116 symtab_node *body = node->ultimate_alias_target ();
e70670cf 117
67348ccc
DM
118 if (node->definition && !node->in_other_partition
119 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8 120 || (((before_inlining_p
f1ced6f5
JH
121 && ((TREE_CODE (node->decl) != FUNCTION_DECL
122 && optimize)
123 || (TREE_CODE (node->decl) == FUNCTION_DECL
124 && opt_for_fn (body->decl, optimize))
17e0fc92
JH
125 || (symtab->state < IPA_SSA
126 && lookup_attribute
127 ("always_inline",
128 DECL_ATTRIBUTES (body->decl))))))
129 /* We use variable constructors during late compilation for
e70670cf
JH
130 constant folding. Keep references alive so partitioning
131 knows about potential references. */
67348ccc 132 || (TREE_CODE (node->decl) == VAR_DECL
6a6dac52 133 && flag_wpa
67348ccc 134 && ctor_for_folding (node->decl)
6a6dac52 135 != error_mark_node))))
17e0fc92
JH
136 {
137 /* Be sure that we will not optimize out alias target
138 body. */
139 if (DECL_EXTERNAL (node->decl)
140 && node->alias
141 && before_inlining_p)
142 reachable->add (body);
143 reachable->add (node);
144 }
67348ccc 145 enqueue_node (node, first, reachable);
b34fd25c
JH
146 }
147}
148
3462aa02
JH
149/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
150 all its potential targets as reachable to permit later inlining if
151 devirtualization happens. After inlining still keep their declarations
152 around, so we can devirtualize to a direct call.
153
154 Also try to make trivial devirutalization when no or only one target is
155 possible. */
156
157static void
6e2830c3 158walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 159 struct cgraph_edge *edge,
5e20cdc9 160 symtab_node **first,
6e2830c3
TS
161 hash_set<symtab_node *> *reachable,
162 bool before_inlining_p)
3462aa02
JH
163{
164 unsigned int i;
165 void *cache_token;
166 bool final;
167 vec <cgraph_node *>targets
168 = possible_polymorphic_call_targets
169 (edge, &final, &cache_token);
170
6e2830c3 171 if (!reachable_call_targets->add (cache_token))
3462aa02 172 {
c3284718 173 for (i = 0; i < targets.length (); i++)
3462aa02
JH
174 {
175 struct cgraph_node *n = targets[i];
176
177 /* Do not bother to mark virtual methods in anonymous namespace;
178 either we will find use of virtual table defining it, or it is
179 unused. */
67348ccc 180 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 181 && type_in_anonymous_namespace_p
70e7f2a2 182 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
3462aa02
JH
183 continue;
184
17e0fc92
JH
185 symtab_node *body = n->function_symbol ();
186
3462aa02
JH
187 /* Prior inlining, keep alive bodies of possible targets for
188 devirtualization. */
67348ccc 189 if (n->definition
8fe91ca8 190 && (before_inlining_p
17e0fc92
JH
191 && opt_for_fn (body->decl, optimize)
192 && opt_for_fn (body->decl, flag_devirtualize)))
193 {
194 /* Be sure that we will not optimize out alias target
195 body. */
196 if (DECL_EXTERNAL (n->decl)
197 && n->alias
198 && before_inlining_p)
199 reachable->add (body);
200 reachable->add (n);
201 }
3462aa02
JH
202 /* Even after inlining we want to keep the possible targets in the
203 boundary, so late passes can still produce direct call even if
204 the chance for inlining is lost. */
67348ccc 205 enqueue_node (n, first, reachable);
3462aa02
JH
206 }
207 }
208
209 /* Very trivial devirtualization; when the type is
210 final or anonymous (so we know all its derivation)
211 and there is only one possible virtual call target,
212 make the edge direct. */
213 if (final)
214 {
2b5f0895 215 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 216 {
7b395ddd 217 cgraph_node *target, *node = edge->caller;
3462aa02
JH
218 if (targets.length () == 1)
219 target = targets[0];
220 else
d52f5295 221 target = cgraph_node::get_create
3462aa02
JH
222 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
223
2b5f0895
XDL
224 if (dump_enabled_p ())
225 {
9189aff7
JH
226 location_t locus;
227 if (edge->call_stmt)
228 locus = gimple_location (edge->call_stmt);
229 else
230 locus = UNKNOWN_LOCATION;
d52f5295 231 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2b5f0895
XDL
232 "devirtualizing call in %s/%i to %s/%i\n",
233 edge->caller->name (), edge->caller->order,
234 target->name (),
235 target->order);
236 }
3dafb85c 237 edge = edge->make_direct (target);
9a1e784a 238 if (inline_summaries)
7b395ddd 239 inline_update_overall_summary (node);
477145c8 240 else if (edge->call_stmt)
d5e254e1
IE
241 {
242 edge->redirect_call_stmt_to_callee ();
243
244 /* Call to __builtin_unreachable shouldn't be instrumented. */
245 if (!targets.length ())
246 gimple_call_set_with_bounds (edge->call_stmt, false);
247 }
3462aa02
JH
248 }
249 }
250}
41817394 251
ca31b95f 252/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
253
254 The algorithm is basically mark&sweep but with some extra refinements:
255
256 - reachable extern inline functions needs special handling; the bodies needs
257 to stay in memory until inlining in hope that they will be inlined.
258 After inlining we release their bodies and turn them into unanalyzed
259 nodes even when they are reachable.
260
04142cc3
JH
261 - virtual functions are kept in callgraph even if they seem unreachable in
262 hope calls to them will be devirtualized.
263
264 Again we remove them after inlining. In late optimization some
31519c38 265 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
266 the call. In theory early opts and IPA should work out all important cases.
267
268 - virtual clones needs bodies of their origins for later materialization;
269 this means that we want to keep the body even if the origin is unreachable
270 otherwise. To avoid origin from sitting in the callgraph and being
271 walked by IPA passes, we turn them into unanalyzed nodes with body
272 defined.
273
274 We maintain set of function declaration where body needs to stay in
275 body_needed_for_clonning
276
277 Inline clones represent special case: their declaration match the
278 declaration of origin and cgraph_remove_node already knows how to
279 reshape callgraph and preserve body when offline copy of function or
280 inline clone is being removed.
281
6649df51
JH
282 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
283 variables with DECL_INITIAL set. We finalize these and keep reachable
284 ones around for constant folding purposes. After inlining we however
285 stop walking their references to let everything static referneced by them
286 to be removed when it is otherwise unreachable.
287
04142cc3
JH
288 We maintain queue of both reachable symbols (i.e. defined symbols that needs
289 to stay) and symbols that are in boundary (i.e. external symbols referenced
290 by reachable symbols or origins of clones). The queue is represented
291 as linked list by AUX pointer terminated by 1.
292
31519c38 293 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
294 turn definition into a declaration, but we may keep function body around
295 based on body_needed_for_clonning
296
297 All symbols that enter the queue have AUX pointer non-zero and are in the
298 boundary. Pointer set REACHABLE is used to track reachable symbols.
299
300 Every symbol can be visited twice - once as part of boundary and once
301 as real reachable symbol. enqueue_node needs to decide whether the
302 node needs to be re-queued for second processing. For this purpose
303 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
304
305bool
17e0fc92 306symbol_table::remove_unreachable_nodes (FILE *file)
ca31b95f 307{
5e20cdc9 308 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 309 struct cgraph_node *node, *next;
2c8326a5 310 varpool_node *vnode, *vnext;
ca31b95f 311 bool changed = false;
6e2830c3
TS
312 hash_set<symtab_node *> reachable;
313 hash_set<tree> body_needed_for_clonning;
314 hash_set<void *> reachable_call_targets;
17e0fc92
JH
315 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
316 : IPA_SSA_AFTER_INLINING);
ca31b95f 317
3462aa02 318 timevar_push (TV_IPA_UNREACHABLE);
2bf86c84 319 build_type_inheritance_graph ();
10d22567
ZD
320 if (file)
321 fprintf (file, "\nReclaiming functions:");
ca31b95f 322#ifdef ENABLE_CHECKING
65c70e6b 323 FOR_EACH_FUNCTION (node)
67348ccc 324 gcc_assert (!node->aux);
65c70e6b 325 FOR_EACH_VARIABLE (vnode)
67348ccc 326 gcc_assert (!vnode->aux);
ca31b95f 327#endif
530f3a1b
JH
328 /* Mark functions whose bodies are obviously needed.
329 This is mostly when they can be referenced externally. Inline clones
330 are special since their declarations are shared with master clone and thus
331 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
332 FOR_EACH_FUNCTION (node)
333 {
334 node->used_as_abstract_origin = false;
67348ccc 335 if (node->definition
c0c123ef 336 && !node->global.inlined_to
67348ccc 337 && !node->in_other_partition
d52f5295 338 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef
JH
339 {
340 gcc_assert (!node->global.inlined_to);
6e2830c3
TS
341 reachable.add (node);
342 enqueue_node (node, &first, &reachable);
c0c123ef
JH
343 }
344 else
67348ccc 345 gcc_assert (!node->aux);
c0c123ef 346 }
530f3a1b
JH
347
348 /* Mark variables that are obviously needed. */
04142cc3 349 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 350 if (!vnode->can_remove_if_no_refs_p()
67348ccc 351 && !vnode->in_other_partition)
04142cc3 352 {
6e2830c3
TS
353 reachable.add (vnode);
354 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
355 }
356
357 /* Perform reachability analysis. */
5e20cdc9 358 while (first != (symtab_node *) (void *) 1)
b34fd25c 359 {
6e2830c3 360 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 361 symtab_node *node = first;
ca31b95f 362
5e20cdc9 363 first = (symtab_node *)first->aux;
19fb0b86 364
04142cc3
JH
365 /* If we are processing symbol in boundary, mark its AUX pointer for
366 possible later re-processing in enqueue_node. */
367 if (in_boundary_p)
4bd019b8
JH
368 {
369 node->aux = (void *)2;
370 if (node->alias && node->analyzed)
371 enqueue_node (node->get_alias_target (), &first, &reachable);
372 }
04142cc3
JH
373 else
374 {
31dad809
JJ
375 if (TREE_CODE (node->decl) == FUNCTION_DECL
376 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
377 {
378 struct cgraph_node *origin_node
4ad08ee8
JH
379 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
380 if (origin_node && !origin_node->used_as_abstract_origin)
381 {
382 origin_node->used_as_abstract_origin = true;
383 gcc_assert (!origin_node->prev_sibling_clone);
384 gcc_assert (!origin_node->next_sibling_clone);
385 for (cgraph_node *n = origin_node->clones; n;
386 n = n->next_sibling_clone)
387 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
388 n->used_as_abstract_origin = true;
4ad08ee8 389 }
c0c123ef 390 }
04142cc3 391 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
392 all externally visible symbols in the same comdat
393 group to be reachable as well. Comdat-local symbols
394 can be discarded if all uses were inlined. */
67348ccc 395 if (node->same_comdat_group)
04142cc3 396 {
5e20cdc9 397 symtab_node *next;
67348ccc 398 for (next = node->same_comdat_group;
04142cc3 399 next != node;
67348ccc 400 next = next->same_comdat_group)
d52f5295 401 if (!next->comdat_local_p ()
6e2830c3
TS
402 && !reachable.add (next))
403 enqueue_node (next, &first, &reachable);
04142cc3
JH
404 }
405 /* Mark references as reachable. */
6e2830c3 406 process_references (node, &first, before_inlining_p, &reachable);
04142cc3 407 }
19fb0b86 408
7de90a6c 409 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 410 {
04142cc3
JH
411 /* Mark the callees reachable unless they are direct calls to extern
412 inline functions we decided to not inline. */
413 if (!in_boundary_p)
8a6295ba 414 {
04142cc3 415 struct cgraph_edge *e;
3462aa02 416 /* Keep alive possible targets for devirtualization. */
2bf86c84
JH
417 if (opt_for_fn (cnode->decl, optimize)
418 && opt_for_fn (cnode->decl, flag_devirtualize))
3462aa02
JH
419 {
420 struct cgraph_edge *next;
421 for (e = cnode->indirect_calls; e; e = next)
422 {
423 next = e->next_callee;
424 if (e->indirect_info->polymorphic)
6e2830c3
TS
425 walk_polymorphic_call_targets (&reachable_call_targets,
426 e, &first, &reachable,
3462aa02
JH
427 before_inlining_p);
428 }
429 }
04142cc3 430 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 431 {
17e0fc92 432 symtab_node *body = e->callee->function_symbol ();
67348ccc
DM
433 if (e->callee->definition
434 && !e->callee->in_other_partition
ed62e0d9 435 && (!e->inline_failed
67348ccc
DM
436 || !DECL_EXTERNAL (e->callee->decl)
437 || e->callee->alias
17e0fc92
JH
438 || (before_inlining_p
439 && (opt_for_fn (body->decl, optimize)
440 || (symtab->state < IPA_SSA
441 && lookup_attribute
442 ("always_inline",
443 DECL_ATTRIBUTES (body->decl)))))))
789c2741
JH
444 {
445 /* Be sure that we will not optimize out alias target
446 body. */
447 if (DECL_EXTERNAL (e->callee->decl)
448 && e->callee->alias
449 && before_inlining_p)
17e0fc92 450 reachable.add (body);
6e2830c3 451 reachable.add (e->callee);
789c2741 452 }
6e2830c3 453 enqueue_node (e->callee, &first, &reachable);
93a18a70 454 }
04142cc3
JH
455
456 /* When inline clone exists, mark body to be preserved so when removing
457 offline copy of the function we don't kill it. */
4f63dfc6 458 if (cnode->global.inlined_to)
6e2830c3 459 body_needed_for_clonning.add (cnode->decl);
b66887e4 460
48de5d37
IE
461 /* For instrumentation clones we always need original
462 function node for proper LTO privatization. */
463 if (cnode->instrumentation_clone
464 && cnode->definition)
465 {
466 gcc_assert (cnode->instrumented_version || in_lto_p);
467 if (cnode->instrumented_version)
468 {
469 enqueue_node (cnode->instrumented_version, &first,
470 &reachable);
471 reachable.add (cnode->instrumented_version);
472 }
473 }
474
4f63dfc6
JH
475 /* For non-inline clones, force their origins to the boundary and ensure
476 that body is not removed. */
477 while (cnode->clone_of)
478 {
67348ccc 479 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
480 cnode = cnode->clone_of;
481 if (noninline)
482 {
6e2830c3
TS
483 body_needed_for_clonning.add (cnode->decl);
484 enqueue_node (cnode, &first, &reachable);
4f63dfc6 485 }
b34fd25c 486 }
0136f8f0
AH
487
488 }
4bd019b8
JH
489 else if (cnode->thunk.thunk_p)
490 enqueue_node (cnode->callees->callee, &first, &reachable);
48de5d37 491
0136f8f0
AH
492 /* If any reachable function has simd clones, mark them as
493 reachable as well. */
494 if (cnode->simd_clones)
495 {
496 cgraph_node *next;
497 for (next = cnode->simd_clones;
498 next;
499 next = next->simdclone->next_clone)
500 if (in_boundary_p
6e2830c3
TS
501 || !reachable.add (next))
502 enqueue_node (next, &first, &reachable);
47cb0d7d 503 }
b34fd25c 504 }
6649df51 505 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
506 boundary. This will also hold initializers of the external vars NODE
507 refers to. */
7de90a6c 508 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 509 if (vnode
67348ccc
DM
510 && DECL_EXTERNAL (node->decl)
511 && !vnode->alias
6649df51 512 && in_boundary_p)
5d59b5e1 513 {
d122681a
ML
514 struct ipa_ref *ref = NULL;
515 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 516 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 517 }
ca31b95f
JH
518 }
519
04142cc3 520 /* Remove unreachable functions. */
3dafb85c 521 for (node = first_function (); node; node = next)
ca31b95f 522 {
3dafb85c 523 next = next_function (node);
e70670cf
JH
524
525 /* If node is not needed at all, remove it. */
67348ccc 526 if (!node->aux)
ca31b95f 527 {
10d22567 528 if (file)
5bed50e8 529 fprintf (file, " %s/%i", node->name (), node->order);
d52f5295 530 node->remove ();
04142cc3
JH
531 changed = true;
532 }
e70670cf 533 /* If node is unreachable, remove its body. */
6e2830c3 534 else if (!reachable.contains (node))
04142cc3 535 {
d3f2e41e
JH
536 /* We keep definitions of thunks and aliases in the boundary so
537 we can walk to the ultimate alias targets and function symbols
538 reliably. */
539 if (node->alias || node->thunk.thunk_p)
540 ;
541 else if (!body_needed_for_clonning.contains (node->decl)
542 && !node->alias && !node->thunk.thunk_p)
d52f5295 543 node->release_body ();
4f63dfc6 544 else if (!node->clone_of)
67348ccc 545 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
4bd019b8 546 if (node->definition && !node->alias && !node->thunk.thunk_p)
bb853349 547 {
04142cc3 548 if (file)
5bed50e8 549 fprintf (file, " %s/%i", node->name (), node->order);
3d8d0043 550 node->body_removed = true;
67348ccc
DM
551 node->analyzed = false;
552 node->definition = false;
553 node->cpp_implicit_alias = false;
554 node->alias = false;
d833415c 555 node->thunk.thunk_p = false;
67348ccc 556 node->weakref = false;
8fe91ca8
JH
557 /* After early inlining we drop always_inline attributes on
558 bodies of functions that are still referenced (have their
559 address taken). */
560 DECL_ATTRIBUTES (node->decl)
561 = remove_attribute ("always_inline",
562 DECL_ATTRIBUTES (node->decl));
67348ccc 563 if (!node->in_other_partition)
51a5c0c2 564 node->local.local = false;
d52f5295 565 node->remove_callees ();
d122681a 566 node->remove_all_references ();
bb853349 567 changed = true;
d5e254e1
IE
568 if (node->thunk.thunk_p
569 && node->thunk.add_pointer_bounds_args)
570 {
571 node->thunk.thunk_p = false;
572 node->thunk.add_pointer_bounds_args = false;
573 }
bb853349 574 }
ca31b95f 575 }
4f63dfc6 576 else
d52f5295 577 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 578 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 579 }
04142cc3
JH
580
581 /* Inline clones might be kept around so their materializing allows further
582 cloning. If the function the clone is inlined into is removed, we need
583 to turn it into normal cone. */
65c70e6b 584 FOR_EACH_FUNCTION (node)
9187e02d 585 {
9187e02d
JH
586 if (node->global.inlined_to
587 && !node->callers)
588 {
589 gcc_assert (node->clones);
d563610d
JH
590 node->global.inlined_to = NULL;
591 update_inlined_to_pointer (node, node);
9187e02d 592 }
67348ccc 593 node->aux = NULL;
9187e02d 594 }
4a444e58 595
04142cc3 596 /* Remove unreachable variables. */
4a444e58 597 if (file)
04142cc3 598 fprintf (file, "\nReclaiming variables:");
3dafb85c 599 for (vnode = first_variable (); vnode; vnode = vnext)
b34fd25c 600 {
3dafb85c 601 vnext = next_variable (vnode);
67348ccc 602 if (!vnode->aux
b9bd2075
JH
603 /* For can_refer_decl_in_current_unit_p we want to track for
604 all external variables if they are defined in other partition
605 or not. */
67348ccc 606 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 607 {
d2b35c04
JH
608 struct ipa_ref *ref = NULL;
609
610 /* First remove the aliases, so varpool::remove can possibly lookup
611 the constructor and save it for future use. */
612 while (vnode->iterate_direct_aliases (0, ref))
613 {
614 if (file)
615 fprintf (file, " %s/%i", ref->referred->name (),
616 ref->referred->order);
617 ref->referring->remove ();
618 }
4a444e58 619 if (file)
5bed50e8 620 fprintf (file, " %s/%i", vnode->name (), vnode->order);
d2b35c04 621 vnext = next_variable (vnode);
d52f5295 622 vnode->remove ();
4a444e58 623 changed = true;
b34fd25c 624 }
4bd019b8 625 else if (!reachable.contains (vnode) && !vnode->alias)
04142cc3 626 {
6a6dac52 627 tree init;
67348ccc 628 if (vnode->definition)
04142cc3
JH
629 {
630 if (file)
fec39fa6 631 fprintf (file, " %s", vnode->name ());
04142cc3
JH
632 changed = true;
633 }
1acc5591 634 /* Keep body if it may be useful for constant folding. */
d5e254e1
IE
635 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
636 && !POINTER_BOUNDS_P (vnode->decl))
1acc5591
JH
637 vnode->remove_initializer ();
638 else
639 DECL_INITIAL (vnode->decl) = init;
3d8d0043 640 vnode->body_removed = true;
67348ccc
DM
641 vnode->definition = false;
642 vnode->analyzed = false;
643 vnode->aux = NULL;
e70670cf 644
d52f5295 645 vnode->remove_from_same_comdat_group ();
7b3376a0 646
d122681a 647 vnode->remove_all_references ();
04142cc3
JH
648 }
649 else
67348ccc 650 vnode->aux = NULL;
b34fd25c 651 }
4a444e58 652
04142cc3 653 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
654 if (file)
655 fprintf (file, "\nClearing address taken flags:");
65c70e6b 656 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
657 if (node->address_taken
658 && !node->used_from_other_partition)
bd3cdcc0 659 {
1ede94c5 660 if (!node->call_for_symbol_and_aliases
d5e254e1
IE
661 (has_addr_references_p, NULL, true)
662 && (!node->instrumentation_clone
663 || !node->instrumented_version
664 || !node->instrumented_version->address_taken))
bd3cdcc0
JH
665 {
666 if (file)
fec39fa6 667 fprintf (file, " %s", node->name ());
67348ccc 668 node->address_taken = false;
4a444e58 669 changed = true;
d52f5295 670 if (node->local_p ())
4a444e58
JH
671 {
672 node->local.local = true;
673 if (file)
674 fprintf (file, " (local)");
675 }
bd3cdcc0
JH
676 }
677 }
10a5dd5d
JH
678 if (file)
679 fprintf (file, "\n");
b34fd25c 680
873aa8f5 681#ifdef ENABLE_CHECKING
d52f5295 682 symtab_node::verify_symtab_nodes ();
873aa8f5 683#endif
4537ec0c 684
a8da72b8 685 /* If we removed something, perhaps profile could be improved. */
9771b263 686 if (changed && optimize && inline_edge_summary_vec.exists ())
a8da72b8 687 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 688 ipa_propagate_frequency (node);
a8da72b8 689
3462aa02 690 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
691 return changed;
692}
f4b3ca72 693
6de88c6a
JH
694/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
695 as needed, also clear EXPLICIT_REFS if the references to given variable
696 do not need to be explicit. */
697
698void
699process_references (varpool_node *vnode,
700 bool *written, bool *address_taken,
701 bool *read, bool *explicit_refs)
702{
703 int i;
704 struct ipa_ref *ref;
705
9041d2e6 706 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
707 || TREE_THIS_VOLATILE (vnode->decl))
708 *explicit_refs = false;
709
d122681a 710 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
711 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
712 switch (ref->use)
713 {
714 case IPA_REF_ADDR:
715 *address_taken = true;
716 break;
717 case IPA_REF_LOAD:
718 *read = true;
719 break;
720 case IPA_REF_STORE:
721 *written = true;
722 break;
723 case IPA_REF_ALIAS:
d52f5295
ML
724 process_references (dyn_cast<varpool_node *> (ref->referring), written,
725 address_taken, read, explicit_refs);
6de88c6a 726 break;
d5e254e1
IE
727 case IPA_REF_CHKP:
728 gcc_unreachable ();
6de88c6a
JH
729 }
730}
731
732/* Set TREE_READONLY bit. */
733
734bool
735set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
736{
737 TREE_READONLY (vnode->decl) = true;
738 return false;
739}
740
741/* Set writeonly bit and clear the initalizer, since it will not be needed. */
742
743bool
dea91a66 744set_writeonly_bit (varpool_node *vnode, void *data)
6de88c6a
JH
745{
746 vnode->writeonly = true;
747 if (optimize)
748 {
749 DECL_INITIAL (vnode->decl) = NULL;
750 if (!vnode->alias)
dea91a66
JH
751 {
752 if (vnode->num_references ())
753 *(bool *)data = true;
754 vnode->remove_all_references ();
755 }
6de88c6a
JH
756 }
757 return false;
758}
759
760/* Clear addressale bit of VNODE. */
761
762bool
763clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
764{
765 vnode->address_taken = false;
766 TREE_ADDRESSABLE (vnode->decl) = 0;
767 return false;
768}
769
4a444e58
JH
770/* Discover variables that have no longer address taken or that are read only
771 and update their flags.
772
dea91a66
JH
773 Return true when unreachable symbol removan should be done.
774
4a444e58
JH
775 FIXME: This can not be done in between gimplify and omp_expand since
776 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
777 this transformation as part of whole program visibility and re-do at
778 ipa-reference pass (to take into account clonning), but it would
779 make sense to do it before early optimizations. */
4a444e58 780
dea91a66 781bool
4a444e58
JH
782ipa_discover_readonly_nonaddressable_vars (void)
783{
dea91a66 784 bool remove_p = false;
2c8326a5 785 varpool_node *vnode;
4a444e58
JH
786 if (dump_file)
787 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 788 FOR_EACH_VARIABLE (vnode)
6de88c6a 789 if (!vnode->alias
67348ccc 790 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 791 || !vnode->writeonly
67348ccc 792 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
793 {
794 bool written = false;
795 bool address_taken = false;
6de88c6a
JH
796 bool read = false;
797 bool explicit_refs = true;
798
dea91a66
JH
799 process_references (vnode, &written, &address_taken, &read,
800 &explicit_refs);
6de88c6a
JH
801 if (!explicit_refs)
802 continue;
803 if (!address_taken)
4a444e58 804 {
6de88c6a 805 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 806 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
31de7606
JH
807 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
808 true);
4a444e58 809 }
6de88c6a 810 if (!address_taken && !written
4a444e58
JH
811 /* Making variable in explicit section readonly can cause section
812 type conflict.
813 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 814 && vnode->get_section () == NULL)
4a444e58 815 {
6de88c6a 816 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 817 fprintf (dump_file, " %s (read-only)", vnode->name ());
31de7606 818 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 819 }
d5ce4663 820 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
821 {
822 if (dump_file)
823 fprintf (dump_file, " %s (write-only)", vnode->name ());
31de7606
JH
824 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
825 true);
4a444e58
JH
826 }
827 }
828 if (dump_file)
829 fprintf (dump_file, "\n");
dea91a66 830 return remove_p;
4a444e58
JH
831}
832
a8da72b8
L
833/* Free inline summary. */
834
27a4cd48
DM
835namespace {
836
837const pass_data pass_data_ipa_free_inline_summary =
a8da72b8 838{
27a4cd48 839 SIMPLE_IPA_PASS, /* type */
8605403e 840 "free-inline-summary", /* name */
27a4cd48 841 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
842 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
843 0, /* properties_required */
844 0, /* properties_provided */
845 0, /* properties_destroyed */
846 0, /* todo_flags_start */
8605403e
JH
847 /* Early optimizations may make function unreachable. We can not
848 remove unreachable functions as part of the ealry opts pass because
849 TODOs are run before subpasses. Do it here. */
850 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
a8da72b8
L
851};
852
27a4cd48
DM
853class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
854{
855public:
c3284718
RS
856 pass_ipa_free_inline_summary (gcc::context *ctxt)
857 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
27a4cd48
DM
858 {}
859
860 /* opt_pass methods: */
be55bfe6
TS
861 virtual unsigned int execute (function *)
862 {
863 inline_free_summary ();
864 return 0;
865 }
27a4cd48
DM
866
867}; // class pass_ipa_free_inline_summary
868
869} // anon namespace
870
871simple_ipa_opt_pass *
872make_pass_ipa_free_inline_summary (gcc::context *ctxt)
873{
874 return new pass_ipa_free_inline_summary (ctxt);
875}
876
9e97ff61 877/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
878 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
879 (for chp static vars constructor) or 'B' (for chkp static bounds
880 constructor). BODY is a STATEMENT_LIST containing GENERIC
881 statements. PRIORITY is the initialization priority for this
882 constructor or destructor.
9e97ff61 883
3a9ed12a
JH
884 FINAL specify whether the externally visible name for collect2 should
885 be produced. */
886
887static void
888cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
889{
890 static int counter = 0;
891 char which_buf[16];
892 tree decl, name, resdecl;
893
894 /* The priority is encoded in the constructor or destructor name.
895 collect2 will sort the names and arrange that they are called at
896 program startup. */
3a9ed12a
JH
897 if (final)
898 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
899 else
900 /* Proudce sane name but one not recognizable by collect2, just for the
901 case we fail to inline the function. */
902 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
903 name = get_file_function_name (which_buf);
904
905 decl = build_decl (input_location, FUNCTION_DECL, name,
906 build_function_type_list (void_type_node, NULL_TREE));
907 current_function_decl = decl;
908
909 resdecl = build_decl (input_location,
910 RESULT_DECL, NULL_TREE, void_type_node);
911 DECL_ARTIFICIAL (resdecl) = 1;
912 DECL_RESULT (decl) = resdecl;
913 DECL_CONTEXT (resdecl) = decl;
914
915 allocate_struct_function (decl, false);
916
917 TREE_STATIC (decl) = 1;
918 TREE_USED (decl) = 1;
919 DECL_ARTIFICIAL (decl) = 1;
920 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
921 DECL_SAVED_TREE (decl) = body;
3a9ed12a 922 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
923 {
924 TREE_PUBLIC (decl) = 1;
925 DECL_PRESERVE_P (decl) = 1;
926 }
927 DECL_UNINLINABLE (decl) = 1;
928
929 DECL_INITIAL (decl) = make_node (BLOCK);
930 TREE_USED (DECL_INITIAL (decl)) = 1;
931
932 DECL_SOURCE_LOCATION (decl) = input_location;
933 cfun->function_end_locus = input_location;
934
935 switch (which)
936 {
937 case 'I':
938 DECL_STATIC_CONSTRUCTOR (decl) = 1;
939 decl_init_priority_insert (decl, priority);
940 break;
d5e254e1
IE
941 case 'P':
942 DECL_STATIC_CONSTRUCTOR (decl) = 1;
943 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
944 NULL,
945 NULL_TREE);
946 decl_init_priority_insert (decl, priority);
947 break;
948 case 'B':
949 DECL_STATIC_CONSTRUCTOR (decl) = 1;
950 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
951 NULL,
952 NULL_TREE);
953 decl_init_priority_insert (decl, priority);
954 break;
9e97ff61
JH
955 case 'D':
956 DECL_STATIC_DESTRUCTOR (decl) = 1;
957 decl_fini_priority_insert (decl, priority);
958 break;
959 default:
960 gcc_unreachable ();
961 }
962
963 gimplify_function_tree (decl);
964
d52f5295 965 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
966
967 set_cfun (NULL);
968 current_function_decl = NULL;
969}
970
3a9ed12a 971/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
972 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
973 (for chkp static vars constructor) or 'B' (for chkp static bounds
974 constructor). BODY is a STATEMENT_LIST containing GENERIC
975 statements. PRIORITY is the initialization priority for this
976 constructor or destructor. */
3a9ed12a
JH
977
978void
979cgraph_build_static_cdtor (char which, tree body, int priority)
980{
981 cgraph_build_static_cdtor_1 (which, body, priority, false);
982}
9e97ff61
JH
983
984/* A vector of FUNCTION_DECLs declared as static constructors. */
9771b263 985static vec<tree> static_ctors;
9e97ff61 986/* A vector of FUNCTION_DECLs declared as static destructors. */
9771b263 987static vec<tree> static_dtors;
9e97ff61
JH
988
989/* When target does not have ctors and dtors, we call all constructor
990 and destructor by special initialization/destruction function
991 recognized by collect2.
992
993 When we are going to build this function, collect all constructors and
994 destructors and turn them into normal functions. */
995
996static void
997record_cdtor_fn (struct cgraph_node *node)
998{
67348ccc
DM
999 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1000 static_ctors.safe_push (node->decl);
1001 if (DECL_STATIC_DESTRUCTOR (node->decl))
1002 static_dtors.safe_push (node->decl);
d52f5295 1003 node = cgraph_node::get (node->decl);
67348ccc 1004 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
1005}
1006
1007/* Define global constructors/destructor functions for the CDTORS, of
1008 which they are LEN. The CDTORS are sorted by initialization
1009 priority. If CTOR_P is true, these are constructors; otherwise,
1010 they are destructors. */
1011
1012static void
9771b263 1013build_cdtor (bool ctor_p, vec<tree> cdtors)
9e97ff61
JH
1014{
1015 size_t i,j;
9771b263 1016 size_t len = cdtors.length ();
9e97ff61
JH
1017
1018 i = 0;
1019 while (i < len)
1020 {
1021 tree body;
1022 tree fn;
1023 priority_type priority;
1024
1025 priority = 0;
1026 body = NULL_TREE;
1027 j = i;
1028 do
1029 {
1030 priority_type p;
9771b263 1031 fn = cdtors[j];
9e97ff61
JH
1032 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1033 if (j == i)
1034 priority = p;
1035 else if (p != priority)
1036 break;
1037 j++;
1038 }
1039 while (j < len);
1040
48c24aca 1041 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
1042 if (j == i + 1
1043 && targetm.have_ctors_dtors)
1044 {
1045 i++;
1046 continue;
1047 }
1048 /* Find the next batch of constructors/destructors with the same
1049 initialization priority. */
48c24aca 1050 for (;i < j; i++)
9e97ff61 1051 {
9e97ff61 1052 tree call;
9771b263 1053 fn = cdtors[i];
9e97ff61
JH
1054 call = build_call_expr (fn, 0);
1055 if (ctor_p)
1056 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1057 else
1058 DECL_STATIC_DESTRUCTOR (fn) = 0;
1059 /* We do not want to optimize away pure/const calls here.
1060 When optimizing, these should be already removed, when not
1061 optimizing, we want user to be able to breakpoint in them. */
1062 TREE_SIDE_EFFECTS (call) = 1;
1063 append_to_statement_list (call, &body);
9e97ff61 1064 }
9e97ff61
JH
1065 gcc_assert (body != NULL_TREE);
1066 /* Generate a function to call all the function of like
1067 priority. */
3a9ed12a 1068 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
1069 }
1070}
1071
1072/* Comparison function for qsort. P1 and P2 are actually of type
1073 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1074 used to determine the sort order. */
1075
1076static int
1077compare_ctor (const void *p1, const void *p2)
1078{
1079 tree f1;
1080 tree f2;
1081 int priority1;
1082 int priority2;
1083
1084 f1 = *(const tree *)p1;
1085 f2 = *(const tree *)p2;
1086 priority1 = DECL_INIT_PRIORITY (f1);
1087 priority2 = DECL_INIT_PRIORITY (f2);
1088
1089 if (priority1 < priority2)
1090 return -1;
1091 else if (priority1 > priority2)
1092 return 1;
1093 else
1094 /* Ensure a stable sort. Constructors are executed in backwarding
1095 order to make LTO initialize braries first. */
1096 return DECL_UID (f2) - DECL_UID (f1);
1097}
1098
1099/* Comparison function for qsort. P1 and P2 are actually of type
1100 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1101 used to determine the sort order. */
1102
1103static int
1104compare_dtor (const void *p1, const void *p2)
1105{
1106 tree f1;
1107 tree f2;
1108 int priority1;
1109 int priority2;
1110
1111 f1 = *(const tree *)p1;
1112 f2 = *(const tree *)p2;
1113 priority1 = DECL_FINI_PRIORITY (f1);
1114 priority2 = DECL_FINI_PRIORITY (f2);
1115
1116 if (priority1 < priority2)
1117 return -1;
1118 else if (priority1 > priority2)
1119 return 1;
1120 else
1121 /* Ensure a stable sort. */
1122 return DECL_UID (f1) - DECL_UID (f2);
1123}
1124
1125/* Generate functions to call static constructors and destructors
1126 for targets that do not support .ctors/.dtors sections. These
1127 functions have magic names which are detected by collect2. */
1128
1129static void
1130build_cdtor_fns (void)
1131{
9771b263 1132 if (!static_ctors.is_empty ())
9e97ff61
JH
1133 {
1134 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1135 static_ctors.qsort (compare_ctor);
48c24aca 1136 build_cdtor (/*ctor_p=*/true, static_ctors);
9e97ff61
JH
1137 }
1138
9771b263 1139 if (!static_dtors.is_empty ())
9e97ff61
JH
1140 {
1141 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1142 static_dtors.qsort (compare_dtor);
48c24aca 1143 build_cdtor (/*ctor_p=*/false, static_dtors);
9e97ff61
JH
1144 }
1145}
1146
1147/* Look for constructors and destructors and produce function calling them.
1148 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1149 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1150 constructors/destructors into single function to improve code locality and
1151 reduce size. */
1152
1153static unsigned int
1154ipa_cdtor_merge (void)
1155{
1156 struct cgraph_node *node;
65c70e6b 1157 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1158 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1159 || DECL_STATIC_DESTRUCTOR (node->decl))
9e97ff61
JH
1160 record_cdtor_fn (node);
1161 build_cdtor_fns ();
9771b263
DN
1162 static_ctors.release ();
1163 static_dtors.release ();
9e97ff61
JH
1164 return 0;
1165}
1166
27a4cd48
DM
1167namespace {
1168
1169const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1170{
27a4cd48
DM
1171 IPA_PASS, /* type */
1172 "cdtor", /* name */
1173 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1174 TV_CGRAPHOPT, /* tv_id */
1175 0, /* properties_required */
1176 0, /* properties_provided */
1177 0, /* properties_destroyed */
1178 0, /* todo_flags_start */
1179 0, /* todo_flags_finish */
9e97ff61 1180};
27a4cd48
DM
1181
1182class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1183{
1184public:
c3284718
RS
1185 pass_ipa_cdtor_merge (gcc::context *ctxt)
1186 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1187 NULL, /* generate_summary */
1188 NULL, /* write_summary */
1189 NULL, /* read_summary */
1190 NULL, /* write_optimization_summary */
1191 NULL, /* read_optimization_summary */
1192 NULL, /* stmt_fixup */
1193 0, /* function_transform_todo_flags_start */
1194 NULL, /* function_transform */
1195 NULL) /* variable_transform */
27a4cd48
DM
1196 {}
1197
1198 /* opt_pass methods: */
1a3d085c 1199 virtual bool gate (function *);
be55bfe6 1200 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1201
1202}; // class pass_ipa_cdtor_merge
1203
1a3d085c
TS
1204bool
1205pass_ipa_cdtor_merge::gate (function *)
1206{
1207 /* Perform the pass when we have no ctors/dtors support
1208 or at LTO time to merge multiple constructors into single
1209 function. */
1210 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1211}
1212
27a4cd48
DM
1213} // anon namespace
1214
1215ipa_opt_pass_d *
1216make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1217{
1218 return new pass_ipa_cdtor_merge (ctxt);
1219}
eb6a09a7
JH
1220
1221/* Invalid pointer representing BOTTOM for single user dataflow. */
1222#define BOTTOM ((cgraph_node *)(size_t) 2)
1223
1224/* Meet operation for single user dataflow.
1225 Here we want to associate variables with sigle function that may access it.
1226
1227 FUNCTION is current single user of a variable, VAR is variable that uses it.
1228 Latttice is stored in SINGLE_USER_MAP.
1229
1230 We represent:
1231 - TOP by no entry in SIGNLE_USER_MAP
1232 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1233 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1234
1235cgraph_node *
1236meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1237 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1238{
1239 struct cgraph_node *user, **f;
1240
1241 if (var->aux == BOTTOM)
1242 return BOTTOM;
1243
1eb68d2d 1244 f = single_user_map.get (var);
eb6a09a7
JH
1245 if (!f)
1246 return function;
1247 user = *f;
1248 if (!function)
1249 return user;
1250 else if (function != user)
1251 return BOTTOM;
1252 else
1253 return function;
1254}
1255
1256/* Propagation step of single-use dataflow.
1257
1258 Check all uses of VNODE and see if they are used by single function FUNCTION.
1259 SINGLE_USER_MAP represents the dataflow lattice. */
1260
1261cgraph_node *
1262propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1263 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1264{
1265 int i;
1266 struct ipa_ref *ref;
1267
1268 gcc_assert (!vnode->externally_visible);
1269
1270 /* If node is an alias, first meet with its target. */
1271 if (vnode->alias)
9041d2e6 1272 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1273
1274 /* Check all users and see if they correspond to a single function. */
d52f5295 1275 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1276 {
1277 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1278 if (cnode)
1279 {
1280 if (cnode->global.inlined_to)
1281 cnode = cnode->global.inlined_to;
1282 if (!function)
1283 function = cnode;
1284 else if (function != cnode)
1285 function = BOTTOM;
1286 }
1287 else
17e0fc92
JH
1288 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1289 single_user_map);
eb6a09a7
JH
1290 }
1291 return function;
1292}
1293
1294/* Pass setting used_by_single_function flag.
17e0fc92
JH
1295 This flag is set on variable when there is only one function that may
1296 possibly referr to it. */
eb6a09a7
JH
1297
1298static unsigned int
1299ipa_single_use (void)
1300{
1301 varpool_node *first = (varpool_node *) (void *) 1;
1302 varpool_node *var;
1eb68d2d 1303 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1304
1305 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1306 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1307 var->aux = BOTTOM;
1308 else
1309 {
1310 /* Enqueue symbol for dataflow. */
1311 var->aux = first;
1312 first = var;
1313 }
1314
1315 /* The actual dataflow. */
1316
1317 while (first != (void *) 1)
1318 {
1319 cgraph_node *user, *orig_user, **f;
1320
1321 var = first;
1322 first = (varpool_node *)first->aux;
1323
1eb68d2d 1324 f = single_user_map.get (var);
eb6a09a7
JH
1325 if (f)
1326 orig_user = *f;
1327 else
1328 orig_user = NULL;
1329 user = propagate_single_user (var, orig_user, single_user_map);
1330
1331 gcc_checking_assert (var->aux != BOTTOM);
1332
1333 /* If user differs, enqueue all references. */
1334 if (user != orig_user)
1335 {
1336 unsigned int i;
1337 ipa_ref *ref;
1338
1eb68d2d 1339 single_user_map.put (var, user);
eb6a09a7
JH
1340
1341 /* Enqueue all aliases for re-processing. */
31de7606
JH
1342 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1343 if (!ref->referring->aux)
eb6a09a7
JH
1344 {
1345 ref->referring->aux = first;
1346 first = dyn_cast <varpool_node *> (ref->referring);
1347 }
1348 /* Enqueue all users for re-processing. */
d52f5295 1349 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1350 if (!ref->referred->aux
1351 && ref->referred->definition
1352 && is_a <varpool_node *> (ref->referred))
1353 {
1354 ref->referred->aux = first;
1355 first = dyn_cast <varpool_node *> (ref->referred);
1356 }
1357
1358 /* If user is BOTTOM, just punt on this var. */
1359 if (user == BOTTOM)
1360 var->aux = BOTTOM;
1361 else
1362 var->aux = NULL;
1363 }
1364 else
1365 var->aux = NULL;
1366 }
1367
1368 FOR_EACH_DEFINED_VARIABLE (var)
1369 {
1370 if (var->aux != BOTTOM)
1371 {
1372#ifdef ENABLE_CHECKING
17e0fc92
JH
1373 /* Not having the single user known means that the VAR is
1374 unreachable. Either someone forgot to remove unreachable
1375 variables or the reachability here is wrong. */
1376
1eb68d2d 1377 gcc_assert (single_user_map.get (var));
eb6a09a7
JH
1378#endif
1379 if (dump_file)
1380 {
1381 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1382 var->name (), var->order);
1383 }
1384 var->used_by_single_function = true;
1385 }
1386 var->aux = NULL;
1387 }
1388 return 0;
1389}
1390
1391namespace {
1392
1393const pass_data pass_data_ipa_single_use =
1394{
1395 IPA_PASS, /* type */
1396 "single-use", /* name */
1397 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1398 TV_CGRAPHOPT, /* tv_id */
1399 0, /* properties_required */
1400 0, /* properties_provided */
1401 0, /* properties_destroyed */
1402 0, /* todo_flags_start */
1403 0, /* todo_flags_finish */
1404};
1405
1406class pass_ipa_single_use : public ipa_opt_pass_d
1407{
1408public:
1409 pass_ipa_single_use (gcc::context *ctxt)
1410 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1411 NULL, /* generate_summary */
1412 NULL, /* write_summary */
1413 NULL, /* read_summary */
1414 NULL, /* write_optimization_summary */
1415 NULL, /* read_optimization_summary */
1416 NULL, /* stmt_fixup */
1417 0, /* function_transform_todo_flags_start */
1418 NULL, /* function_transform */
1419 NULL) /* variable_transform */
1420 {}
1421
1422 /* opt_pass methods: */
1423 virtual bool gate (function *);
1424 virtual unsigned int execute (function *) { return ipa_single_use (); }
1425
1426}; // class pass_ipa_single_use
1427
1428bool
1429pass_ipa_single_use::gate (function *)
1430{
1431 return optimize;
1432}
1433
1434} // anon namespace
1435
1436ipa_opt_pass_d *
1437make_pass_ipa_single_use (gcc::context *ctxt)
1438{
1439 return new pass_ipa_single_use (ctxt);
1440}