]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
genextract.c: add [cd]tors to accum_extract
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
818ab71a 2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
c7131fb2
AM
25#include "tree.h"
26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "stringpool.h"
30#include "cgraph.h"
45b0be94 31#include "gimplify.h"
9e97ff61 32#include "tree-iterator.h"
af8bca3c 33#include "ipa-utils.h"
dd912cb8 34#include "symbol-summary.h"
c582198b 35#include "ipa-prop.h"
04142cc3 36#include "ipa-inline.h"
2b5f0895 37#include "dbgcnt.h"
ca31b95f 38
e70670cf
JH
39
40/* Return true when NODE has ADDR reference. */
41
42static bool
43has_addr_references_p (struct cgraph_node *node,
4f4ada6a 44 void *)
e70670cf
JH
45{
46 int i;
d122681a 47 struct ipa_ref *ref = NULL;
e70670cf 48
d122681a 49 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
50 if (ref->use == IPA_REF_ADDR)
51 return true;
52 return false;
53}
54
4f4ada6a
JH
55/* Return true when NODE can be target of an indirect call. */
56
57static bool
58is_indirect_call_target_p (struct cgraph_node *node, void *)
59{
60 return node->indirect_call_target;
61}
62
d563610d
JH
63/* Look for all functions inlined to NODE and update their inlined_to pointers
64 to INLINED_TO. */
65
66static void
67update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
68{
69 struct cgraph_edge *e;
70 for (e = node->callees; e; e = e->next_callee)
71 if (e->callee->global.inlined_to)
72 {
73 e->callee->global.inlined_to = inlined_to;
74 update_inlined_to_pointer (e->callee, inlined_to);
75 }
76}
77
04142cc3 78/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
79
80 The queue is linked via AUX pointers and terminated by pointer to 1.
81 We enqueue nodes at two occasions: when we find them reachable or when we find
82 their bodies needed for further clonning. In the second case we mark them
83 by pointer to 2 after processing so they are re-queue when they become
84 reachable. */
b34fd25c
JH
85
86static void
5e20cdc9 87enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 88 hash_set<symtab_node *> *reachable)
b34fd25c 89{
19fb0b86 90 /* Node is still in queue; do nothing. */
67348ccc 91 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
92 return;
93 /* Node was already processed as unreachable, re-enqueue
94 only if it became reachable now. */
6e2830c3 95 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 96 return;
67348ccc 97 node->aux = *first;
b34fd25c
JH
98 *first = node;
99}
100
b34fd25c
JH
101/* Process references. */
102
103static void
d122681a 104process_references (symtab_node *snode,
5e20cdc9 105 symtab_node **first,
93a18a70 106 bool before_inlining_p,
6e2830c3 107 hash_set<symtab_node *> *reachable)
b34fd25c
JH
108{
109 int i;
d122681a
ML
110 struct ipa_ref *ref = NULL;
111 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 112 {
5e20cdc9 113 symtab_node *node = ref->referred;
17e0fc92 114 symtab_node *body = node->ultimate_alias_target ();
e70670cf 115
67348ccc
DM
116 if (node->definition && !node->in_other_partition
117 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8 118 || (((before_inlining_p
f1ced6f5
JH
119 && ((TREE_CODE (node->decl) != FUNCTION_DECL
120 && optimize)
121 || (TREE_CODE (node->decl) == FUNCTION_DECL
122 && opt_for_fn (body->decl, optimize))
17e0fc92
JH
123 || (symtab->state < IPA_SSA
124 && lookup_attribute
125 ("always_inline",
126 DECL_ATTRIBUTES (body->decl))))))
127 /* We use variable constructors during late compilation for
e70670cf
JH
128 constant folding. Keep references alive so partitioning
129 knows about potential references. */
67348ccc 130 || (TREE_CODE (node->decl) == VAR_DECL
6a6dac52 131 && flag_wpa
67348ccc 132 && ctor_for_folding (node->decl)
6a6dac52 133 != error_mark_node))))
17e0fc92
JH
134 {
135 /* Be sure that we will not optimize out alias target
136 body. */
137 if (DECL_EXTERNAL (node->decl)
138 && node->alias
139 && before_inlining_p)
140 reachable->add (body);
141 reachable->add (node);
142 }
67348ccc 143 enqueue_node (node, first, reachable);
b34fd25c
JH
144 }
145}
146
3462aa02
JH
147/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
148 all its potential targets as reachable to permit later inlining if
149 devirtualization happens. After inlining still keep their declarations
150 around, so we can devirtualize to a direct call.
151
152 Also try to make trivial devirutalization when no or only one target is
153 possible. */
154
155static void
6e2830c3 156walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 157 struct cgraph_edge *edge,
5e20cdc9 158 symtab_node **first,
6e2830c3
TS
159 hash_set<symtab_node *> *reachable,
160 bool before_inlining_p)
3462aa02
JH
161{
162 unsigned int i;
163 void *cache_token;
164 bool final;
165 vec <cgraph_node *>targets
166 = possible_polymorphic_call_targets
167 (edge, &final, &cache_token);
168
6e2830c3 169 if (!reachable_call_targets->add (cache_token))
3462aa02 170 {
c3284718 171 for (i = 0; i < targets.length (); i++)
3462aa02
JH
172 {
173 struct cgraph_node *n = targets[i];
174
175 /* Do not bother to mark virtual methods in anonymous namespace;
176 either we will find use of virtual table defining it, or it is
177 unused. */
67348ccc 178 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 179 && type_in_anonymous_namespace_p
70e7f2a2 180 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
3462aa02
JH
181 continue;
182
4f4ada6a
JH
183 n->indirect_call_target = true;
184 symtab_node *body = n->function_symbol ();
17e0fc92 185
3462aa02
JH
186 /* Prior inlining, keep alive bodies of possible targets for
187 devirtualization. */
4f4ada6a
JH
188 if (n->definition
189 && (before_inlining_p
190 && opt_for_fn (body->decl, optimize)
191 && opt_for_fn (body->decl, flag_devirtualize)))
192 {
193 /* Be sure that we will not optimize out alias target
194 body. */
195 if (DECL_EXTERNAL (n->decl)
196 && n->alias
197 && before_inlining_p)
198 reachable->add (body);
199 reachable->add (n);
200 }
3462aa02
JH
201 /* Even after inlining we want to keep the possible targets in the
202 boundary, so late passes can still produce direct call even if
203 the chance for inlining is lost. */
67348ccc 204 enqueue_node (n, first, reachable);
3462aa02
JH
205 }
206 }
207
208 /* Very trivial devirtualization; when the type is
209 final or anonymous (so we know all its derivation)
210 and there is only one possible virtual call target,
211 make the edge direct. */
212 if (final)
213 {
2b5f0895 214 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 215 {
7b395ddd 216 cgraph_node *target, *node = edge->caller;
3462aa02
JH
217 if (targets.length () == 1)
218 target = targets[0];
219 else
d52f5295 220 target = cgraph_node::get_create
3462aa02
JH
221 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
222
2b5f0895
XDL
223 if (dump_enabled_p ())
224 {
9189aff7
JH
225 location_t locus;
226 if (edge->call_stmt)
227 locus = gimple_location (edge->call_stmt);
228 else
229 locus = UNKNOWN_LOCATION;
d52f5295 230 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2b5f0895
XDL
231 "devirtualizing call in %s/%i to %s/%i\n",
232 edge->caller->name (), edge->caller->order,
233 target->name (),
234 target->order);
235 }
3dafb85c 236 edge = edge->make_direct (target);
9a1e784a 237 if (inline_summaries)
7b395ddd 238 inline_update_overall_summary (node);
477145c8 239 else if (edge->call_stmt)
d5e254e1
IE
240 {
241 edge->redirect_call_stmt_to_callee ();
242
243 /* Call to __builtin_unreachable shouldn't be instrumented. */
244 if (!targets.length ())
245 gimple_call_set_with_bounds (edge->call_stmt, false);
246 }
3462aa02
JH
247 }
248 }
249}
41817394 250
ca31b95f 251/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
252
253 The algorithm is basically mark&sweep but with some extra refinements:
254
255 - reachable extern inline functions needs special handling; the bodies needs
256 to stay in memory until inlining in hope that they will be inlined.
257 After inlining we release their bodies and turn them into unanalyzed
258 nodes even when they are reachable.
259
04142cc3
JH
260 - virtual functions are kept in callgraph even if they seem unreachable in
261 hope calls to them will be devirtualized.
262
263 Again we remove them after inlining. In late optimization some
31519c38 264 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
265 the call. In theory early opts and IPA should work out all important cases.
266
267 - virtual clones needs bodies of their origins for later materialization;
268 this means that we want to keep the body even if the origin is unreachable
269 otherwise. To avoid origin from sitting in the callgraph and being
270 walked by IPA passes, we turn them into unanalyzed nodes with body
271 defined.
272
273 We maintain set of function declaration where body needs to stay in
274 body_needed_for_clonning
275
276 Inline clones represent special case: their declaration match the
277 declaration of origin and cgraph_remove_node already knows how to
278 reshape callgraph and preserve body when offline copy of function or
279 inline clone is being removed.
280
6649df51
JH
281 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
282 variables with DECL_INITIAL set. We finalize these and keep reachable
283 ones around for constant folding purposes. After inlining we however
284 stop walking their references to let everything static referneced by them
285 to be removed when it is otherwise unreachable.
286
04142cc3
JH
287 We maintain queue of both reachable symbols (i.e. defined symbols that needs
288 to stay) and symbols that are in boundary (i.e. external symbols referenced
289 by reachable symbols or origins of clones). The queue is represented
290 as linked list by AUX pointer terminated by 1.
291
31519c38 292 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
293 turn definition into a declaration, but we may keep function body around
294 based on body_needed_for_clonning
295
296 All symbols that enter the queue have AUX pointer non-zero and are in the
297 boundary. Pointer set REACHABLE is used to track reachable symbols.
298
299 Every symbol can be visited twice - once as part of boundary and once
300 as real reachable symbol. enqueue_node needs to decide whether the
301 node needs to be re-queued for second processing. For this purpose
302 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
303
304bool
17e0fc92 305symbol_table::remove_unreachable_nodes (FILE *file)
ca31b95f 306{
5e20cdc9 307 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 308 struct cgraph_node *node, *next;
2c8326a5 309 varpool_node *vnode, *vnext;
ca31b95f 310 bool changed = false;
6e2830c3
TS
311 hash_set<symtab_node *> reachable;
312 hash_set<tree> body_needed_for_clonning;
313 hash_set<void *> reachable_call_targets;
17e0fc92
JH
314 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
315 : IPA_SSA_AFTER_INLINING);
ca31b95f 316
3462aa02 317 timevar_push (TV_IPA_UNREACHABLE);
2bf86c84 318 build_type_inheritance_graph ();
10d22567
ZD
319 if (file)
320 fprintf (file, "\nReclaiming functions:");
b2b29377
MM
321 if (flag_checking)
322 {
323 FOR_EACH_FUNCTION (node)
324 gcc_assert (!node->aux);
325 FOR_EACH_VARIABLE (vnode)
326 gcc_assert (!vnode->aux);
327 }
530f3a1b
JH
328 /* Mark functions whose bodies are obviously needed.
329 This is mostly when they can be referenced externally. Inline clones
330 are special since their declarations are shared with master clone and thus
331 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
332 FOR_EACH_FUNCTION (node)
333 {
334 node->used_as_abstract_origin = false;
4f4ada6a 335 node->indirect_call_target = false;
67348ccc 336 if (node->definition
c0c123ef 337 && !node->global.inlined_to
67348ccc 338 && !node->in_other_partition
d52f5295 339 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef
JH
340 {
341 gcc_assert (!node->global.inlined_to);
6e2830c3
TS
342 reachable.add (node);
343 enqueue_node (node, &first, &reachable);
c0c123ef
JH
344 }
345 else
67348ccc 346 gcc_assert (!node->aux);
c0c123ef 347 }
530f3a1b
JH
348
349 /* Mark variables that are obviously needed. */
04142cc3 350 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 351 if (!vnode->can_remove_if_no_refs_p()
67348ccc 352 && !vnode->in_other_partition)
04142cc3 353 {
6e2830c3
TS
354 reachable.add (vnode);
355 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
356 }
357
358 /* Perform reachability analysis. */
5e20cdc9 359 while (first != (symtab_node *) (void *) 1)
b34fd25c 360 {
6e2830c3 361 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 362 symtab_node *node = first;
ca31b95f 363
5e20cdc9 364 first = (symtab_node *)first->aux;
19fb0b86 365
04142cc3
JH
366 /* If we are processing symbol in boundary, mark its AUX pointer for
367 possible later re-processing in enqueue_node. */
368 if (in_boundary_p)
4bd019b8
JH
369 {
370 node->aux = (void *)2;
371 if (node->alias && node->analyzed)
372 enqueue_node (node->get_alias_target (), &first, &reachable);
373 }
04142cc3
JH
374 else
375 {
31dad809
JJ
376 if (TREE_CODE (node->decl) == FUNCTION_DECL
377 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
378 {
379 struct cgraph_node *origin_node
4ad08ee8
JH
380 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
381 if (origin_node && !origin_node->used_as_abstract_origin)
382 {
383 origin_node->used_as_abstract_origin = true;
384 gcc_assert (!origin_node->prev_sibling_clone);
385 gcc_assert (!origin_node->next_sibling_clone);
386 for (cgraph_node *n = origin_node->clones; n;
387 n = n->next_sibling_clone)
388 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
389 n->used_as_abstract_origin = true;
4ad08ee8 390 }
c0c123ef 391 }
04142cc3 392 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
393 all externally visible symbols in the same comdat
394 group to be reachable as well. Comdat-local symbols
395 can be discarded if all uses were inlined. */
67348ccc 396 if (node->same_comdat_group)
04142cc3 397 {
5e20cdc9 398 symtab_node *next;
67348ccc 399 for (next = node->same_comdat_group;
04142cc3 400 next != node;
67348ccc 401 next = next->same_comdat_group)
d52f5295 402 if (!next->comdat_local_p ()
6e2830c3
TS
403 && !reachable.add (next))
404 enqueue_node (next, &first, &reachable);
04142cc3
JH
405 }
406 /* Mark references as reachable. */
6e2830c3 407 process_references (node, &first, before_inlining_p, &reachable);
04142cc3 408 }
19fb0b86 409
7de90a6c 410 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 411 {
04142cc3
JH
412 /* Mark the callees reachable unless they are direct calls to extern
413 inline functions we decided to not inline. */
414 if (!in_boundary_p)
8a6295ba 415 {
04142cc3 416 struct cgraph_edge *e;
3462aa02 417 /* Keep alive possible targets for devirtualization. */
2bf86c84
JH
418 if (opt_for_fn (cnode->decl, optimize)
419 && opt_for_fn (cnode->decl, flag_devirtualize))
3462aa02
JH
420 {
421 struct cgraph_edge *next;
422 for (e = cnode->indirect_calls; e; e = next)
423 {
424 next = e->next_callee;
425 if (e->indirect_info->polymorphic)
6e2830c3
TS
426 walk_polymorphic_call_targets (&reachable_call_targets,
427 e, &first, &reachable,
3462aa02
JH
428 before_inlining_p);
429 }
430 }
04142cc3 431 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 432 {
17e0fc92 433 symtab_node *body = e->callee->function_symbol ();
67348ccc
DM
434 if (e->callee->definition
435 && !e->callee->in_other_partition
ed62e0d9 436 && (!e->inline_failed
67348ccc
DM
437 || !DECL_EXTERNAL (e->callee->decl)
438 || e->callee->alias
17e0fc92
JH
439 || (before_inlining_p
440 && (opt_for_fn (body->decl, optimize)
441 || (symtab->state < IPA_SSA
442 && lookup_attribute
443 ("always_inline",
444 DECL_ATTRIBUTES (body->decl)))))))
789c2741
JH
445 {
446 /* Be sure that we will not optimize out alias target
447 body. */
448 if (DECL_EXTERNAL (e->callee->decl)
449 && e->callee->alias
450 && before_inlining_p)
17e0fc92 451 reachable.add (body);
6e2830c3 452 reachable.add (e->callee);
789c2741 453 }
6e2830c3 454 enqueue_node (e->callee, &first, &reachable);
93a18a70 455 }
04142cc3
JH
456
457 /* When inline clone exists, mark body to be preserved so when removing
458 offline copy of the function we don't kill it. */
4f63dfc6 459 if (cnode->global.inlined_to)
6e2830c3 460 body_needed_for_clonning.add (cnode->decl);
b66887e4 461
48de5d37
IE
462 /* For instrumentation clones we always need original
463 function node for proper LTO privatization. */
464 if (cnode->instrumentation_clone
465 && cnode->definition)
466 {
467 gcc_assert (cnode->instrumented_version || in_lto_p);
468 if (cnode->instrumented_version)
469 {
470 enqueue_node (cnode->instrumented_version, &first,
471 &reachable);
472 reachable.add (cnode->instrumented_version);
473 }
474 }
475
4f63dfc6
JH
476 /* For non-inline clones, force their origins to the boundary and ensure
477 that body is not removed. */
478 while (cnode->clone_of)
479 {
67348ccc 480 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
481 cnode = cnode->clone_of;
482 if (noninline)
483 {
6e2830c3
TS
484 body_needed_for_clonning.add (cnode->decl);
485 enqueue_node (cnode, &first, &reachable);
4f63dfc6 486 }
b34fd25c 487 }
0136f8f0
AH
488
489 }
4bd019b8
JH
490 else if (cnode->thunk.thunk_p)
491 enqueue_node (cnode->callees->callee, &first, &reachable);
48de5d37 492
0136f8f0
AH
493 /* If any reachable function has simd clones, mark them as
494 reachable as well. */
495 if (cnode->simd_clones)
496 {
497 cgraph_node *next;
498 for (next = cnode->simd_clones;
499 next;
500 next = next->simdclone->next_clone)
501 if (in_boundary_p
6e2830c3
TS
502 || !reachable.add (next))
503 enqueue_node (next, &first, &reachable);
47cb0d7d 504 }
b34fd25c 505 }
6649df51 506 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
507 boundary. This will also hold initializers of the external vars NODE
508 refers to. */
7de90a6c 509 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 510 if (vnode
67348ccc
DM
511 && DECL_EXTERNAL (node->decl)
512 && !vnode->alias
6649df51 513 && in_boundary_p)
5d59b5e1 514 {
d122681a
ML
515 struct ipa_ref *ref = NULL;
516 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 517 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 518 }
ca31b95f
JH
519 }
520
04142cc3 521 /* Remove unreachable functions. */
3dafb85c 522 for (node = first_function (); node; node = next)
ca31b95f 523 {
3dafb85c 524 next = next_function (node);
e70670cf
JH
525
526 /* If node is not needed at all, remove it. */
67348ccc 527 if (!node->aux)
ca31b95f 528 {
10d22567 529 if (file)
5bed50e8 530 fprintf (file, " %s/%i", node->name (), node->order);
d52f5295 531 node->remove ();
04142cc3
JH
532 changed = true;
533 }
e70670cf 534 /* If node is unreachable, remove its body. */
6e2830c3 535 else if (!reachable.contains (node))
04142cc3 536 {
d3f2e41e
JH
537 /* We keep definitions of thunks and aliases in the boundary so
538 we can walk to the ultimate alias targets and function symbols
539 reliably. */
540 if (node->alias || node->thunk.thunk_p)
541 ;
542 else if (!body_needed_for_clonning.contains (node->decl)
543 && !node->alias && !node->thunk.thunk_p)
d52f5295 544 node->release_body ();
4f63dfc6 545 else if (!node->clone_of)
67348ccc 546 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
4bd019b8 547 if (node->definition && !node->alias && !node->thunk.thunk_p)
bb853349 548 {
04142cc3 549 if (file)
5bed50e8 550 fprintf (file, " %s/%i", node->name (), node->order);
3d8d0043 551 node->body_removed = true;
67348ccc
DM
552 node->analyzed = false;
553 node->definition = false;
554 node->cpp_implicit_alias = false;
555 node->alias = false;
71e54687 556 node->transparent_alias = false;
d833415c 557 node->thunk.thunk_p = false;
67348ccc 558 node->weakref = false;
8fe91ca8
JH
559 /* After early inlining we drop always_inline attributes on
560 bodies of functions that are still referenced (have their
561 address taken). */
562 DECL_ATTRIBUTES (node->decl)
563 = remove_attribute ("always_inline",
564 DECL_ATTRIBUTES (node->decl));
67348ccc 565 if (!node->in_other_partition)
51a5c0c2 566 node->local.local = false;
d52f5295 567 node->remove_callees ();
d122681a 568 node->remove_all_references ();
bb853349 569 changed = true;
d5e254e1
IE
570 if (node->thunk.thunk_p
571 && node->thunk.add_pointer_bounds_args)
572 {
573 node->thunk.thunk_p = false;
574 node->thunk.add_pointer_bounds_args = false;
575 }
bb853349 576 }
ca31b95f 577 }
4f63dfc6 578 else
d52f5295 579 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 580 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 581 }
04142cc3
JH
582
583 /* Inline clones might be kept around so their materializing allows further
584 cloning. If the function the clone is inlined into is removed, we need
585 to turn it into normal cone. */
65c70e6b 586 FOR_EACH_FUNCTION (node)
9187e02d 587 {
9187e02d
JH
588 if (node->global.inlined_to
589 && !node->callers)
590 {
591 gcc_assert (node->clones);
d563610d
JH
592 node->global.inlined_to = NULL;
593 update_inlined_to_pointer (node, node);
9187e02d 594 }
67348ccc 595 node->aux = NULL;
9187e02d 596 }
4a444e58 597
04142cc3 598 /* Remove unreachable variables. */
4a444e58 599 if (file)
04142cc3 600 fprintf (file, "\nReclaiming variables:");
3dafb85c 601 for (vnode = first_variable (); vnode; vnode = vnext)
b34fd25c 602 {
3dafb85c 603 vnext = next_variable (vnode);
67348ccc 604 if (!vnode->aux
b9bd2075
JH
605 /* For can_refer_decl_in_current_unit_p we want to track for
606 all external variables if they are defined in other partition
607 or not. */
67348ccc 608 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 609 {
d2b35c04
JH
610 struct ipa_ref *ref = NULL;
611
612 /* First remove the aliases, so varpool::remove can possibly lookup
613 the constructor and save it for future use. */
614 while (vnode->iterate_direct_aliases (0, ref))
615 {
616 if (file)
617 fprintf (file, " %s/%i", ref->referred->name (),
618 ref->referred->order);
619 ref->referring->remove ();
620 }
4a444e58 621 if (file)
5bed50e8 622 fprintf (file, " %s/%i", vnode->name (), vnode->order);
d2b35c04 623 vnext = next_variable (vnode);
d52f5295 624 vnode->remove ();
4a444e58 625 changed = true;
b34fd25c 626 }
4bd019b8 627 else if (!reachable.contains (vnode) && !vnode->alias)
04142cc3 628 {
6a6dac52 629 tree init;
67348ccc 630 if (vnode->definition)
04142cc3
JH
631 {
632 if (file)
fec39fa6 633 fprintf (file, " %s", vnode->name ());
04142cc3
JH
634 changed = true;
635 }
1acc5591 636 /* Keep body if it may be useful for constant folding. */
d5e254e1
IE
637 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
638 && !POINTER_BOUNDS_P (vnode->decl))
1acc5591
JH
639 vnode->remove_initializer ();
640 else
641 DECL_INITIAL (vnode->decl) = init;
3d8d0043 642 vnode->body_removed = true;
67348ccc
DM
643 vnode->definition = false;
644 vnode->analyzed = false;
645 vnode->aux = NULL;
e70670cf 646
d52f5295 647 vnode->remove_from_same_comdat_group ();
7b3376a0 648
d122681a 649 vnode->remove_all_references ();
04142cc3
JH
650 }
651 else
67348ccc 652 vnode->aux = NULL;
b34fd25c 653 }
4a444e58 654
04142cc3 655 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
656 if (file)
657 fprintf (file, "\nClearing address taken flags:");
65c70e6b 658 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
659 if (node->address_taken
660 && !node->used_from_other_partition)
bd3cdcc0 661 {
1ede94c5 662 if (!node->call_for_symbol_and_aliases
d5e254e1
IE
663 (has_addr_references_p, NULL, true)
664 && (!node->instrumentation_clone
665 || !node->instrumented_version
666 || !node->instrumented_version->address_taken))
bd3cdcc0
JH
667 {
668 if (file)
fec39fa6 669 fprintf (file, " %s", node->name ());
67348ccc 670 node->address_taken = false;
4a444e58 671 changed = true;
4f4ada6a
JH
672 if (node->local_p ()
673 /* Virtual functions may be kept in cgraph just because
674 of possible later devirtualization. Do not mark them as
675 local too early so we won't optimize them out before
676 we are done with polymorphic call analysis. */
677 && (!before_inlining_p
678 || !node->call_for_symbol_and_aliases
679 (is_indirect_call_target_p, NULL, true)))
4a444e58
JH
680 {
681 node->local.local = true;
682 if (file)
683 fprintf (file, " (local)");
684 }
bd3cdcc0
JH
685 }
686 }
10a5dd5d
JH
687 if (file)
688 fprintf (file, "\n");
b34fd25c 689
b2b29377 690 symtab_node::checking_verify_symtab_nodes ();
4537ec0c 691
a8da72b8 692 /* If we removed something, perhaps profile could be improved. */
9771b263 693 if (changed && optimize && inline_edge_summary_vec.exists ())
a8da72b8 694 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 695 ipa_propagate_frequency (node);
a8da72b8 696
3462aa02 697 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
698 return changed;
699}
f4b3ca72 700
6de88c6a
JH
701/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
702 as needed, also clear EXPLICIT_REFS if the references to given variable
703 do not need to be explicit. */
704
705void
706process_references (varpool_node *vnode,
707 bool *written, bool *address_taken,
708 bool *read, bool *explicit_refs)
709{
710 int i;
711 struct ipa_ref *ref;
712
9041d2e6 713 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
714 || TREE_THIS_VOLATILE (vnode->decl))
715 *explicit_refs = false;
716
d122681a 717 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
718 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
719 switch (ref->use)
720 {
721 case IPA_REF_ADDR:
722 *address_taken = true;
723 break;
724 case IPA_REF_LOAD:
725 *read = true;
726 break;
727 case IPA_REF_STORE:
728 *written = true;
729 break;
730 case IPA_REF_ALIAS:
d52f5295
ML
731 process_references (dyn_cast<varpool_node *> (ref->referring), written,
732 address_taken, read, explicit_refs);
6de88c6a 733 break;
d5e254e1
IE
734 case IPA_REF_CHKP:
735 gcc_unreachable ();
6de88c6a
JH
736 }
737}
738
739/* Set TREE_READONLY bit. */
740
741bool
742set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
743{
744 TREE_READONLY (vnode->decl) = true;
745 return false;
746}
747
748/* Set writeonly bit and clear the initalizer, since it will not be needed. */
749
750bool
dea91a66 751set_writeonly_bit (varpool_node *vnode, void *data)
6de88c6a
JH
752{
753 vnode->writeonly = true;
754 if (optimize)
755 {
756 DECL_INITIAL (vnode->decl) = NULL;
757 if (!vnode->alias)
dea91a66
JH
758 {
759 if (vnode->num_references ())
760 *(bool *)data = true;
761 vnode->remove_all_references ();
762 }
6de88c6a
JH
763 }
764 return false;
765}
766
767/* Clear addressale bit of VNODE. */
768
769bool
770clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
771{
772 vnode->address_taken = false;
773 TREE_ADDRESSABLE (vnode->decl) = 0;
774 return false;
775}
776
4a444e58
JH
777/* Discover variables that have no longer address taken or that are read only
778 and update their flags.
779
dea91a66
JH
780 Return true when unreachable symbol removan should be done.
781
4a444e58
JH
782 FIXME: This can not be done in between gimplify and omp_expand since
783 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
784 this transformation as part of whole program visibility and re-do at
785 ipa-reference pass (to take into account clonning), but it would
786 make sense to do it before early optimizations. */
4a444e58 787
dea91a66 788bool
4a444e58
JH
789ipa_discover_readonly_nonaddressable_vars (void)
790{
dea91a66 791 bool remove_p = false;
2c8326a5 792 varpool_node *vnode;
4a444e58
JH
793 if (dump_file)
794 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 795 FOR_EACH_VARIABLE (vnode)
6de88c6a 796 if (!vnode->alias
67348ccc 797 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 798 || !vnode->writeonly
67348ccc 799 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
800 {
801 bool written = false;
802 bool address_taken = false;
6de88c6a
JH
803 bool read = false;
804 bool explicit_refs = true;
805
dea91a66
JH
806 process_references (vnode, &written, &address_taken, &read,
807 &explicit_refs);
6de88c6a
JH
808 if (!explicit_refs)
809 continue;
810 if (!address_taken)
4a444e58 811 {
6de88c6a 812 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 813 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
31de7606
JH
814 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
815 true);
4a444e58 816 }
6de88c6a 817 if (!address_taken && !written
4a444e58
JH
818 /* Making variable in explicit section readonly can cause section
819 type conflict.
820 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 821 && vnode->get_section () == NULL)
4a444e58 822 {
6de88c6a 823 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 824 fprintf (dump_file, " %s (read-only)", vnode->name ());
31de7606 825 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 826 }
d5ce4663 827 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
828 {
829 if (dump_file)
830 fprintf (dump_file, " %s (write-only)", vnode->name ());
31de7606
JH
831 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
832 true);
4a444e58
JH
833 }
834 }
835 if (dump_file)
836 fprintf (dump_file, "\n");
dea91a66 837 return remove_p;
4a444e58
JH
838}
839
a8da72b8
L
840/* Free inline summary. */
841
17795822
TS
842namespace {
843
844const pass_data pass_data_ipa_free_inline_summary =
a8da72b8 845{
27a4cd48 846 SIMPLE_IPA_PASS, /* type */
8605403e 847 "free-inline-summary", /* name */
27a4cd48 848 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
849 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
850 0, /* properties_required */
851 0, /* properties_provided */
852 0, /* properties_destroyed */
853 0, /* todo_flags_start */
8605403e
JH
854 /* Early optimizations may make function unreachable. We can not
855 remove unreachable functions as part of the ealry opts pass because
856 TODOs are run before subpasses. Do it here. */
857 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
a8da72b8
L
858};
859
17795822 860class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
27a4cd48
DM
861{
862public:
c3284718
RS
863 pass_ipa_free_inline_summary (gcc::context *ctxt)
864 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
27a4cd48
DM
865 {}
866
867 /* opt_pass methods: */
be55bfe6
TS
868 virtual unsigned int execute (function *)
869 {
870 inline_free_summary ();
871 return 0;
872 }
27a4cd48
DM
873
874}; // class pass_ipa_free_inline_summary
875
17795822
TS
876} // anon namespace
877
27a4cd48
DM
878simple_ipa_opt_pass *
879make_pass_ipa_free_inline_summary (gcc::context *ctxt)
880{
881 return new pass_ipa_free_inline_summary (ctxt);
882}
883
9e97ff61 884/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
885 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
886 (for chp static vars constructor) or 'B' (for chkp static bounds
887 constructor). BODY is a STATEMENT_LIST containing GENERIC
888 statements. PRIORITY is the initialization priority for this
889 constructor or destructor.
9e97ff61 890
3a9ed12a
JH
891 FINAL specify whether the externally visible name for collect2 should
892 be produced. */
893
894static void
895cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
896{
897 static int counter = 0;
898 char which_buf[16];
899 tree decl, name, resdecl;
900
901 /* The priority is encoded in the constructor or destructor name.
902 collect2 will sort the names and arrange that they are called at
903 program startup. */
3a9ed12a
JH
904 if (final)
905 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
906 else
907 /* Proudce sane name but one not recognizable by collect2, just for the
908 case we fail to inline the function. */
909 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
910 name = get_file_function_name (which_buf);
911
912 decl = build_decl (input_location, FUNCTION_DECL, name,
913 build_function_type_list (void_type_node, NULL_TREE));
914 current_function_decl = decl;
915
916 resdecl = build_decl (input_location,
917 RESULT_DECL, NULL_TREE, void_type_node);
918 DECL_ARTIFICIAL (resdecl) = 1;
919 DECL_RESULT (decl) = resdecl;
920 DECL_CONTEXT (resdecl) = decl;
921
922 allocate_struct_function (decl, false);
923
924 TREE_STATIC (decl) = 1;
925 TREE_USED (decl) = 1;
926 DECL_ARTIFICIAL (decl) = 1;
3f2dd8cd 927 DECL_IGNORED_P (decl) = 1;
9e97ff61
JH
928 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
929 DECL_SAVED_TREE (decl) = body;
3a9ed12a 930 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
931 {
932 TREE_PUBLIC (decl) = 1;
933 DECL_PRESERVE_P (decl) = 1;
934 }
935 DECL_UNINLINABLE (decl) = 1;
936
937 DECL_INITIAL (decl) = make_node (BLOCK);
938 TREE_USED (DECL_INITIAL (decl)) = 1;
939
940 DECL_SOURCE_LOCATION (decl) = input_location;
941 cfun->function_end_locus = input_location;
942
943 switch (which)
944 {
945 case 'I':
946 DECL_STATIC_CONSTRUCTOR (decl) = 1;
947 decl_init_priority_insert (decl, priority);
948 break;
d5e254e1
IE
949 case 'P':
950 DECL_STATIC_CONSTRUCTOR (decl) = 1;
951 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
952 NULL,
953 NULL_TREE);
954 decl_init_priority_insert (decl, priority);
955 break;
956 case 'B':
957 DECL_STATIC_CONSTRUCTOR (decl) = 1;
958 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
959 NULL,
960 NULL_TREE);
961 decl_init_priority_insert (decl, priority);
962 break;
9e97ff61
JH
963 case 'D':
964 DECL_STATIC_DESTRUCTOR (decl) = 1;
965 decl_fini_priority_insert (decl, priority);
966 break;
967 default:
968 gcc_unreachable ();
969 }
970
971 gimplify_function_tree (decl);
972
d52f5295 973 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
974
975 set_cfun (NULL);
976 current_function_decl = NULL;
977}
978
3a9ed12a 979/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
980 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
981 (for chkp static vars constructor) or 'B' (for chkp static bounds
982 constructor). BODY is a STATEMENT_LIST containing GENERIC
983 statements. PRIORITY is the initialization priority for this
984 constructor or destructor. */
3a9ed12a
JH
985
986void
987cgraph_build_static_cdtor (char which, tree body, int priority)
988{
989 cgraph_build_static_cdtor_1 (which, body, priority, false);
990}
9e97ff61
JH
991
992/* A vector of FUNCTION_DECLs declared as static constructors. */
9771b263 993static vec<tree> static_ctors;
9e97ff61 994/* A vector of FUNCTION_DECLs declared as static destructors. */
9771b263 995static vec<tree> static_dtors;
9e97ff61
JH
996
997/* When target does not have ctors and dtors, we call all constructor
998 and destructor by special initialization/destruction function
999 recognized by collect2.
1000
1001 When we are going to build this function, collect all constructors and
1002 destructors and turn them into normal functions. */
1003
1004static void
1005record_cdtor_fn (struct cgraph_node *node)
1006{
67348ccc
DM
1007 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1008 static_ctors.safe_push (node->decl);
1009 if (DECL_STATIC_DESTRUCTOR (node->decl))
1010 static_dtors.safe_push (node->decl);
d52f5295 1011 node = cgraph_node::get (node->decl);
67348ccc 1012 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
1013}
1014
1015/* Define global constructors/destructor functions for the CDTORS, of
1016 which they are LEN. The CDTORS are sorted by initialization
1017 priority. If CTOR_P is true, these are constructors; otherwise,
1018 they are destructors. */
1019
1020static void
9771b263 1021build_cdtor (bool ctor_p, vec<tree> cdtors)
9e97ff61
JH
1022{
1023 size_t i,j;
9771b263 1024 size_t len = cdtors.length ();
9e97ff61
JH
1025
1026 i = 0;
1027 while (i < len)
1028 {
1029 tree body;
1030 tree fn;
1031 priority_type priority;
1032
1033 priority = 0;
1034 body = NULL_TREE;
1035 j = i;
1036 do
1037 {
1038 priority_type p;
9771b263 1039 fn = cdtors[j];
9e97ff61
JH
1040 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1041 if (j == i)
1042 priority = p;
1043 else if (p != priority)
1044 break;
1045 j++;
1046 }
1047 while (j < len);
1048
48c24aca 1049 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
1050 if (j == i + 1
1051 && targetm.have_ctors_dtors)
1052 {
1053 i++;
1054 continue;
1055 }
1056 /* Find the next batch of constructors/destructors with the same
1057 initialization priority. */
48c24aca 1058 for (;i < j; i++)
9e97ff61 1059 {
9e97ff61 1060 tree call;
9771b263 1061 fn = cdtors[i];
9e97ff61
JH
1062 call = build_call_expr (fn, 0);
1063 if (ctor_p)
1064 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1065 else
1066 DECL_STATIC_DESTRUCTOR (fn) = 0;
1067 /* We do not want to optimize away pure/const calls here.
1068 When optimizing, these should be already removed, when not
1069 optimizing, we want user to be able to breakpoint in them. */
1070 TREE_SIDE_EFFECTS (call) = 1;
1071 append_to_statement_list (call, &body);
9e97ff61 1072 }
9e97ff61
JH
1073 gcc_assert (body != NULL_TREE);
1074 /* Generate a function to call all the function of like
1075 priority. */
3a9ed12a 1076 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
1077 }
1078}
1079
1080/* Comparison function for qsort. P1 and P2 are actually of type
1081 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1082 used to determine the sort order. */
1083
1084static int
1085compare_ctor (const void *p1, const void *p2)
1086{
1087 tree f1;
1088 tree f2;
1089 int priority1;
1090 int priority2;
1091
1092 f1 = *(const tree *)p1;
1093 f2 = *(const tree *)p2;
1094 priority1 = DECL_INIT_PRIORITY (f1);
1095 priority2 = DECL_INIT_PRIORITY (f2);
1096
1097 if (priority1 < priority2)
1098 return -1;
1099 else if (priority1 > priority2)
1100 return 1;
1101 else
1102 /* Ensure a stable sort. Constructors are executed in backwarding
1103 order to make LTO initialize braries first. */
1104 return DECL_UID (f2) - DECL_UID (f1);
1105}
1106
1107/* Comparison function for qsort. P1 and P2 are actually of type
1108 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1109 used to determine the sort order. */
1110
1111static int
1112compare_dtor (const void *p1, const void *p2)
1113{
1114 tree f1;
1115 tree f2;
1116 int priority1;
1117 int priority2;
1118
1119 f1 = *(const tree *)p1;
1120 f2 = *(const tree *)p2;
1121 priority1 = DECL_FINI_PRIORITY (f1);
1122 priority2 = DECL_FINI_PRIORITY (f2);
1123
1124 if (priority1 < priority2)
1125 return -1;
1126 else if (priority1 > priority2)
1127 return 1;
1128 else
1129 /* Ensure a stable sort. */
1130 return DECL_UID (f1) - DECL_UID (f2);
1131}
1132
1133/* Generate functions to call static constructors and destructors
1134 for targets that do not support .ctors/.dtors sections. These
1135 functions have magic names which are detected by collect2. */
1136
1137static void
1138build_cdtor_fns (void)
1139{
9771b263 1140 if (!static_ctors.is_empty ())
9e97ff61
JH
1141 {
1142 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1143 static_ctors.qsort (compare_ctor);
48c24aca 1144 build_cdtor (/*ctor_p=*/true, static_ctors);
9e97ff61
JH
1145 }
1146
9771b263 1147 if (!static_dtors.is_empty ())
9e97ff61
JH
1148 {
1149 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
9771b263 1150 static_dtors.qsort (compare_dtor);
48c24aca 1151 build_cdtor (/*ctor_p=*/false, static_dtors);
9e97ff61
JH
1152 }
1153}
1154
1155/* Look for constructors and destructors and produce function calling them.
1156 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1157 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1158 constructors/destructors into single function to improve code locality and
1159 reduce size. */
1160
1161static unsigned int
1162ipa_cdtor_merge (void)
1163{
1164 struct cgraph_node *node;
65c70e6b 1165 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1166 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1167 || DECL_STATIC_DESTRUCTOR (node->decl))
9e97ff61
JH
1168 record_cdtor_fn (node);
1169 build_cdtor_fns ();
9771b263
DN
1170 static_ctors.release ();
1171 static_dtors.release ();
9e97ff61
JH
1172 return 0;
1173}
1174
17795822
TS
1175namespace {
1176
1177const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1178{
27a4cd48
DM
1179 IPA_PASS, /* type */
1180 "cdtor", /* name */
1181 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1182 TV_CGRAPHOPT, /* tv_id */
1183 0, /* properties_required */
1184 0, /* properties_provided */
1185 0, /* properties_destroyed */
1186 0, /* todo_flags_start */
1187 0, /* todo_flags_finish */
9e97ff61 1188};
27a4cd48 1189
17795822 1190class pass_ipa_cdtor_merge : public ipa_opt_pass_d
27a4cd48
DM
1191{
1192public:
c3284718
RS
1193 pass_ipa_cdtor_merge (gcc::context *ctxt)
1194 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1195 NULL, /* generate_summary */
1196 NULL, /* write_summary */
1197 NULL, /* read_summary */
1198 NULL, /* write_optimization_summary */
1199 NULL, /* read_optimization_summary */
1200 NULL, /* stmt_fixup */
1201 0, /* function_transform_todo_flags_start */
1202 NULL, /* function_transform */
1203 NULL) /* variable_transform */
27a4cd48
DM
1204 {}
1205
1206 /* opt_pass methods: */
1a3d085c 1207 virtual bool gate (function *);
be55bfe6 1208 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1209
1210}; // class pass_ipa_cdtor_merge
1211
1a3d085c
TS
1212bool
1213pass_ipa_cdtor_merge::gate (function *)
1214{
1215 /* Perform the pass when we have no ctors/dtors support
1216 or at LTO time to merge multiple constructors into single
1217 function. */
1218 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1219}
1220
17795822
TS
1221} // anon namespace
1222
27a4cd48
DM
1223ipa_opt_pass_d *
1224make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1225{
1226 return new pass_ipa_cdtor_merge (ctxt);
1227}
eb6a09a7
JH
1228
1229/* Invalid pointer representing BOTTOM for single user dataflow. */
1230#define BOTTOM ((cgraph_node *)(size_t) 2)
1231
1232/* Meet operation for single user dataflow.
1233 Here we want to associate variables with sigle function that may access it.
1234
1235 FUNCTION is current single user of a variable, VAR is variable that uses it.
1236 Latttice is stored in SINGLE_USER_MAP.
1237
1238 We represent:
1239 - TOP by no entry in SIGNLE_USER_MAP
1240 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1241 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1242
1243cgraph_node *
1244meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1245 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1246{
1247 struct cgraph_node *user, **f;
1248
1249 if (var->aux == BOTTOM)
1250 return BOTTOM;
1251
1eb68d2d 1252 f = single_user_map.get (var);
eb6a09a7
JH
1253 if (!f)
1254 return function;
1255 user = *f;
1256 if (!function)
1257 return user;
1258 else if (function != user)
1259 return BOTTOM;
1260 else
1261 return function;
1262}
1263
1264/* Propagation step of single-use dataflow.
1265
1266 Check all uses of VNODE and see if they are used by single function FUNCTION.
1267 SINGLE_USER_MAP represents the dataflow lattice. */
1268
1269cgraph_node *
1270propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1271 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1272{
1273 int i;
1274 struct ipa_ref *ref;
1275
1276 gcc_assert (!vnode->externally_visible);
1277
1278 /* If node is an alias, first meet with its target. */
1279 if (vnode->alias)
9041d2e6 1280 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1281
1282 /* Check all users and see if they correspond to a single function. */
d52f5295 1283 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1284 {
1285 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1286 if (cnode)
1287 {
1288 if (cnode->global.inlined_to)
1289 cnode = cnode->global.inlined_to;
1290 if (!function)
1291 function = cnode;
1292 else if (function != cnode)
1293 function = BOTTOM;
1294 }
1295 else
17e0fc92
JH
1296 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1297 single_user_map);
eb6a09a7
JH
1298 }
1299 return function;
1300}
1301
1302/* Pass setting used_by_single_function flag.
17e0fc92
JH
1303 This flag is set on variable when there is only one function that may
1304 possibly referr to it. */
eb6a09a7
JH
1305
1306static unsigned int
1307ipa_single_use (void)
1308{
1309 varpool_node *first = (varpool_node *) (void *) 1;
1310 varpool_node *var;
1eb68d2d 1311 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1312
1313 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1314 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1315 var->aux = BOTTOM;
1316 else
1317 {
1318 /* Enqueue symbol for dataflow. */
1319 var->aux = first;
1320 first = var;
1321 }
1322
1323 /* The actual dataflow. */
1324
1325 while (first != (void *) 1)
1326 {
1327 cgraph_node *user, *orig_user, **f;
1328
1329 var = first;
1330 first = (varpool_node *)first->aux;
1331
1eb68d2d 1332 f = single_user_map.get (var);
eb6a09a7
JH
1333 if (f)
1334 orig_user = *f;
1335 else
1336 orig_user = NULL;
1337 user = propagate_single_user (var, orig_user, single_user_map);
1338
1339 gcc_checking_assert (var->aux != BOTTOM);
1340
1341 /* If user differs, enqueue all references. */
1342 if (user != orig_user)
1343 {
1344 unsigned int i;
1345 ipa_ref *ref;
1346
1eb68d2d 1347 single_user_map.put (var, user);
eb6a09a7
JH
1348
1349 /* Enqueue all aliases for re-processing. */
31de7606
JH
1350 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1351 if (!ref->referring->aux)
eb6a09a7
JH
1352 {
1353 ref->referring->aux = first;
1354 first = dyn_cast <varpool_node *> (ref->referring);
1355 }
1356 /* Enqueue all users for re-processing. */
d52f5295 1357 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1358 if (!ref->referred->aux
1359 && ref->referred->definition
1360 && is_a <varpool_node *> (ref->referred))
1361 {
1362 ref->referred->aux = first;
1363 first = dyn_cast <varpool_node *> (ref->referred);
1364 }
1365
1366 /* If user is BOTTOM, just punt on this var. */
1367 if (user == BOTTOM)
1368 var->aux = BOTTOM;
1369 else
1370 var->aux = NULL;
1371 }
1372 else
1373 var->aux = NULL;
1374 }
1375
1376 FOR_EACH_DEFINED_VARIABLE (var)
1377 {
1378 if (var->aux != BOTTOM)
1379 {
17e0fc92
JH
1380 /* Not having the single user known means that the VAR is
1381 unreachable. Either someone forgot to remove unreachable
1382 variables or the reachability here is wrong. */
1383
b2b29377
MM
1384 gcc_checking_assert (single_user_map.get (var));
1385
eb6a09a7
JH
1386 if (dump_file)
1387 {
1388 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1389 var->name (), var->order);
1390 }
1391 var->used_by_single_function = true;
1392 }
1393 var->aux = NULL;
1394 }
1395 return 0;
1396}
1397
17795822
TS
1398namespace {
1399
1400const pass_data pass_data_ipa_single_use =
eb6a09a7
JH
1401{
1402 IPA_PASS, /* type */
1403 "single-use", /* name */
1404 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1405 TV_CGRAPHOPT, /* tv_id */
1406 0, /* properties_required */
1407 0, /* properties_provided */
1408 0, /* properties_destroyed */
1409 0, /* todo_flags_start */
1410 0, /* todo_flags_finish */
1411};
1412
17795822 1413class pass_ipa_single_use : public ipa_opt_pass_d
eb6a09a7
JH
1414{
1415public:
1416 pass_ipa_single_use (gcc::context *ctxt)
1417 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1418 NULL, /* generate_summary */
1419 NULL, /* write_summary */
1420 NULL, /* read_summary */
1421 NULL, /* write_optimization_summary */
1422 NULL, /* read_optimization_summary */
1423 NULL, /* stmt_fixup */
1424 0, /* function_transform_todo_flags_start */
1425 NULL, /* function_transform */
1426 NULL) /* variable_transform */
1427 {}
1428
1429 /* opt_pass methods: */
1430 virtual bool gate (function *);
1431 virtual unsigned int execute (function *) { return ipa_single_use (); }
1432
1433}; // class pass_ipa_single_use
1434
1435bool
1436pass_ipa_single_use::gate (function *)
1437{
1438 return optimize;
1439}
1440
17795822
TS
1441} // anon namespace
1442
eb6a09a7
JH
1443ipa_opt_pass_d *
1444make_pass_ipa_single_use (gcc::context *ctxt)
1445{
1446 return new pass_ipa_single_use (ctxt);
1447}