]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa.c
Move symtab_node::dump_table to symbol_table::dump
[thirdparty/gcc.git] / gcc / ipa.c
CommitLineData
ca31b95f 1/* Basic IPA optimizations and utilities.
cbe34bb5 2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
ca31b95f
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
ca31b95f
JH
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ca31b95f
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "target.h"
c7131fb2
AM
25#include "tree.h"
26#include "gimple.h"
957060b5
AM
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "stringpool.h"
30#include "cgraph.h"
45b0be94 31#include "gimplify.h"
9e97ff61 32#include "tree-iterator.h"
af8bca3c 33#include "ipa-utils.h"
dd912cb8 34#include "symbol-summary.h"
8bc5448f 35#include "tree-vrp.h"
c582198b 36#include "ipa-prop.h"
27d020cf 37#include "ipa-fnsummary.h"
2b5f0895 38#include "dbgcnt.h"
775669c1 39#include "debug.h"
ca31b95f 40
e70670cf
JH
41
42/* Return true when NODE has ADDR reference. */
43
44static bool
45has_addr_references_p (struct cgraph_node *node,
4f4ada6a 46 void *)
e70670cf
JH
47{
48 int i;
d122681a 49 struct ipa_ref *ref = NULL;
e70670cf 50
d122681a 51 for (i = 0; node->iterate_referring (i, ref); i++)
e70670cf
JH
52 if (ref->use == IPA_REF_ADDR)
53 return true;
54 return false;
55}
56
4f4ada6a
JH
57/* Return true when NODE can be target of an indirect call. */
58
59static bool
60is_indirect_call_target_p (struct cgraph_node *node, void *)
61{
62 return node->indirect_call_target;
63}
64
d563610d
JH
65/* Look for all functions inlined to NODE and update their inlined_to pointers
66 to INLINED_TO. */
67
68static void
69update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
70{
71 struct cgraph_edge *e;
72 for (e = node->callees; e; e = e->next_callee)
73 if (e->callee->global.inlined_to)
74 {
75 e->callee->global.inlined_to = inlined_to;
76 update_inlined_to_pointer (e->callee, inlined_to);
77 }
78}
79
04142cc3 80/* Add symtab NODE to queue starting at FIRST.
19fb0b86
JH
81
82 The queue is linked via AUX pointers and terminated by pointer to 1.
83 We enqueue nodes at two occasions: when we find them reachable or when we find
84 their bodies needed for further clonning. In the second case we mark them
85 by pointer to 2 after processing so they are re-queue when they become
86 reachable. */
b34fd25c
JH
87
88static void
5e20cdc9 89enqueue_node (symtab_node *node, symtab_node **first,
6e2830c3 90 hash_set<symtab_node *> *reachable)
b34fd25c 91{
19fb0b86 92 /* Node is still in queue; do nothing. */
67348ccc 93 if (node->aux && node->aux != (void *) 2)
19fb0b86
JH
94 return;
95 /* Node was already processed as unreachable, re-enqueue
96 only if it became reachable now. */
6e2830c3 97 if (node->aux == (void *)2 && !reachable->contains (node))
19fb0b86 98 return;
67348ccc 99 node->aux = *first;
b34fd25c
JH
100 *first = node;
101}
102
b34fd25c
JH
103/* Process references. */
104
105static void
d122681a 106process_references (symtab_node *snode,
5e20cdc9 107 symtab_node **first,
93a18a70 108 bool before_inlining_p,
6e2830c3 109 hash_set<symtab_node *> *reachable)
b34fd25c
JH
110{
111 int i;
d122681a
ML
112 struct ipa_ref *ref = NULL;
113 for (i = 0; snode->iterate_reference (i, ref); i++)
b34fd25c 114 {
5e20cdc9 115 symtab_node *node = ref->referred;
17e0fc92 116 symtab_node *body = node->ultimate_alias_target ();
e70670cf 117
67348ccc
DM
118 if (node->definition && !node->in_other_partition
119 && ((!DECL_EXTERNAL (node->decl) || node->alias)
8fe91ca8 120 || (((before_inlining_p
f1ced6f5
JH
121 && ((TREE_CODE (node->decl) != FUNCTION_DECL
122 && optimize)
123 || (TREE_CODE (node->decl) == FUNCTION_DECL
124 && opt_for_fn (body->decl, optimize))
17e0fc92
JH
125 || (symtab->state < IPA_SSA
126 && lookup_attribute
127 ("always_inline",
128 DECL_ATTRIBUTES (body->decl))))))
129 /* We use variable constructors during late compilation for
e70670cf
JH
130 constant folding. Keep references alive so partitioning
131 knows about potential references. */
8813a647 132 || (VAR_P (node->decl)
6a6dac52 133 && flag_wpa
67348ccc 134 && ctor_for_folding (node->decl)
6a6dac52 135 != error_mark_node))))
17e0fc92
JH
136 {
137 /* Be sure that we will not optimize out alias target
138 body. */
139 if (DECL_EXTERNAL (node->decl)
140 && node->alias
141 && before_inlining_p)
142 reachable->add (body);
143 reachable->add (node);
144 }
67348ccc 145 enqueue_node (node, first, reachable);
b34fd25c
JH
146 }
147}
148
3462aa02
JH
149/* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
150 all its potential targets as reachable to permit later inlining if
151 devirtualization happens. After inlining still keep their declarations
152 around, so we can devirtualize to a direct call.
153
154 Also try to make trivial devirutalization when no or only one target is
155 possible. */
156
157static void
6e2830c3 158walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
3462aa02 159 struct cgraph_edge *edge,
5e20cdc9 160 symtab_node **first,
6e2830c3
TS
161 hash_set<symtab_node *> *reachable,
162 bool before_inlining_p)
3462aa02
JH
163{
164 unsigned int i;
165 void *cache_token;
166 bool final;
167 vec <cgraph_node *>targets
168 = possible_polymorphic_call_targets
169 (edge, &final, &cache_token);
170
6e2830c3 171 if (!reachable_call_targets->add (cache_token))
3462aa02 172 {
c3284718 173 for (i = 0; i < targets.length (); i++)
3462aa02
JH
174 {
175 struct cgraph_node *n = targets[i];
176
177 /* Do not bother to mark virtual methods in anonymous namespace;
178 either we will find use of virtual table defining it, or it is
179 unused. */
67348ccc 180 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
3462aa02 181 && type_in_anonymous_namespace_p
70e7f2a2 182 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
3462aa02
JH
183 continue;
184
4f4ada6a
JH
185 n->indirect_call_target = true;
186 symtab_node *body = n->function_symbol ();
17e0fc92 187
3462aa02
JH
188 /* Prior inlining, keep alive bodies of possible targets for
189 devirtualization. */
4f4ada6a
JH
190 if (n->definition
191 && (before_inlining_p
192 && opt_for_fn (body->decl, optimize)
193 && opt_for_fn (body->decl, flag_devirtualize)))
194 {
195 /* Be sure that we will not optimize out alias target
196 body. */
197 if (DECL_EXTERNAL (n->decl)
198 && n->alias
199 && before_inlining_p)
200 reachable->add (body);
201 reachable->add (n);
202 }
3462aa02
JH
203 /* Even after inlining we want to keep the possible targets in the
204 boundary, so late passes can still produce direct call even if
205 the chance for inlining is lost. */
67348ccc 206 enqueue_node (n, first, reachable);
3462aa02
JH
207 }
208 }
209
210 /* Very trivial devirtualization; when the type is
211 final or anonymous (so we know all its derivation)
212 and there is only one possible virtual call target,
213 make the edge direct. */
214 if (final)
215 {
2b5f0895 216 if (targets.length () <= 1 && dbg_cnt (devirt))
3462aa02 217 {
7b395ddd 218 cgraph_node *target, *node = edge->caller;
3462aa02
JH
219 if (targets.length () == 1)
220 target = targets[0];
221 else
d52f5295 222 target = cgraph_node::get_create
3462aa02
JH
223 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
224
2b5f0895
XDL
225 if (dump_enabled_p ())
226 {
9189aff7
JH
227 location_t locus;
228 if (edge->call_stmt)
229 locus = gimple_location (edge->call_stmt);
230 else
231 locus = UNKNOWN_LOCATION;
d52f5295 232 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
2b5f0895
XDL
233 "devirtualizing call in %s/%i to %s/%i\n",
234 edge->caller->name (), edge->caller->order,
235 target->name (),
236 target->order);
237 }
3dafb85c 238 edge = edge->make_direct (target);
0bceb671
JH
239 if (ipa_fn_summaries)
240 ipa_update_overall_fn_summary (node);
477145c8 241 else if (edge->call_stmt)
d5e254e1
IE
242 {
243 edge->redirect_call_stmt_to_callee ();
244
245 /* Call to __builtin_unreachable shouldn't be instrumented. */
246 if (!targets.length ())
247 gimple_call_set_with_bounds (edge->call_stmt, false);
248 }
3462aa02
JH
249 }
250 }
251}
41817394 252
ca31b95f 253/* Perform reachability analysis and reclaim all unreachable nodes.
04142cc3
JH
254
255 The algorithm is basically mark&sweep but with some extra refinements:
256
257 - reachable extern inline functions needs special handling; the bodies needs
258 to stay in memory until inlining in hope that they will be inlined.
259 After inlining we release their bodies and turn them into unanalyzed
260 nodes even when they are reachable.
261
04142cc3
JH
262 - virtual functions are kept in callgraph even if they seem unreachable in
263 hope calls to them will be devirtualized.
264
265 Again we remove them after inlining. In late optimization some
31519c38 266 devirtualization may happen, but it is not important since we won't inline
04142cc3
JH
267 the call. In theory early opts and IPA should work out all important cases.
268
269 - virtual clones needs bodies of their origins for later materialization;
270 this means that we want to keep the body even if the origin is unreachable
271 otherwise. To avoid origin from sitting in the callgraph and being
272 walked by IPA passes, we turn them into unanalyzed nodes with body
273 defined.
274
275 We maintain set of function declaration where body needs to stay in
276 body_needed_for_clonning
277
278 Inline clones represent special case: their declaration match the
279 declaration of origin and cgraph_remove_node already knows how to
280 reshape callgraph and preserve body when offline copy of function or
281 inline clone is being removed.
282
6649df51
JH
283 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
284 variables with DECL_INITIAL set. We finalize these and keep reachable
285 ones around for constant folding purposes. After inlining we however
286 stop walking their references to let everything static referneced by them
287 to be removed when it is otherwise unreachable.
288
04142cc3
JH
289 We maintain queue of both reachable symbols (i.e. defined symbols that needs
290 to stay) and symbols that are in boundary (i.e. external symbols referenced
291 by reachable symbols or origins of clones). The queue is represented
292 as linked list by AUX pointer terminated by 1.
293
31519c38 294 At the end we keep all reachable symbols. For symbols in boundary we always
04142cc3
JH
295 turn definition into a declaration, but we may keep function body around
296 based on body_needed_for_clonning
297
298 All symbols that enter the queue have AUX pointer non-zero and are in the
299 boundary. Pointer set REACHABLE is used to track reachable symbols.
300
301 Every symbol can be visited twice - once as part of boundary and once
302 as real reachable symbol. enqueue_node needs to decide whether the
303 node needs to be re-queued for second processing. For this purpose
304 we set AUX pointer of processed symbols in the boundary to constant 2. */
ca31b95f
JH
305
306bool
17e0fc92 307symbol_table::remove_unreachable_nodes (FILE *file)
ca31b95f 308{
5e20cdc9 309 symtab_node *first = (symtab_node *) (void *) 1;
96fc428c 310 struct cgraph_node *node, *next;
2c8326a5 311 varpool_node *vnode, *vnext;
ca31b95f 312 bool changed = false;
6e2830c3
TS
313 hash_set<symtab_node *> reachable;
314 hash_set<tree> body_needed_for_clonning;
315 hash_set<void *> reachable_call_targets;
17e0fc92
JH
316 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
317 : IPA_SSA_AFTER_INLINING);
ca31b95f 318
3462aa02 319 timevar_push (TV_IPA_UNREACHABLE);
2bf86c84 320 build_type_inheritance_graph ();
10d22567
ZD
321 if (file)
322 fprintf (file, "\nReclaiming functions:");
b2b29377
MM
323 if (flag_checking)
324 {
325 FOR_EACH_FUNCTION (node)
326 gcc_assert (!node->aux);
327 FOR_EACH_VARIABLE (vnode)
328 gcc_assert (!vnode->aux);
329 }
530f3a1b
JH
330 /* Mark functions whose bodies are obviously needed.
331 This is mostly when they can be referenced externally. Inline clones
332 are special since their declarations are shared with master clone and thus
333 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
c0c123ef
JH
334 FOR_EACH_FUNCTION (node)
335 {
336 node->used_as_abstract_origin = false;
4f4ada6a 337 node->indirect_call_target = false;
67348ccc 338 if (node->definition
c0c123ef 339 && !node->global.inlined_to
67348ccc 340 && !node->in_other_partition
d52f5295 341 && !node->can_remove_if_no_direct_calls_and_refs_p ())
c0c123ef
JH
342 {
343 gcc_assert (!node->global.inlined_to);
6e2830c3
TS
344 reachable.add (node);
345 enqueue_node (node, &first, &reachable);
c0c123ef
JH
346 }
347 else
67348ccc 348 gcc_assert (!node->aux);
c0c123ef 349 }
530f3a1b
JH
350
351 /* Mark variables that are obviously needed. */
04142cc3 352 FOR_EACH_DEFINED_VARIABLE (vnode)
9041d2e6 353 if (!vnode->can_remove_if_no_refs_p()
67348ccc 354 && !vnode->in_other_partition)
04142cc3 355 {
6e2830c3
TS
356 reachable.add (vnode);
357 enqueue_node (vnode, &first, &reachable);
04142cc3
JH
358 }
359
360 /* Perform reachability analysis. */
5e20cdc9 361 while (first != (symtab_node *) (void *) 1)
b34fd25c 362 {
6e2830c3 363 bool in_boundary_p = !reachable.contains (first);
5e20cdc9 364 symtab_node *node = first;
ca31b95f 365
5e20cdc9 366 first = (symtab_node *)first->aux;
19fb0b86 367
04142cc3
JH
368 /* If we are processing symbol in boundary, mark its AUX pointer for
369 possible later re-processing in enqueue_node. */
370 if (in_boundary_p)
4bd019b8
JH
371 {
372 node->aux = (void *)2;
373 if (node->alias && node->analyzed)
374 enqueue_node (node->get_alias_target (), &first, &reachable);
375 }
04142cc3
JH
376 else
377 {
31dad809
JJ
378 if (TREE_CODE (node->decl) == FUNCTION_DECL
379 && DECL_ABSTRACT_ORIGIN (node->decl))
c0c123ef
JH
380 {
381 struct cgraph_node *origin_node
4ad08ee8
JH
382 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
383 if (origin_node && !origin_node->used_as_abstract_origin)
384 {
385 origin_node->used_as_abstract_origin = true;
386 gcc_assert (!origin_node->prev_sibling_clone);
387 gcc_assert (!origin_node->next_sibling_clone);
388 for (cgraph_node *n = origin_node->clones; n;
389 n = n->next_sibling_clone)
390 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
391 n->used_as_abstract_origin = true;
4ad08ee8 392 }
c0c123ef 393 }
04142cc3 394 /* If any symbol in a comdat group is reachable, force
1f26ac87
JM
395 all externally visible symbols in the same comdat
396 group to be reachable as well. Comdat-local symbols
397 can be discarded if all uses were inlined. */
67348ccc 398 if (node->same_comdat_group)
04142cc3 399 {
5e20cdc9 400 symtab_node *next;
67348ccc 401 for (next = node->same_comdat_group;
04142cc3 402 next != node;
67348ccc 403 next = next->same_comdat_group)
d52f5295 404 if (!next->comdat_local_p ()
6e2830c3
TS
405 && !reachable.add (next))
406 enqueue_node (next, &first, &reachable);
04142cc3
JH
407 }
408 /* Mark references as reachable. */
6e2830c3 409 process_references (node, &first, before_inlining_p, &reachable);
04142cc3 410 }
19fb0b86 411
7de90a6c 412 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
b34fd25c 413 {
04142cc3
JH
414 /* Mark the callees reachable unless they are direct calls to extern
415 inline functions we decided to not inline. */
416 if (!in_boundary_p)
8a6295ba 417 {
04142cc3 418 struct cgraph_edge *e;
3462aa02 419 /* Keep alive possible targets for devirtualization. */
2bf86c84
JH
420 if (opt_for_fn (cnode->decl, optimize)
421 && opt_for_fn (cnode->decl, flag_devirtualize))
3462aa02
JH
422 {
423 struct cgraph_edge *next;
424 for (e = cnode->indirect_calls; e; e = next)
425 {
426 next = e->next_callee;
427 if (e->indirect_info->polymorphic)
6e2830c3
TS
428 walk_polymorphic_call_targets (&reachable_call_targets,
429 e, &first, &reachable,
3462aa02
JH
430 before_inlining_p);
431 }
432 }
04142cc3 433 for (e = cnode->callees; e; e = e->next_callee)
ed62e0d9 434 {
17e0fc92 435 symtab_node *body = e->callee->function_symbol ();
67348ccc
DM
436 if (e->callee->definition
437 && !e->callee->in_other_partition
ed62e0d9 438 && (!e->inline_failed
67348ccc
DM
439 || !DECL_EXTERNAL (e->callee->decl)
440 || e->callee->alias
17e0fc92
JH
441 || (before_inlining_p
442 && (opt_for_fn (body->decl, optimize)
443 || (symtab->state < IPA_SSA
444 && lookup_attribute
445 ("always_inline",
446 DECL_ATTRIBUTES (body->decl)))))))
789c2741
JH
447 {
448 /* Be sure that we will not optimize out alias target
449 body. */
450 if (DECL_EXTERNAL (e->callee->decl)
451 && e->callee->alias
452 && before_inlining_p)
17e0fc92 453 reachable.add (body);
6e2830c3 454 reachable.add (e->callee);
789c2741 455 }
6e2830c3 456 enqueue_node (e->callee, &first, &reachable);
93a18a70 457 }
04142cc3
JH
458
459 /* When inline clone exists, mark body to be preserved so when removing
460 offline copy of the function we don't kill it. */
4f63dfc6 461 if (cnode->global.inlined_to)
6e2830c3 462 body_needed_for_clonning.add (cnode->decl);
b66887e4 463
48de5d37
IE
464 /* For instrumentation clones we always need original
465 function node for proper LTO privatization. */
466 if (cnode->instrumentation_clone
467 && cnode->definition)
468 {
469 gcc_assert (cnode->instrumented_version || in_lto_p);
470 if (cnode->instrumented_version)
471 {
472 enqueue_node (cnode->instrumented_version, &first,
473 &reachable);
474 reachable.add (cnode->instrumented_version);
475 }
476 }
477
4f63dfc6
JH
478 /* For non-inline clones, force their origins to the boundary and ensure
479 that body is not removed. */
480 while (cnode->clone_of)
481 {
67348ccc 482 bool noninline = cnode->clone_of->decl != cnode->decl;
4f63dfc6
JH
483 cnode = cnode->clone_of;
484 if (noninline)
485 {
6e2830c3
TS
486 body_needed_for_clonning.add (cnode->decl);
487 enqueue_node (cnode, &first, &reachable);
4f63dfc6 488 }
b34fd25c 489 }
0136f8f0
AH
490
491 }
4bd019b8
JH
492 else if (cnode->thunk.thunk_p)
493 enqueue_node (cnode->callees->callee, &first, &reachable);
48de5d37 494
0136f8f0
AH
495 /* If any reachable function has simd clones, mark them as
496 reachable as well. */
497 if (cnode->simd_clones)
498 {
499 cgraph_node *next;
500 for (next = cnode->simd_clones;
501 next;
502 next = next->simdclone->next_clone)
503 if (in_boundary_p
6e2830c3
TS
504 || !reachable.add (next))
505 enqueue_node (next, &first, &reachable);
47cb0d7d 506 }
b34fd25c 507 }
6649df51 508 /* When we see constructor of external variable, keep referred nodes in the
5d59b5e1
LC
509 boundary. This will also hold initializers of the external vars NODE
510 refers to. */
7de90a6c 511 varpool_node *vnode = dyn_cast <varpool_node *> (node);
5d59b5e1 512 if (vnode
67348ccc
DM
513 && DECL_EXTERNAL (node->decl)
514 && !vnode->alias
6649df51 515 && in_boundary_p)
5d59b5e1 516 {
d122681a
ML
517 struct ipa_ref *ref = NULL;
518 for (int i = 0; node->iterate_reference (i, ref); i++)
6e2830c3 519 enqueue_node (ref->referred, &first, &reachable);
5d59b5e1 520 }
ca31b95f
JH
521 }
522
04142cc3 523 /* Remove unreachable functions. */
3dafb85c 524 for (node = first_function (); node; node = next)
ca31b95f 525 {
3dafb85c 526 next = next_function (node);
e70670cf
JH
527
528 /* If node is not needed at all, remove it. */
67348ccc 529 if (!node->aux)
ca31b95f 530 {
10d22567 531 if (file)
5bed50e8 532 fprintf (file, " %s/%i", node->name (), node->order);
d52f5295 533 node->remove ();
04142cc3
JH
534 changed = true;
535 }
e70670cf 536 /* If node is unreachable, remove its body. */
6e2830c3 537 else if (!reachable.contains (node))
04142cc3 538 {
d3f2e41e
JH
539 /* We keep definitions of thunks and aliases in the boundary so
540 we can walk to the ultimate alias targets and function symbols
541 reliably. */
542 if (node->alias || node->thunk.thunk_p)
543 ;
544 else if (!body_needed_for_clonning.contains (node->decl)
545 && !node->alias && !node->thunk.thunk_p)
d52f5295 546 node->release_body ();
4f63dfc6 547 else if (!node->clone_of)
67348ccc 548 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
4bd019b8 549 if (node->definition && !node->alias && !node->thunk.thunk_p)
bb853349 550 {
04142cc3 551 if (file)
5bed50e8 552 fprintf (file, " %s/%i", node->name (), node->order);
3d8d0043 553 node->body_removed = true;
67348ccc
DM
554 node->analyzed = false;
555 node->definition = false;
556 node->cpp_implicit_alias = false;
557 node->alias = false;
71e54687 558 node->transparent_alias = false;
d833415c 559 node->thunk.thunk_p = false;
67348ccc 560 node->weakref = false;
8fe91ca8
JH
561 /* After early inlining we drop always_inline attributes on
562 bodies of functions that are still referenced (have their
563 address taken). */
564 DECL_ATTRIBUTES (node->decl)
565 = remove_attribute ("always_inline",
566 DECL_ATTRIBUTES (node->decl));
67348ccc 567 if (!node->in_other_partition)
51a5c0c2 568 node->local.local = false;
d52f5295 569 node->remove_callees ();
d122681a 570 node->remove_all_references ();
bb853349 571 changed = true;
d5e254e1
IE
572 if (node->thunk.thunk_p
573 && node->thunk.add_pointer_bounds_args)
574 {
575 node->thunk.thunk_p = false;
576 node->thunk.add_pointer_bounds_args = false;
577 }
bb853349 578 }
ca31b95f 579 }
4f63dfc6 580 else
d52f5295 581 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
67348ccc 582 || in_lto_p || DECL_RESULT (node->decl));
ca31b95f 583 }
04142cc3
JH
584
585 /* Inline clones might be kept around so their materializing allows further
586 cloning. If the function the clone is inlined into is removed, we need
587 to turn it into normal cone. */
65c70e6b 588 FOR_EACH_FUNCTION (node)
9187e02d 589 {
9187e02d
JH
590 if (node->global.inlined_to
591 && !node->callers)
592 {
593 gcc_assert (node->clones);
d563610d
JH
594 node->global.inlined_to = NULL;
595 update_inlined_to_pointer (node, node);
9187e02d 596 }
67348ccc 597 node->aux = NULL;
9187e02d 598 }
4a444e58 599
04142cc3 600 /* Remove unreachable variables. */
4a444e58 601 if (file)
04142cc3 602 fprintf (file, "\nReclaiming variables:");
3dafb85c 603 for (vnode = first_variable (); vnode; vnode = vnext)
b34fd25c 604 {
3dafb85c 605 vnext = next_variable (vnode);
67348ccc 606 if (!vnode->aux
b9bd2075
JH
607 /* For can_refer_decl_in_current_unit_p we want to track for
608 all external variables if they are defined in other partition
609 or not. */
67348ccc 610 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
04142cc3 611 {
d2b35c04
JH
612 struct ipa_ref *ref = NULL;
613
614 /* First remove the aliases, so varpool::remove can possibly lookup
615 the constructor and save it for future use. */
616 while (vnode->iterate_direct_aliases (0, ref))
617 {
618 if (file)
619 fprintf (file, " %s/%i", ref->referred->name (),
620 ref->referred->order);
621 ref->referring->remove ();
622 }
4a444e58 623 if (file)
5bed50e8 624 fprintf (file, " %s/%i", vnode->name (), vnode->order);
d2b35c04 625 vnext = next_variable (vnode);
775669c1
RB
626 /* Signal removal to the debug machinery. */
627 if (! flag_wpa)
628 {
629 vnode->definition = false;
630 (*debug_hooks->late_global_decl) (vnode->decl);
631 }
d52f5295 632 vnode->remove ();
4a444e58 633 changed = true;
b34fd25c 634 }
4bd019b8 635 else if (!reachable.contains (vnode) && !vnode->alias)
04142cc3 636 {
6a6dac52 637 tree init;
67348ccc 638 if (vnode->definition)
04142cc3
JH
639 {
640 if (file)
fec39fa6 641 fprintf (file, " %s", vnode->name ());
04142cc3
JH
642 changed = true;
643 }
1acc5591 644 /* Keep body if it may be useful for constant folding. */
d5e254e1
IE
645 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
646 && !POINTER_BOUNDS_P (vnode->decl))
1acc5591
JH
647 vnode->remove_initializer ();
648 else
649 DECL_INITIAL (vnode->decl) = init;
3d8d0043 650 vnode->body_removed = true;
67348ccc
DM
651 vnode->definition = false;
652 vnode->analyzed = false;
653 vnode->aux = NULL;
e70670cf 654
d52f5295 655 vnode->remove_from_same_comdat_group ();
7b3376a0 656
d122681a 657 vnode->remove_all_references ();
04142cc3
JH
658 }
659 else
67348ccc 660 vnode->aux = NULL;
b34fd25c 661 }
4a444e58 662
04142cc3 663 /* Now update address_taken flags and try to promote functions to be local. */
bd3cdcc0
JH
664 if (file)
665 fprintf (file, "\nClearing address taken flags:");
65c70e6b 666 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
667 if (node->address_taken
668 && !node->used_from_other_partition)
bd3cdcc0 669 {
1ede94c5 670 if (!node->call_for_symbol_and_aliases
d5e254e1
IE
671 (has_addr_references_p, NULL, true)
672 && (!node->instrumentation_clone
673 || !node->instrumented_version
674 || !node->instrumented_version->address_taken))
bd3cdcc0
JH
675 {
676 if (file)
fec39fa6 677 fprintf (file, " %s", node->name ());
67348ccc 678 node->address_taken = false;
4a444e58 679 changed = true;
4f4ada6a
JH
680 if (node->local_p ()
681 /* Virtual functions may be kept in cgraph just because
682 of possible later devirtualization. Do not mark them as
683 local too early so we won't optimize them out before
684 we are done with polymorphic call analysis. */
685 && (!before_inlining_p
686 || !node->call_for_symbol_and_aliases
687 (is_indirect_call_target_p, NULL, true)))
4a444e58
JH
688 {
689 node->local.local = true;
690 if (file)
691 fprintf (file, " (local)");
692 }
bd3cdcc0
JH
693 }
694 }
10a5dd5d
JH
695 if (file)
696 fprintf (file, "\n");
b34fd25c 697
b2b29377 698 symtab_node::checking_verify_symtab_nodes ();
4537ec0c 699
a8da72b8 700 /* If we removed something, perhaps profile could be improved. */
263e19c7 701 if (changed && optimize && ipa_call_summaries)
a8da72b8 702 FOR_EACH_DEFINED_FUNCTION (node)
08f835dc 703 ipa_propagate_frequency (node);
a8da72b8 704
3462aa02 705 timevar_pop (TV_IPA_UNREACHABLE);
ca31b95f
JH
706 return changed;
707}
f4b3ca72 708
6de88c6a
JH
709/* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
710 as needed, also clear EXPLICIT_REFS if the references to given variable
711 do not need to be explicit. */
712
713void
714process_references (varpool_node *vnode,
715 bool *written, bool *address_taken,
716 bool *read, bool *explicit_refs)
717{
718 int i;
719 struct ipa_ref *ref;
720
9041d2e6 721 if (!vnode->all_refs_explicit_p ()
6de88c6a
JH
722 || TREE_THIS_VOLATILE (vnode->decl))
723 *explicit_refs = false;
724
d122681a 725 for (i = 0; vnode->iterate_referring (i, ref)
6de88c6a
JH
726 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
727 switch (ref->use)
728 {
729 case IPA_REF_ADDR:
730 *address_taken = true;
731 break;
732 case IPA_REF_LOAD:
733 *read = true;
734 break;
735 case IPA_REF_STORE:
736 *written = true;
737 break;
738 case IPA_REF_ALIAS:
d52f5295
ML
739 process_references (dyn_cast<varpool_node *> (ref->referring), written,
740 address_taken, read, explicit_refs);
6de88c6a 741 break;
d5e254e1
IE
742 case IPA_REF_CHKP:
743 gcc_unreachable ();
6de88c6a
JH
744 }
745}
746
747/* Set TREE_READONLY bit. */
748
749bool
750set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
751{
752 TREE_READONLY (vnode->decl) = true;
753 return false;
754}
755
756/* Set writeonly bit and clear the initalizer, since it will not be needed. */
757
758bool
dea91a66 759set_writeonly_bit (varpool_node *vnode, void *data)
6de88c6a
JH
760{
761 vnode->writeonly = true;
762 if (optimize)
763 {
764 DECL_INITIAL (vnode->decl) = NULL;
765 if (!vnode->alias)
dea91a66
JH
766 {
767 if (vnode->num_references ())
768 *(bool *)data = true;
769 vnode->remove_all_references ();
770 }
6de88c6a
JH
771 }
772 return false;
773}
774
775/* Clear addressale bit of VNODE. */
776
777bool
778clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
779{
780 vnode->address_taken = false;
781 TREE_ADDRESSABLE (vnode->decl) = 0;
782 return false;
783}
784
4a444e58
JH
785/* Discover variables that have no longer address taken or that are read only
786 and update their flags.
787
dea91a66
JH
788 Return true when unreachable symbol removan should be done.
789
4a444e58
JH
790 FIXME: This can not be done in between gimplify and omp_expand since
791 readonly flag plays role on what is shared and what is not. Currently we do
f10ea640
JH
792 this transformation as part of whole program visibility and re-do at
793 ipa-reference pass (to take into account clonning), but it would
794 make sense to do it before early optimizations. */
4a444e58 795
dea91a66 796bool
4a444e58
JH
797ipa_discover_readonly_nonaddressable_vars (void)
798{
dea91a66 799 bool remove_p = false;
2c8326a5 800 varpool_node *vnode;
4a444e58
JH
801 if (dump_file)
802 fprintf (dump_file, "Clearing variable flags:");
65c70e6b 803 FOR_EACH_VARIABLE (vnode)
6de88c6a 804 if (!vnode->alias
67348ccc 805 && (TREE_ADDRESSABLE (vnode->decl)
6de88c6a 806 || !vnode->writeonly
67348ccc 807 || !TREE_READONLY (vnode->decl)))
4a444e58
JH
808 {
809 bool written = false;
810 bool address_taken = false;
6de88c6a
JH
811 bool read = false;
812 bool explicit_refs = true;
813
dea91a66
JH
814 process_references (vnode, &written, &address_taken, &read,
815 &explicit_refs);
6de88c6a
JH
816 if (!explicit_refs)
817 continue;
818 if (!address_taken)
4a444e58 819 {
6de88c6a 820 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
d5ce4663 821 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
31de7606
JH
822 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
823 true);
4a444e58 824 }
6de88c6a 825 if (!address_taken && !written
4a444e58
JH
826 /* Making variable in explicit section readonly can cause section
827 type conflict.
828 See e.g. gcc.c-torture/compile/pr23237.c */
24d047a3 829 && vnode->get_section () == NULL)
4a444e58 830 {
6de88c6a 831 if (!TREE_READONLY (vnode->decl) && dump_file)
fec39fa6 832 fprintf (dump_file, " %s (read-only)", vnode->name ());
31de7606 833 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
6de88c6a 834 }
d5ce4663 835 if (!vnode->writeonly && !read && !address_taken && written)
6de88c6a
JH
836 {
837 if (dump_file)
838 fprintf (dump_file, " %s (write-only)", vnode->name ());
31de7606
JH
839 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
840 true);
4a444e58
JH
841 }
842 }
843 if (dump_file)
844 fprintf (dump_file, "\n");
dea91a66 845 return remove_p;
4a444e58
JH
846}
847
a8da72b8
L
848/* Free inline summary. */
849
17795822
TS
850namespace {
851
0bceb671 852const pass_data pass_data_ipa_free_fn_summary =
a8da72b8 853{
27a4cd48 854 SIMPLE_IPA_PASS, /* type */
8605403e 855 "free-inline-summary", /* name */
27a4cd48 856 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
857 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
858 0, /* properties_required */
859 0, /* properties_provided */
860 0, /* properties_destroyed */
861 0, /* todo_flags_start */
8605403e
JH
862 /* Early optimizations may make function unreachable. We can not
863 remove unreachable functions as part of the ealry opts pass because
864 TODOs are run before subpasses. Do it here. */
865 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
a8da72b8
L
866};
867
0bceb671 868class pass_ipa_free_fn_summary : public simple_ipa_opt_pass
27a4cd48
DM
869{
870public:
0bceb671
JH
871 pass_ipa_free_fn_summary (gcc::context *ctxt)
872 : simple_ipa_opt_pass (pass_data_ipa_free_fn_summary, ctxt)
27a4cd48
DM
873 {}
874
875 /* opt_pass methods: */
be55bfe6
TS
876 virtual unsigned int execute (function *)
877 {
878 inline_free_summary ();
879 return 0;
880 }
27a4cd48 881
0bceb671 882}; // class pass_ipa_free_fn_summary
27a4cd48 883
17795822
TS
884} // anon namespace
885
27a4cd48 886simple_ipa_opt_pass *
0bceb671 887make_pass_ipa_free_fn_summary (gcc::context *ctxt)
27a4cd48 888{
0bceb671 889 return new pass_ipa_free_fn_summary (ctxt);
27a4cd48
DM
890}
891
9e97ff61 892/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
893 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
894 (for chp static vars constructor) or 'B' (for chkp static bounds
895 constructor). BODY is a STATEMENT_LIST containing GENERIC
896 statements. PRIORITY is the initialization priority for this
897 constructor or destructor.
9e97ff61 898
3a9ed12a
JH
899 FINAL specify whether the externally visible name for collect2 should
900 be produced. */
901
902static void
903cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
9e97ff61
JH
904{
905 static int counter = 0;
906 char which_buf[16];
907 tree decl, name, resdecl;
908
909 /* The priority is encoded in the constructor or destructor name.
910 collect2 will sort the names and arrange that they are called at
911 program startup. */
3a9ed12a
JH
912 if (final)
913 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
914 else
915 /* Proudce sane name but one not recognizable by collect2, just for the
916 case we fail to inline the function. */
917 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
9e97ff61
JH
918 name = get_file_function_name (which_buf);
919
920 decl = build_decl (input_location, FUNCTION_DECL, name,
921 build_function_type_list (void_type_node, NULL_TREE));
922 current_function_decl = decl;
923
924 resdecl = build_decl (input_location,
925 RESULT_DECL, NULL_TREE, void_type_node);
926 DECL_ARTIFICIAL (resdecl) = 1;
927 DECL_RESULT (decl) = resdecl;
928 DECL_CONTEXT (resdecl) = decl;
929
930 allocate_struct_function (decl, false);
931
932 TREE_STATIC (decl) = 1;
933 TREE_USED (decl) = 1;
934 DECL_ARTIFICIAL (decl) = 1;
3f2dd8cd 935 DECL_IGNORED_P (decl) = 1;
9e97ff61
JH
936 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
937 DECL_SAVED_TREE (decl) = body;
3a9ed12a 938 if (!targetm.have_ctors_dtors && final)
9e97ff61
JH
939 {
940 TREE_PUBLIC (decl) = 1;
941 DECL_PRESERVE_P (decl) = 1;
942 }
943 DECL_UNINLINABLE (decl) = 1;
944
945 DECL_INITIAL (decl) = make_node (BLOCK);
01771d43 946 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
9e97ff61
JH
947 TREE_USED (DECL_INITIAL (decl)) = 1;
948
949 DECL_SOURCE_LOCATION (decl) = input_location;
950 cfun->function_end_locus = input_location;
951
952 switch (which)
953 {
954 case 'I':
955 DECL_STATIC_CONSTRUCTOR (decl) = 1;
956 decl_init_priority_insert (decl, priority);
957 break;
d5e254e1
IE
958 case 'P':
959 DECL_STATIC_CONSTRUCTOR (decl) = 1;
960 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
961 NULL,
962 NULL_TREE);
963 decl_init_priority_insert (decl, priority);
964 break;
965 case 'B':
966 DECL_STATIC_CONSTRUCTOR (decl) = 1;
967 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
968 NULL,
969 NULL_TREE);
970 decl_init_priority_insert (decl, priority);
971 break;
9e97ff61
JH
972 case 'D':
973 DECL_STATIC_DESTRUCTOR (decl) = 1;
974 decl_fini_priority_insert (decl, priority);
975 break;
976 default:
977 gcc_unreachable ();
978 }
979
980 gimplify_function_tree (decl);
981
d52f5295 982 cgraph_node::add_new_function (decl, false);
9e97ff61
JH
983
984 set_cfun (NULL);
985 current_function_decl = NULL;
986}
987
3a9ed12a 988/* Generate and emit a static constructor or destructor. WHICH must
d5e254e1
IE
989 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
990 (for chkp static vars constructor) or 'B' (for chkp static bounds
991 constructor). BODY is a STATEMENT_LIST containing GENERIC
992 statements. PRIORITY is the initialization priority for this
993 constructor or destructor. */
3a9ed12a
JH
994
995void
996cgraph_build_static_cdtor (char which, tree body, int priority)
997{
998 cgraph_build_static_cdtor_1 (which, body, priority, false);
999}
9e97ff61 1000
9e97ff61
JH
1001/* When target does not have ctors and dtors, we call all constructor
1002 and destructor by special initialization/destruction function
1003 recognized by collect2.
1004
1005 When we are going to build this function, collect all constructors and
1006 destructors and turn them into normal functions. */
1007
1008static void
37a51997 1009record_cdtor_fn (struct cgraph_node *node, vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 1010{
67348ccc 1011 if (DECL_STATIC_CONSTRUCTOR (node->decl))
37a51997 1012 ctors->safe_push (node->decl);
67348ccc 1013 if (DECL_STATIC_DESTRUCTOR (node->decl))
37a51997 1014 dtors->safe_push (node->decl);
d52f5295 1015 node = cgraph_node::get (node->decl);
67348ccc 1016 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
9e97ff61
JH
1017}
1018
1019/* Define global constructors/destructor functions for the CDTORS, of
1020 which they are LEN. The CDTORS are sorted by initialization
1021 priority. If CTOR_P is true, these are constructors; otherwise,
1022 they are destructors. */
1023
1024static void
37a51997 1025build_cdtor (bool ctor_p, const vec<tree> &cdtors)
9e97ff61
JH
1026{
1027 size_t i,j;
9771b263 1028 size_t len = cdtors.length ();
9e97ff61
JH
1029
1030 i = 0;
1031 while (i < len)
1032 {
1033 tree body;
1034 tree fn;
1035 priority_type priority;
1036
1037 priority = 0;
1038 body = NULL_TREE;
1039 j = i;
1040 do
1041 {
1042 priority_type p;
9771b263 1043 fn = cdtors[j];
9e97ff61
JH
1044 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1045 if (j == i)
1046 priority = p;
1047 else if (p != priority)
1048 break;
1049 j++;
1050 }
1051 while (j < len);
1052
48c24aca 1053 /* When there is only one cdtor and target supports them, do nothing. */
9e97ff61
JH
1054 if (j == i + 1
1055 && targetm.have_ctors_dtors)
1056 {
1057 i++;
1058 continue;
1059 }
1060 /* Find the next batch of constructors/destructors with the same
1061 initialization priority. */
48c24aca 1062 for (;i < j; i++)
9e97ff61 1063 {
9e97ff61 1064 tree call;
9771b263 1065 fn = cdtors[i];
9e97ff61
JH
1066 call = build_call_expr (fn, 0);
1067 if (ctor_p)
1068 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1069 else
1070 DECL_STATIC_DESTRUCTOR (fn) = 0;
1071 /* We do not want to optimize away pure/const calls here.
1072 When optimizing, these should be already removed, when not
1073 optimizing, we want user to be able to breakpoint in them. */
1074 TREE_SIDE_EFFECTS (call) = 1;
1075 append_to_statement_list (call, &body);
9e97ff61 1076 }
9e97ff61
JH
1077 gcc_assert (body != NULL_TREE);
1078 /* Generate a function to call all the function of like
1079 priority. */
3a9ed12a 1080 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
9e97ff61
JH
1081 }
1082}
1083
1084/* Comparison function for qsort. P1 and P2 are actually of type
1085 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1086 used to determine the sort order. */
1087
1088static int
1089compare_ctor (const void *p1, const void *p2)
1090{
1091 tree f1;
1092 tree f2;
1093 int priority1;
1094 int priority2;
1095
1096 f1 = *(const tree *)p1;
1097 f2 = *(const tree *)p2;
1098 priority1 = DECL_INIT_PRIORITY (f1);
1099 priority2 = DECL_INIT_PRIORITY (f2);
1100
1101 if (priority1 < priority2)
1102 return -1;
1103 else if (priority1 > priority2)
1104 return 1;
1105 else
1106 /* Ensure a stable sort. Constructors are executed in backwarding
1107 order to make LTO initialize braries first. */
1108 return DECL_UID (f2) - DECL_UID (f1);
1109}
1110
1111/* Comparison function for qsort. P1 and P2 are actually of type
1112 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1113 used to determine the sort order. */
1114
1115static int
1116compare_dtor (const void *p1, const void *p2)
1117{
1118 tree f1;
1119 tree f2;
1120 int priority1;
1121 int priority2;
1122
1123 f1 = *(const tree *)p1;
1124 f2 = *(const tree *)p2;
1125 priority1 = DECL_FINI_PRIORITY (f1);
1126 priority2 = DECL_FINI_PRIORITY (f2);
1127
1128 if (priority1 < priority2)
1129 return -1;
1130 else if (priority1 > priority2)
1131 return 1;
1132 else
1133 /* Ensure a stable sort. */
1134 return DECL_UID (f1) - DECL_UID (f2);
1135}
1136
1137/* Generate functions to call static constructors and destructors
1138 for targets that do not support .ctors/.dtors sections. These
1139 functions have magic names which are detected by collect2. */
1140
1141static void
37a51997 1142build_cdtor_fns (vec<tree> *ctors, vec<tree> *dtors)
9e97ff61 1143{
37a51997 1144 if (!ctors->is_empty ())
9e97ff61
JH
1145 {
1146 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1147 ctors->qsort (compare_ctor);
1148 build_cdtor (/*ctor_p=*/true, *ctors);
9e97ff61
JH
1149 }
1150
37a51997 1151 if (!dtors->is_empty ())
9e97ff61
JH
1152 {
1153 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
37a51997
TS
1154 dtors->qsort (compare_dtor);
1155 build_cdtor (/*ctor_p=*/false, *dtors);
9e97ff61
JH
1156 }
1157}
1158
1159/* Look for constructors and destructors and produce function calling them.
1160 This is needed for targets not supporting ctors or dtors, but we perform the
073a8998 1161 transformation also at linktime to merge possibly numerous
9e97ff61
JH
1162 constructors/destructors into single function to improve code locality and
1163 reduce size. */
1164
1165static unsigned int
1166ipa_cdtor_merge (void)
1167{
37a51997
TS
1168 /* A vector of FUNCTION_DECLs declared as static constructors. */
1169 auto_vec<tree, 20> ctors;
1170 /* A vector of FUNCTION_DECLs declared as static destructors. */
1171 auto_vec<tree, 20> dtors;
9e97ff61 1172 struct cgraph_node *node;
65c70e6b 1173 FOR_EACH_DEFINED_FUNCTION (node)
67348ccc
DM
1174 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1175 || DECL_STATIC_DESTRUCTOR (node->decl))
37a51997
TS
1176 record_cdtor_fn (node, &ctors, &dtors);
1177 build_cdtor_fns (&ctors, &dtors);
9e97ff61
JH
1178 return 0;
1179}
1180
17795822
TS
1181namespace {
1182
1183const pass_data pass_data_ipa_cdtor_merge =
9e97ff61 1184{
27a4cd48
DM
1185 IPA_PASS, /* type */
1186 "cdtor", /* name */
1187 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1188 TV_CGRAPHOPT, /* tv_id */
1189 0, /* properties_required */
1190 0, /* properties_provided */
1191 0, /* properties_destroyed */
1192 0, /* todo_flags_start */
1193 0, /* todo_flags_finish */
9e97ff61 1194};
27a4cd48 1195
17795822 1196class pass_ipa_cdtor_merge : public ipa_opt_pass_d
27a4cd48
DM
1197{
1198public:
c3284718
RS
1199 pass_ipa_cdtor_merge (gcc::context *ctxt)
1200 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1201 NULL, /* generate_summary */
1202 NULL, /* write_summary */
1203 NULL, /* read_summary */
1204 NULL, /* write_optimization_summary */
1205 NULL, /* read_optimization_summary */
1206 NULL, /* stmt_fixup */
1207 0, /* function_transform_todo_flags_start */
1208 NULL, /* function_transform */
1209 NULL) /* variable_transform */
27a4cd48
DM
1210 {}
1211
1212 /* opt_pass methods: */
1a3d085c 1213 virtual bool gate (function *);
be55bfe6 1214 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
27a4cd48
DM
1215
1216}; // class pass_ipa_cdtor_merge
1217
1a3d085c
TS
1218bool
1219pass_ipa_cdtor_merge::gate (function *)
1220{
1221 /* Perform the pass when we have no ctors/dtors support
1222 or at LTO time to merge multiple constructors into single
1223 function. */
1224 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1225}
1226
17795822
TS
1227} // anon namespace
1228
27a4cd48
DM
1229ipa_opt_pass_d *
1230make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1231{
1232 return new pass_ipa_cdtor_merge (ctxt);
1233}
eb6a09a7
JH
1234
1235/* Invalid pointer representing BOTTOM for single user dataflow. */
1236#define BOTTOM ((cgraph_node *)(size_t) 2)
1237
1238/* Meet operation for single user dataflow.
1239 Here we want to associate variables with sigle function that may access it.
1240
1241 FUNCTION is current single user of a variable, VAR is variable that uses it.
1242 Latttice is stored in SINGLE_USER_MAP.
1243
1244 We represent:
1245 - TOP by no entry in SIGNLE_USER_MAP
1246 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1247 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1248
1249cgraph_node *
1250meet (cgraph_node *function, varpool_node *var,
1eb68d2d 1251 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1252{
1253 struct cgraph_node *user, **f;
1254
1255 if (var->aux == BOTTOM)
1256 return BOTTOM;
1257
1eb68d2d 1258 f = single_user_map.get (var);
eb6a09a7
JH
1259 if (!f)
1260 return function;
1261 user = *f;
1262 if (!function)
1263 return user;
1264 else if (function != user)
1265 return BOTTOM;
1266 else
1267 return function;
1268}
1269
1270/* Propagation step of single-use dataflow.
1271
1272 Check all uses of VNODE and see if they are used by single function FUNCTION.
1273 SINGLE_USER_MAP represents the dataflow lattice. */
1274
1275cgraph_node *
1276propagate_single_user (varpool_node *vnode, cgraph_node *function,
1eb68d2d 1277 hash_map<varpool_node *, cgraph_node *> &single_user_map)
eb6a09a7
JH
1278{
1279 int i;
1280 struct ipa_ref *ref;
1281
1282 gcc_assert (!vnode->externally_visible);
1283
1284 /* If node is an alias, first meet with its target. */
1285 if (vnode->alias)
9041d2e6 1286 function = meet (function, vnode->get_alias_target (), single_user_map);
eb6a09a7
JH
1287
1288 /* Check all users and see if they correspond to a single function. */
d52f5295 1289 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
eb6a09a7
JH
1290 {
1291 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1292 if (cnode)
1293 {
1294 if (cnode->global.inlined_to)
1295 cnode = cnode->global.inlined_to;
1296 if (!function)
1297 function = cnode;
1298 else if (function != cnode)
1299 function = BOTTOM;
1300 }
1301 else
17e0fc92
JH
1302 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1303 single_user_map);
eb6a09a7
JH
1304 }
1305 return function;
1306}
1307
1308/* Pass setting used_by_single_function flag.
17e0fc92
JH
1309 This flag is set on variable when there is only one function that may
1310 possibly referr to it. */
eb6a09a7
JH
1311
1312static unsigned int
1313ipa_single_use (void)
1314{
1315 varpool_node *first = (varpool_node *) (void *) 1;
1316 varpool_node *var;
1eb68d2d 1317 hash_map<varpool_node *, cgraph_node *> single_user_map;
eb6a09a7
JH
1318
1319 FOR_EACH_DEFINED_VARIABLE (var)
9041d2e6 1320 if (!var->all_refs_explicit_p ())
eb6a09a7
JH
1321 var->aux = BOTTOM;
1322 else
1323 {
1324 /* Enqueue symbol for dataflow. */
1325 var->aux = first;
1326 first = var;
1327 }
1328
1329 /* The actual dataflow. */
1330
1331 while (first != (void *) 1)
1332 {
1333 cgraph_node *user, *orig_user, **f;
1334
1335 var = first;
1336 first = (varpool_node *)first->aux;
1337
1eb68d2d 1338 f = single_user_map.get (var);
eb6a09a7
JH
1339 if (f)
1340 orig_user = *f;
1341 else
1342 orig_user = NULL;
1343 user = propagate_single_user (var, orig_user, single_user_map);
1344
1345 gcc_checking_assert (var->aux != BOTTOM);
1346
1347 /* If user differs, enqueue all references. */
1348 if (user != orig_user)
1349 {
1350 unsigned int i;
1351 ipa_ref *ref;
1352
1eb68d2d 1353 single_user_map.put (var, user);
eb6a09a7
JH
1354
1355 /* Enqueue all aliases for re-processing. */
31de7606
JH
1356 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1357 if (!ref->referring->aux)
eb6a09a7
JH
1358 {
1359 ref->referring->aux = first;
1360 first = dyn_cast <varpool_node *> (ref->referring);
1361 }
1362 /* Enqueue all users for re-processing. */
d52f5295 1363 for (i = 0; var->iterate_reference (i, ref); i++)
eb6a09a7
JH
1364 if (!ref->referred->aux
1365 && ref->referred->definition
1366 && is_a <varpool_node *> (ref->referred))
1367 {
1368 ref->referred->aux = first;
1369 first = dyn_cast <varpool_node *> (ref->referred);
1370 }
1371
1372 /* If user is BOTTOM, just punt on this var. */
1373 if (user == BOTTOM)
1374 var->aux = BOTTOM;
1375 else
1376 var->aux = NULL;
1377 }
1378 else
1379 var->aux = NULL;
1380 }
1381
1382 FOR_EACH_DEFINED_VARIABLE (var)
1383 {
1384 if (var->aux != BOTTOM)
1385 {
17e0fc92
JH
1386 /* Not having the single user known means that the VAR is
1387 unreachable. Either someone forgot to remove unreachable
1388 variables or the reachability here is wrong. */
1389
b2b29377
MM
1390 gcc_checking_assert (single_user_map.get (var));
1391
eb6a09a7
JH
1392 if (dump_file)
1393 {
1394 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1395 var->name (), var->order);
1396 }
1397 var->used_by_single_function = true;
1398 }
1399 var->aux = NULL;
1400 }
1401 return 0;
1402}
1403
17795822
TS
1404namespace {
1405
1406const pass_data pass_data_ipa_single_use =
eb6a09a7
JH
1407{
1408 IPA_PASS, /* type */
1409 "single-use", /* name */
1410 OPTGROUP_NONE, /* optinfo_flags */
eb6a09a7
JH
1411 TV_CGRAPHOPT, /* tv_id */
1412 0, /* properties_required */
1413 0, /* properties_provided */
1414 0, /* properties_destroyed */
1415 0, /* todo_flags_start */
1416 0, /* todo_flags_finish */
1417};
1418
17795822 1419class pass_ipa_single_use : public ipa_opt_pass_d
eb6a09a7
JH
1420{
1421public:
1422 pass_ipa_single_use (gcc::context *ctxt)
1423 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1424 NULL, /* generate_summary */
1425 NULL, /* write_summary */
1426 NULL, /* read_summary */
1427 NULL, /* write_optimization_summary */
1428 NULL, /* read_optimization_summary */
1429 NULL, /* stmt_fixup */
1430 0, /* function_transform_todo_flags_start */
1431 NULL, /* function_transform */
1432 NULL) /* variable_transform */
1433 {}
1434
1435 /* opt_pass methods: */
1436 virtual bool gate (function *);
1437 virtual unsigned int execute (function *) { return ipa_single_use (); }
1438
1439}; // class pass_ipa_single_use
1440
1441bool
1442pass_ipa_single_use::gate (function *)
1443{
1444 return optimize;
1445}
1446
17795822
TS
1447} // anon namespace
1448
eb6a09a7
JH
1449ipa_opt_pass_d *
1450make_pass_ipa_single_use (gcc::context *ctxt)
1451{
1452 return new pass_ipa_single_use (ctxt);
1453}
f0251020
RB
1454
1455/* Materialize all clones. */
1456
1457namespace {
1458
1459const pass_data pass_data_materialize_all_clones =
1460{
1461 SIMPLE_IPA_PASS, /* type */
1462 "materialize-all-clones", /* name */
1463 OPTGROUP_NONE, /* optinfo_flags */
1464 TV_IPA_OPT, /* tv_id */
1465 0, /* properties_required */
1466 0, /* properties_provided */
1467 0, /* properties_destroyed */
1468 0, /* todo_flags_start */
1469 0, /* todo_flags_finish */
1470};
1471
1472class pass_materialize_all_clones : public simple_ipa_opt_pass
1473{
1474public:
1475 pass_materialize_all_clones (gcc::context *ctxt)
1476 : simple_ipa_opt_pass (pass_data_materialize_all_clones, ctxt)
1477 {}
1478
1479 /* opt_pass methods: */
1480 virtual unsigned int execute (function *)
1481 {
1482 symtab->materialize_all_clones ();
1483 return 0;
1484 }
1485
1486}; // class pass_materialize_all_clones
1487
1488} // anon namespace
1489
1490simple_ipa_opt_pass *
1491make_pass_materialize_all_clones (gcc::context *ctxt)
1492{
1493 return new pass_materialize_all_clones (ctxt);
1494}