]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ipa.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / ipa.c
1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "calls.h"
30 #include "stringpool.h"
31 #include "predict.h"
32 #include "basic-block.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "cgraph.h"
36 #include "tree-pass.h"
37 #include "gimple-expr.h"
38 #include "gimplify.h"
39 #include "flags.h"
40 #include "target.h"
41 #include "tree-iterator.h"
42 #include "ipa-utils.h"
43 #include "alloc-pool.h"
44 #include "symbol-summary.h"
45 #include "ipa-prop.h"
46 #include "ipa-inline.h"
47 #include "tree-inline.h"
48 #include "profile.h"
49 #include "params.h"
50 #include "internal-fn.h"
51 #include "tree-ssa-alias.h"
52 #include "gimple.h"
53 #include "dbgcnt.h"
54
55
56 /* Return true when NODE has ADDR reference. */
57
58 static bool
59 has_addr_references_p (struct cgraph_node *node,
60 void *data ATTRIBUTE_UNUSED)
61 {
62 int i;
63 struct ipa_ref *ref = NULL;
64
65 for (i = 0; node->iterate_referring (i, ref); i++)
66 if (ref->use == IPA_REF_ADDR)
67 return true;
68 return false;
69 }
70
71 /* Look for all functions inlined to NODE and update their inlined_to pointers
72 to INLINED_TO. */
73
74 static void
75 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
76 {
77 struct cgraph_edge *e;
78 for (e = node->callees; e; e = e->next_callee)
79 if (e->callee->global.inlined_to)
80 {
81 e->callee->global.inlined_to = inlined_to;
82 update_inlined_to_pointer (e->callee, inlined_to);
83 }
84 }
85
86 /* Add symtab NODE to queue starting at FIRST.
87
88 The queue is linked via AUX pointers and terminated by pointer to 1.
89 We enqueue nodes at two occasions: when we find them reachable or when we find
90 their bodies needed for further clonning. In the second case we mark them
91 by pointer to 2 after processing so they are re-queue when they become
92 reachable. */
93
94 static void
95 enqueue_node (symtab_node *node, symtab_node **first,
96 hash_set<symtab_node *> *reachable)
97 {
98 /* Node is still in queue; do nothing. */
99 if (node->aux && node->aux != (void *) 2)
100 return;
101 /* Node was already processed as unreachable, re-enqueue
102 only if it became reachable now. */
103 if (node->aux == (void *)2 && !reachable->contains (node))
104 return;
105 node->aux = *first;
106 *first = node;
107 }
108
109 /* Process references. */
110
111 static void
112 process_references (symtab_node *snode,
113 symtab_node **first,
114 bool before_inlining_p,
115 hash_set<symtab_node *> *reachable)
116 {
117 int i;
118 struct ipa_ref *ref = NULL;
119 for (i = 0; snode->iterate_reference (i, ref); i++)
120 {
121 symtab_node *node = ref->referred;
122 symtab_node *body = node->ultimate_alias_target ();
123
124 if (node->definition && !node->in_other_partition
125 && ((!DECL_EXTERNAL (node->decl) || node->alias)
126 || (((before_inlining_p
127 && ((TREE_CODE (node->decl) != FUNCTION_DECL
128 && optimize)
129 || (TREE_CODE (node->decl) == FUNCTION_DECL
130 && opt_for_fn (body->decl, optimize))
131 || (symtab->state < IPA_SSA
132 && lookup_attribute
133 ("always_inline",
134 DECL_ATTRIBUTES (body->decl))))))
135 /* We use variable constructors during late compilation for
136 constant folding. Keep references alive so partitioning
137 knows about potential references. */
138 || (TREE_CODE (node->decl) == VAR_DECL
139 && flag_wpa
140 && ctor_for_folding (node->decl)
141 != error_mark_node))))
142 {
143 /* Be sure that we will not optimize out alias target
144 body. */
145 if (DECL_EXTERNAL (node->decl)
146 && node->alias
147 && before_inlining_p)
148 reachable->add (body);
149 reachable->add (node);
150 }
151 enqueue_node (node, first, reachable);
152 }
153 }
154
155 /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark
156 all its potential targets as reachable to permit later inlining if
157 devirtualization happens. After inlining still keep their declarations
158 around, so we can devirtualize to a direct call.
159
160 Also try to make trivial devirutalization when no or only one target is
161 possible. */
162
163 static void
164 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
165 struct cgraph_edge *edge,
166 symtab_node **first,
167 hash_set<symtab_node *> *reachable,
168 bool before_inlining_p)
169 {
170 unsigned int i;
171 void *cache_token;
172 bool final;
173 vec <cgraph_node *>targets
174 = possible_polymorphic_call_targets
175 (edge, &final, &cache_token);
176
177 if (!reachable_call_targets->add (cache_token))
178 {
179 for (i = 0; i < targets.length (); i++)
180 {
181 struct cgraph_node *n = targets[i];
182
183 /* Do not bother to mark virtual methods in anonymous namespace;
184 either we will find use of virtual table defining it, or it is
185 unused. */
186 if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE
187 && type_in_anonymous_namespace_p
188 (TYPE_METHOD_BASETYPE (TREE_TYPE (n->decl))))
189 continue;
190
191 symtab_node *body = n->function_symbol ();
192
193 /* Prior inlining, keep alive bodies of possible targets for
194 devirtualization. */
195 if (n->definition
196 && (before_inlining_p
197 && opt_for_fn (body->decl, optimize)
198 && opt_for_fn (body->decl, flag_devirtualize)))
199 {
200 /* Be sure that we will not optimize out alias target
201 body. */
202 if (DECL_EXTERNAL (n->decl)
203 && n->alias
204 && before_inlining_p)
205 reachable->add (body);
206 reachable->add (n);
207 }
208 /* Even after inlining we want to keep the possible targets in the
209 boundary, so late passes can still produce direct call even if
210 the chance for inlining is lost. */
211 enqueue_node (n, first, reachable);
212 }
213 }
214
215 /* Very trivial devirtualization; when the type is
216 final or anonymous (so we know all its derivation)
217 and there is only one possible virtual call target,
218 make the edge direct. */
219 if (final)
220 {
221 if (targets.length () <= 1 && dbg_cnt (devirt))
222 {
223 cgraph_node *target, *node = edge->caller;
224 if (targets.length () == 1)
225 target = targets[0];
226 else
227 target = cgraph_node::get_create
228 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
229
230 if (dump_enabled_p ())
231 {
232 location_t locus;
233 if (edge->call_stmt)
234 locus = gimple_location (edge->call_stmt);
235 else
236 locus = UNKNOWN_LOCATION;
237 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
238 "devirtualizing call in %s/%i to %s/%i\n",
239 edge->caller->name (), edge->caller->order,
240 target->name (),
241 target->order);
242 }
243 edge = edge->make_direct (target);
244 if (inline_summaries)
245 inline_update_overall_summary (node);
246 else if (edge->call_stmt)
247 {
248 edge->redirect_call_stmt_to_callee ();
249
250 /* Call to __builtin_unreachable shouldn't be instrumented. */
251 if (!targets.length ())
252 gimple_call_set_with_bounds (edge->call_stmt, false);
253 }
254 }
255 }
256 }
257
258 /* Perform reachability analysis and reclaim all unreachable nodes.
259
260 The algorithm is basically mark&sweep but with some extra refinements:
261
262 - reachable extern inline functions needs special handling; the bodies needs
263 to stay in memory until inlining in hope that they will be inlined.
264 After inlining we release their bodies and turn them into unanalyzed
265 nodes even when they are reachable.
266
267 - virtual functions are kept in callgraph even if they seem unreachable in
268 hope calls to them will be devirtualized.
269
270 Again we remove them after inlining. In late optimization some
271 devirtualization may happen, but it is not important since we won't inline
272 the call. In theory early opts and IPA should work out all important cases.
273
274 - virtual clones needs bodies of their origins for later materialization;
275 this means that we want to keep the body even if the origin is unreachable
276 otherwise. To avoid origin from sitting in the callgraph and being
277 walked by IPA passes, we turn them into unanalyzed nodes with body
278 defined.
279
280 We maintain set of function declaration where body needs to stay in
281 body_needed_for_clonning
282
283 Inline clones represent special case: their declaration match the
284 declaration of origin and cgraph_remove_node already knows how to
285 reshape callgraph and preserve body when offline copy of function or
286 inline clone is being removed.
287
288 - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL
289 variables with DECL_INITIAL set. We finalize these and keep reachable
290 ones around for constant folding purposes. After inlining we however
291 stop walking their references to let everything static referneced by them
292 to be removed when it is otherwise unreachable.
293
294 We maintain queue of both reachable symbols (i.e. defined symbols that needs
295 to stay) and symbols that are in boundary (i.e. external symbols referenced
296 by reachable symbols or origins of clones). The queue is represented
297 as linked list by AUX pointer terminated by 1.
298
299 At the end we keep all reachable symbols. For symbols in boundary we always
300 turn definition into a declaration, but we may keep function body around
301 based on body_needed_for_clonning
302
303 All symbols that enter the queue have AUX pointer non-zero and are in the
304 boundary. Pointer set REACHABLE is used to track reachable symbols.
305
306 Every symbol can be visited twice - once as part of boundary and once
307 as real reachable symbol. enqueue_node needs to decide whether the
308 node needs to be re-queued for second processing. For this purpose
309 we set AUX pointer of processed symbols in the boundary to constant 2. */
310
311 bool
312 symbol_table::remove_unreachable_nodes (FILE *file)
313 {
314 symtab_node *first = (symtab_node *) (void *) 1;
315 struct cgraph_node *node, *next;
316 varpool_node *vnode, *vnext;
317 bool changed = false;
318 hash_set<symtab_node *> reachable;
319 hash_set<tree> body_needed_for_clonning;
320 hash_set<void *> reachable_call_targets;
321 bool before_inlining_p = symtab->state < (!optimize ? IPA_SSA
322 : IPA_SSA_AFTER_INLINING);
323
324 timevar_push (TV_IPA_UNREACHABLE);
325 build_type_inheritance_graph ();
326 if (file)
327 fprintf (file, "\nReclaiming functions:");
328 #ifdef ENABLE_CHECKING
329 FOR_EACH_FUNCTION (node)
330 gcc_assert (!node->aux);
331 FOR_EACH_VARIABLE (vnode)
332 gcc_assert (!vnode->aux);
333 #endif
334 /* Mark functions whose bodies are obviously needed.
335 This is mostly when they can be referenced externally. Inline clones
336 are special since their declarations are shared with master clone and thus
337 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */
338 FOR_EACH_FUNCTION (node)
339 {
340 node->used_as_abstract_origin = false;
341 if (node->definition
342 && !node->global.inlined_to
343 && !node->in_other_partition
344 && !node->can_remove_if_no_direct_calls_and_refs_p ())
345 {
346 gcc_assert (!node->global.inlined_to);
347 reachable.add (node);
348 enqueue_node (node, &first, &reachable);
349 }
350 else
351 gcc_assert (!node->aux);
352 }
353
354 /* Mark variables that are obviously needed. */
355 FOR_EACH_DEFINED_VARIABLE (vnode)
356 if (!vnode->can_remove_if_no_refs_p()
357 && !vnode->in_other_partition)
358 {
359 reachable.add (vnode);
360 enqueue_node (vnode, &first, &reachable);
361 }
362
363 /* Perform reachability analysis. */
364 while (first != (symtab_node *) (void *) 1)
365 {
366 bool in_boundary_p = !reachable.contains (first);
367 symtab_node *node = first;
368
369 first = (symtab_node *)first->aux;
370
371 /* If we are processing symbol in boundary, mark its AUX pointer for
372 possible later re-processing in enqueue_node. */
373 if (in_boundary_p)
374 {
375 node->aux = (void *)2;
376 if (node->alias && node->analyzed)
377 enqueue_node (node->get_alias_target (), &first, &reachable);
378 }
379 else
380 {
381 if (TREE_CODE (node->decl) == FUNCTION_DECL
382 && DECL_ABSTRACT_ORIGIN (node->decl))
383 {
384 struct cgraph_node *origin_node
385 = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
386 if (origin_node && !origin_node->used_as_abstract_origin)
387 {
388 origin_node->used_as_abstract_origin = true;
389 gcc_assert (!origin_node->prev_sibling_clone);
390 gcc_assert (!origin_node->next_sibling_clone);
391 for (cgraph_node *n = origin_node->clones; n;
392 n = n->next_sibling_clone)
393 if (n->decl == DECL_ABSTRACT_ORIGIN (node->decl))
394 n->used_as_abstract_origin = true;
395 }
396 }
397 /* If any symbol in a comdat group is reachable, force
398 all externally visible symbols in the same comdat
399 group to be reachable as well. Comdat-local symbols
400 can be discarded if all uses were inlined. */
401 if (node->same_comdat_group)
402 {
403 symtab_node *next;
404 for (next = node->same_comdat_group;
405 next != node;
406 next = next->same_comdat_group)
407 if (!next->comdat_local_p ()
408 && !reachable.add (next))
409 enqueue_node (next, &first, &reachable);
410 }
411 /* Mark references as reachable. */
412 process_references (node, &first, before_inlining_p, &reachable);
413 }
414
415 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
416 {
417 /* Mark the callees reachable unless they are direct calls to extern
418 inline functions we decided to not inline. */
419 if (!in_boundary_p)
420 {
421 struct cgraph_edge *e;
422 /* Keep alive possible targets for devirtualization. */
423 if (opt_for_fn (cnode->decl, optimize)
424 && opt_for_fn (cnode->decl, flag_devirtualize))
425 {
426 struct cgraph_edge *next;
427 for (e = cnode->indirect_calls; e; e = next)
428 {
429 next = e->next_callee;
430 if (e->indirect_info->polymorphic)
431 walk_polymorphic_call_targets (&reachable_call_targets,
432 e, &first, &reachable,
433 before_inlining_p);
434 }
435 }
436 for (e = cnode->callees; e; e = e->next_callee)
437 {
438 symtab_node *body = e->callee->function_symbol ();
439 if (e->callee->definition
440 && !e->callee->in_other_partition
441 && (!e->inline_failed
442 || !DECL_EXTERNAL (e->callee->decl)
443 || e->callee->alias
444 || (before_inlining_p
445 && (opt_for_fn (body->decl, optimize)
446 || (symtab->state < IPA_SSA
447 && lookup_attribute
448 ("always_inline",
449 DECL_ATTRIBUTES (body->decl)))))))
450 {
451 /* Be sure that we will not optimize out alias target
452 body. */
453 if (DECL_EXTERNAL (e->callee->decl)
454 && e->callee->alias
455 && before_inlining_p)
456 reachable.add (body);
457 reachable.add (e->callee);
458 }
459 enqueue_node (e->callee, &first, &reachable);
460 }
461
462 /* When inline clone exists, mark body to be preserved so when removing
463 offline copy of the function we don't kill it. */
464 if (cnode->global.inlined_to)
465 body_needed_for_clonning.add (cnode->decl);
466
467 /* For instrumentation clones we always need original
468 function node for proper LTO privatization. */
469 if (cnode->instrumentation_clone
470 && cnode->definition)
471 {
472 gcc_assert (cnode->instrumented_version || in_lto_p);
473 if (cnode->instrumented_version)
474 {
475 enqueue_node (cnode->instrumented_version, &first,
476 &reachable);
477 reachable.add (cnode->instrumented_version);
478 }
479 }
480
481 /* For non-inline clones, force their origins to the boundary and ensure
482 that body is not removed. */
483 while (cnode->clone_of)
484 {
485 bool noninline = cnode->clone_of->decl != cnode->decl;
486 cnode = cnode->clone_of;
487 if (noninline)
488 {
489 body_needed_for_clonning.add (cnode->decl);
490 enqueue_node (cnode, &first, &reachable);
491 }
492 }
493
494 }
495 else if (cnode->thunk.thunk_p)
496 enqueue_node (cnode->callees->callee, &first, &reachable);
497
498 /* If any reachable function has simd clones, mark them as
499 reachable as well. */
500 if (cnode->simd_clones)
501 {
502 cgraph_node *next;
503 for (next = cnode->simd_clones;
504 next;
505 next = next->simdclone->next_clone)
506 if (in_boundary_p
507 || !reachable.add (next))
508 enqueue_node (next, &first, &reachable);
509 }
510 }
511 /* When we see constructor of external variable, keep referred nodes in the
512 boundary. This will also hold initializers of the external vars NODE
513 refers to. */
514 varpool_node *vnode = dyn_cast <varpool_node *> (node);
515 if (vnode
516 && DECL_EXTERNAL (node->decl)
517 && !vnode->alias
518 && in_boundary_p)
519 {
520 struct ipa_ref *ref = NULL;
521 for (int i = 0; node->iterate_reference (i, ref); i++)
522 enqueue_node (ref->referred, &first, &reachable);
523 }
524 }
525
526 /* Remove unreachable functions. */
527 for (node = first_function (); node; node = next)
528 {
529 next = next_function (node);
530
531 /* If node is not needed at all, remove it. */
532 if (!node->aux)
533 {
534 if (file)
535 fprintf (file, " %s/%i", node->name (), node->order);
536 node->remove ();
537 changed = true;
538 }
539 /* If node is unreachable, remove its body. */
540 else if (!reachable.contains (node))
541 {
542 /* We keep definitions of thunks and aliases in the boundary so
543 we can walk to the ultimate alias targets and function symbols
544 reliably. */
545 if (node->alias || node->thunk.thunk_p)
546 ;
547 else if (!body_needed_for_clonning.contains (node->decl)
548 && !node->alias && !node->thunk.thunk_p)
549 node->release_body ();
550 else if (!node->clone_of)
551 gcc_assert (in_lto_p || DECL_RESULT (node->decl));
552 if (node->definition && !node->alias && !node->thunk.thunk_p)
553 {
554 if (file)
555 fprintf (file, " %s/%i", node->name (), node->order);
556 node->body_removed = true;
557 node->analyzed = false;
558 node->definition = false;
559 node->cpp_implicit_alias = false;
560 node->alias = false;
561 node->thunk.thunk_p = false;
562 node->weakref = false;
563 /* After early inlining we drop always_inline attributes on
564 bodies of functions that are still referenced (have their
565 address taken). */
566 DECL_ATTRIBUTES (node->decl)
567 = remove_attribute ("always_inline",
568 DECL_ATTRIBUTES (node->decl));
569 if (!node->in_other_partition)
570 node->local.local = false;
571 node->remove_callees ();
572 node->remove_all_references ();
573 changed = true;
574 if (node->thunk.thunk_p
575 && node->thunk.add_pointer_bounds_args)
576 {
577 node->thunk.thunk_p = false;
578 node->thunk.add_pointer_bounds_args = false;
579 }
580 }
581 }
582 else
583 gcc_assert (node->clone_of || !node->has_gimple_body_p ()
584 || in_lto_p || DECL_RESULT (node->decl));
585 }
586
587 /* Inline clones might be kept around so their materializing allows further
588 cloning. If the function the clone is inlined into is removed, we need
589 to turn it into normal cone. */
590 FOR_EACH_FUNCTION (node)
591 {
592 if (node->global.inlined_to
593 && !node->callers)
594 {
595 gcc_assert (node->clones);
596 node->global.inlined_to = NULL;
597 update_inlined_to_pointer (node, node);
598 }
599 node->aux = NULL;
600 }
601
602 /* Remove unreachable variables. */
603 if (file)
604 fprintf (file, "\nReclaiming variables:");
605 for (vnode = first_variable (); vnode; vnode = vnext)
606 {
607 vnext = next_variable (vnode);
608 if (!vnode->aux
609 /* For can_refer_decl_in_current_unit_p we want to track for
610 all external variables if they are defined in other partition
611 or not. */
612 && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl)))
613 {
614 struct ipa_ref *ref = NULL;
615
616 /* First remove the aliases, so varpool::remove can possibly lookup
617 the constructor and save it for future use. */
618 while (vnode->iterate_direct_aliases (0, ref))
619 {
620 if (file)
621 fprintf (file, " %s/%i", ref->referred->name (),
622 ref->referred->order);
623 ref->referring->remove ();
624 }
625 if (file)
626 fprintf (file, " %s/%i", vnode->name (), vnode->order);
627 vnext = next_variable (vnode);
628 vnode->remove ();
629 changed = true;
630 }
631 else if (!reachable.contains (vnode) && !vnode->alias)
632 {
633 tree init;
634 if (vnode->definition)
635 {
636 if (file)
637 fprintf (file, " %s", vnode->name ());
638 changed = true;
639 }
640 /* Keep body if it may be useful for constant folding. */
641 if ((init = ctor_for_folding (vnode->decl)) == error_mark_node
642 && !POINTER_BOUNDS_P (vnode->decl))
643 vnode->remove_initializer ();
644 else
645 DECL_INITIAL (vnode->decl) = init;
646 vnode->body_removed = true;
647 vnode->definition = false;
648 vnode->analyzed = false;
649 vnode->aux = NULL;
650
651 vnode->remove_from_same_comdat_group ();
652
653 vnode->remove_all_references ();
654 }
655 else
656 vnode->aux = NULL;
657 }
658
659 /* Now update address_taken flags and try to promote functions to be local. */
660 if (file)
661 fprintf (file, "\nClearing address taken flags:");
662 FOR_EACH_DEFINED_FUNCTION (node)
663 if (node->address_taken
664 && !node->used_from_other_partition)
665 {
666 if (!node->call_for_symbol_and_aliases
667 (has_addr_references_p, NULL, true)
668 && (!node->instrumentation_clone
669 || !node->instrumented_version
670 || !node->instrumented_version->address_taken))
671 {
672 if (file)
673 fprintf (file, " %s", node->name ());
674 node->address_taken = false;
675 changed = true;
676 if (node->local_p ())
677 {
678 node->local.local = true;
679 if (file)
680 fprintf (file, " (local)");
681 }
682 }
683 }
684 if (file)
685 fprintf (file, "\n");
686
687 #ifdef ENABLE_CHECKING
688 symtab_node::verify_symtab_nodes ();
689 #endif
690
691 /* If we removed something, perhaps profile could be improved. */
692 if (changed && optimize && inline_edge_summary_vec.exists ())
693 FOR_EACH_DEFINED_FUNCTION (node)
694 ipa_propagate_frequency (node);
695
696 timevar_pop (TV_IPA_UNREACHABLE);
697 return changed;
698 }
699
700 /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ
701 as needed, also clear EXPLICIT_REFS if the references to given variable
702 do not need to be explicit. */
703
704 void
705 process_references (varpool_node *vnode,
706 bool *written, bool *address_taken,
707 bool *read, bool *explicit_refs)
708 {
709 int i;
710 struct ipa_ref *ref;
711
712 if (!vnode->all_refs_explicit_p ()
713 || TREE_THIS_VOLATILE (vnode->decl))
714 *explicit_refs = false;
715
716 for (i = 0; vnode->iterate_referring (i, ref)
717 && *explicit_refs && (!*written || !*address_taken || !*read); i++)
718 switch (ref->use)
719 {
720 case IPA_REF_ADDR:
721 *address_taken = true;
722 break;
723 case IPA_REF_LOAD:
724 *read = true;
725 break;
726 case IPA_REF_STORE:
727 *written = true;
728 break;
729 case IPA_REF_ALIAS:
730 process_references (dyn_cast<varpool_node *> (ref->referring), written,
731 address_taken, read, explicit_refs);
732 break;
733 case IPA_REF_CHKP:
734 gcc_unreachable ();
735 }
736 }
737
738 /* Set TREE_READONLY bit. */
739
740 bool
741 set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
742 {
743 TREE_READONLY (vnode->decl) = true;
744 return false;
745 }
746
747 /* Set writeonly bit and clear the initalizer, since it will not be needed. */
748
749 bool
750 set_writeonly_bit (varpool_node *vnode, void *data)
751 {
752 vnode->writeonly = true;
753 if (optimize)
754 {
755 DECL_INITIAL (vnode->decl) = NULL;
756 if (!vnode->alias)
757 {
758 if (vnode->num_references ())
759 *(bool *)data = true;
760 vnode->remove_all_references ();
761 }
762 }
763 return false;
764 }
765
766 /* Clear addressale bit of VNODE. */
767
768 bool
769 clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED)
770 {
771 vnode->address_taken = false;
772 TREE_ADDRESSABLE (vnode->decl) = 0;
773 return false;
774 }
775
776 /* Discover variables that have no longer address taken or that are read only
777 and update their flags.
778
779 Return true when unreachable symbol removan should be done.
780
781 FIXME: This can not be done in between gimplify and omp_expand since
782 readonly flag plays role on what is shared and what is not. Currently we do
783 this transformation as part of whole program visibility and re-do at
784 ipa-reference pass (to take into account clonning), but it would
785 make sense to do it before early optimizations. */
786
787 bool
788 ipa_discover_readonly_nonaddressable_vars (void)
789 {
790 bool remove_p = false;
791 varpool_node *vnode;
792 if (dump_file)
793 fprintf (dump_file, "Clearing variable flags:");
794 FOR_EACH_VARIABLE (vnode)
795 if (!vnode->alias
796 && (TREE_ADDRESSABLE (vnode->decl)
797 || !vnode->writeonly
798 || !TREE_READONLY (vnode->decl)))
799 {
800 bool written = false;
801 bool address_taken = false;
802 bool read = false;
803 bool explicit_refs = true;
804
805 process_references (vnode, &written, &address_taken, &read,
806 &explicit_refs);
807 if (!explicit_refs)
808 continue;
809 if (!address_taken)
810 {
811 if (TREE_ADDRESSABLE (vnode->decl) && dump_file)
812 fprintf (dump_file, " %s (non-addressable)", vnode->name ());
813 vnode->call_for_symbol_and_aliases (clear_addressable_bit, NULL,
814 true);
815 }
816 if (!address_taken && !written
817 /* Making variable in explicit section readonly can cause section
818 type conflict.
819 See e.g. gcc.c-torture/compile/pr23237.c */
820 && vnode->get_section () == NULL)
821 {
822 if (!TREE_READONLY (vnode->decl) && dump_file)
823 fprintf (dump_file, " %s (read-only)", vnode->name ());
824 vnode->call_for_symbol_and_aliases (set_readonly_bit, NULL, true);
825 }
826 if (!vnode->writeonly && !read && !address_taken && written)
827 {
828 if (dump_file)
829 fprintf (dump_file, " %s (write-only)", vnode->name ());
830 vnode->call_for_symbol_and_aliases (set_writeonly_bit, &remove_p,
831 true);
832 }
833 }
834 if (dump_file)
835 fprintf (dump_file, "\n");
836 return remove_p;
837 }
838
839 /* Free inline summary. */
840
841 namespace {
842
843 const pass_data pass_data_ipa_free_inline_summary =
844 {
845 SIMPLE_IPA_PASS, /* type */
846 "free-inline-summary", /* name */
847 OPTGROUP_NONE, /* optinfo_flags */
848 TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */
849 0, /* properties_required */
850 0, /* properties_provided */
851 0, /* properties_destroyed */
852 0, /* todo_flags_start */
853 /* Early optimizations may make function unreachable. We can not
854 remove unreachable functions as part of the ealry opts pass because
855 TODOs are run before subpasses. Do it here. */
856 ( TODO_remove_functions | TODO_dump_symtab ), /* todo_flags_finish */
857 };
858
859 class pass_ipa_free_inline_summary : public simple_ipa_opt_pass
860 {
861 public:
862 pass_ipa_free_inline_summary (gcc::context *ctxt)
863 : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt)
864 {}
865
866 /* opt_pass methods: */
867 virtual unsigned int execute (function *)
868 {
869 inline_free_summary ();
870 return 0;
871 }
872
873 }; // class pass_ipa_free_inline_summary
874
875 } // anon namespace
876
877 simple_ipa_opt_pass *
878 make_pass_ipa_free_inline_summary (gcc::context *ctxt)
879 {
880 return new pass_ipa_free_inline_summary (ctxt);
881 }
882
883 /* Generate and emit a static constructor or destructor. WHICH must
884 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
885 (for chp static vars constructor) or 'B' (for chkp static bounds
886 constructor). BODY is a STATEMENT_LIST containing GENERIC
887 statements. PRIORITY is the initialization priority for this
888 constructor or destructor.
889
890 FINAL specify whether the externally visible name for collect2 should
891 be produced. */
892
893 static void
894 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
895 {
896 static int counter = 0;
897 char which_buf[16];
898 tree decl, name, resdecl;
899
900 /* The priority is encoded in the constructor or destructor name.
901 collect2 will sort the names and arrange that they are called at
902 program startup. */
903 if (final)
904 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
905 else
906 /* Proudce sane name but one not recognizable by collect2, just for the
907 case we fail to inline the function. */
908 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++);
909 name = get_file_function_name (which_buf);
910
911 decl = build_decl (input_location, FUNCTION_DECL, name,
912 build_function_type_list (void_type_node, NULL_TREE));
913 current_function_decl = decl;
914
915 resdecl = build_decl (input_location,
916 RESULT_DECL, NULL_TREE, void_type_node);
917 DECL_ARTIFICIAL (resdecl) = 1;
918 DECL_RESULT (decl) = resdecl;
919 DECL_CONTEXT (resdecl) = decl;
920
921 allocate_struct_function (decl, false);
922
923 TREE_STATIC (decl) = 1;
924 TREE_USED (decl) = 1;
925 DECL_ARTIFICIAL (decl) = 1;
926 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
927 DECL_SAVED_TREE (decl) = body;
928 if (!targetm.have_ctors_dtors && final)
929 {
930 TREE_PUBLIC (decl) = 1;
931 DECL_PRESERVE_P (decl) = 1;
932 }
933 DECL_UNINLINABLE (decl) = 1;
934
935 DECL_INITIAL (decl) = make_node (BLOCK);
936 TREE_USED (DECL_INITIAL (decl)) = 1;
937
938 DECL_SOURCE_LOCATION (decl) = input_location;
939 cfun->function_end_locus = input_location;
940
941 switch (which)
942 {
943 case 'I':
944 DECL_STATIC_CONSTRUCTOR (decl) = 1;
945 decl_init_priority_insert (decl, priority);
946 break;
947 case 'P':
948 DECL_STATIC_CONSTRUCTOR (decl) = 1;
949 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("chkp ctor"),
950 NULL,
951 NULL_TREE);
952 decl_init_priority_insert (decl, priority);
953 break;
954 case 'B':
955 DECL_STATIC_CONSTRUCTOR (decl) = 1;
956 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("bnd_legacy"),
957 NULL,
958 NULL_TREE);
959 decl_init_priority_insert (decl, priority);
960 break;
961 case 'D':
962 DECL_STATIC_DESTRUCTOR (decl) = 1;
963 decl_fini_priority_insert (decl, priority);
964 break;
965 default:
966 gcc_unreachable ();
967 }
968
969 gimplify_function_tree (decl);
970
971 cgraph_node::add_new_function (decl, false);
972
973 set_cfun (NULL);
974 current_function_decl = NULL;
975 }
976
977 /* Generate and emit a static constructor or destructor. WHICH must
978 be one of 'I' (for a constructor), 'D' (for a destructor), 'P'
979 (for chkp static vars constructor) or 'B' (for chkp static bounds
980 constructor). BODY is a STATEMENT_LIST containing GENERIC
981 statements. PRIORITY is the initialization priority for this
982 constructor or destructor. */
983
984 void
985 cgraph_build_static_cdtor (char which, tree body, int priority)
986 {
987 cgraph_build_static_cdtor_1 (which, body, priority, false);
988 }
989
990 /* A vector of FUNCTION_DECLs declared as static constructors. */
991 static vec<tree> static_ctors;
992 /* A vector of FUNCTION_DECLs declared as static destructors. */
993 static vec<tree> static_dtors;
994
995 /* When target does not have ctors and dtors, we call all constructor
996 and destructor by special initialization/destruction function
997 recognized by collect2.
998
999 When we are going to build this function, collect all constructors and
1000 destructors and turn them into normal functions. */
1001
1002 static void
1003 record_cdtor_fn (struct cgraph_node *node)
1004 {
1005 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1006 static_ctors.safe_push (node->decl);
1007 if (DECL_STATIC_DESTRUCTOR (node->decl))
1008 static_dtors.safe_push (node->decl);
1009 node = cgraph_node::get (node->decl);
1010 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
1011 }
1012
1013 /* Define global constructors/destructor functions for the CDTORS, of
1014 which they are LEN. The CDTORS are sorted by initialization
1015 priority. If CTOR_P is true, these are constructors; otherwise,
1016 they are destructors. */
1017
1018 static void
1019 build_cdtor (bool ctor_p, vec<tree> cdtors)
1020 {
1021 size_t i,j;
1022 size_t len = cdtors.length ();
1023
1024 i = 0;
1025 while (i < len)
1026 {
1027 tree body;
1028 tree fn;
1029 priority_type priority;
1030
1031 priority = 0;
1032 body = NULL_TREE;
1033 j = i;
1034 do
1035 {
1036 priority_type p;
1037 fn = cdtors[j];
1038 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1039 if (j == i)
1040 priority = p;
1041 else if (p != priority)
1042 break;
1043 j++;
1044 }
1045 while (j < len);
1046
1047 /* When there is only one cdtor and target supports them, do nothing. */
1048 if (j == i + 1
1049 && targetm.have_ctors_dtors)
1050 {
1051 i++;
1052 continue;
1053 }
1054 /* Find the next batch of constructors/destructors with the same
1055 initialization priority. */
1056 for (;i < j; i++)
1057 {
1058 tree call;
1059 fn = cdtors[i];
1060 call = build_call_expr (fn, 0);
1061 if (ctor_p)
1062 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1063 else
1064 DECL_STATIC_DESTRUCTOR (fn) = 0;
1065 /* We do not want to optimize away pure/const calls here.
1066 When optimizing, these should be already removed, when not
1067 optimizing, we want user to be able to breakpoint in them. */
1068 TREE_SIDE_EFFECTS (call) = 1;
1069 append_to_statement_list (call, &body);
1070 }
1071 gcc_assert (body != NULL_TREE);
1072 /* Generate a function to call all the function of like
1073 priority. */
1074 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true);
1075 }
1076 }
1077
1078 /* Comparison function for qsort. P1 and P2 are actually of type
1079 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1080 used to determine the sort order. */
1081
1082 static int
1083 compare_ctor (const void *p1, const void *p2)
1084 {
1085 tree f1;
1086 tree f2;
1087 int priority1;
1088 int priority2;
1089
1090 f1 = *(const tree *)p1;
1091 f2 = *(const tree *)p2;
1092 priority1 = DECL_INIT_PRIORITY (f1);
1093 priority2 = DECL_INIT_PRIORITY (f2);
1094
1095 if (priority1 < priority2)
1096 return -1;
1097 else if (priority1 > priority2)
1098 return 1;
1099 else
1100 /* Ensure a stable sort. Constructors are executed in backwarding
1101 order to make LTO initialize braries first. */
1102 return DECL_UID (f2) - DECL_UID (f1);
1103 }
1104
1105 /* Comparison function for qsort. P1 and P2 are actually of type
1106 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1107 used to determine the sort order. */
1108
1109 static int
1110 compare_dtor (const void *p1, const void *p2)
1111 {
1112 tree f1;
1113 tree f2;
1114 int priority1;
1115 int priority2;
1116
1117 f1 = *(const tree *)p1;
1118 f2 = *(const tree *)p2;
1119 priority1 = DECL_FINI_PRIORITY (f1);
1120 priority2 = DECL_FINI_PRIORITY (f2);
1121
1122 if (priority1 < priority2)
1123 return -1;
1124 else if (priority1 > priority2)
1125 return 1;
1126 else
1127 /* Ensure a stable sort. */
1128 return DECL_UID (f1) - DECL_UID (f2);
1129 }
1130
1131 /* Generate functions to call static constructors and destructors
1132 for targets that do not support .ctors/.dtors sections. These
1133 functions have magic names which are detected by collect2. */
1134
1135 static void
1136 build_cdtor_fns (void)
1137 {
1138 if (!static_ctors.is_empty ())
1139 {
1140 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1141 static_ctors.qsort (compare_ctor);
1142 build_cdtor (/*ctor_p=*/true, static_ctors);
1143 }
1144
1145 if (!static_dtors.is_empty ())
1146 {
1147 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1148 static_dtors.qsort (compare_dtor);
1149 build_cdtor (/*ctor_p=*/false, static_dtors);
1150 }
1151 }
1152
1153 /* Look for constructors and destructors and produce function calling them.
1154 This is needed for targets not supporting ctors or dtors, but we perform the
1155 transformation also at linktime to merge possibly numerous
1156 constructors/destructors into single function to improve code locality and
1157 reduce size. */
1158
1159 static unsigned int
1160 ipa_cdtor_merge (void)
1161 {
1162 struct cgraph_node *node;
1163 FOR_EACH_DEFINED_FUNCTION (node)
1164 if (DECL_STATIC_CONSTRUCTOR (node->decl)
1165 || DECL_STATIC_DESTRUCTOR (node->decl))
1166 record_cdtor_fn (node);
1167 build_cdtor_fns ();
1168 static_ctors.release ();
1169 static_dtors.release ();
1170 return 0;
1171 }
1172
1173 namespace {
1174
1175 const pass_data pass_data_ipa_cdtor_merge =
1176 {
1177 IPA_PASS, /* type */
1178 "cdtor", /* name */
1179 OPTGROUP_NONE, /* optinfo_flags */
1180 TV_CGRAPHOPT, /* tv_id */
1181 0, /* properties_required */
1182 0, /* properties_provided */
1183 0, /* properties_destroyed */
1184 0, /* todo_flags_start */
1185 0, /* todo_flags_finish */
1186 };
1187
1188 class pass_ipa_cdtor_merge : public ipa_opt_pass_d
1189 {
1190 public:
1191 pass_ipa_cdtor_merge (gcc::context *ctxt)
1192 : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt,
1193 NULL, /* generate_summary */
1194 NULL, /* write_summary */
1195 NULL, /* read_summary */
1196 NULL, /* write_optimization_summary */
1197 NULL, /* read_optimization_summary */
1198 NULL, /* stmt_fixup */
1199 0, /* function_transform_todo_flags_start */
1200 NULL, /* function_transform */
1201 NULL) /* variable_transform */
1202 {}
1203
1204 /* opt_pass methods: */
1205 virtual bool gate (function *);
1206 virtual unsigned int execute (function *) { return ipa_cdtor_merge (); }
1207
1208 }; // class pass_ipa_cdtor_merge
1209
1210 bool
1211 pass_ipa_cdtor_merge::gate (function *)
1212 {
1213 /* Perform the pass when we have no ctors/dtors support
1214 or at LTO time to merge multiple constructors into single
1215 function. */
1216 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1217 }
1218
1219 } // anon namespace
1220
1221 ipa_opt_pass_d *
1222 make_pass_ipa_cdtor_merge (gcc::context *ctxt)
1223 {
1224 return new pass_ipa_cdtor_merge (ctxt);
1225 }
1226
1227 /* Invalid pointer representing BOTTOM for single user dataflow. */
1228 #define BOTTOM ((cgraph_node *)(size_t) 2)
1229
1230 /* Meet operation for single user dataflow.
1231 Here we want to associate variables with sigle function that may access it.
1232
1233 FUNCTION is current single user of a variable, VAR is variable that uses it.
1234 Latttice is stored in SINGLE_USER_MAP.
1235
1236 We represent:
1237 - TOP by no entry in SIGNLE_USER_MAP
1238 - BOTTOM by BOTTOM in AUX pointer (to save lookups)
1239 - known single user by cgraph pointer in SINGLE_USER_MAP. */
1240
1241 cgraph_node *
1242 meet (cgraph_node *function, varpool_node *var,
1243 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1244 {
1245 struct cgraph_node *user, **f;
1246
1247 if (var->aux == BOTTOM)
1248 return BOTTOM;
1249
1250 f = single_user_map.get (var);
1251 if (!f)
1252 return function;
1253 user = *f;
1254 if (!function)
1255 return user;
1256 else if (function != user)
1257 return BOTTOM;
1258 else
1259 return function;
1260 }
1261
1262 /* Propagation step of single-use dataflow.
1263
1264 Check all uses of VNODE and see if they are used by single function FUNCTION.
1265 SINGLE_USER_MAP represents the dataflow lattice. */
1266
1267 cgraph_node *
1268 propagate_single_user (varpool_node *vnode, cgraph_node *function,
1269 hash_map<varpool_node *, cgraph_node *> &single_user_map)
1270 {
1271 int i;
1272 struct ipa_ref *ref;
1273
1274 gcc_assert (!vnode->externally_visible);
1275
1276 /* If node is an alias, first meet with its target. */
1277 if (vnode->alias)
1278 function = meet (function, vnode->get_alias_target (), single_user_map);
1279
1280 /* Check all users and see if they correspond to a single function. */
1281 for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
1282 {
1283 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
1284 if (cnode)
1285 {
1286 if (cnode->global.inlined_to)
1287 cnode = cnode->global.inlined_to;
1288 if (!function)
1289 function = cnode;
1290 else if (function != cnode)
1291 function = BOTTOM;
1292 }
1293 else
1294 function = meet (function, dyn_cast <varpool_node *> (ref->referring),
1295 single_user_map);
1296 }
1297 return function;
1298 }
1299
1300 /* Pass setting used_by_single_function flag.
1301 This flag is set on variable when there is only one function that may
1302 possibly referr to it. */
1303
1304 static unsigned int
1305 ipa_single_use (void)
1306 {
1307 varpool_node *first = (varpool_node *) (void *) 1;
1308 varpool_node *var;
1309 hash_map<varpool_node *, cgraph_node *> single_user_map;
1310
1311 FOR_EACH_DEFINED_VARIABLE (var)
1312 if (!var->all_refs_explicit_p ())
1313 var->aux = BOTTOM;
1314 else
1315 {
1316 /* Enqueue symbol for dataflow. */
1317 var->aux = first;
1318 first = var;
1319 }
1320
1321 /* The actual dataflow. */
1322
1323 while (first != (void *) 1)
1324 {
1325 cgraph_node *user, *orig_user, **f;
1326
1327 var = first;
1328 first = (varpool_node *)first->aux;
1329
1330 f = single_user_map.get (var);
1331 if (f)
1332 orig_user = *f;
1333 else
1334 orig_user = NULL;
1335 user = propagate_single_user (var, orig_user, single_user_map);
1336
1337 gcc_checking_assert (var->aux != BOTTOM);
1338
1339 /* If user differs, enqueue all references. */
1340 if (user != orig_user)
1341 {
1342 unsigned int i;
1343 ipa_ref *ref;
1344
1345 single_user_map.put (var, user);
1346
1347 /* Enqueue all aliases for re-processing. */
1348 for (i = 0; var->iterate_direct_aliases (i, ref); i++)
1349 if (!ref->referring->aux)
1350 {
1351 ref->referring->aux = first;
1352 first = dyn_cast <varpool_node *> (ref->referring);
1353 }
1354 /* Enqueue all users for re-processing. */
1355 for (i = 0; var->iterate_reference (i, ref); i++)
1356 if (!ref->referred->aux
1357 && ref->referred->definition
1358 && is_a <varpool_node *> (ref->referred))
1359 {
1360 ref->referred->aux = first;
1361 first = dyn_cast <varpool_node *> (ref->referred);
1362 }
1363
1364 /* If user is BOTTOM, just punt on this var. */
1365 if (user == BOTTOM)
1366 var->aux = BOTTOM;
1367 else
1368 var->aux = NULL;
1369 }
1370 else
1371 var->aux = NULL;
1372 }
1373
1374 FOR_EACH_DEFINED_VARIABLE (var)
1375 {
1376 if (var->aux != BOTTOM)
1377 {
1378 #ifdef ENABLE_CHECKING
1379 /* Not having the single user known means that the VAR is
1380 unreachable. Either someone forgot to remove unreachable
1381 variables or the reachability here is wrong. */
1382
1383 gcc_assert (single_user_map.get (var));
1384 #endif
1385 if (dump_file)
1386 {
1387 fprintf (dump_file, "Variable %s/%i is used by single function\n",
1388 var->name (), var->order);
1389 }
1390 var->used_by_single_function = true;
1391 }
1392 var->aux = NULL;
1393 }
1394 return 0;
1395 }
1396
1397 namespace {
1398
1399 const pass_data pass_data_ipa_single_use =
1400 {
1401 IPA_PASS, /* type */
1402 "single-use", /* name */
1403 OPTGROUP_NONE, /* optinfo_flags */
1404 TV_CGRAPHOPT, /* tv_id */
1405 0, /* properties_required */
1406 0, /* properties_provided */
1407 0, /* properties_destroyed */
1408 0, /* todo_flags_start */
1409 0, /* todo_flags_finish */
1410 };
1411
1412 class pass_ipa_single_use : public ipa_opt_pass_d
1413 {
1414 public:
1415 pass_ipa_single_use (gcc::context *ctxt)
1416 : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt,
1417 NULL, /* generate_summary */
1418 NULL, /* write_summary */
1419 NULL, /* read_summary */
1420 NULL, /* write_optimization_summary */
1421 NULL, /* read_optimization_summary */
1422 NULL, /* stmt_fixup */
1423 0, /* function_transform_todo_flags_start */
1424 NULL, /* function_transform */
1425 NULL) /* variable_transform */
1426 {}
1427
1428 /* opt_pass methods: */
1429 virtual bool gate (function *);
1430 virtual unsigned int execute (function *) { return ipa_single_use (); }
1431
1432 }; // class pass_ipa_single_use
1433
1434 bool
1435 pass_ipa_single_use::gate (function *)
1436 {
1437 return optimize;
1438 }
1439
1440 } // anon namespace
1441
1442 ipa_opt_pass_d *
1443 make_pass_ipa_single_use (gcc::context *ctxt)
1444 {
1445 return new pass_ipa_single_use (ctxt);
1446 }