]>
Commit | Line | Data |
---|---|---|
65c1a668 | 1 | /* Basic IPA optimizations and utilities. |
3aea1f79 | 2 | Copyright (C) 2003-2014 Free Software Foundation, Inc. |
65c1a668 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
65c1a668 | 9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
65c1a668 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "tm.h" | |
41a8aa41 | 24 | #include "tree.h" |
9ed99284 | 25 | #include "calls.h" |
26 | #include "stringpool.h" | |
65c1a668 | 27 | #include "cgraph.h" |
f37a5008 | 28 | #include "tree-pass.h" |
bc61cadb | 29 | #include "pointer-set.h" |
30 | #include "gimple-expr.h" | |
a8783bee | 31 | #include "gimplify.h" |
8dfbf71d | 32 | #include "flags.h" |
a53e7471 | 33 | #include "target.h" |
34 | #include "tree-iterator.h" | |
7771d558 | 35 | #include "ipa-utils.h" |
91f0ab48 | 36 | #include "ipa-inline.h" |
9e179a64 | 37 | #include "tree-inline.h" |
38 | #include "profile.h" | |
39 | #include "params.h" | |
ceb49bba | 40 | #include "internal-fn.h" |
41 | #include "tree-ssa-alias.h" | |
42 | #include "gimple.h" | |
43 | #include "dbgcnt.h" | |
65c1a668 | 44 | |
15ca8f90 | 45 | |
46 | /* Return true when NODE has ADDR reference. */ | |
47 | ||
48 | static bool | |
49 | has_addr_references_p (struct cgraph_node *node, | |
50 | void *data ATTRIBUTE_UNUSED) | |
51 | { | |
52 | int i; | |
53 | struct ipa_ref *ref; | |
54 | ||
02774f2d | 55 | for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, |
15ca8f90 | 56 | i, ref); i++) |
57 | if (ref->use == IPA_REF_ADDR) | |
58 | return true; | |
59 | return false; | |
60 | } | |
61 | ||
21f41380 | 62 | /* Look for all functions inlined to NODE and update their inlined_to pointers |
63 | to INLINED_TO. */ | |
64 | ||
65 | static void | |
66 | update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to) | |
67 | { | |
68 | struct cgraph_edge *e; | |
69 | for (e = node->callees; e; e = e->next_callee) | |
70 | if (e->callee->global.inlined_to) | |
71 | { | |
72 | e->callee->global.inlined_to = inlined_to; | |
73 | update_inlined_to_pointer (e->callee, inlined_to); | |
74 | } | |
75 | } | |
76 | ||
91f0ab48 | 77 | /* Add symtab NODE to queue starting at FIRST. |
9da87cb8 | 78 | |
79 | The queue is linked via AUX pointers and terminated by pointer to 1. | |
80 | We enqueue nodes at two occasions: when we find them reachable or when we find | |
81 | their bodies needed for further clonning. In the second case we mark them | |
82 | by pointer to 2 after processing so they are re-queue when they become | |
83 | reachable. */ | |
6f932b06 | 84 | |
85 | static void | |
452659af | 86 | enqueue_node (symtab_node *node, symtab_node **first, |
91f0ab48 | 87 | struct pointer_set_t *reachable) |
6f932b06 | 88 | { |
9da87cb8 | 89 | /* Node is still in queue; do nothing. */ |
02774f2d | 90 | if (node->aux && node->aux != (void *) 2) |
9da87cb8 | 91 | return; |
92 | /* Node was already processed as unreachable, re-enqueue | |
93 | only if it became reachable now. */ | |
02774f2d | 94 | if (node->aux == (void *)2 && !pointer_set_contains (reachable, node)) |
9da87cb8 | 95 | return; |
02774f2d | 96 | node->aux = *first; |
6f932b06 | 97 | *first = node; |
98 | } | |
99 | ||
6f932b06 | 100 | /* Process references. */ |
101 | ||
102 | static void | |
103 | process_references (struct ipa_ref_list *list, | |
452659af | 104 | symtab_node **first, |
da751785 | 105 | bool before_inlining_p, |
106 | struct pointer_set_t *reachable) | |
6f932b06 | 107 | { |
108 | int i; | |
109 | struct ipa_ref *ref; | |
110 | for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++) | |
111 | { | |
452659af | 112 | symtab_node *node = ref->referred; |
15ca8f90 | 113 | |
02774f2d | 114 | if (node->definition && !node->in_other_partition |
115 | && ((!DECL_EXTERNAL (node->decl) || node->alias) | |
f0d26d57 | 116 | || (((before_inlining_p |
117 | && (cgraph_state < CGRAPH_STATE_IPA_SSA | |
118 | || !lookup_attribute ("always_inline", | |
119 | DECL_ATTRIBUTES (node->decl))))) | |
15ca8f90 | 120 | /* We use variable constructors during late complation for |
121 | constant folding. Keep references alive so partitioning | |
122 | knows about potential references. */ | |
02774f2d | 123 | || (TREE_CODE (node->decl) == VAR_DECL |
df8d3e89 | 124 | && flag_wpa |
02774f2d | 125 | && ctor_for_folding (node->decl) |
df8d3e89 | 126 | != error_mark_node)))) |
15ca8f90 | 127 | pointer_set_insert (reachable, node); |
02774f2d | 128 | enqueue_node (node, first, reachable); |
6f932b06 | 129 | } |
130 | } | |
131 | ||
e2fa5d74 | 132 | /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark |
133 | all its potential targets as reachable to permit later inlining if | |
134 | devirtualization happens. After inlining still keep their declarations | |
135 | around, so we can devirtualize to a direct call. | |
136 | ||
137 | Also try to make trivial devirutalization when no or only one target is | |
138 | possible. */ | |
139 | ||
140 | static void | |
141 | walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets, | |
142 | struct cgraph_edge *edge, | |
452659af | 143 | symtab_node **first, |
e2fa5d74 | 144 | pointer_set_t *reachable, bool before_inlining_p) |
145 | { | |
146 | unsigned int i; | |
147 | void *cache_token; | |
148 | bool final; | |
149 | vec <cgraph_node *>targets | |
150 | = possible_polymorphic_call_targets | |
151 | (edge, &final, &cache_token); | |
152 | ||
153 | if (!pointer_set_insert (reachable_call_targets, | |
154 | cache_token)) | |
155 | { | |
9af5ce0c | 156 | for (i = 0; i < targets.length (); i++) |
e2fa5d74 | 157 | { |
158 | struct cgraph_node *n = targets[i]; | |
159 | ||
160 | /* Do not bother to mark virtual methods in anonymous namespace; | |
161 | either we will find use of virtual table defining it, or it is | |
162 | unused. */ | |
02774f2d | 163 | if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE |
e2fa5d74 | 164 | && type_in_anonymous_namespace_p |
02774f2d | 165 | (method_class_type (TREE_TYPE (n->decl)))) |
e2fa5d74 | 166 | continue; |
167 | ||
168 | /* Prior inlining, keep alive bodies of possible targets for | |
169 | devirtualization. */ | |
02774f2d | 170 | if (n->definition |
f0d26d57 | 171 | && (before_inlining_p |
172 | && (cgraph_state < CGRAPH_STATE_IPA_SSA | |
173 | || !lookup_attribute ("always_inline", | |
174 | DECL_ATTRIBUTES (n->decl))))) | |
e2fa5d74 | 175 | pointer_set_insert (reachable, n); |
176 | ||
177 | /* Even after inlining we want to keep the possible targets in the | |
178 | boundary, so late passes can still produce direct call even if | |
179 | the chance for inlining is lost. */ | |
02774f2d | 180 | enqueue_node (n, first, reachable); |
e2fa5d74 | 181 | } |
182 | } | |
183 | ||
184 | /* Very trivial devirtualization; when the type is | |
185 | final or anonymous (so we know all its derivation) | |
186 | and there is only one possible virtual call target, | |
187 | make the edge direct. */ | |
188 | if (final) | |
189 | { | |
ceb49bba | 190 | if (targets.length () <= 1 && dbg_cnt (devirt)) |
e2fa5d74 | 191 | { |
749c5b03 | 192 | cgraph_node *target, *node = edge->caller; |
e2fa5d74 | 193 | if (targets.length () == 1) |
194 | target = targets[0]; | |
195 | else | |
196 | target = cgraph_get_create_node | |
197 | (builtin_decl_implicit (BUILT_IN_UNREACHABLE)); | |
198 | ||
ceb49bba | 199 | if (dump_enabled_p ()) |
200 | { | |
201 | location_t locus = gimple_location (edge->call_stmt); | |
202 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus, | |
203 | "devirtualizing call in %s/%i to %s/%i\n", | |
204 | edge->caller->name (), edge->caller->order, | |
205 | target->name (), | |
206 | target->order); | |
207 | } | |
e2fa5d74 | 208 | edge = cgraph_make_edge_direct (edge, target); |
6469adde | 209 | if (inline_summary_vec) |
749c5b03 | 210 | inline_update_overall_summary (node); |
6469adde | 211 | else if (edge->call_stmt) |
212 | cgraph_redirect_edge_call_stmt_to_callee (edge); | |
e2fa5d74 | 213 | } |
214 | } | |
215 | } | |
36a32361 | 216 | |
65c1a668 | 217 | /* Perform reachability analysis and reclaim all unreachable nodes. |
91f0ab48 | 218 | |
219 | The algorithm is basically mark&sweep but with some extra refinements: | |
220 | ||
221 | - reachable extern inline functions needs special handling; the bodies needs | |
222 | to stay in memory until inlining in hope that they will be inlined. | |
223 | After inlining we release their bodies and turn them into unanalyzed | |
224 | nodes even when they are reachable. | |
225 | ||
226 | BEFORE_INLINING_P specify whether we are before or after inlining. | |
227 | ||
228 | - virtual functions are kept in callgraph even if they seem unreachable in | |
229 | hope calls to them will be devirtualized. | |
230 | ||
231 | Again we remove them after inlining. In late optimization some | |
6bcfabf2 | 232 | devirtualization may happen, but it is not important since we won't inline |
91f0ab48 | 233 | the call. In theory early opts and IPA should work out all important cases. |
234 | ||
235 | - virtual clones needs bodies of their origins for later materialization; | |
236 | this means that we want to keep the body even if the origin is unreachable | |
237 | otherwise. To avoid origin from sitting in the callgraph and being | |
238 | walked by IPA passes, we turn them into unanalyzed nodes with body | |
239 | defined. | |
240 | ||
241 | We maintain set of function declaration where body needs to stay in | |
242 | body_needed_for_clonning | |
243 | ||
244 | Inline clones represent special case: their declaration match the | |
245 | declaration of origin and cgraph_remove_node already knows how to | |
246 | reshape callgraph and preserve body when offline copy of function or | |
247 | inline clone is being removed. | |
248 | ||
aa419a52 | 249 | - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL |
250 | variables with DECL_INITIAL set. We finalize these and keep reachable | |
251 | ones around for constant folding purposes. After inlining we however | |
252 | stop walking their references to let everything static referneced by them | |
253 | to be removed when it is otherwise unreachable. | |
254 | ||
91f0ab48 | 255 | We maintain queue of both reachable symbols (i.e. defined symbols that needs |
256 | to stay) and symbols that are in boundary (i.e. external symbols referenced | |
257 | by reachable symbols or origins of clones). The queue is represented | |
258 | as linked list by AUX pointer terminated by 1. | |
259 | ||
6bcfabf2 | 260 | At the end we keep all reachable symbols. For symbols in boundary we always |
91f0ab48 | 261 | turn definition into a declaration, but we may keep function body around |
262 | based on body_needed_for_clonning | |
263 | ||
264 | All symbols that enter the queue have AUX pointer non-zero and are in the | |
265 | boundary. Pointer set REACHABLE is used to track reachable symbols. | |
266 | ||
267 | Every symbol can be visited twice - once as part of boundary and once | |
268 | as real reachable symbol. enqueue_node needs to decide whether the | |
269 | node needs to be re-queued for second processing. For this purpose | |
270 | we set AUX pointer of processed symbols in the boundary to constant 2. */ | |
65c1a668 | 271 | |
272 | bool | |
91f0ab48 | 273 | symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file) |
65c1a668 | 274 | { |
452659af | 275 | symtab_node *first = (symtab_node *) (void *) 1; |
f4ec5ce1 | 276 | struct cgraph_node *node, *next; |
098f44bc | 277 | varpool_node *vnode, *vnext; |
65c1a668 | 278 | bool changed = false; |
da751785 | 279 | struct pointer_set_t *reachable = pointer_set_create (); |
91f0ab48 | 280 | struct pointer_set_t *body_needed_for_clonning = pointer_set_create (); |
e2fa5d74 | 281 | struct pointer_set_t *reachable_call_targets = pointer_set_create (); |
65c1a668 | 282 | |
e2fa5d74 | 283 | timevar_push (TV_IPA_UNREACHABLE); |
65c1a668 | 284 | #ifdef ENABLE_CHECKING |
3e7775f6 | 285 | verify_symtab (); |
65c1a668 | 286 | #endif |
4befb9f4 | 287 | if (optimize && flag_devirtualize) |
288 | build_type_inheritance_graph (); | |
3f5be5f4 | 289 | if (file) |
290 | fprintf (file, "\nReclaiming functions:"); | |
65c1a668 | 291 | #ifdef ENABLE_CHECKING |
7c455d87 | 292 | FOR_EACH_FUNCTION (node) |
02774f2d | 293 | gcc_assert (!node->aux); |
7c455d87 | 294 | FOR_EACH_VARIABLE (vnode) |
02774f2d | 295 | gcc_assert (!vnode->aux); |
65c1a668 | 296 | #endif |
7f74ac6b | 297 | /* Mark functions whose bodies are obviously needed. |
298 | This is mostly when they can be referenced externally. Inline clones | |
299 | are special since their declarations are shared with master clone and thus | |
300 | cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */ | |
abb1a237 | 301 | FOR_EACH_FUNCTION (node) |
302 | { | |
303 | node->used_as_abstract_origin = false; | |
02774f2d | 304 | if (node->definition |
abb1a237 | 305 | && !node->global.inlined_to |
02774f2d | 306 | && !node->in_other_partition |
e2fa5d74 | 307 | && !cgraph_can_remove_if_no_direct_calls_and_refs_p (node)) |
abb1a237 | 308 | { |
309 | gcc_assert (!node->global.inlined_to); | |
310 | pointer_set_insert (reachable, node); | |
02774f2d | 311 | enqueue_node (node, &first, reachable); |
abb1a237 | 312 | } |
313 | else | |
02774f2d | 314 | gcc_assert (!node->aux); |
abb1a237 | 315 | } |
7f74ac6b | 316 | |
317 | /* Mark variables that are obviously needed. */ | |
91f0ab48 | 318 | FOR_EACH_DEFINED_VARIABLE (vnode) |
b9b49047 | 319 | if (!varpool_can_remove_if_no_refs (vnode) |
02774f2d | 320 | && !vnode->in_other_partition) |
91f0ab48 | 321 | { |
322 | pointer_set_insert (reachable, vnode); | |
02774f2d | 323 | enqueue_node (vnode, &first, reachable); |
91f0ab48 | 324 | } |
325 | ||
326 | /* Perform reachability analysis. */ | |
452659af | 327 | while (first != (symtab_node *) (void *) 1) |
6f932b06 | 328 | { |
91f0ab48 | 329 | bool in_boundary_p = !pointer_set_contains (reachable, first); |
452659af | 330 | symtab_node *node = first; |
65c1a668 | 331 | |
452659af | 332 | first = (symtab_node *)first->aux; |
9da87cb8 | 333 | |
91f0ab48 | 334 | /* If we are processing symbol in boundary, mark its AUX pointer for |
335 | possible later re-processing in enqueue_node. */ | |
336 | if (in_boundary_p) | |
02774f2d | 337 | node->aux = (void *)2; |
91f0ab48 | 338 | else |
339 | { | |
9f0b7378 | 340 | if (TREE_CODE (node->decl) == FUNCTION_DECL |
341 | && DECL_ABSTRACT_ORIGIN (node->decl)) | |
abb1a237 | 342 | { |
343 | struct cgraph_node *origin_node | |
593ce529 | 344 | = cgraph_get_create_node (DECL_ABSTRACT_ORIGIN (node->decl)); |
abb1a237 | 345 | origin_node->used_as_abstract_origin = true; |
02774f2d | 346 | enqueue_node (origin_node, &first, reachable); |
abb1a237 | 347 | } |
91f0ab48 | 348 | /* If any symbol in a comdat group is reachable, force |
468088ac | 349 | all externally visible symbols in the same comdat |
350 | group to be reachable as well. Comdat-local symbols | |
351 | can be discarded if all uses were inlined. */ | |
02774f2d | 352 | if (node->same_comdat_group) |
91f0ab48 | 353 | { |
452659af | 354 | symtab_node *next; |
02774f2d | 355 | for (next = node->same_comdat_group; |
91f0ab48 | 356 | next != node; |
02774f2d | 357 | next = next->same_comdat_group) |
468088ac | 358 | if (!symtab_comdat_local_p (next) |
359 | && !pointer_set_insert (reachable, next)) | |
02774f2d | 360 | enqueue_node (next, &first, reachable); |
91f0ab48 | 361 | } |
362 | /* Mark references as reachable. */ | |
02774f2d | 363 | process_references (&node->ref_list, &first, |
91f0ab48 | 364 | before_inlining_p, reachable); |
365 | } | |
9da87cb8 | 366 | |
13cbeaac | 367 | if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node)) |
6f932b06 | 368 | { |
91f0ab48 | 369 | /* Mark the callees reachable unless they are direct calls to extern |
370 | inline functions we decided to not inline. */ | |
371 | if (!in_boundary_p) | |
e12f85b7 | 372 | { |
91f0ab48 | 373 | struct cgraph_edge *e; |
e2fa5d74 | 374 | /* Keep alive possible targets for devirtualization. */ |
375 | if (optimize && flag_devirtualize) | |
376 | { | |
377 | struct cgraph_edge *next; | |
378 | for (e = cnode->indirect_calls; e; e = next) | |
379 | { | |
380 | next = e->next_callee; | |
381 | if (e->indirect_info->polymorphic) | |
382 | walk_polymorphic_call_targets (reachable_call_targets, | |
383 | e, &first, reachable, | |
384 | before_inlining_p); | |
385 | } | |
386 | } | |
91f0ab48 | 387 | for (e = cnode->callees; e; e = e->next_callee) |
71ca01ff | 388 | { |
02774f2d | 389 | if (e->callee->definition |
390 | && !e->callee->in_other_partition | |
71ca01ff | 391 | && (!e->inline_failed |
02774f2d | 392 | || !DECL_EXTERNAL (e->callee->decl) |
393 | || e->callee->alias | |
71ca01ff | 394 | || before_inlining_p)) |
89ae81e0 | 395 | { |
396 | /* Be sure that we will not optimize out alias target | |
397 | body. */ | |
398 | if (DECL_EXTERNAL (e->callee->decl) | |
399 | && e->callee->alias | |
400 | && before_inlining_p) | |
401 | { | |
402 | pointer_set_insert (reachable, | |
403 | cgraph_function_node (e->callee)); | |
404 | } | |
405 | pointer_set_insert (reachable, e->callee); | |
406 | } | |
02774f2d | 407 | enqueue_node (e->callee, &first, reachable); |
da751785 | 408 | } |
91f0ab48 | 409 | |
410 | /* When inline clone exists, mark body to be preserved so when removing | |
411 | offline copy of the function we don't kill it. */ | |
b9b49047 | 412 | if (cnode->global.inlined_to) |
02774f2d | 413 | pointer_set_insert (body_needed_for_clonning, cnode->decl); |
61c2c7b1 | 414 | |
b9b49047 | 415 | /* For non-inline clones, force their origins to the boundary and ensure |
416 | that body is not removed. */ | |
417 | while (cnode->clone_of) | |
418 | { | |
02774f2d | 419 | bool noninline = cnode->clone_of->decl != cnode->decl; |
b9b49047 | 420 | cnode = cnode->clone_of; |
421 | if (noninline) | |
422 | { | |
02774f2d | 423 | pointer_set_insert (body_needed_for_clonning, cnode->decl); |
424 | enqueue_node (cnode, &first, reachable); | |
b9b49047 | 425 | } |
6f932b06 | 426 | } |
d09768a4 | 427 | |
428 | } | |
429 | /* If any reachable function has simd clones, mark them as | |
430 | reachable as well. */ | |
431 | if (cnode->simd_clones) | |
432 | { | |
433 | cgraph_node *next; | |
434 | for (next = cnode->simd_clones; | |
435 | next; | |
436 | next = next->simdclone->next_clone) | |
437 | if (in_boundary_p | |
438 | || !pointer_set_insert (reachable, next)) | |
439 | enqueue_node (next, &first, reachable); | |
ee3f5fc0 | 440 | } |
6f932b06 | 441 | } |
aa419a52 | 442 | /* When we see constructor of external variable, keep referred nodes in the |
2dc9831f | 443 | boundary. This will also hold initializers of the external vars NODE |
444 | refers to. */ | |
13cbeaac | 445 | varpool_node *vnode = dyn_cast <varpool_node *> (node); |
2dc9831f | 446 | if (vnode |
02774f2d | 447 | && DECL_EXTERNAL (node->decl) |
448 | && !vnode->alias | |
aa419a52 | 449 | && in_boundary_p) |
2dc9831f | 450 | { |
aa419a52 | 451 | struct ipa_ref *ref; |
02774f2d | 452 | for (int i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++) |
aa419a52 | 453 | enqueue_node (ref->referred, &first, reachable); |
2dc9831f | 454 | } |
65c1a668 | 455 | } |
456 | ||
91f0ab48 | 457 | /* Remove unreachable functions. */ |
0704fb2e | 458 | for (node = cgraph_first_function (); node; node = next) |
65c1a668 | 459 | { |
0704fb2e | 460 | next = cgraph_next_function (node); |
15ca8f90 | 461 | |
462 | /* If node is not needed at all, remove it. */ | |
02774f2d | 463 | if (!node->aux) |
65c1a668 | 464 | { |
3f5be5f4 | 465 | if (file) |
3083a0b3 | 466 | fprintf (file, " %s/%i", node->name (), node->order); |
91f0ab48 | 467 | cgraph_remove_node (node); |
468 | changed = true; | |
469 | } | |
15ca8f90 | 470 | /* If node is unreachable, remove its body. */ |
91f0ab48 | 471 | else if (!pointer_set_contains (reachable, node)) |
472 | { | |
02774f2d | 473 | if (!pointer_set_contains (body_needed_for_clonning, node->decl)) |
15ca8f90 | 474 | cgraph_release_function_body (node); |
b9b49047 | 475 | else if (!node->clone_of) |
02774f2d | 476 | gcc_assert (in_lto_p || DECL_RESULT (node->decl)); |
477 | if (node->definition) | |
7fb046a4 | 478 | { |
91f0ab48 | 479 | if (file) |
3083a0b3 | 480 | fprintf (file, " %s/%i", node->name (), node->order); |
fa4052b3 | 481 | node->body_removed = true; |
02774f2d | 482 | node->analyzed = false; |
483 | node->definition = false; | |
484 | node->cpp_implicit_alias = false; | |
485 | node->alias = false; | |
95d0bdb9 | 486 | node->thunk.thunk_p = false; |
02774f2d | 487 | node->weakref = false; |
f0d26d57 | 488 | /* After early inlining we drop always_inline attributes on |
489 | bodies of functions that are still referenced (have their | |
490 | address taken). */ | |
491 | DECL_ATTRIBUTES (node->decl) | |
492 | = remove_attribute ("always_inline", | |
493 | DECL_ATTRIBUTES (node->decl)); | |
02774f2d | 494 | if (!node->in_other_partition) |
281dea26 | 495 | node->local.local = false; |
496 | cgraph_node_remove_callees (node); | |
04f65f92 | 497 | symtab_remove_from_same_comdat_group (node); |
02774f2d | 498 | ipa_remove_all_references (&node->ref_list); |
7fb046a4 | 499 | changed = true; |
500 | } | |
65c1a668 | 501 | } |
b9b49047 | 502 | else |
503 | gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node) | |
02774f2d | 504 | || in_lto_p || DECL_RESULT (node->decl)); |
65c1a668 | 505 | } |
91f0ab48 | 506 | |
507 | /* Inline clones might be kept around so their materializing allows further | |
508 | cloning. If the function the clone is inlined into is removed, we need | |
509 | to turn it into normal cone. */ | |
7c455d87 | 510 | FOR_EACH_FUNCTION (node) |
ccf4ab6b | 511 | { |
ccf4ab6b | 512 | if (node->global.inlined_to |
513 | && !node->callers) | |
514 | { | |
515 | gcc_assert (node->clones); | |
21f41380 | 516 | node->global.inlined_to = NULL; |
517 | update_inlined_to_pointer (node, node); | |
ccf4ab6b | 518 | } |
02774f2d | 519 | node->aux = NULL; |
ccf4ab6b | 520 | } |
8dfbf71d | 521 | |
91f0ab48 | 522 | /* Remove unreachable variables. */ |
8dfbf71d | 523 | if (file) |
91f0ab48 | 524 | fprintf (file, "\nReclaiming variables:"); |
0704fb2e | 525 | for (vnode = varpool_first_variable (); vnode; vnode = vnext) |
6f932b06 | 526 | { |
0704fb2e | 527 | vnext = varpool_next_variable (vnode); |
02774f2d | 528 | if (!vnode->aux |
f1a7feee | 529 | /* For can_refer_decl_in_current_unit_p we want to track for |
530 | all external variables if they are defined in other partition | |
531 | or not. */ | |
02774f2d | 532 | && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl))) |
91f0ab48 | 533 | { |
8dfbf71d | 534 | if (file) |
3083a0b3 | 535 | fprintf (file, " %s/%i", vnode->name (), vnode->order); |
8dfbf71d | 536 | varpool_remove_node (vnode); |
537 | changed = true; | |
6f932b06 | 538 | } |
91f0ab48 | 539 | else if (!pointer_set_contains (reachable, vnode)) |
540 | { | |
df8d3e89 | 541 | tree init; |
02774f2d | 542 | if (vnode->definition) |
91f0ab48 | 543 | { |
544 | if (file) | |
f1c8b4d7 | 545 | fprintf (file, " %s", vnode->name ()); |
91f0ab48 | 546 | changed = true; |
547 | } | |
fa4052b3 | 548 | vnode->body_removed = true; |
02774f2d | 549 | vnode->definition = false; |
550 | vnode->analyzed = false; | |
551 | vnode->aux = NULL; | |
15ca8f90 | 552 | |
04f65f92 | 553 | symtab_remove_from_same_comdat_group (vnode); |
554 | ||
15ca8f90 | 555 | /* Keep body if it may be useful for constant folding. */ |
02774f2d | 556 | if ((init = ctor_for_folding (vnode->decl)) == error_mark_node) |
15ca8f90 | 557 | varpool_remove_initializer (vnode); |
df8d3e89 | 558 | else |
02774f2d | 559 | DECL_INITIAL (vnode->decl) = init; |
560 | ipa_remove_all_references (&vnode->ref_list); | |
91f0ab48 | 561 | } |
562 | else | |
02774f2d | 563 | vnode->aux = NULL; |
6f932b06 | 564 | } |
8dfbf71d | 565 | |
91f0ab48 | 566 | pointer_set_destroy (reachable); |
567 | pointer_set_destroy (body_needed_for_clonning); | |
e2fa5d74 | 568 | pointer_set_destroy (reachable_call_targets); |
8dfbf71d | 569 | |
91f0ab48 | 570 | /* Now update address_taken flags and try to promote functions to be local. */ |
cdedc740 | 571 | if (file) |
572 | fprintf (file, "\nClearing address taken flags:"); | |
7c455d87 | 573 | FOR_EACH_DEFINED_FUNCTION (node) |
02774f2d | 574 | if (node->address_taken |
575 | && !node->used_from_other_partition) | |
cdedc740 | 576 | { |
36a32361 | 577 | if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true)) |
cdedc740 | 578 | { |
579 | if (file) | |
f1c8b4d7 | 580 | fprintf (file, " %s", node->name ()); |
02774f2d | 581 | node->address_taken = false; |
8dfbf71d | 582 | changed = true; |
583 | if (cgraph_local_node_p (node)) | |
584 | { | |
585 | node->local.local = true; | |
586 | if (file) | |
587 | fprintf (file, " (local)"); | |
588 | } | |
cdedc740 | 589 | } |
590 | } | |
c7b2cc59 | 591 | if (file) |
592 | fprintf (file, "\n"); | |
6f932b06 | 593 | |
09a2e412 | 594 | #ifdef ENABLE_CHECKING |
3e7775f6 | 595 | verify_symtab (); |
09a2e412 | 596 | #endif |
34e5cced | 597 | |
f8bfd7f7 | 598 | /* If we removed something, perhaps profile could be improved. */ |
f1f41a6c | 599 | if (changed && optimize && inline_edge_summary_vec.exists ()) |
f8bfd7f7 | 600 | FOR_EACH_DEFINED_FUNCTION (node) |
6eaf903b | 601 | ipa_propagate_frequency (node); |
f8bfd7f7 | 602 | |
e2fa5d74 | 603 | timevar_pop (TV_IPA_UNREACHABLE); |
65c1a668 | 604 | return changed; |
605 | } | |
f37a5008 | 606 | |
703ad42c | 607 | /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ |
608 | as needed, also clear EXPLICIT_REFS if the references to given variable | |
609 | do not need to be explicit. */ | |
610 | ||
611 | void | |
612 | process_references (varpool_node *vnode, | |
613 | bool *written, bool *address_taken, | |
614 | bool *read, bool *explicit_refs) | |
615 | { | |
616 | int i; | |
617 | struct ipa_ref *ref; | |
618 | ||
619 | if (!varpool_all_refs_explicit_p (vnode) | |
620 | || TREE_THIS_VOLATILE (vnode->decl)) | |
621 | *explicit_refs = false; | |
622 | ||
623 | for (i = 0; ipa_ref_list_referring_iterate (&vnode->ref_list, | |
624 | i, ref) | |
625 | && *explicit_refs && (!*written || !*address_taken || !*read); i++) | |
626 | switch (ref->use) | |
627 | { | |
628 | case IPA_REF_ADDR: | |
629 | *address_taken = true; | |
630 | break; | |
631 | case IPA_REF_LOAD: | |
632 | *read = true; | |
633 | break; | |
634 | case IPA_REF_STORE: | |
635 | *written = true; | |
636 | break; | |
637 | case IPA_REF_ALIAS: | |
638 | process_references (varpool (ref->referring), written, address_taken, | |
639 | read, explicit_refs); | |
640 | break; | |
641 | } | |
642 | } | |
643 | ||
644 | /* Set TREE_READONLY bit. */ | |
645 | ||
646 | bool | |
647 | set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) | |
648 | { | |
649 | TREE_READONLY (vnode->decl) = true; | |
650 | return false; | |
651 | } | |
652 | ||
653 | /* Set writeonly bit and clear the initalizer, since it will not be needed. */ | |
654 | ||
655 | bool | |
656 | set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) | |
657 | { | |
658 | vnode->writeonly = true; | |
659 | if (optimize) | |
660 | { | |
661 | DECL_INITIAL (vnode->decl) = NULL; | |
662 | if (!vnode->alias) | |
663 | ipa_remove_all_references (&vnode->ref_list); | |
664 | } | |
665 | return false; | |
666 | } | |
667 | ||
668 | /* Clear addressale bit of VNODE. */ | |
669 | ||
670 | bool | |
671 | clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) | |
672 | { | |
673 | vnode->address_taken = false; | |
674 | TREE_ADDRESSABLE (vnode->decl) = 0; | |
675 | return false; | |
676 | } | |
677 | ||
8dfbf71d | 678 | /* Discover variables that have no longer address taken or that are read only |
679 | and update their flags. | |
680 | ||
681 | FIXME: This can not be done in between gimplify and omp_expand since | |
682 | readonly flag plays role on what is shared and what is not. Currently we do | |
023a28e1 | 683 | this transformation as part of whole program visibility and re-do at |
684 | ipa-reference pass (to take into account clonning), but it would | |
685 | make sense to do it before early optimizations. */ | |
8dfbf71d | 686 | |
687 | void | |
688 | ipa_discover_readonly_nonaddressable_vars (void) | |
689 | { | |
098f44bc | 690 | varpool_node *vnode; |
8dfbf71d | 691 | if (dump_file) |
692 | fprintf (dump_file, "Clearing variable flags:"); | |
7c455d87 | 693 | FOR_EACH_VARIABLE (vnode) |
703ad42c | 694 | if (!vnode->alias |
02774f2d | 695 | && (TREE_ADDRESSABLE (vnode->decl) |
703ad42c | 696 | || !vnode->writeonly |
02774f2d | 697 | || !TREE_READONLY (vnode->decl))) |
8dfbf71d | 698 | { |
699 | bool written = false; | |
700 | bool address_taken = false; | |
703ad42c | 701 | bool read = false; |
702 | bool explicit_refs = true; | |
703 | ||
704 | process_references (vnode, &written, &address_taken, &read, &explicit_refs); | |
705 | if (!explicit_refs) | |
706 | continue; | |
707 | if (!address_taken) | |
8dfbf71d | 708 | { |
703ad42c | 709 | if (TREE_ADDRESSABLE (vnode->decl) && dump_file) |
4206bfac | 710 | fprintf (dump_file, " %s (non-addressable)", vnode->name ()); |
703ad42c | 711 | varpool_for_node_and_aliases (vnode, clear_addressable_bit, NULL, true); |
8dfbf71d | 712 | } |
703ad42c | 713 | if (!address_taken && !written |
8dfbf71d | 714 | /* Making variable in explicit section readonly can cause section |
715 | type conflict. | |
716 | See e.g. gcc.c-torture/compile/pr23237.c */ | |
02774f2d | 717 | && DECL_SECTION_NAME (vnode->decl) == NULL) |
8dfbf71d | 718 | { |
703ad42c | 719 | if (!TREE_READONLY (vnode->decl) && dump_file) |
f1c8b4d7 | 720 | fprintf (dump_file, " %s (read-only)", vnode->name ()); |
703ad42c | 721 | varpool_for_node_and_aliases (vnode, set_readonly_bit, NULL, true); |
722 | } | |
4206bfac | 723 | if (!vnode->writeonly && !read && !address_taken && written) |
703ad42c | 724 | { |
725 | if (dump_file) | |
726 | fprintf (dump_file, " %s (write-only)", vnode->name ()); | |
727 | varpool_for_node_and_aliases (vnode, set_writeonly_bit, NULL, true); | |
8dfbf71d | 728 | } |
729 | } | |
730 | if (dump_file) | |
731 | fprintf (dump_file, "\n"); | |
732 | } | |
733 | ||
f8bfd7f7 | 734 | /* Free inline summary. */ |
735 | ||
cbe8bda8 | 736 | namespace { |
737 | ||
738 | const pass_data pass_data_ipa_free_inline_summary = | |
f8bfd7f7 | 739 | { |
cbe8bda8 | 740 | SIMPLE_IPA_PASS, /* type */ |
741 | "*free_inline_summary", /* name */ | |
742 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 743 | true, /* has_execute */ |
744 | TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */ | |
745 | 0, /* properties_required */ | |
746 | 0, /* properties_provided */ | |
747 | 0, /* properties_destroyed */ | |
748 | 0, /* todo_flags_start */ | |
749 | 0, /* todo_flags_finish */ | |
f8bfd7f7 | 750 | }; |
751 | ||
cbe8bda8 | 752 | class pass_ipa_free_inline_summary : public simple_ipa_opt_pass |
753 | { | |
754 | public: | |
9af5ce0c | 755 | pass_ipa_free_inline_summary (gcc::context *ctxt) |
756 | : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt) | |
cbe8bda8 | 757 | {} |
758 | ||
759 | /* opt_pass methods: */ | |
65b0537f | 760 | virtual unsigned int execute (function *) |
761 | { | |
762 | inline_free_summary (); | |
763 | return 0; | |
764 | } | |
cbe8bda8 | 765 | |
766 | }; // class pass_ipa_free_inline_summary | |
767 | ||
768 | } // anon namespace | |
769 | ||
770 | simple_ipa_opt_pass * | |
771 | make_pass_ipa_free_inline_summary (gcc::context *ctxt) | |
772 | { | |
773 | return new pass_ipa_free_inline_summary (ctxt); | |
774 | } | |
775 | ||
a53e7471 | 776 | /* Generate and emit a static constructor or destructor. WHICH must |
bbc26dcc | 777 | be one of 'I' (for a constructor) or 'D' (for a destructor). BODY |
778 | is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the | |
779 | initialization priority for this constructor or destructor. | |
a53e7471 | 780 | |
62510893 | 781 | FINAL specify whether the externally visible name for collect2 should |
782 | be produced. */ | |
783 | ||
784 | static void | |
785 | cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final) | |
a53e7471 | 786 | { |
787 | static int counter = 0; | |
788 | char which_buf[16]; | |
789 | tree decl, name, resdecl; | |
790 | ||
791 | /* The priority is encoded in the constructor or destructor name. | |
792 | collect2 will sort the names and arrange that they are called at | |
793 | program startup. */ | |
62510893 | 794 | if (final) |
795 | sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++); | |
796 | else | |
797 | /* Proudce sane name but one not recognizable by collect2, just for the | |
798 | case we fail to inline the function. */ | |
799 | sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++); | |
a53e7471 | 800 | name = get_file_function_name (which_buf); |
801 | ||
802 | decl = build_decl (input_location, FUNCTION_DECL, name, | |
803 | build_function_type_list (void_type_node, NULL_TREE)); | |
804 | current_function_decl = decl; | |
805 | ||
806 | resdecl = build_decl (input_location, | |
807 | RESULT_DECL, NULL_TREE, void_type_node); | |
808 | DECL_ARTIFICIAL (resdecl) = 1; | |
809 | DECL_RESULT (decl) = resdecl; | |
810 | DECL_CONTEXT (resdecl) = decl; | |
811 | ||
812 | allocate_struct_function (decl, false); | |
813 | ||
814 | TREE_STATIC (decl) = 1; | |
815 | TREE_USED (decl) = 1; | |
816 | DECL_ARTIFICIAL (decl) = 1; | |
817 | DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1; | |
818 | DECL_SAVED_TREE (decl) = body; | |
62510893 | 819 | if (!targetm.have_ctors_dtors && final) |
a53e7471 | 820 | { |
821 | TREE_PUBLIC (decl) = 1; | |
822 | DECL_PRESERVE_P (decl) = 1; | |
823 | } | |
824 | DECL_UNINLINABLE (decl) = 1; | |
825 | ||
826 | DECL_INITIAL (decl) = make_node (BLOCK); | |
827 | TREE_USED (DECL_INITIAL (decl)) = 1; | |
828 | ||
829 | DECL_SOURCE_LOCATION (decl) = input_location; | |
830 | cfun->function_end_locus = input_location; | |
831 | ||
832 | switch (which) | |
833 | { | |
834 | case 'I': | |
835 | DECL_STATIC_CONSTRUCTOR (decl) = 1; | |
836 | decl_init_priority_insert (decl, priority); | |
837 | break; | |
838 | case 'D': | |
839 | DECL_STATIC_DESTRUCTOR (decl) = 1; | |
840 | decl_fini_priority_insert (decl, priority); | |
841 | break; | |
842 | default: | |
843 | gcc_unreachable (); | |
844 | } | |
845 | ||
846 | gimplify_function_tree (decl); | |
847 | ||
848 | cgraph_add_new_function (decl, false); | |
849 | ||
850 | set_cfun (NULL); | |
851 | current_function_decl = NULL; | |
852 | } | |
853 | ||
62510893 | 854 | /* Generate and emit a static constructor or destructor. WHICH must |
bbc26dcc | 855 | be one of 'I' (for a constructor) or 'D' (for a destructor). BODY |
856 | is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the | |
857 | initialization priority for this constructor or destructor. */ | |
62510893 | 858 | |
859 | void | |
860 | cgraph_build_static_cdtor (char which, tree body, int priority) | |
861 | { | |
862 | cgraph_build_static_cdtor_1 (which, body, priority, false); | |
863 | } | |
a53e7471 | 864 | |
865 | /* A vector of FUNCTION_DECLs declared as static constructors. */ | |
f1f41a6c | 866 | static vec<tree> static_ctors; |
a53e7471 | 867 | /* A vector of FUNCTION_DECLs declared as static destructors. */ |
f1f41a6c | 868 | static vec<tree> static_dtors; |
a53e7471 | 869 | |
870 | /* When target does not have ctors and dtors, we call all constructor | |
871 | and destructor by special initialization/destruction function | |
872 | recognized by collect2. | |
873 | ||
874 | When we are going to build this function, collect all constructors and | |
875 | destructors and turn them into normal functions. */ | |
876 | ||
877 | static void | |
878 | record_cdtor_fn (struct cgraph_node *node) | |
879 | { | |
02774f2d | 880 | if (DECL_STATIC_CONSTRUCTOR (node->decl)) |
881 | static_ctors.safe_push (node->decl); | |
882 | if (DECL_STATIC_DESTRUCTOR (node->decl)) | |
883 | static_dtors.safe_push (node->decl); | |
884 | node = cgraph_get_node (node->decl); | |
885 | DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1; | |
a53e7471 | 886 | } |
887 | ||
888 | /* Define global constructors/destructor functions for the CDTORS, of | |
889 | which they are LEN. The CDTORS are sorted by initialization | |
890 | priority. If CTOR_P is true, these are constructors; otherwise, | |
891 | they are destructors. */ | |
892 | ||
893 | static void | |
f1f41a6c | 894 | build_cdtor (bool ctor_p, vec<tree> cdtors) |
a53e7471 | 895 | { |
896 | size_t i,j; | |
f1f41a6c | 897 | size_t len = cdtors.length (); |
a53e7471 | 898 | |
899 | i = 0; | |
900 | while (i < len) | |
901 | { | |
902 | tree body; | |
903 | tree fn; | |
904 | priority_type priority; | |
905 | ||
906 | priority = 0; | |
907 | body = NULL_TREE; | |
908 | j = i; | |
909 | do | |
910 | { | |
911 | priority_type p; | |
f1f41a6c | 912 | fn = cdtors[j]; |
a53e7471 | 913 | p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn); |
914 | if (j == i) | |
915 | priority = p; | |
916 | else if (p != priority) | |
917 | break; | |
918 | j++; | |
919 | } | |
920 | while (j < len); | |
921 | ||
d2435fb0 | 922 | /* When there is only one cdtor and target supports them, do nothing. */ |
a53e7471 | 923 | if (j == i + 1 |
924 | && targetm.have_ctors_dtors) | |
925 | { | |
926 | i++; | |
927 | continue; | |
928 | } | |
929 | /* Find the next batch of constructors/destructors with the same | |
930 | initialization priority. */ | |
d2435fb0 | 931 | for (;i < j; i++) |
a53e7471 | 932 | { |
a53e7471 | 933 | tree call; |
f1f41a6c | 934 | fn = cdtors[i]; |
a53e7471 | 935 | call = build_call_expr (fn, 0); |
936 | if (ctor_p) | |
937 | DECL_STATIC_CONSTRUCTOR (fn) = 0; | |
938 | else | |
939 | DECL_STATIC_DESTRUCTOR (fn) = 0; | |
940 | /* We do not want to optimize away pure/const calls here. | |
941 | When optimizing, these should be already removed, when not | |
942 | optimizing, we want user to be able to breakpoint in them. */ | |
943 | TREE_SIDE_EFFECTS (call) = 1; | |
944 | append_to_statement_list (call, &body); | |
a53e7471 | 945 | } |
a53e7471 | 946 | gcc_assert (body != NULL_TREE); |
947 | /* Generate a function to call all the function of like | |
948 | priority. */ | |
62510893 | 949 | cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true); |
a53e7471 | 950 | } |
951 | } | |
952 | ||
953 | /* Comparison function for qsort. P1 and P2 are actually of type | |
954 | "tree *" and point to static constructors. DECL_INIT_PRIORITY is | |
955 | used to determine the sort order. */ | |
956 | ||
957 | static int | |
958 | compare_ctor (const void *p1, const void *p2) | |
959 | { | |
960 | tree f1; | |
961 | tree f2; | |
962 | int priority1; | |
963 | int priority2; | |
964 | ||
965 | f1 = *(const tree *)p1; | |
966 | f2 = *(const tree *)p2; | |
967 | priority1 = DECL_INIT_PRIORITY (f1); | |
968 | priority2 = DECL_INIT_PRIORITY (f2); | |
969 | ||
970 | if (priority1 < priority2) | |
971 | return -1; | |
972 | else if (priority1 > priority2) | |
973 | return 1; | |
974 | else | |
975 | /* Ensure a stable sort. Constructors are executed in backwarding | |
976 | order to make LTO initialize braries first. */ | |
977 | return DECL_UID (f2) - DECL_UID (f1); | |
978 | } | |
979 | ||
980 | /* Comparison function for qsort. P1 and P2 are actually of type | |
981 | "tree *" and point to static destructors. DECL_FINI_PRIORITY is | |
982 | used to determine the sort order. */ | |
983 | ||
984 | static int | |
985 | compare_dtor (const void *p1, const void *p2) | |
986 | { | |
987 | tree f1; | |
988 | tree f2; | |
989 | int priority1; | |
990 | int priority2; | |
991 | ||
992 | f1 = *(const tree *)p1; | |
993 | f2 = *(const tree *)p2; | |
994 | priority1 = DECL_FINI_PRIORITY (f1); | |
995 | priority2 = DECL_FINI_PRIORITY (f2); | |
996 | ||
997 | if (priority1 < priority2) | |
998 | return -1; | |
999 | else if (priority1 > priority2) | |
1000 | return 1; | |
1001 | else | |
1002 | /* Ensure a stable sort. */ | |
1003 | return DECL_UID (f1) - DECL_UID (f2); | |
1004 | } | |
1005 | ||
1006 | /* Generate functions to call static constructors and destructors | |
1007 | for targets that do not support .ctors/.dtors sections. These | |
1008 | functions have magic names which are detected by collect2. */ | |
1009 | ||
1010 | static void | |
1011 | build_cdtor_fns (void) | |
1012 | { | |
f1f41a6c | 1013 | if (!static_ctors.is_empty ()) |
a53e7471 | 1014 | { |
1015 | gcc_assert (!targetm.have_ctors_dtors || in_lto_p); | |
f1f41a6c | 1016 | static_ctors.qsort (compare_ctor); |
d2435fb0 | 1017 | build_cdtor (/*ctor_p=*/true, static_ctors); |
a53e7471 | 1018 | } |
1019 | ||
f1f41a6c | 1020 | if (!static_dtors.is_empty ()) |
a53e7471 | 1021 | { |
1022 | gcc_assert (!targetm.have_ctors_dtors || in_lto_p); | |
f1f41a6c | 1023 | static_dtors.qsort (compare_dtor); |
d2435fb0 | 1024 | build_cdtor (/*ctor_p=*/false, static_dtors); |
a53e7471 | 1025 | } |
1026 | } | |
1027 | ||
1028 | /* Look for constructors and destructors and produce function calling them. | |
1029 | This is needed for targets not supporting ctors or dtors, but we perform the | |
9d75589a | 1030 | transformation also at linktime to merge possibly numerous |
a53e7471 | 1031 | constructors/destructors into single function to improve code locality and |
1032 | reduce size. */ | |
1033 | ||
1034 | static unsigned int | |
1035 | ipa_cdtor_merge (void) | |
1036 | { | |
1037 | struct cgraph_node *node; | |
7c455d87 | 1038 | FOR_EACH_DEFINED_FUNCTION (node) |
02774f2d | 1039 | if (DECL_STATIC_CONSTRUCTOR (node->decl) |
1040 | || DECL_STATIC_DESTRUCTOR (node->decl)) | |
a53e7471 | 1041 | record_cdtor_fn (node); |
1042 | build_cdtor_fns (); | |
f1f41a6c | 1043 | static_ctors.release (); |
1044 | static_dtors.release (); | |
a53e7471 | 1045 | return 0; |
1046 | } | |
1047 | ||
cbe8bda8 | 1048 | namespace { |
1049 | ||
1050 | const pass_data pass_data_ipa_cdtor_merge = | |
a53e7471 | 1051 | { |
cbe8bda8 | 1052 | IPA_PASS, /* type */ |
1053 | "cdtor", /* name */ | |
1054 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 1055 | true, /* has_execute */ |
1056 | TV_CGRAPHOPT, /* tv_id */ | |
1057 | 0, /* properties_required */ | |
1058 | 0, /* properties_provided */ | |
1059 | 0, /* properties_destroyed */ | |
1060 | 0, /* todo_flags_start */ | |
1061 | 0, /* todo_flags_finish */ | |
a53e7471 | 1062 | }; |
cbe8bda8 | 1063 | |
1064 | class pass_ipa_cdtor_merge : public ipa_opt_pass_d | |
1065 | { | |
1066 | public: | |
9af5ce0c | 1067 | pass_ipa_cdtor_merge (gcc::context *ctxt) |
1068 | : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt, | |
1069 | NULL, /* generate_summary */ | |
1070 | NULL, /* write_summary */ | |
1071 | NULL, /* read_summary */ | |
1072 | NULL, /* write_optimization_summary */ | |
1073 | NULL, /* read_optimization_summary */ | |
1074 | NULL, /* stmt_fixup */ | |
1075 | 0, /* function_transform_todo_flags_start */ | |
1076 | NULL, /* function_transform */ | |
1077 | NULL) /* variable_transform */ | |
cbe8bda8 | 1078 | {} |
1079 | ||
1080 | /* opt_pass methods: */ | |
31315c24 | 1081 | virtual bool gate (function *); |
65b0537f | 1082 | virtual unsigned int execute (function *) { return ipa_cdtor_merge (); } |
cbe8bda8 | 1083 | |
1084 | }; // class pass_ipa_cdtor_merge | |
1085 | ||
31315c24 | 1086 | bool |
1087 | pass_ipa_cdtor_merge::gate (function *) | |
1088 | { | |
1089 | /* Perform the pass when we have no ctors/dtors support | |
1090 | or at LTO time to merge multiple constructors into single | |
1091 | function. */ | |
1092 | return !targetm.have_ctors_dtors || (optimize && in_lto_p); | |
1093 | } | |
1094 | ||
cbe8bda8 | 1095 | } // anon namespace |
1096 | ||
1097 | ipa_opt_pass_d * | |
1098 | make_pass_ipa_cdtor_merge (gcc::context *ctxt) | |
1099 | { | |
1100 | return new pass_ipa_cdtor_merge (ctxt); | |
1101 | } |