]>
Commit | Line | Data |
---|---|---|
65c1a668 | 1 | /* Basic IPA optimizations and utilities. |
3aea1f79 | 2 | Copyright (C) 2003-2014 Free Software Foundation, Inc. |
65c1a668 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it under | |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
65c1a668 | 9 | version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
65c1a668 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "tm.h" | |
41a8aa41 | 24 | #include "tree.h" |
9ed99284 | 25 | #include "calls.h" |
26 | #include "stringpool.h" | |
65c1a668 | 27 | #include "cgraph.h" |
f37a5008 | 28 | #include "tree-pass.h" |
d62dd039 | 29 | #include "hash-map.h" |
bc61cadb | 30 | #include "pointer-set.h" |
31 | #include "gimple-expr.h" | |
a8783bee | 32 | #include "gimplify.h" |
8dfbf71d | 33 | #include "flags.h" |
a53e7471 | 34 | #include "target.h" |
35 | #include "tree-iterator.h" | |
7771d558 | 36 | #include "ipa-utils.h" |
91f0ab48 | 37 | #include "ipa-inline.h" |
9e179a64 | 38 | #include "tree-inline.h" |
39 | #include "profile.h" | |
40 | #include "params.h" | |
ceb49bba | 41 | #include "internal-fn.h" |
42 | #include "tree-ssa-alias.h" | |
43 | #include "gimple.h" | |
44 | #include "dbgcnt.h" | |
65c1a668 | 45 | |
15ca8f90 | 46 | |
47 | /* Return true when NODE has ADDR reference. */ | |
48 | ||
49 | static bool | |
50 | has_addr_references_p (struct cgraph_node *node, | |
51 | void *data ATTRIBUTE_UNUSED) | |
52 | { | |
53 | int i; | |
51ce5652 | 54 | struct ipa_ref *ref = NULL; |
15ca8f90 | 55 | |
51ce5652 | 56 | for (i = 0; node->iterate_referring (i, ref); i++) |
15ca8f90 | 57 | if (ref->use == IPA_REF_ADDR) |
58 | return true; | |
59 | return false; | |
60 | } | |
61 | ||
21f41380 | 62 | /* Look for all functions inlined to NODE and update their inlined_to pointers |
63 | to INLINED_TO. */ | |
64 | ||
65 | static void | |
66 | update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to) | |
67 | { | |
68 | struct cgraph_edge *e; | |
69 | for (e = node->callees; e; e = e->next_callee) | |
70 | if (e->callee->global.inlined_to) | |
71 | { | |
72 | e->callee->global.inlined_to = inlined_to; | |
73 | update_inlined_to_pointer (e->callee, inlined_to); | |
74 | } | |
75 | } | |
76 | ||
91f0ab48 | 77 | /* Add symtab NODE to queue starting at FIRST. |
9da87cb8 | 78 | |
79 | The queue is linked via AUX pointers and terminated by pointer to 1. | |
80 | We enqueue nodes at two occasions: when we find them reachable or when we find | |
81 | their bodies needed for further clonning. In the second case we mark them | |
82 | by pointer to 2 after processing so they are re-queue when they become | |
83 | reachable. */ | |
6f932b06 | 84 | |
85 | static void | |
452659af | 86 | enqueue_node (symtab_node *node, symtab_node **first, |
91f0ab48 | 87 | struct pointer_set_t *reachable) |
6f932b06 | 88 | { |
9da87cb8 | 89 | /* Node is still in queue; do nothing. */ |
02774f2d | 90 | if (node->aux && node->aux != (void *) 2) |
9da87cb8 | 91 | return; |
92 | /* Node was already processed as unreachable, re-enqueue | |
93 | only if it became reachable now. */ | |
02774f2d | 94 | if (node->aux == (void *)2 && !pointer_set_contains (reachable, node)) |
9da87cb8 | 95 | return; |
02774f2d | 96 | node->aux = *first; |
6f932b06 | 97 | *first = node; |
98 | } | |
99 | ||
6f932b06 | 100 | /* Process references. */ |
101 | ||
102 | static void | |
51ce5652 | 103 | process_references (symtab_node *snode, |
452659af | 104 | symtab_node **first, |
da751785 | 105 | bool before_inlining_p, |
106 | struct pointer_set_t *reachable) | |
6f932b06 | 107 | { |
108 | int i; | |
51ce5652 | 109 | struct ipa_ref *ref = NULL; |
110 | for (i = 0; snode->iterate_reference (i, ref); i++) | |
6f932b06 | 111 | { |
452659af | 112 | symtab_node *node = ref->referred; |
15ca8f90 | 113 | |
02774f2d | 114 | if (node->definition && !node->in_other_partition |
115 | && ((!DECL_EXTERNAL (node->decl) || node->alias) | |
f0d26d57 | 116 | || (((before_inlining_p |
117 | && (cgraph_state < CGRAPH_STATE_IPA_SSA | |
118 | || !lookup_attribute ("always_inline", | |
119 | DECL_ATTRIBUTES (node->decl))))) | |
15ca8f90 | 120 | /* We use variable constructors during late complation for |
121 | constant folding. Keep references alive so partitioning | |
122 | knows about potential references. */ | |
02774f2d | 123 | || (TREE_CODE (node->decl) == VAR_DECL |
df8d3e89 | 124 | && flag_wpa |
02774f2d | 125 | && ctor_for_folding (node->decl) |
df8d3e89 | 126 | != error_mark_node)))) |
15ca8f90 | 127 | pointer_set_insert (reachable, node); |
02774f2d | 128 | enqueue_node (node, first, reachable); |
6f932b06 | 129 | } |
130 | } | |
131 | ||
e2fa5d74 | 132 | /* EDGE is an polymorphic call. If BEFORE_INLINING_P is set, mark |
133 | all its potential targets as reachable to permit later inlining if | |
134 | devirtualization happens. After inlining still keep their declarations | |
135 | around, so we can devirtualize to a direct call. | |
136 | ||
137 | Also try to make trivial devirutalization when no or only one target is | |
138 | possible. */ | |
139 | ||
140 | static void | |
141 | walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets, | |
142 | struct cgraph_edge *edge, | |
452659af | 143 | symtab_node **first, |
e2fa5d74 | 144 | pointer_set_t *reachable, bool before_inlining_p) |
145 | { | |
146 | unsigned int i; | |
147 | void *cache_token; | |
148 | bool final; | |
149 | vec <cgraph_node *>targets | |
150 | = possible_polymorphic_call_targets | |
151 | (edge, &final, &cache_token); | |
152 | ||
153 | if (!pointer_set_insert (reachable_call_targets, | |
154 | cache_token)) | |
155 | { | |
9af5ce0c | 156 | for (i = 0; i < targets.length (); i++) |
e2fa5d74 | 157 | { |
158 | struct cgraph_node *n = targets[i]; | |
159 | ||
160 | /* Do not bother to mark virtual methods in anonymous namespace; | |
161 | either we will find use of virtual table defining it, or it is | |
162 | unused. */ | |
02774f2d | 163 | if (TREE_CODE (TREE_TYPE (n->decl)) == METHOD_TYPE |
e2fa5d74 | 164 | && type_in_anonymous_namespace_p |
02774f2d | 165 | (method_class_type (TREE_TYPE (n->decl)))) |
e2fa5d74 | 166 | continue; |
167 | ||
168 | /* Prior inlining, keep alive bodies of possible targets for | |
169 | devirtualization. */ | |
02774f2d | 170 | if (n->definition |
f0d26d57 | 171 | && (before_inlining_p |
172 | && (cgraph_state < CGRAPH_STATE_IPA_SSA | |
173 | || !lookup_attribute ("always_inline", | |
174 | DECL_ATTRIBUTES (n->decl))))) | |
e2fa5d74 | 175 | pointer_set_insert (reachable, n); |
176 | ||
177 | /* Even after inlining we want to keep the possible targets in the | |
178 | boundary, so late passes can still produce direct call even if | |
179 | the chance for inlining is lost. */ | |
02774f2d | 180 | enqueue_node (n, first, reachable); |
e2fa5d74 | 181 | } |
182 | } | |
183 | ||
184 | /* Very trivial devirtualization; when the type is | |
185 | final or anonymous (so we know all its derivation) | |
186 | and there is only one possible virtual call target, | |
187 | make the edge direct. */ | |
188 | if (final) | |
189 | { | |
ceb49bba | 190 | if (targets.length () <= 1 && dbg_cnt (devirt)) |
e2fa5d74 | 191 | { |
749c5b03 | 192 | cgraph_node *target, *node = edge->caller; |
e2fa5d74 | 193 | if (targets.length () == 1) |
194 | target = targets[0]; | |
195 | else | |
196 | target = cgraph_get_create_node | |
197 | (builtin_decl_implicit (BUILT_IN_UNREACHABLE)); | |
198 | ||
ceb49bba | 199 | if (dump_enabled_p ()) |
200 | { | |
4c8041d7 | 201 | location_t locus = gimple_location_safe (edge->call_stmt); |
ceb49bba | 202 | dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus, |
203 | "devirtualizing call in %s/%i to %s/%i\n", | |
204 | edge->caller->name (), edge->caller->order, | |
205 | target->name (), | |
206 | target->order); | |
207 | } | |
e2fa5d74 | 208 | edge = cgraph_make_edge_direct (edge, target); |
6469adde | 209 | if (inline_summary_vec) |
749c5b03 | 210 | inline_update_overall_summary (node); |
6469adde | 211 | else if (edge->call_stmt) |
212 | cgraph_redirect_edge_call_stmt_to_callee (edge); | |
e2fa5d74 | 213 | } |
214 | } | |
215 | } | |
36a32361 | 216 | |
65c1a668 | 217 | /* Perform reachability analysis and reclaim all unreachable nodes. |
91f0ab48 | 218 | |
219 | The algorithm is basically mark&sweep but with some extra refinements: | |
220 | ||
221 | - reachable extern inline functions needs special handling; the bodies needs | |
222 | to stay in memory until inlining in hope that they will be inlined. | |
223 | After inlining we release their bodies and turn them into unanalyzed | |
224 | nodes even when they are reachable. | |
225 | ||
226 | BEFORE_INLINING_P specify whether we are before or after inlining. | |
227 | ||
228 | - virtual functions are kept in callgraph even if they seem unreachable in | |
229 | hope calls to them will be devirtualized. | |
230 | ||
231 | Again we remove them after inlining. In late optimization some | |
6bcfabf2 | 232 | devirtualization may happen, but it is not important since we won't inline |
91f0ab48 | 233 | the call. In theory early opts and IPA should work out all important cases. |
234 | ||
235 | - virtual clones needs bodies of their origins for later materialization; | |
236 | this means that we want to keep the body even if the origin is unreachable | |
237 | otherwise. To avoid origin from sitting in the callgraph and being | |
238 | walked by IPA passes, we turn them into unanalyzed nodes with body | |
239 | defined. | |
240 | ||
241 | We maintain set of function declaration where body needs to stay in | |
242 | body_needed_for_clonning | |
243 | ||
244 | Inline clones represent special case: their declaration match the | |
245 | declaration of origin and cgraph_remove_node already knows how to | |
246 | reshape callgraph and preserve body when offline copy of function or | |
247 | inline clone is being removed. | |
248 | ||
aa419a52 | 249 | - C++ virtual tables keyed to other unit are represented as DECL_EXTERNAL |
250 | variables with DECL_INITIAL set. We finalize these and keep reachable | |
251 | ones around for constant folding purposes. After inlining we however | |
252 | stop walking their references to let everything static referneced by them | |
253 | to be removed when it is otherwise unreachable. | |
254 | ||
91f0ab48 | 255 | We maintain queue of both reachable symbols (i.e. defined symbols that needs |
256 | to stay) and symbols that are in boundary (i.e. external symbols referenced | |
257 | by reachable symbols or origins of clones). The queue is represented | |
258 | as linked list by AUX pointer terminated by 1. | |
259 | ||
6bcfabf2 | 260 | At the end we keep all reachable symbols. For symbols in boundary we always |
91f0ab48 | 261 | turn definition into a declaration, but we may keep function body around |
262 | based on body_needed_for_clonning | |
263 | ||
264 | All symbols that enter the queue have AUX pointer non-zero and are in the | |
265 | boundary. Pointer set REACHABLE is used to track reachable symbols. | |
266 | ||
267 | Every symbol can be visited twice - once as part of boundary and once | |
268 | as real reachable symbol. enqueue_node needs to decide whether the | |
269 | node needs to be re-queued for second processing. For this purpose | |
270 | we set AUX pointer of processed symbols in the boundary to constant 2. */ | |
65c1a668 | 271 | |
272 | bool | |
91f0ab48 | 273 | symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file) |
65c1a668 | 274 | { |
452659af | 275 | symtab_node *first = (symtab_node *) (void *) 1; |
f4ec5ce1 | 276 | struct cgraph_node *node, *next; |
098f44bc | 277 | varpool_node *vnode, *vnext; |
65c1a668 | 278 | bool changed = false; |
da751785 | 279 | struct pointer_set_t *reachable = pointer_set_create (); |
91f0ab48 | 280 | struct pointer_set_t *body_needed_for_clonning = pointer_set_create (); |
e2fa5d74 | 281 | struct pointer_set_t *reachable_call_targets = pointer_set_create (); |
65c1a668 | 282 | |
e2fa5d74 | 283 | timevar_push (TV_IPA_UNREACHABLE); |
4befb9f4 | 284 | if (optimize && flag_devirtualize) |
285 | build_type_inheritance_graph (); | |
3f5be5f4 | 286 | if (file) |
287 | fprintf (file, "\nReclaiming functions:"); | |
65c1a668 | 288 | #ifdef ENABLE_CHECKING |
7c455d87 | 289 | FOR_EACH_FUNCTION (node) |
02774f2d | 290 | gcc_assert (!node->aux); |
7c455d87 | 291 | FOR_EACH_VARIABLE (vnode) |
02774f2d | 292 | gcc_assert (!vnode->aux); |
65c1a668 | 293 | #endif |
7f74ac6b | 294 | /* Mark functions whose bodies are obviously needed. |
295 | This is mostly when they can be referenced externally. Inline clones | |
296 | are special since their declarations are shared with master clone and thus | |
297 | cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */ | |
abb1a237 | 298 | FOR_EACH_FUNCTION (node) |
299 | { | |
300 | node->used_as_abstract_origin = false; | |
02774f2d | 301 | if (node->definition |
abb1a237 | 302 | && !node->global.inlined_to |
02774f2d | 303 | && !node->in_other_partition |
e2fa5d74 | 304 | && !cgraph_can_remove_if_no_direct_calls_and_refs_p (node)) |
abb1a237 | 305 | { |
306 | gcc_assert (!node->global.inlined_to); | |
307 | pointer_set_insert (reachable, node); | |
02774f2d | 308 | enqueue_node (node, &first, reachable); |
abb1a237 | 309 | } |
310 | else | |
02774f2d | 311 | gcc_assert (!node->aux); |
abb1a237 | 312 | } |
7f74ac6b | 313 | |
314 | /* Mark variables that are obviously needed. */ | |
91f0ab48 | 315 | FOR_EACH_DEFINED_VARIABLE (vnode) |
b9b49047 | 316 | if (!varpool_can_remove_if_no_refs (vnode) |
02774f2d | 317 | && !vnode->in_other_partition) |
91f0ab48 | 318 | { |
319 | pointer_set_insert (reachable, vnode); | |
02774f2d | 320 | enqueue_node (vnode, &first, reachable); |
91f0ab48 | 321 | } |
322 | ||
323 | /* Perform reachability analysis. */ | |
452659af | 324 | while (first != (symtab_node *) (void *) 1) |
6f932b06 | 325 | { |
91f0ab48 | 326 | bool in_boundary_p = !pointer_set_contains (reachable, first); |
452659af | 327 | symtab_node *node = first; |
65c1a668 | 328 | |
452659af | 329 | first = (symtab_node *)first->aux; |
9da87cb8 | 330 | |
91f0ab48 | 331 | /* If we are processing symbol in boundary, mark its AUX pointer for |
332 | possible later re-processing in enqueue_node. */ | |
333 | if (in_boundary_p) | |
02774f2d | 334 | node->aux = (void *)2; |
91f0ab48 | 335 | else |
336 | { | |
9f0b7378 | 337 | if (TREE_CODE (node->decl) == FUNCTION_DECL |
338 | && DECL_ABSTRACT_ORIGIN (node->decl)) | |
abb1a237 | 339 | { |
340 | struct cgraph_node *origin_node | |
593ce529 | 341 | = cgraph_get_create_node (DECL_ABSTRACT_ORIGIN (node->decl)); |
abb1a237 | 342 | origin_node->used_as_abstract_origin = true; |
02774f2d | 343 | enqueue_node (origin_node, &first, reachable); |
abb1a237 | 344 | } |
91f0ab48 | 345 | /* If any symbol in a comdat group is reachable, force |
468088ac | 346 | all externally visible symbols in the same comdat |
347 | group to be reachable as well. Comdat-local symbols | |
348 | can be discarded if all uses were inlined. */ | |
02774f2d | 349 | if (node->same_comdat_group) |
91f0ab48 | 350 | { |
452659af | 351 | symtab_node *next; |
02774f2d | 352 | for (next = node->same_comdat_group; |
91f0ab48 | 353 | next != node; |
02774f2d | 354 | next = next->same_comdat_group) |
468088ac | 355 | if (!symtab_comdat_local_p (next) |
356 | && !pointer_set_insert (reachable, next)) | |
02774f2d | 357 | enqueue_node (next, &first, reachable); |
91f0ab48 | 358 | } |
359 | /* Mark references as reachable. */ | |
51ce5652 | 360 | process_references (node, &first, before_inlining_p, reachable); |
91f0ab48 | 361 | } |
9da87cb8 | 362 | |
13cbeaac | 363 | if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node)) |
6f932b06 | 364 | { |
91f0ab48 | 365 | /* Mark the callees reachable unless they are direct calls to extern |
366 | inline functions we decided to not inline. */ | |
367 | if (!in_boundary_p) | |
e12f85b7 | 368 | { |
91f0ab48 | 369 | struct cgraph_edge *e; |
e2fa5d74 | 370 | /* Keep alive possible targets for devirtualization. */ |
371 | if (optimize && flag_devirtualize) | |
372 | { | |
373 | struct cgraph_edge *next; | |
374 | for (e = cnode->indirect_calls; e; e = next) | |
375 | { | |
376 | next = e->next_callee; | |
377 | if (e->indirect_info->polymorphic) | |
378 | walk_polymorphic_call_targets (reachable_call_targets, | |
379 | e, &first, reachable, | |
380 | before_inlining_p); | |
381 | } | |
382 | } | |
91f0ab48 | 383 | for (e = cnode->callees; e; e = e->next_callee) |
71ca01ff | 384 | { |
02774f2d | 385 | if (e->callee->definition |
386 | && !e->callee->in_other_partition | |
71ca01ff | 387 | && (!e->inline_failed |
02774f2d | 388 | || !DECL_EXTERNAL (e->callee->decl) |
389 | || e->callee->alias | |
71ca01ff | 390 | || before_inlining_p)) |
89ae81e0 | 391 | { |
392 | /* Be sure that we will not optimize out alias target | |
393 | body. */ | |
394 | if (DECL_EXTERNAL (e->callee->decl) | |
395 | && e->callee->alias | |
396 | && before_inlining_p) | |
397 | { | |
398 | pointer_set_insert (reachable, | |
399 | cgraph_function_node (e->callee)); | |
400 | } | |
401 | pointer_set_insert (reachable, e->callee); | |
402 | } | |
02774f2d | 403 | enqueue_node (e->callee, &first, reachable); |
da751785 | 404 | } |
91f0ab48 | 405 | |
406 | /* When inline clone exists, mark body to be preserved so when removing | |
407 | offline copy of the function we don't kill it. */ | |
b9b49047 | 408 | if (cnode->global.inlined_to) |
02774f2d | 409 | pointer_set_insert (body_needed_for_clonning, cnode->decl); |
61c2c7b1 | 410 | |
b9b49047 | 411 | /* For non-inline clones, force their origins to the boundary and ensure |
412 | that body is not removed. */ | |
413 | while (cnode->clone_of) | |
414 | { | |
02774f2d | 415 | bool noninline = cnode->clone_of->decl != cnode->decl; |
b9b49047 | 416 | cnode = cnode->clone_of; |
417 | if (noninline) | |
418 | { | |
02774f2d | 419 | pointer_set_insert (body_needed_for_clonning, cnode->decl); |
420 | enqueue_node (cnode, &first, reachable); | |
b9b49047 | 421 | } |
6f932b06 | 422 | } |
d09768a4 | 423 | |
424 | } | |
425 | /* If any reachable function has simd clones, mark them as | |
426 | reachable as well. */ | |
427 | if (cnode->simd_clones) | |
428 | { | |
429 | cgraph_node *next; | |
430 | for (next = cnode->simd_clones; | |
431 | next; | |
432 | next = next->simdclone->next_clone) | |
433 | if (in_boundary_p | |
434 | || !pointer_set_insert (reachable, next)) | |
435 | enqueue_node (next, &first, reachable); | |
ee3f5fc0 | 436 | } |
6f932b06 | 437 | } |
aa419a52 | 438 | /* When we see constructor of external variable, keep referred nodes in the |
2dc9831f | 439 | boundary. This will also hold initializers of the external vars NODE |
440 | refers to. */ | |
13cbeaac | 441 | varpool_node *vnode = dyn_cast <varpool_node *> (node); |
2dc9831f | 442 | if (vnode |
02774f2d | 443 | && DECL_EXTERNAL (node->decl) |
444 | && !vnode->alias | |
aa419a52 | 445 | && in_boundary_p) |
2dc9831f | 446 | { |
51ce5652 | 447 | struct ipa_ref *ref = NULL; |
448 | for (int i = 0; node->iterate_reference (i, ref); i++) | |
aa419a52 | 449 | enqueue_node (ref->referred, &first, reachable); |
2dc9831f | 450 | } |
65c1a668 | 451 | } |
452 | ||
91f0ab48 | 453 | /* Remove unreachable functions. */ |
0704fb2e | 454 | for (node = cgraph_first_function (); node; node = next) |
65c1a668 | 455 | { |
0704fb2e | 456 | next = cgraph_next_function (node); |
15ca8f90 | 457 | |
458 | /* If node is not needed at all, remove it. */ | |
02774f2d | 459 | if (!node->aux) |
65c1a668 | 460 | { |
3f5be5f4 | 461 | if (file) |
3083a0b3 | 462 | fprintf (file, " %s/%i", node->name (), node->order); |
91f0ab48 | 463 | cgraph_remove_node (node); |
464 | changed = true; | |
465 | } | |
15ca8f90 | 466 | /* If node is unreachable, remove its body. */ |
91f0ab48 | 467 | else if (!pointer_set_contains (reachable, node)) |
468 | { | |
02774f2d | 469 | if (!pointer_set_contains (body_needed_for_clonning, node->decl)) |
15ca8f90 | 470 | cgraph_release_function_body (node); |
b9b49047 | 471 | else if (!node->clone_of) |
02774f2d | 472 | gcc_assert (in_lto_p || DECL_RESULT (node->decl)); |
473 | if (node->definition) | |
7fb046a4 | 474 | { |
91f0ab48 | 475 | if (file) |
3083a0b3 | 476 | fprintf (file, " %s/%i", node->name (), node->order); |
fa4052b3 | 477 | node->body_removed = true; |
02774f2d | 478 | node->analyzed = false; |
479 | node->definition = false; | |
480 | node->cpp_implicit_alias = false; | |
481 | node->alias = false; | |
95d0bdb9 | 482 | node->thunk.thunk_p = false; |
02774f2d | 483 | node->weakref = false; |
f0d26d57 | 484 | /* After early inlining we drop always_inline attributes on |
485 | bodies of functions that are still referenced (have their | |
486 | address taken). */ | |
487 | DECL_ATTRIBUTES (node->decl) | |
488 | = remove_attribute ("always_inline", | |
489 | DECL_ATTRIBUTES (node->decl)); | |
02774f2d | 490 | if (!node->in_other_partition) |
281dea26 | 491 | node->local.local = false; |
492 | cgraph_node_remove_callees (node); | |
04f65f92 | 493 | symtab_remove_from_same_comdat_group (node); |
51ce5652 | 494 | node->remove_all_references (); |
7fb046a4 | 495 | changed = true; |
496 | } | |
65c1a668 | 497 | } |
b9b49047 | 498 | else |
499 | gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node) | |
02774f2d | 500 | || in_lto_p || DECL_RESULT (node->decl)); |
65c1a668 | 501 | } |
91f0ab48 | 502 | |
503 | /* Inline clones might be kept around so their materializing allows further | |
504 | cloning. If the function the clone is inlined into is removed, we need | |
505 | to turn it into normal cone. */ | |
7c455d87 | 506 | FOR_EACH_FUNCTION (node) |
ccf4ab6b | 507 | { |
ccf4ab6b | 508 | if (node->global.inlined_to |
509 | && !node->callers) | |
510 | { | |
511 | gcc_assert (node->clones); | |
21f41380 | 512 | node->global.inlined_to = NULL; |
513 | update_inlined_to_pointer (node, node); | |
ccf4ab6b | 514 | } |
02774f2d | 515 | node->aux = NULL; |
ccf4ab6b | 516 | } |
8dfbf71d | 517 | |
91f0ab48 | 518 | /* Remove unreachable variables. */ |
8dfbf71d | 519 | if (file) |
91f0ab48 | 520 | fprintf (file, "\nReclaiming variables:"); |
0704fb2e | 521 | for (vnode = varpool_first_variable (); vnode; vnode = vnext) |
6f932b06 | 522 | { |
0704fb2e | 523 | vnext = varpool_next_variable (vnode); |
02774f2d | 524 | if (!vnode->aux |
f1a7feee | 525 | /* For can_refer_decl_in_current_unit_p we want to track for |
526 | all external variables if they are defined in other partition | |
527 | or not. */ | |
02774f2d | 528 | && (!flag_ltrans || !DECL_EXTERNAL (vnode->decl))) |
91f0ab48 | 529 | { |
8dfbf71d | 530 | if (file) |
3083a0b3 | 531 | fprintf (file, " %s/%i", vnode->name (), vnode->order); |
8dfbf71d | 532 | varpool_remove_node (vnode); |
533 | changed = true; | |
6f932b06 | 534 | } |
91f0ab48 | 535 | else if (!pointer_set_contains (reachable, vnode)) |
536 | { | |
df8d3e89 | 537 | tree init; |
02774f2d | 538 | if (vnode->definition) |
91f0ab48 | 539 | { |
540 | if (file) | |
f1c8b4d7 | 541 | fprintf (file, " %s", vnode->name ()); |
91f0ab48 | 542 | changed = true; |
543 | } | |
fa4052b3 | 544 | vnode->body_removed = true; |
02774f2d | 545 | vnode->definition = false; |
546 | vnode->analyzed = false; | |
547 | vnode->aux = NULL; | |
15ca8f90 | 548 | |
04f65f92 | 549 | symtab_remove_from_same_comdat_group (vnode); |
550 | ||
15ca8f90 | 551 | /* Keep body if it may be useful for constant folding. */ |
02774f2d | 552 | if ((init = ctor_for_folding (vnode->decl)) == error_mark_node) |
15ca8f90 | 553 | varpool_remove_initializer (vnode); |
df8d3e89 | 554 | else |
02774f2d | 555 | DECL_INITIAL (vnode->decl) = init; |
51ce5652 | 556 | vnode->remove_all_references (); |
91f0ab48 | 557 | } |
558 | else | |
02774f2d | 559 | vnode->aux = NULL; |
6f932b06 | 560 | } |
8dfbf71d | 561 | |
91f0ab48 | 562 | pointer_set_destroy (reachable); |
563 | pointer_set_destroy (body_needed_for_clonning); | |
e2fa5d74 | 564 | pointer_set_destroy (reachable_call_targets); |
8dfbf71d | 565 | |
91f0ab48 | 566 | /* Now update address_taken flags and try to promote functions to be local. */ |
cdedc740 | 567 | if (file) |
568 | fprintf (file, "\nClearing address taken flags:"); | |
7c455d87 | 569 | FOR_EACH_DEFINED_FUNCTION (node) |
02774f2d | 570 | if (node->address_taken |
571 | && !node->used_from_other_partition) | |
cdedc740 | 572 | { |
36a32361 | 573 | if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true)) |
cdedc740 | 574 | { |
575 | if (file) | |
f1c8b4d7 | 576 | fprintf (file, " %s", node->name ()); |
02774f2d | 577 | node->address_taken = false; |
8dfbf71d | 578 | changed = true; |
579 | if (cgraph_local_node_p (node)) | |
580 | { | |
581 | node->local.local = true; | |
582 | if (file) | |
583 | fprintf (file, " (local)"); | |
584 | } | |
cdedc740 | 585 | } |
586 | } | |
c7b2cc59 | 587 | if (file) |
588 | fprintf (file, "\n"); | |
6f932b06 | 589 | |
09a2e412 | 590 | #ifdef ENABLE_CHECKING |
3e7775f6 | 591 | verify_symtab (); |
09a2e412 | 592 | #endif |
34e5cced | 593 | |
f8bfd7f7 | 594 | /* If we removed something, perhaps profile could be improved. */ |
f1f41a6c | 595 | if (changed && optimize && inline_edge_summary_vec.exists ()) |
f8bfd7f7 | 596 | FOR_EACH_DEFINED_FUNCTION (node) |
6eaf903b | 597 | ipa_propagate_frequency (node); |
f8bfd7f7 | 598 | |
e2fa5d74 | 599 | timevar_pop (TV_IPA_UNREACHABLE); |
65c1a668 | 600 | return changed; |
601 | } | |
f37a5008 | 602 | |
703ad42c | 603 | /* Process references to VNODE and set flags WRITTEN, ADDRESS_TAKEN, READ |
604 | as needed, also clear EXPLICIT_REFS if the references to given variable | |
605 | do not need to be explicit. */ | |
606 | ||
607 | void | |
608 | process_references (varpool_node *vnode, | |
609 | bool *written, bool *address_taken, | |
610 | bool *read, bool *explicit_refs) | |
611 | { | |
612 | int i; | |
613 | struct ipa_ref *ref; | |
614 | ||
615 | if (!varpool_all_refs_explicit_p (vnode) | |
616 | || TREE_THIS_VOLATILE (vnode->decl)) | |
617 | *explicit_refs = false; | |
618 | ||
51ce5652 | 619 | for (i = 0; vnode->iterate_referring (i, ref) |
703ad42c | 620 | && *explicit_refs && (!*written || !*address_taken || !*read); i++) |
621 | switch (ref->use) | |
622 | { | |
623 | case IPA_REF_ADDR: | |
624 | *address_taken = true; | |
625 | break; | |
626 | case IPA_REF_LOAD: | |
627 | *read = true; | |
628 | break; | |
629 | case IPA_REF_STORE: | |
630 | *written = true; | |
631 | break; | |
632 | case IPA_REF_ALIAS: | |
633 | process_references (varpool (ref->referring), written, address_taken, | |
634 | read, explicit_refs); | |
635 | break; | |
636 | } | |
637 | } | |
638 | ||
639 | /* Set TREE_READONLY bit. */ | |
640 | ||
641 | bool | |
642 | set_readonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) | |
643 | { | |
644 | TREE_READONLY (vnode->decl) = true; | |
645 | return false; | |
646 | } | |
647 | ||
648 | /* Set writeonly bit and clear the initalizer, since it will not be needed. */ | |
649 | ||
650 | bool | |
651 | set_writeonly_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) | |
652 | { | |
653 | vnode->writeonly = true; | |
654 | if (optimize) | |
655 | { | |
656 | DECL_INITIAL (vnode->decl) = NULL; | |
657 | if (!vnode->alias) | |
51ce5652 | 658 | vnode->remove_all_references (); |
703ad42c | 659 | } |
660 | return false; | |
661 | } | |
662 | ||
663 | /* Clear addressale bit of VNODE. */ | |
664 | ||
665 | bool | |
666 | clear_addressable_bit (varpool_node *vnode, void *data ATTRIBUTE_UNUSED) | |
667 | { | |
668 | vnode->address_taken = false; | |
669 | TREE_ADDRESSABLE (vnode->decl) = 0; | |
670 | return false; | |
671 | } | |
672 | ||
8dfbf71d | 673 | /* Discover variables that have no longer address taken or that are read only |
674 | and update their flags. | |
675 | ||
676 | FIXME: This can not be done in between gimplify and omp_expand since | |
677 | readonly flag plays role on what is shared and what is not. Currently we do | |
023a28e1 | 678 | this transformation as part of whole program visibility and re-do at |
679 | ipa-reference pass (to take into account clonning), but it would | |
680 | make sense to do it before early optimizations. */ | |
8dfbf71d | 681 | |
682 | void | |
683 | ipa_discover_readonly_nonaddressable_vars (void) | |
684 | { | |
098f44bc | 685 | varpool_node *vnode; |
8dfbf71d | 686 | if (dump_file) |
687 | fprintf (dump_file, "Clearing variable flags:"); | |
7c455d87 | 688 | FOR_EACH_VARIABLE (vnode) |
703ad42c | 689 | if (!vnode->alias |
02774f2d | 690 | && (TREE_ADDRESSABLE (vnode->decl) |
703ad42c | 691 | || !vnode->writeonly |
02774f2d | 692 | || !TREE_READONLY (vnode->decl))) |
8dfbf71d | 693 | { |
694 | bool written = false; | |
695 | bool address_taken = false; | |
703ad42c | 696 | bool read = false; |
697 | bool explicit_refs = true; | |
698 | ||
699 | process_references (vnode, &written, &address_taken, &read, &explicit_refs); | |
700 | if (!explicit_refs) | |
701 | continue; | |
702 | if (!address_taken) | |
8dfbf71d | 703 | { |
703ad42c | 704 | if (TREE_ADDRESSABLE (vnode->decl) && dump_file) |
4206bfac | 705 | fprintf (dump_file, " %s (non-addressable)", vnode->name ()); |
703ad42c | 706 | varpool_for_node_and_aliases (vnode, clear_addressable_bit, NULL, true); |
8dfbf71d | 707 | } |
703ad42c | 708 | if (!address_taken && !written |
8dfbf71d | 709 | /* Making variable in explicit section readonly can cause section |
710 | type conflict. | |
711 | See e.g. gcc.c-torture/compile/pr23237.c */ | |
71e19e54 | 712 | && vnode->get_section () == NULL) |
8dfbf71d | 713 | { |
703ad42c | 714 | if (!TREE_READONLY (vnode->decl) && dump_file) |
f1c8b4d7 | 715 | fprintf (dump_file, " %s (read-only)", vnode->name ()); |
703ad42c | 716 | varpool_for_node_and_aliases (vnode, set_readonly_bit, NULL, true); |
717 | } | |
4206bfac | 718 | if (!vnode->writeonly && !read && !address_taken && written) |
703ad42c | 719 | { |
720 | if (dump_file) | |
721 | fprintf (dump_file, " %s (write-only)", vnode->name ()); | |
722 | varpool_for_node_and_aliases (vnode, set_writeonly_bit, NULL, true); | |
8dfbf71d | 723 | } |
724 | } | |
725 | if (dump_file) | |
726 | fprintf (dump_file, "\n"); | |
727 | } | |
728 | ||
f8bfd7f7 | 729 | /* Free inline summary. */ |
730 | ||
cbe8bda8 | 731 | namespace { |
732 | ||
733 | const pass_data pass_data_ipa_free_inline_summary = | |
f8bfd7f7 | 734 | { |
cbe8bda8 | 735 | SIMPLE_IPA_PASS, /* type */ |
736 | "*free_inline_summary", /* name */ | |
737 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 738 | TV_IPA_FREE_INLINE_SUMMARY, /* tv_id */ |
739 | 0, /* properties_required */ | |
740 | 0, /* properties_provided */ | |
741 | 0, /* properties_destroyed */ | |
742 | 0, /* todo_flags_start */ | |
743 | 0, /* todo_flags_finish */ | |
f8bfd7f7 | 744 | }; |
745 | ||
cbe8bda8 | 746 | class pass_ipa_free_inline_summary : public simple_ipa_opt_pass |
747 | { | |
748 | public: | |
9af5ce0c | 749 | pass_ipa_free_inline_summary (gcc::context *ctxt) |
750 | : simple_ipa_opt_pass (pass_data_ipa_free_inline_summary, ctxt) | |
cbe8bda8 | 751 | {} |
752 | ||
753 | /* opt_pass methods: */ | |
65b0537f | 754 | virtual unsigned int execute (function *) |
755 | { | |
756 | inline_free_summary (); | |
757 | return 0; | |
758 | } | |
cbe8bda8 | 759 | |
760 | }; // class pass_ipa_free_inline_summary | |
761 | ||
762 | } // anon namespace | |
763 | ||
764 | simple_ipa_opt_pass * | |
765 | make_pass_ipa_free_inline_summary (gcc::context *ctxt) | |
766 | { | |
767 | return new pass_ipa_free_inline_summary (ctxt); | |
768 | } | |
769 | ||
a53e7471 | 770 | /* Generate and emit a static constructor or destructor. WHICH must |
bbc26dcc | 771 | be one of 'I' (for a constructor) or 'D' (for a destructor). BODY |
772 | is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the | |
773 | initialization priority for this constructor or destructor. | |
a53e7471 | 774 | |
62510893 | 775 | FINAL specify whether the externally visible name for collect2 should |
776 | be produced. */ | |
777 | ||
778 | static void | |
779 | cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final) | |
a53e7471 | 780 | { |
781 | static int counter = 0; | |
782 | char which_buf[16]; | |
783 | tree decl, name, resdecl; | |
784 | ||
785 | /* The priority is encoded in the constructor or destructor name. | |
786 | collect2 will sort the names and arrange that they are called at | |
787 | program startup. */ | |
62510893 | 788 | if (final) |
789 | sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++); | |
790 | else | |
791 | /* Proudce sane name but one not recognizable by collect2, just for the | |
792 | case we fail to inline the function. */ | |
793 | sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++); | |
a53e7471 | 794 | name = get_file_function_name (which_buf); |
795 | ||
796 | decl = build_decl (input_location, FUNCTION_DECL, name, | |
797 | build_function_type_list (void_type_node, NULL_TREE)); | |
798 | current_function_decl = decl; | |
799 | ||
800 | resdecl = build_decl (input_location, | |
801 | RESULT_DECL, NULL_TREE, void_type_node); | |
802 | DECL_ARTIFICIAL (resdecl) = 1; | |
803 | DECL_RESULT (decl) = resdecl; | |
804 | DECL_CONTEXT (resdecl) = decl; | |
805 | ||
806 | allocate_struct_function (decl, false); | |
807 | ||
808 | TREE_STATIC (decl) = 1; | |
809 | TREE_USED (decl) = 1; | |
810 | DECL_ARTIFICIAL (decl) = 1; | |
811 | DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1; | |
812 | DECL_SAVED_TREE (decl) = body; | |
62510893 | 813 | if (!targetm.have_ctors_dtors && final) |
a53e7471 | 814 | { |
815 | TREE_PUBLIC (decl) = 1; | |
816 | DECL_PRESERVE_P (decl) = 1; | |
817 | } | |
818 | DECL_UNINLINABLE (decl) = 1; | |
819 | ||
820 | DECL_INITIAL (decl) = make_node (BLOCK); | |
821 | TREE_USED (DECL_INITIAL (decl)) = 1; | |
822 | ||
823 | DECL_SOURCE_LOCATION (decl) = input_location; | |
824 | cfun->function_end_locus = input_location; | |
825 | ||
826 | switch (which) | |
827 | { | |
828 | case 'I': | |
829 | DECL_STATIC_CONSTRUCTOR (decl) = 1; | |
830 | decl_init_priority_insert (decl, priority); | |
831 | break; | |
832 | case 'D': | |
833 | DECL_STATIC_DESTRUCTOR (decl) = 1; | |
834 | decl_fini_priority_insert (decl, priority); | |
835 | break; | |
836 | default: | |
837 | gcc_unreachable (); | |
838 | } | |
839 | ||
840 | gimplify_function_tree (decl); | |
841 | ||
842 | cgraph_add_new_function (decl, false); | |
843 | ||
844 | set_cfun (NULL); | |
845 | current_function_decl = NULL; | |
846 | } | |
847 | ||
62510893 | 848 | /* Generate and emit a static constructor or destructor. WHICH must |
bbc26dcc | 849 | be one of 'I' (for a constructor) or 'D' (for a destructor). BODY |
850 | is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the | |
851 | initialization priority for this constructor or destructor. */ | |
62510893 | 852 | |
853 | void | |
854 | cgraph_build_static_cdtor (char which, tree body, int priority) | |
855 | { | |
856 | cgraph_build_static_cdtor_1 (which, body, priority, false); | |
857 | } | |
a53e7471 | 858 | |
859 | /* A vector of FUNCTION_DECLs declared as static constructors. */ | |
f1f41a6c | 860 | static vec<tree> static_ctors; |
a53e7471 | 861 | /* A vector of FUNCTION_DECLs declared as static destructors. */ |
f1f41a6c | 862 | static vec<tree> static_dtors; |
a53e7471 | 863 | |
864 | /* When target does not have ctors and dtors, we call all constructor | |
865 | and destructor by special initialization/destruction function | |
866 | recognized by collect2. | |
867 | ||
868 | When we are going to build this function, collect all constructors and | |
869 | destructors and turn them into normal functions. */ | |
870 | ||
871 | static void | |
872 | record_cdtor_fn (struct cgraph_node *node) | |
873 | { | |
02774f2d | 874 | if (DECL_STATIC_CONSTRUCTOR (node->decl)) |
875 | static_ctors.safe_push (node->decl); | |
876 | if (DECL_STATIC_DESTRUCTOR (node->decl)) | |
877 | static_dtors.safe_push (node->decl); | |
878 | node = cgraph_get_node (node->decl); | |
879 | DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1; | |
a53e7471 | 880 | } |
881 | ||
882 | /* Define global constructors/destructor functions for the CDTORS, of | |
883 | which they are LEN. The CDTORS are sorted by initialization | |
884 | priority. If CTOR_P is true, these are constructors; otherwise, | |
885 | they are destructors. */ | |
886 | ||
887 | static void | |
f1f41a6c | 888 | build_cdtor (bool ctor_p, vec<tree> cdtors) |
a53e7471 | 889 | { |
890 | size_t i,j; | |
f1f41a6c | 891 | size_t len = cdtors.length (); |
a53e7471 | 892 | |
893 | i = 0; | |
894 | while (i < len) | |
895 | { | |
896 | tree body; | |
897 | tree fn; | |
898 | priority_type priority; | |
899 | ||
900 | priority = 0; | |
901 | body = NULL_TREE; | |
902 | j = i; | |
903 | do | |
904 | { | |
905 | priority_type p; | |
f1f41a6c | 906 | fn = cdtors[j]; |
a53e7471 | 907 | p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn); |
908 | if (j == i) | |
909 | priority = p; | |
910 | else if (p != priority) | |
911 | break; | |
912 | j++; | |
913 | } | |
914 | while (j < len); | |
915 | ||
d2435fb0 | 916 | /* When there is only one cdtor and target supports them, do nothing. */ |
a53e7471 | 917 | if (j == i + 1 |
918 | && targetm.have_ctors_dtors) | |
919 | { | |
920 | i++; | |
921 | continue; | |
922 | } | |
923 | /* Find the next batch of constructors/destructors with the same | |
924 | initialization priority. */ | |
d2435fb0 | 925 | for (;i < j; i++) |
a53e7471 | 926 | { |
a53e7471 | 927 | tree call; |
f1f41a6c | 928 | fn = cdtors[i]; |
a53e7471 | 929 | call = build_call_expr (fn, 0); |
930 | if (ctor_p) | |
931 | DECL_STATIC_CONSTRUCTOR (fn) = 0; | |
932 | else | |
933 | DECL_STATIC_DESTRUCTOR (fn) = 0; | |
934 | /* We do not want to optimize away pure/const calls here. | |
935 | When optimizing, these should be already removed, when not | |
936 | optimizing, we want user to be able to breakpoint in them. */ | |
937 | TREE_SIDE_EFFECTS (call) = 1; | |
938 | append_to_statement_list (call, &body); | |
a53e7471 | 939 | } |
a53e7471 | 940 | gcc_assert (body != NULL_TREE); |
941 | /* Generate a function to call all the function of like | |
942 | priority. */ | |
62510893 | 943 | cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true); |
a53e7471 | 944 | } |
945 | } | |
946 | ||
947 | /* Comparison function for qsort. P1 and P2 are actually of type | |
948 | "tree *" and point to static constructors. DECL_INIT_PRIORITY is | |
949 | used to determine the sort order. */ | |
950 | ||
951 | static int | |
952 | compare_ctor (const void *p1, const void *p2) | |
953 | { | |
954 | tree f1; | |
955 | tree f2; | |
956 | int priority1; | |
957 | int priority2; | |
958 | ||
959 | f1 = *(const tree *)p1; | |
960 | f2 = *(const tree *)p2; | |
961 | priority1 = DECL_INIT_PRIORITY (f1); | |
962 | priority2 = DECL_INIT_PRIORITY (f2); | |
963 | ||
964 | if (priority1 < priority2) | |
965 | return -1; | |
966 | else if (priority1 > priority2) | |
967 | return 1; | |
968 | else | |
969 | /* Ensure a stable sort. Constructors are executed in backwarding | |
970 | order to make LTO initialize braries first. */ | |
971 | return DECL_UID (f2) - DECL_UID (f1); | |
972 | } | |
973 | ||
974 | /* Comparison function for qsort. P1 and P2 are actually of type | |
975 | "tree *" and point to static destructors. DECL_FINI_PRIORITY is | |
976 | used to determine the sort order. */ | |
977 | ||
978 | static int | |
979 | compare_dtor (const void *p1, const void *p2) | |
980 | { | |
981 | tree f1; | |
982 | tree f2; | |
983 | int priority1; | |
984 | int priority2; | |
985 | ||
986 | f1 = *(const tree *)p1; | |
987 | f2 = *(const tree *)p2; | |
988 | priority1 = DECL_FINI_PRIORITY (f1); | |
989 | priority2 = DECL_FINI_PRIORITY (f2); | |
990 | ||
991 | if (priority1 < priority2) | |
992 | return -1; | |
993 | else if (priority1 > priority2) | |
994 | return 1; | |
995 | else | |
996 | /* Ensure a stable sort. */ | |
997 | return DECL_UID (f1) - DECL_UID (f2); | |
998 | } | |
999 | ||
1000 | /* Generate functions to call static constructors and destructors | |
1001 | for targets that do not support .ctors/.dtors sections. These | |
1002 | functions have magic names which are detected by collect2. */ | |
1003 | ||
1004 | static void | |
1005 | build_cdtor_fns (void) | |
1006 | { | |
f1f41a6c | 1007 | if (!static_ctors.is_empty ()) |
a53e7471 | 1008 | { |
1009 | gcc_assert (!targetm.have_ctors_dtors || in_lto_p); | |
f1f41a6c | 1010 | static_ctors.qsort (compare_ctor); |
d2435fb0 | 1011 | build_cdtor (/*ctor_p=*/true, static_ctors); |
a53e7471 | 1012 | } |
1013 | ||
f1f41a6c | 1014 | if (!static_dtors.is_empty ()) |
a53e7471 | 1015 | { |
1016 | gcc_assert (!targetm.have_ctors_dtors || in_lto_p); | |
f1f41a6c | 1017 | static_dtors.qsort (compare_dtor); |
d2435fb0 | 1018 | build_cdtor (/*ctor_p=*/false, static_dtors); |
a53e7471 | 1019 | } |
1020 | } | |
1021 | ||
1022 | /* Look for constructors and destructors and produce function calling them. | |
1023 | This is needed for targets not supporting ctors or dtors, but we perform the | |
9d75589a | 1024 | transformation also at linktime to merge possibly numerous |
a53e7471 | 1025 | constructors/destructors into single function to improve code locality and |
1026 | reduce size. */ | |
1027 | ||
1028 | static unsigned int | |
1029 | ipa_cdtor_merge (void) | |
1030 | { | |
1031 | struct cgraph_node *node; | |
7c455d87 | 1032 | FOR_EACH_DEFINED_FUNCTION (node) |
02774f2d | 1033 | if (DECL_STATIC_CONSTRUCTOR (node->decl) |
1034 | || DECL_STATIC_DESTRUCTOR (node->decl)) | |
a53e7471 | 1035 | record_cdtor_fn (node); |
1036 | build_cdtor_fns (); | |
f1f41a6c | 1037 | static_ctors.release (); |
1038 | static_dtors.release (); | |
a53e7471 | 1039 | return 0; |
1040 | } | |
1041 | ||
cbe8bda8 | 1042 | namespace { |
1043 | ||
1044 | const pass_data pass_data_ipa_cdtor_merge = | |
a53e7471 | 1045 | { |
cbe8bda8 | 1046 | IPA_PASS, /* type */ |
1047 | "cdtor", /* name */ | |
1048 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 1049 | TV_CGRAPHOPT, /* tv_id */ |
1050 | 0, /* properties_required */ | |
1051 | 0, /* properties_provided */ | |
1052 | 0, /* properties_destroyed */ | |
1053 | 0, /* todo_flags_start */ | |
1054 | 0, /* todo_flags_finish */ | |
a53e7471 | 1055 | }; |
cbe8bda8 | 1056 | |
1057 | class pass_ipa_cdtor_merge : public ipa_opt_pass_d | |
1058 | { | |
1059 | public: | |
9af5ce0c | 1060 | pass_ipa_cdtor_merge (gcc::context *ctxt) |
1061 | : ipa_opt_pass_d (pass_data_ipa_cdtor_merge, ctxt, | |
1062 | NULL, /* generate_summary */ | |
1063 | NULL, /* write_summary */ | |
1064 | NULL, /* read_summary */ | |
1065 | NULL, /* write_optimization_summary */ | |
1066 | NULL, /* read_optimization_summary */ | |
1067 | NULL, /* stmt_fixup */ | |
1068 | 0, /* function_transform_todo_flags_start */ | |
1069 | NULL, /* function_transform */ | |
1070 | NULL) /* variable_transform */ | |
cbe8bda8 | 1071 | {} |
1072 | ||
1073 | /* opt_pass methods: */ | |
31315c24 | 1074 | virtual bool gate (function *); |
65b0537f | 1075 | virtual unsigned int execute (function *) { return ipa_cdtor_merge (); } |
cbe8bda8 | 1076 | |
1077 | }; // class pass_ipa_cdtor_merge | |
1078 | ||
31315c24 | 1079 | bool |
1080 | pass_ipa_cdtor_merge::gate (function *) | |
1081 | { | |
1082 | /* Perform the pass when we have no ctors/dtors support | |
1083 | or at LTO time to merge multiple constructors into single | |
1084 | function. */ | |
1085 | return !targetm.have_ctors_dtors || (optimize && in_lto_p); | |
1086 | } | |
1087 | ||
cbe8bda8 | 1088 | } // anon namespace |
1089 | ||
1090 | ipa_opt_pass_d * | |
1091 | make_pass_ipa_cdtor_merge (gcc::context *ctxt) | |
1092 | { | |
1093 | return new pass_ipa_cdtor_merge (ctxt); | |
1094 | } | |
3f1f2be0 | 1095 | |
1096 | /* Invalid pointer representing BOTTOM for single user dataflow. */ | |
1097 | #define BOTTOM ((cgraph_node *)(size_t) 2) | |
1098 | ||
1099 | /* Meet operation for single user dataflow. | |
1100 | Here we want to associate variables with sigle function that may access it. | |
1101 | ||
1102 | FUNCTION is current single user of a variable, VAR is variable that uses it. | |
1103 | Latttice is stored in SINGLE_USER_MAP. | |
1104 | ||
1105 | We represent: | |
1106 | - TOP by no entry in SIGNLE_USER_MAP | |
1107 | - BOTTOM by BOTTOM in AUX pointer (to save lookups) | |
1108 | - known single user by cgraph pointer in SINGLE_USER_MAP. */ | |
1109 | ||
1110 | cgraph_node * | |
1111 | meet (cgraph_node *function, varpool_node *var, | |
d62dd039 | 1112 | hash_map<varpool_node *, cgraph_node *> &single_user_map) |
3f1f2be0 | 1113 | { |
1114 | struct cgraph_node *user, **f; | |
1115 | ||
1116 | if (var->aux == BOTTOM) | |
1117 | return BOTTOM; | |
1118 | ||
d62dd039 | 1119 | f = single_user_map.get (var); |
3f1f2be0 | 1120 | if (!f) |
1121 | return function; | |
1122 | user = *f; | |
1123 | if (!function) | |
1124 | return user; | |
1125 | else if (function != user) | |
1126 | return BOTTOM; | |
1127 | else | |
1128 | return function; | |
1129 | } | |
1130 | ||
1131 | /* Propagation step of single-use dataflow. | |
1132 | ||
1133 | Check all uses of VNODE and see if they are used by single function FUNCTION. | |
1134 | SINGLE_USER_MAP represents the dataflow lattice. */ | |
1135 | ||
1136 | cgraph_node * | |
1137 | propagate_single_user (varpool_node *vnode, cgraph_node *function, | |
d62dd039 | 1138 | hash_map<varpool_node *, cgraph_node *> &single_user_map) |
3f1f2be0 | 1139 | { |
1140 | int i; | |
1141 | struct ipa_ref *ref; | |
1142 | ||
1143 | gcc_assert (!vnode->externally_visible); | |
1144 | ||
1145 | /* If node is an alias, first meet with its target. */ | |
1146 | if (vnode->alias) | |
1147 | function = meet (function, varpool_alias_target (vnode), single_user_map); | |
1148 | ||
1149 | /* Check all users and see if they correspond to a single function. */ | |
1150 | for (i = 0; | |
51ce5652 | 1151 | vnode->iterate_referring (i, ref) |
3f1f2be0 | 1152 | && function != BOTTOM; i++) |
1153 | { | |
1154 | struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring); | |
1155 | if (cnode) | |
1156 | { | |
1157 | if (cnode->global.inlined_to) | |
1158 | cnode = cnode->global.inlined_to; | |
1159 | if (!function) | |
1160 | function = cnode; | |
1161 | else if (function != cnode) | |
1162 | function = BOTTOM; | |
1163 | } | |
1164 | else | |
1165 | function = meet (function, dyn_cast <varpool_node *> (ref->referring), single_user_map); | |
1166 | } | |
1167 | return function; | |
1168 | } | |
1169 | ||
1170 | /* Pass setting used_by_single_function flag. | |
1171 | This flag is set on variable when there is only one function that may possibly | |
1172 | referr to it. */ | |
1173 | ||
1174 | static unsigned int | |
1175 | ipa_single_use (void) | |
1176 | { | |
1177 | varpool_node *first = (varpool_node *) (void *) 1; | |
1178 | varpool_node *var; | |
d62dd039 | 1179 | hash_map<varpool_node *, cgraph_node *> single_user_map; |
3f1f2be0 | 1180 | |
1181 | FOR_EACH_DEFINED_VARIABLE (var) | |
1182 | if (!varpool_all_refs_explicit_p (var)) | |
1183 | var->aux = BOTTOM; | |
1184 | else | |
1185 | { | |
1186 | /* Enqueue symbol for dataflow. */ | |
1187 | var->aux = first; | |
1188 | first = var; | |
1189 | } | |
1190 | ||
1191 | /* The actual dataflow. */ | |
1192 | ||
1193 | while (first != (void *) 1) | |
1194 | { | |
1195 | cgraph_node *user, *orig_user, **f; | |
1196 | ||
1197 | var = first; | |
1198 | first = (varpool_node *)first->aux; | |
1199 | ||
d62dd039 | 1200 | f = single_user_map.get (var); |
3f1f2be0 | 1201 | if (f) |
1202 | orig_user = *f; | |
1203 | else | |
1204 | orig_user = NULL; | |
1205 | user = propagate_single_user (var, orig_user, single_user_map); | |
1206 | ||
1207 | gcc_checking_assert (var->aux != BOTTOM); | |
1208 | ||
1209 | /* If user differs, enqueue all references. */ | |
1210 | if (user != orig_user) | |
1211 | { | |
1212 | unsigned int i; | |
1213 | ipa_ref *ref; | |
1214 | ||
d62dd039 | 1215 | single_user_map.put (var, user); |
3f1f2be0 | 1216 | |
1217 | /* Enqueue all aliases for re-processing. */ | |
1218 | for (i = 0; | |
51ce5652 | 1219 | var->iterate_referring (i, ref); i++) |
3f1f2be0 | 1220 | if (ref->use == IPA_REF_ALIAS |
1221 | && !ref->referring->aux) | |
1222 | { | |
1223 | ref->referring->aux = first; | |
1224 | first = dyn_cast <varpool_node *> (ref->referring); | |
1225 | } | |
1226 | /* Enqueue all users for re-processing. */ | |
1227 | for (i = 0; | |
51ce5652 | 1228 | var->iterate_reference (i, ref); i++) |
3f1f2be0 | 1229 | if (!ref->referred->aux |
1230 | && ref->referred->definition | |
1231 | && is_a <varpool_node *> (ref->referred)) | |
1232 | { | |
1233 | ref->referred->aux = first; | |
1234 | first = dyn_cast <varpool_node *> (ref->referred); | |
1235 | } | |
1236 | ||
1237 | /* If user is BOTTOM, just punt on this var. */ | |
1238 | if (user == BOTTOM) | |
1239 | var->aux = BOTTOM; | |
1240 | else | |
1241 | var->aux = NULL; | |
1242 | } | |
1243 | else | |
1244 | var->aux = NULL; | |
1245 | } | |
1246 | ||
1247 | FOR_EACH_DEFINED_VARIABLE (var) | |
1248 | { | |
1249 | if (var->aux != BOTTOM) | |
1250 | { | |
1251 | #ifdef ENABLE_CHECKING | |
d62dd039 | 1252 | if (!single_user_map.get (var)) |
1253 | gcc_assert (single_user_map.get (var)); | |
3f1f2be0 | 1254 | #endif |
1255 | if (dump_file) | |
1256 | { | |
1257 | fprintf (dump_file, "Variable %s/%i is used by single function\n", | |
1258 | var->name (), var->order); | |
1259 | } | |
1260 | var->used_by_single_function = true; | |
1261 | } | |
1262 | var->aux = NULL; | |
1263 | } | |
1264 | return 0; | |
1265 | } | |
1266 | ||
1267 | namespace { | |
1268 | ||
1269 | const pass_data pass_data_ipa_single_use = | |
1270 | { | |
1271 | IPA_PASS, /* type */ | |
1272 | "single-use", /* name */ | |
1273 | OPTGROUP_NONE, /* optinfo_flags */ | |
3f1f2be0 | 1274 | TV_CGRAPHOPT, /* tv_id */ |
1275 | 0, /* properties_required */ | |
1276 | 0, /* properties_provided */ | |
1277 | 0, /* properties_destroyed */ | |
1278 | 0, /* todo_flags_start */ | |
1279 | 0, /* todo_flags_finish */ | |
1280 | }; | |
1281 | ||
1282 | class pass_ipa_single_use : public ipa_opt_pass_d | |
1283 | { | |
1284 | public: | |
1285 | pass_ipa_single_use (gcc::context *ctxt) | |
1286 | : ipa_opt_pass_d (pass_data_ipa_single_use, ctxt, | |
1287 | NULL, /* generate_summary */ | |
1288 | NULL, /* write_summary */ | |
1289 | NULL, /* read_summary */ | |
1290 | NULL, /* write_optimization_summary */ | |
1291 | NULL, /* read_optimization_summary */ | |
1292 | NULL, /* stmt_fixup */ | |
1293 | 0, /* function_transform_todo_flags_start */ | |
1294 | NULL, /* function_transform */ | |
1295 | NULL) /* variable_transform */ | |
1296 | {} | |
1297 | ||
1298 | /* opt_pass methods: */ | |
1299 | virtual bool gate (function *); | |
1300 | virtual unsigned int execute (function *) { return ipa_single_use (); } | |
1301 | ||
1302 | }; // class pass_ipa_single_use | |
1303 | ||
1304 | bool | |
1305 | pass_ipa_single_use::gate (function *) | |
1306 | { | |
1307 | return optimize; | |
1308 | } | |
1309 | ||
1310 | } // anon namespace | |
1311 | ||
1312 | ipa_opt_pass_d * | |
1313 | make_pass_ipa_single_use (gcc::context *ctxt) | |
1314 | { | |
1315 | return new pass_ipa_single_use (ctxt); | |
1316 | } |