]>
Commit | Line | Data |
---|---|---|
f7d118a9 | 1 | /* Utilities for ipa analysis. |
cfaf579d | 2 | Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc. |
f7d118a9 | 3 | Contributed by Kenneth Zadeck <zadeck@naturalbridge.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 9 | Software Foundation; either version 3, or (at your option) any later |
f7d118a9 | 10 | version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
f7d118a9 | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "tree-flow.h" | |
27 | #include "tree-inline.h" | |
b9ed1410 | 28 | #include "dumpfile.h" |
f7d118a9 | 29 | #include "langhooks.h" |
30 | #include "pointer-set.h" | |
5863771b | 31 | #include "splay-tree.h" |
f7d118a9 | 32 | #include "ggc.h" |
33 | #include "ipa-utils.h" | |
34 | #include "ipa-reference.h" | |
75a70cf9 | 35 | #include "gimple.h" |
f7d118a9 | 36 | #include "cgraph.h" |
f7d118a9 | 37 | #include "flags.h" |
f7d118a9 | 38 | #include "diagnostic.h" |
39 | #include "langhooks.h" | |
40 | ||
41 | /* Debugging function for postorder and inorder code. NOTE is a string | |
42 | that is printed before the nodes are printed. ORDER is an array of | |
43 | cgraph_nodes that has COUNT useful nodes in it. */ | |
44 | ||
48e1416a | 45 | void |
7771d558 | 46 | ipa_print_order (FILE* out, |
47 | const char * note, | |
48 | struct cgraph_node** order, | |
49 | int count) | |
f7d118a9 | 50 | { |
51 | int i; | |
52 | fprintf (out, "\n\n ordered call graph: %s\n", note); | |
48e1416a | 53 | |
f7d118a9 | 54 | for (i = count - 1; i >= 0; i--) |
55 | dump_cgraph_node(dump_file, order[i]); | |
56 | fprintf (out, "\n"); | |
57 | fflush(out); | |
58 | } | |
59 | ||
60 | \f | |
61 | struct searchc_env { | |
62 | struct cgraph_node **stack; | |
63 | int stack_size; | |
64 | struct cgraph_node **result; | |
65 | int order_pos; | |
66 | splay_tree nodes_marked_new; | |
67 | bool reduce; | |
c336a49e | 68 | bool allow_overwritable; |
f7d118a9 | 69 | int count; |
70 | }; | |
71 | ||
72 | /* This is an implementation of Tarjan's strongly connected region | |
73 | finder as reprinted in Aho Hopcraft and Ullman's The Design and | |
74 | Analysis of Computer Programs (1975) pages 192-193. This version | |
75 | has been customized for cgraph_nodes. The env parameter is because | |
76 | it is recursive and there are no nested functions here. This | |
77 | function should only be called from itself or | |
7771d558 | 78 | ipa_reduced_postorder. ENV is a stack env and would be |
f7d118a9 | 79 | unnecessary if C had nested functions. V is the node to start |
80 | searching from. */ | |
81 | ||
82 | static void | |
17b28e52 | 83 | searchc (struct searchc_env* env, struct cgraph_node *v, |
84 | bool (*ignore_edge) (struct cgraph_edge *)) | |
f7d118a9 | 85 | { |
86 | struct cgraph_edge *edge; | |
7d0d0ce1 | 87 | struct ipa_dfs_info *v_info = (struct ipa_dfs_info *) v->symbol.aux; |
48e1416a | 88 | |
f7d118a9 | 89 | /* mark node as old */ |
cda6870f | 90 | v_info->new_node = false; |
f7d118a9 | 91 | splay_tree_remove (env->nodes_marked_new, v->uid); |
48e1416a | 92 | |
f7d118a9 | 93 | v_info->dfn_number = env->count; |
94 | v_info->low_link = env->count; | |
95 | env->count++; | |
96 | env->stack[(env->stack_size)++] = v; | |
97 | v_info->on_stack = true; | |
48e1416a | 98 | |
f7d118a9 | 99 | for (edge = v->callees; edge; edge = edge->next_callee) |
100 | { | |
101 | struct ipa_dfs_info * w_info; | |
b2c2e188 | 102 | enum availability avail; |
103 | struct cgraph_node *w = cgraph_function_or_thunk_node (edge->callee, &avail); | |
86844d6c | 104 | |
b2c2e188 | 105 | if (!w || (ignore_edge && ignore_edge (edge))) |
17b28e52 | 106 | continue; |
107 | ||
7d0d0ce1 | 108 | if (w->symbol.aux |
c336a49e | 109 | && (avail > AVAIL_OVERWRITABLE |
110 | || (env->allow_overwritable && avail == AVAIL_OVERWRITABLE))) | |
f7d118a9 | 111 | { |
7d0d0ce1 | 112 | w_info = (struct ipa_dfs_info *) w->symbol.aux; |
48e1416a | 113 | if (w_info->new_node) |
f7d118a9 | 114 | { |
17b28e52 | 115 | searchc (env, w, ignore_edge); |
f7d118a9 | 116 | v_info->low_link = |
117 | (v_info->low_link < w_info->low_link) ? | |
118 | v_info->low_link : w_info->low_link; | |
48e1416a | 119 | } |
120 | else | |
121 | if ((w_info->dfn_number < v_info->dfn_number) | |
122 | && (w_info->on_stack)) | |
f7d118a9 | 123 | v_info->low_link = |
124 | (w_info->dfn_number < v_info->low_link) ? | |
125 | w_info->dfn_number : v_info->low_link; | |
126 | } | |
127 | } | |
128 | ||
129 | ||
48e1416a | 130 | if (v_info->low_link == v_info->dfn_number) |
f7d118a9 | 131 | { |
132 | struct cgraph_node *last = NULL; | |
133 | struct cgraph_node *x; | |
134 | struct ipa_dfs_info *x_info; | |
135 | do { | |
136 | x = env->stack[--(env->stack_size)]; | |
7d0d0ce1 | 137 | x_info = (struct ipa_dfs_info *) x->symbol.aux; |
f7d118a9 | 138 | x_info->on_stack = false; |
572635a5 | 139 | x_info->scc_no = v_info->dfn_number; |
48e1416a | 140 | |
141 | if (env->reduce) | |
f7d118a9 | 142 | { |
143 | x_info->next_cycle = last; | |
144 | last = x; | |
48e1416a | 145 | } |
146 | else | |
f7d118a9 | 147 | env->result[env->order_pos++] = x; |
48e1416a | 148 | } |
f7d118a9 | 149 | while (v != x); |
48e1416a | 150 | if (env->reduce) |
f7d118a9 | 151 | env->result[env->order_pos++] = v; |
152 | } | |
153 | } | |
154 | ||
155 | /* Topsort the call graph by caller relation. Put the result in ORDER. | |
156 | ||
9631926a | 157 | The REDUCE flag is true if you want the cycles reduced to single nodes. |
158 | You can use ipa_get_nodes_in_cycle to obtain a vector containing all real | |
159 | call graph nodes in a reduced node. | |
160 | ||
161 | Set ALLOW_OVERWRITABLE if nodes with such availability should be included. | |
7771d558 | 162 | IGNORE_EDGE, if non-NULL is a hook that may make some edges insignificant |
163 | for the topological sort. */ | |
f7d118a9 | 164 | |
165 | int | |
7771d558 | 166 | ipa_reduced_postorder (struct cgraph_node **order, |
167 | bool reduce, bool allow_overwritable, | |
168 | bool (*ignore_edge) (struct cgraph_edge *)) | |
f7d118a9 | 169 | { |
170 | struct cgraph_node *node; | |
171 | struct searchc_env env; | |
172 | splay_tree_node result; | |
4c36ffe6 | 173 | env.stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); |
f7d118a9 | 174 | env.stack_size = 0; |
175 | env.result = order; | |
176 | env.order_pos = 0; | |
177 | env.nodes_marked_new = splay_tree_new (splay_tree_compare_ints, 0, 0); | |
178 | env.count = 1; | |
179 | env.reduce = reduce; | |
c336a49e | 180 | env.allow_overwritable = allow_overwritable; |
48e1416a | 181 | |
7c455d87 | 182 | FOR_EACH_DEFINED_FUNCTION (node) |
86844d6c | 183 | { |
184 | enum availability avail = cgraph_function_body_availability (node); | |
185 | ||
186 | if (avail > AVAIL_OVERWRITABLE | |
48e1416a | 187 | || (allow_overwritable |
86844d6c | 188 | && (avail == AVAIL_OVERWRITABLE))) |
189 | { | |
190 | /* Reuse the info if it is already there. */ | |
7d0d0ce1 | 191 | struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->symbol.aux; |
86844d6c | 192 | if (!info) |
193 | info = XCNEW (struct ipa_dfs_info); | |
194 | info->new_node = true; | |
195 | info->on_stack = false; | |
196 | info->next_cycle = NULL; | |
7d0d0ce1 | 197 | node->symbol.aux = info; |
48e1416a | 198 | |
86844d6c | 199 | splay_tree_insert (env.nodes_marked_new, |
48e1416a | 200 | (splay_tree_key)node->uid, |
86844d6c | 201 | (splay_tree_value)node); |
48e1416a | 202 | } |
203 | else | |
7d0d0ce1 | 204 | node->symbol.aux = NULL; |
86844d6c | 205 | } |
f7d118a9 | 206 | result = splay_tree_min (env.nodes_marked_new); |
207 | while (result) | |
208 | { | |
209 | node = (struct cgraph_node *)result->value; | |
17b28e52 | 210 | searchc (&env, node, ignore_edge); |
f7d118a9 | 211 | result = splay_tree_min (env.nodes_marked_new); |
212 | } | |
213 | splay_tree_delete (env.nodes_marked_new); | |
214 | free (env.stack); | |
215 | ||
216 | return env.order_pos; | |
217 | } | |
218 | ||
7771d558 | 219 | /* Deallocate all ipa_dfs_info structures pointed to by the aux pointer of call |
220 | graph nodes. */ | |
221 | ||
222 | void | |
223 | ipa_free_postorder_info (void) | |
224 | { | |
225 | struct cgraph_node *node; | |
7c455d87 | 226 | FOR_EACH_DEFINED_FUNCTION (node) |
7771d558 | 227 | { |
228 | /* Get rid of the aux information. */ | |
7d0d0ce1 | 229 | if (node->symbol.aux) |
7771d558 | 230 | { |
7d0d0ce1 | 231 | free (node->symbol.aux); |
232 | node->symbol.aux = NULL; | |
7771d558 | 233 | } |
234 | } | |
235 | } | |
236 | ||
9631926a | 237 | /* Get the set of nodes for the cycle in the reduced call graph starting |
238 | from NODE. */ | |
239 | ||
240 | VEC (cgraph_node_p, heap) * | |
241 | ipa_get_nodes_in_cycle (struct cgraph_node *node) | |
242 | { | |
243 | VEC (cgraph_node_p, heap) *v = NULL; | |
244 | struct ipa_dfs_info *node_dfs_info; | |
245 | while (node) | |
246 | { | |
247 | VEC_safe_push (cgraph_node_p, heap, v, node); | |
248 | node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux; | |
249 | node = node_dfs_info->next_cycle; | |
250 | } | |
251 | return v; | |
252 | } | |
253 | ||
ee5e516b | 254 | struct postorder_stack |
255 | { | |
256 | struct cgraph_node *node; | |
257 | struct cgraph_edge *edge; | |
258 | int ref; | |
259 | }; | |
260 | ||
7771d558 | 261 | /* Fill array order with all nodes with output flag set in the reverse |
c70f46b0 | 262 | topological order. Return the number of elements in the array. |
263 | FIXME: While walking, consider aliases, too. */ | |
7771d558 | 264 | |
265 | int | |
266 | ipa_reverse_postorder (struct cgraph_node **order) | |
267 | { | |
268 | struct cgraph_node *node, *node2; | |
269 | int stack_size = 0; | |
270 | int order_pos = 0; | |
ee5e516b | 271 | struct cgraph_edge *edge; |
7771d558 | 272 | int pass; |
ee5e516b | 273 | struct ipa_ref *ref; |
7771d558 | 274 | |
ee5e516b | 275 | struct postorder_stack *stack = |
276 | XCNEWVEC (struct postorder_stack, cgraph_n_nodes); | |
7771d558 | 277 | |
278 | /* We have to deal with cycles nicely, so use a depth first traversal | |
279 | output algorithm. Ignore the fact that some functions won't need | |
280 | to be output and put them into order as well, so we get dependencies | |
281 | right through inline functions. */ | |
7c455d87 | 282 | FOR_EACH_FUNCTION (node) |
7d0d0ce1 | 283 | node->symbol.aux = NULL; |
7771d558 | 284 | for (pass = 0; pass < 2; pass++) |
7c455d87 | 285 | FOR_EACH_FUNCTION (node) |
7d0d0ce1 | 286 | if (!node->symbol.aux |
7771d558 | 287 | && (pass |
7d0d0ce1 | 288 | || (!node->symbol.address_taken |
7771d558 | 289 | && !node->global.inlined_to |
ee5e516b | 290 | && !node->alias && !node->thunk.thunk_p |
291 | && !cgraph_only_called_directly_p (node)))) | |
7771d558 | 292 | { |
ee5e516b | 293 | stack_size = 0; |
294 | stack[stack_size].node = node; | |
295 | stack[stack_size].edge = node->callers; | |
296 | stack[stack_size].ref = 0; | |
7d0d0ce1 | 297 | node->symbol.aux = (void *)(size_t)1; |
ee5e516b | 298 | while (stack_size >= 0) |
7771d558 | 299 | { |
ee5e516b | 300 | while (true) |
7771d558 | 301 | { |
ee5e516b | 302 | node2 = NULL; |
303 | while (stack[stack_size].edge && !node2) | |
7771d558 | 304 | { |
ee5e516b | 305 | edge = stack[stack_size].edge; |
7771d558 | 306 | node2 = edge->caller; |
ee5e516b | 307 | stack[stack_size].edge = edge->next_caller; |
308 | /* Break possible cycles involving always-inline | |
309 | functions by ignoring edges from always-inline | |
310 | functions to non-always-inline functions. */ | |
7d0d0ce1 | 311 | if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->symbol.decl) |
ee5e516b | 312 | && !DECL_DISREGARD_INLINE_LIMITS |
7d0d0ce1 | 313 | (cgraph_function_node (edge->callee, NULL)->symbol.decl)) |
ee5e516b | 314 | node2 = NULL; |
315 | } | |
04ec15fa | 316 | for (;ipa_ref_list_referring_iterate (&stack[stack_size].node->symbol.ref_list, |
ee5e516b | 317 | stack[stack_size].ref, |
318 | ref) && !node2; | |
319 | stack[stack_size].ref++) | |
320 | { | |
321 | if (ref->use == IPA_REF_ALIAS) | |
04ec15fa | 322 | node2 = ipa_ref_referring_node (ref); |
ee5e516b | 323 | } |
324 | if (!node2) | |
325 | break; | |
7d0d0ce1 | 326 | if (!node2->symbol.aux) |
ee5e516b | 327 | { |
328 | stack[++stack_size].node = node2; | |
329 | stack[stack_size].edge = node2->callers; | |
330 | stack[stack_size].ref = 0; | |
7d0d0ce1 | 331 | node2->symbol.aux = (void *)(size_t)1; |
7771d558 | 332 | } |
333 | } | |
ee5e516b | 334 | order[order_pos++] = stack[stack_size--].node; |
7771d558 | 335 | } |
336 | } | |
337 | free (stack); | |
7c455d87 | 338 | FOR_EACH_FUNCTION (node) |
7d0d0ce1 | 339 | node->symbol.aux = NULL; |
7771d558 | 340 | return order_pos; |
341 | } | |
342 | ||
343 | ||
f7d118a9 | 344 | |
345 | /* Given a memory reference T, will return the variable at the bottom | |
9d75589a | 346 | of the access. Unlike get_base_address, this will recurse through |
f7d118a9 | 347 | INDIRECT_REFS. */ |
348 | ||
349 | tree | |
350 | get_base_var (tree t) | |
351 | { | |
48e1416a | 352 | while (!SSA_VAR_P (t) |
f7d118a9 | 353 | && (!CONSTANT_CLASS_P (t)) |
354 | && TREE_CODE (t) != LABEL_DECL | |
355 | && TREE_CODE (t) != FUNCTION_DECL | |
9ed5b1f5 | 356 | && TREE_CODE (t) != CONST_DECL |
357 | && TREE_CODE (t) != CONSTRUCTOR) | |
f7d118a9 | 358 | { |
359 | t = TREE_OPERAND (t, 0); | |
360 | } | |
361 | return t; | |
48e1416a | 362 | } |
f7d118a9 | 363 | |
19ad01f7 | 364 | |
365 | /* Create a new cgraph node set. */ | |
366 | ||
367 | cgraph_node_set | |
368 | cgraph_node_set_new (void) | |
369 | { | |
370 | cgraph_node_set new_node_set; | |
371 | ||
372 | new_node_set = XCNEW (struct cgraph_node_set_def); | |
373 | new_node_set->map = pointer_map_create (); | |
374 | new_node_set->nodes = NULL; | |
375 | return new_node_set; | |
376 | } | |
377 | ||
378 | ||
379 | /* Add cgraph_node NODE to cgraph_node_set SET. */ | |
380 | ||
381 | void | |
382 | cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node) | |
383 | { | |
384 | void **slot; | |
385 | ||
386 | slot = pointer_map_insert (set->map, node); | |
387 | ||
388 | if (*slot) | |
389 | { | |
390 | int index = (size_t) *slot - 1; | |
391 | gcc_checking_assert ((VEC_index (cgraph_node_ptr, set->nodes, index) | |
392 | == node)); | |
393 | return; | |
394 | } | |
395 | ||
396 | *slot = (void *)(size_t) (VEC_length (cgraph_node_ptr, set->nodes) + 1); | |
397 | ||
398 | /* Insert into node vector. */ | |
399 | VEC_safe_push (cgraph_node_ptr, heap, set->nodes, node); | |
400 | } | |
401 | ||
402 | ||
403 | /* Remove cgraph_node NODE from cgraph_node_set SET. */ | |
404 | ||
405 | void | |
406 | cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node) | |
407 | { | |
408 | void **slot, **last_slot; | |
409 | int index; | |
410 | struct cgraph_node *last_node; | |
411 | ||
412 | slot = pointer_map_contains (set->map, node); | |
413 | if (slot == NULL || !*slot) | |
414 | return; | |
415 | ||
416 | index = (size_t) *slot - 1; | |
417 | gcc_checking_assert (VEC_index (cgraph_node_ptr, set->nodes, index) | |
418 | == node); | |
419 | ||
420 | /* Remove from vector. We do this by swapping node with the last element | |
421 | of the vector. */ | |
422 | last_node = VEC_pop (cgraph_node_ptr, set->nodes); | |
423 | if (last_node != node) | |
424 | { | |
425 | last_slot = pointer_map_contains (set->map, last_node); | |
426 | gcc_checking_assert (last_slot && *last_slot); | |
427 | *last_slot = (void *)(size_t) (index + 1); | |
428 | ||
429 | /* Move the last element to the original spot of NODE. */ | |
430 | VEC_replace (cgraph_node_ptr, set->nodes, index, last_node); | |
431 | } | |
432 | ||
433 | /* Remove element from hash table. */ | |
434 | *slot = NULL; | |
435 | } | |
436 | ||
437 | ||
438 | /* Find NODE in SET and return an iterator to it if found. A null iterator | |
439 | is returned if NODE is not in SET. */ | |
440 | ||
441 | cgraph_node_set_iterator | |
442 | cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node) | |
443 | { | |
444 | void **slot; | |
445 | cgraph_node_set_iterator csi; | |
446 | ||
447 | slot = pointer_map_contains (set->map, node); | |
448 | if (slot == NULL || !*slot) | |
449 | csi.index = (unsigned) ~0; | |
450 | else | |
451 | csi.index = (size_t)*slot - 1; | |
452 | csi.set = set; | |
453 | ||
454 | return csi; | |
455 | } | |
456 | ||
457 | ||
458 | /* Dump content of SET to file F. */ | |
459 | ||
460 | void | |
461 | dump_cgraph_node_set (FILE *f, cgraph_node_set set) | |
462 | { | |
463 | cgraph_node_set_iterator iter; | |
464 | ||
465 | for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter)) | |
466 | { | |
467 | struct cgraph_node *node = csi_node (iter); | |
468 | fprintf (f, " %s/%i", cgraph_node_name (node), node->uid); | |
469 | } | |
470 | fprintf (f, "\n"); | |
471 | } | |
472 | ||
473 | ||
474 | /* Dump content of SET to stderr. */ | |
475 | ||
476 | DEBUG_FUNCTION void | |
477 | debug_cgraph_node_set (cgraph_node_set set) | |
478 | { | |
479 | dump_cgraph_node_set (stderr, set); | |
480 | } | |
481 | ||
482 | ||
483 | /* Free varpool node set. */ | |
484 | ||
485 | void | |
486 | free_cgraph_node_set (cgraph_node_set set) | |
487 | { | |
488 | VEC_free (cgraph_node_ptr, heap, set->nodes); | |
489 | pointer_map_destroy (set->map); | |
490 | free (set); | |
491 | } | |
492 | ||
493 | ||
494 | /* Create a new varpool node set. */ | |
495 | ||
496 | varpool_node_set | |
497 | varpool_node_set_new (void) | |
498 | { | |
499 | varpool_node_set new_node_set; | |
500 | ||
501 | new_node_set = XCNEW (struct varpool_node_set_def); | |
502 | new_node_set->map = pointer_map_create (); | |
503 | new_node_set->nodes = NULL; | |
504 | return new_node_set; | |
505 | } | |
506 | ||
507 | ||
508 | /* Add varpool_node NODE to varpool_node_set SET. */ | |
509 | ||
510 | void | |
511 | varpool_node_set_add (varpool_node_set set, struct varpool_node *node) | |
512 | { | |
513 | void **slot; | |
514 | ||
515 | slot = pointer_map_insert (set->map, node); | |
516 | ||
517 | if (*slot) | |
518 | { | |
519 | int index = (size_t) *slot - 1; | |
520 | gcc_checking_assert ((VEC_index (varpool_node_ptr, set->nodes, index) | |
521 | == node)); | |
522 | return; | |
523 | } | |
524 | ||
525 | *slot = (void *)(size_t) (VEC_length (varpool_node_ptr, set->nodes) + 1); | |
526 | ||
527 | /* Insert into node vector. */ | |
528 | VEC_safe_push (varpool_node_ptr, heap, set->nodes, node); | |
529 | } | |
530 | ||
531 | ||
532 | /* Remove varpool_node NODE from varpool_node_set SET. */ | |
533 | ||
534 | void | |
535 | varpool_node_set_remove (varpool_node_set set, struct varpool_node *node) | |
536 | { | |
537 | void **slot, **last_slot; | |
538 | int index; | |
539 | struct varpool_node *last_node; | |
540 | ||
541 | slot = pointer_map_contains (set->map, node); | |
542 | if (slot == NULL || !*slot) | |
543 | return; | |
544 | ||
545 | index = (size_t) *slot - 1; | |
546 | gcc_checking_assert (VEC_index (varpool_node_ptr, set->nodes, index) | |
547 | == node); | |
548 | ||
549 | /* Remove from vector. We do this by swapping node with the last element | |
550 | of the vector. */ | |
551 | last_node = VEC_pop (varpool_node_ptr, set->nodes); | |
552 | if (last_node != node) | |
553 | { | |
554 | last_slot = pointer_map_contains (set->map, last_node); | |
555 | gcc_checking_assert (last_slot && *last_slot); | |
556 | *last_slot = (void *)(size_t) (index + 1); | |
557 | ||
558 | /* Move the last element to the original spot of NODE. */ | |
559 | VEC_replace (varpool_node_ptr, set->nodes, index, last_node); | |
560 | } | |
561 | ||
562 | /* Remove element from hash table. */ | |
563 | *slot = NULL; | |
564 | } | |
565 | ||
566 | ||
567 | /* Find NODE in SET and return an iterator to it if found. A null iterator | |
568 | is returned if NODE is not in SET. */ | |
569 | ||
570 | varpool_node_set_iterator | |
571 | varpool_node_set_find (varpool_node_set set, struct varpool_node *node) | |
572 | { | |
573 | void **slot; | |
574 | varpool_node_set_iterator vsi; | |
575 | ||
576 | slot = pointer_map_contains (set->map, node); | |
577 | if (slot == NULL || !*slot) | |
578 | vsi.index = (unsigned) ~0; | |
579 | else | |
580 | vsi.index = (size_t)*slot - 1; | |
581 | vsi.set = set; | |
582 | ||
583 | return vsi; | |
584 | } | |
585 | ||
586 | ||
587 | /* Dump content of SET to file F. */ | |
588 | ||
589 | void | |
590 | dump_varpool_node_set (FILE *f, varpool_node_set set) | |
591 | { | |
592 | varpool_node_set_iterator iter; | |
593 | ||
594 | for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter)) | |
595 | { | |
596 | struct varpool_node *node = vsi_node (iter); | |
597 | fprintf (f, " %s", varpool_node_name (node)); | |
598 | } | |
599 | fprintf (f, "\n"); | |
600 | } | |
601 | ||
602 | ||
603 | /* Free varpool node set. */ | |
604 | ||
605 | void | |
606 | free_varpool_node_set (varpool_node_set set) | |
607 | { | |
608 | VEC_free (varpool_node_ptr, heap, set->nodes); | |
609 | pointer_map_destroy (set->map); | |
610 | free (set); | |
611 | } | |
612 | ||
613 | ||
614 | /* Dump content of SET to stderr. */ | |
615 | ||
616 | DEBUG_FUNCTION void | |
617 | debug_varpool_node_set (varpool_node_set set) | |
618 | { | |
619 | dump_varpool_node_set (stderr, set); | |
620 | } |