]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-phinodes.c
alias.c: Remove unused headers.
[thirdparty/gcc.git] / gcc / tree-phinodes.c
1 /* Generic routines for manipulating PHIs
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "ssa.h"
27 #include "fold-const.h"
28 #include "gimple-iterator.h"
29 #include "tree-ssa.h"
30
31 /* Rewriting a function into SSA form can create a huge number of PHIs
32 many of which may be thrown away shortly after their creation if jumps
33 were threaded through PHI nodes.
34
35 While our garbage collection mechanisms will handle this situation, it
36 is extremely wasteful to create nodes and throw them away, especially
37 when the nodes can be reused.
38
39 For PR 8361, we can significantly reduce the number of nodes allocated
40 and thus the total amount of memory allocated by managing PHIs a
41 little. This additionally helps reduce the amount of work done by the
42 garbage collector. Similar results have been seen on a wider variety
43 of tests (such as the compiler itself).
44
45 PHI nodes have different sizes, so we can't have a single list of all
46 the PHI nodes as it would be too expensive to walk down that list to
47 find a PHI of a suitable size.
48
49 Instead we have an array of lists of free PHI nodes. The array is
50 indexed by the number of PHI alternatives that PHI node can hold.
51 Except for the last array member, which holds all remaining PHI
52 nodes.
53
54 So to find a free PHI node, we compute its index into the free PHI
55 node array and see if there are any elements with an exact match.
56 If so, then we are done. Otherwise, we test the next larger size
57 up and continue until we are in the last array element.
58
59 We do not actually walk members of the last array element. While it
60 might allow us to pick up a few reusable PHI nodes, it could potentially
61 be very expensive if the program has released a bunch of large PHI nodes,
62 but keeps asking for even larger PHI nodes. Experiments have shown that
63 walking the elements of the last array entry would result in finding less
64 than .1% additional reusable PHI nodes.
65
66 Note that we can never have less than two PHI argument slots. Thus,
67 the -2 on all the calculations below. */
68
69 #define NUM_BUCKETS 10
70 static GTY ((deletable (""))) vec<gimple *, va_gc> *free_phinodes[NUM_BUCKETS - 2];
71 static unsigned long free_phinode_count;
72
73 static int ideal_phi_node_len (int);
74
75 unsigned int phi_nodes_reused;
76 unsigned int phi_nodes_created;
77
78 /* Dump some simple statistics regarding the re-use of PHI nodes. */
79
80 void
81 phinodes_print_statistics (void)
82 {
83 fprintf (stderr, "PHI nodes allocated: %u\n", phi_nodes_created);
84 fprintf (stderr, "PHI nodes reused: %u\n", phi_nodes_reused);
85 }
86
87 /* Allocate a PHI node with at least LEN arguments. If the free list
88 happens to contain a PHI node with LEN arguments or more, return
89 that one. */
90
91 static inline gphi *
92 allocate_phi_node (size_t len)
93 {
94 gphi *phi;
95 size_t bucket = NUM_BUCKETS - 2;
96 size_t size = sizeof (struct gphi)
97 + (len - 1) * sizeof (struct phi_arg_d);
98
99 if (free_phinode_count)
100 for (bucket = len - 2; bucket < NUM_BUCKETS - 2; bucket++)
101 if (free_phinodes[bucket])
102 break;
103
104 /* If our free list has an element, then use it. */
105 if (bucket < NUM_BUCKETS - 2
106 && gimple_phi_capacity ((*free_phinodes[bucket])[0]) >= len)
107 {
108 free_phinode_count--;
109 phi = as_a <gphi *> (free_phinodes[bucket]->pop ());
110 if (free_phinodes[bucket]->is_empty ())
111 vec_free (free_phinodes[bucket]);
112 if (GATHER_STATISTICS)
113 phi_nodes_reused++;
114 }
115 else
116 {
117 phi = static_cast <gphi *> (ggc_internal_alloc (size));
118 if (GATHER_STATISTICS)
119 {
120 enum gimple_alloc_kind kind = gimple_alloc_kind (GIMPLE_PHI);
121 phi_nodes_created++;
122 gimple_alloc_counts[(int) kind]++;
123 gimple_alloc_sizes[(int) kind] += size;
124 }
125 }
126
127 return phi;
128 }
129
130 /* Given LEN, the original number of requested PHI arguments, return
131 a new, "ideal" length for the PHI node. The "ideal" length rounds
132 the total size of the PHI node up to the next power of two bytes.
133
134 Rounding up will not result in wasting any memory since the size request
135 will be rounded up by the GC system anyway. [ Note this is not entirely
136 true since the original length might have fit on one of the special
137 GC pages. ] By rounding up, we may avoid the need to reallocate the
138 PHI node later if we increase the number of arguments for the PHI. */
139
140 static int
141 ideal_phi_node_len (int len)
142 {
143 size_t size, new_size;
144 int log2, new_len;
145
146 /* We do not support allocations of less than two PHI argument slots. */
147 if (len < 2)
148 len = 2;
149
150 /* Compute the number of bytes of the original request. */
151 size = sizeof (struct gphi)
152 + (len - 1) * sizeof (struct phi_arg_d);
153
154 /* Round it up to the next power of two. */
155 log2 = ceil_log2 (size);
156 new_size = 1 << log2;
157
158 /* Now compute and return the number of PHI argument slots given an
159 ideal size allocation. */
160 new_len = len + (new_size - size) / sizeof (struct phi_arg_d);
161 return new_len;
162 }
163
164 /* Return a PHI node with LEN argument slots for variable VAR. */
165
166 static gphi *
167 make_phi_node (tree var, int len)
168 {
169 gphi *phi;
170 int capacity, i;
171
172 capacity = ideal_phi_node_len (len);
173
174 phi = allocate_phi_node (capacity);
175
176 /* We need to clear the entire PHI node, including the argument
177 portion, because we represent a "missing PHI argument" by placing
178 NULL_TREE in PHI_ARG_DEF. */
179 memset (phi, 0, (sizeof (struct gphi)
180 - sizeof (struct phi_arg_d)
181 + sizeof (struct phi_arg_d) * len));
182 phi->code = GIMPLE_PHI;
183 gimple_init_singleton (phi);
184 phi->nargs = len;
185 phi->capacity = capacity;
186 if (!var)
187 ;
188 else if (TREE_CODE (var) == SSA_NAME)
189 gimple_phi_set_result (phi, var);
190 else
191 gimple_phi_set_result (phi, make_ssa_name (var, phi));
192
193 for (i = 0; i < capacity; i++)
194 {
195 use_operand_p imm;
196
197 gimple_phi_arg_set_location (phi, i, UNKNOWN_LOCATION);
198 imm = gimple_phi_arg_imm_use_ptr (phi, i);
199 imm->use = gimple_phi_arg_def_ptr (phi, i);
200 imm->prev = NULL;
201 imm->next = NULL;
202 imm->loc.stmt = phi;
203 }
204
205 return phi;
206 }
207
208 /* We no longer need PHI, release it so that it may be reused. */
209
210 void
211 release_phi_node (gimple *phi)
212 {
213 size_t bucket;
214 size_t len = gimple_phi_capacity (phi);
215 size_t x;
216
217 for (x = 0; x < gimple_phi_num_args (phi); x++)
218 {
219 use_operand_p imm;
220 imm = gimple_phi_arg_imm_use_ptr (phi, x);
221 delink_imm_use (imm);
222 }
223
224 bucket = len > NUM_BUCKETS - 1 ? NUM_BUCKETS - 1 : len;
225 bucket -= 2;
226 vec_safe_push (free_phinodes[bucket], phi);
227 free_phinode_count++;
228 }
229
230
231 /* Resize an existing PHI node. The only way is up. Return the
232 possibly relocated phi. */
233
234 static gphi *
235 resize_phi_node (gphi *phi, size_t len)
236 {
237 size_t old_size, i;
238 gphi *new_phi;
239
240 gcc_assert (len > gimple_phi_capacity (phi));
241
242 /* The garbage collector will not look at the PHI node beyond the
243 first PHI_NUM_ARGS elements. Therefore, all we have to copy is a
244 portion of the PHI node currently in use. */
245 old_size = sizeof (struct gphi)
246 + (gimple_phi_num_args (phi) - 1) * sizeof (struct phi_arg_d);
247
248 new_phi = allocate_phi_node (len);
249
250 memcpy (new_phi, phi, old_size);
251
252 for (i = 0; i < gimple_phi_num_args (new_phi); i++)
253 {
254 use_operand_p imm, old_imm;
255 imm = gimple_phi_arg_imm_use_ptr (new_phi, i);
256 old_imm = gimple_phi_arg_imm_use_ptr (phi, i);
257 imm->use = gimple_phi_arg_def_ptr (new_phi, i);
258 relink_imm_use_stmt (imm, old_imm, new_phi);
259 }
260
261 new_phi->capacity = len;
262
263 for (i = gimple_phi_num_args (new_phi); i < len; i++)
264 {
265 use_operand_p imm;
266
267 gimple_phi_arg_set_location (new_phi, i, UNKNOWN_LOCATION);
268 imm = gimple_phi_arg_imm_use_ptr (new_phi, i);
269 imm->use = gimple_phi_arg_def_ptr (new_phi, i);
270 imm->prev = NULL;
271 imm->next = NULL;
272 imm->loc.stmt = new_phi;
273 }
274
275 return new_phi;
276 }
277
278 /* Reserve PHI arguments for a new edge to basic block BB. */
279
280 void
281 reserve_phi_args_for_new_edge (basic_block bb)
282 {
283 size_t len = EDGE_COUNT (bb->preds);
284 size_t cap = ideal_phi_node_len (len + 4);
285 gphi_iterator gsi;
286
287 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
288 {
289 gphi *stmt = gsi.phi ();
290
291 if (len > gimple_phi_capacity (stmt))
292 {
293 gphi *new_phi = resize_phi_node (stmt, cap);
294
295 /* The result of the PHI is defined by this PHI node. */
296 SSA_NAME_DEF_STMT (gimple_phi_result (new_phi)) = new_phi;
297 gsi_set_stmt (&gsi, new_phi);
298
299 release_phi_node (stmt);
300 stmt = new_phi;
301 }
302
303 /* We represent a "missing PHI argument" by placing NULL_TREE in
304 the corresponding slot. If PHI arguments were added
305 immediately after an edge is created, this zeroing would not
306 be necessary, but unfortunately this is not the case. For
307 example, the loop optimizer duplicates several basic blocks,
308 redirects edges, and then fixes up PHI arguments later in
309 batch. */
310 SET_PHI_ARG_DEF (stmt, len - 1, NULL_TREE);
311 gimple_phi_arg_set_location (stmt, len - 1, UNKNOWN_LOCATION);
312
313 stmt->nargs++;
314 }
315 }
316
317 /* Adds PHI to BB. */
318
319 void
320 add_phi_node_to_bb (gphi *phi, basic_block bb)
321 {
322 gimple_seq seq = phi_nodes (bb);
323 /* Add the new PHI node to the list of PHI nodes for block BB. */
324 if (seq == NULL)
325 set_phi_nodes (bb, gimple_seq_alloc_with_stmt (phi));
326 else
327 {
328 gimple_seq_add_stmt (&seq, phi);
329 gcc_assert (seq == phi_nodes (bb));
330 }
331
332 /* Associate BB to the PHI node. */
333 gimple_set_bb (phi, bb);
334
335 }
336
337 /* Create a new PHI node for variable VAR at basic block BB. */
338
339 gphi *
340 create_phi_node (tree var, basic_block bb)
341 {
342 gphi *phi = make_phi_node (var, EDGE_COUNT (bb->preds));
343
344 add_phi_node_to_bb (phi, bb);
345 return phi;
346 }
347
348
349 /* Add a new argument to PHI node PHI. DEF is the incoming reaching
350 definition and E is the edge through which DEF reaches PHI. The new
351 argument is added at the end of the argument list.
352 If PHI has reached its maximum capacity, add a few slots. In this case,
353 PHI points to the reallocated phi node when we return. */
354
355 void
356 add_phi_arg (gphi *phi, tree def, edge e, source_location locus)
357 {
358 basic_block bb = e->dest;
359
360 gcc_assert (bb == gimple_bb (phi));
361
362 /* We resize PHI nodes upon edge creation. We should always have
363 enough room at this point. */
364 gcc_assert (gimple_phi_num_args (phi) <= gimple_phi_capacity (phi));
365
366 /* We resize PHI nodes upon edge creation. We should always have
367 enough room at this point. */
368 gcc_assert (e->dest_idx < gimple_phi_num_args (phi));
369
370 /* Copy propagation needs to know what object occur in abnormal
371 PHI nodes. This is a convenient place to record such information. */
372 if (e->flags & EDGE_ABNORMAL)
373 {
374 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def) = 1;
375 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
376 }
377
378 SET_PHI_ARG_DEF (phi, e->dest_idx, def);
379 gimple_phi_arg_set_location (phi, e->dest_idx, locus);
380 }
381
382
383 /* Remove the Ith argument from PHI's argument list. This routine
384 implements removal by swapping the last alternative with the
385 alternative we want to delete and then shrinking the vector, which
386 is consistent with how we remove an edge from the edge vector. */
387
388 static void
389 remove_phi_arg_num (gphi *phi, int i)
390 {
391 int num_elem = gimple_phi_num_args (phi);
392
393 gcc_assert (i < num_elem);
394
395 /* Delink the item which is being removed. */
396 delink_imm_use (gimple_phi_arg_imm_use_ptr (phi, i));
397
398 /* If it is not the last element, move the last element
399 to the element we want to delete, resetting all the links. */
400 if (i != num_elem - 1)
401 {
402 use_operand_p old_p, new_p;
403 old_p = gimple_phi_arg_imm_use_ptr (phi, num_elem - 1);
404 new_p = gimple_phi_arg_imm_use_ptr (phi, i);
405 /* Set use on new node, and link into last element's place. */
406 *(new_p->use) = *(old_p->use);
407 relink_imm_use (new_p, old_p);
408 /* Move the location as well. */
409 gimple_phi_arg_set_location (phi, i,
410 gimple_phi_arg_location (phi, num_elem - 1));
411 }
412
413 /* Shrink the vector and return. Note that we do not have to clear
414 PHI_ARG_DEF because the garbage collector will not look at those
415 elements beyond the first PHI_NUM_ARGS elements of the array. */
416 phi->nargs--;
417 }
418
419
420 /* Remove all PHI arguments associated with edge E. */
421
422 void
423 remove_phi_args (edge e)
424 {
425 gphi_iterator gsi;
426
427 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
428 remove_phi_arg_num (gsi.phi (),
429 e->dest_idx);
430 }
431
432
433 /* Remove the PHI node pointed-to by iterator GSI from basic block BB. After
434 removal, iterator GSI is updated to point to the next PHI node in the
435 sequence. If RELEASE_LHS_P is true, the LHS of this PHI node is released
436 into the free pool of SSA names. */
437
438 void
439 remove_phi_node (gimple_stmt_iterator *gsi, bool release_lhs_p)
440 {
441 gimple *phi = gsi_stmt (*gsi);
442
443 if (release_lhs_p)
444 insert_debug_temps_for_defs (gsi);
445
446 gsi_remove (gsi, false);
447
448 /* If we are deleting the PHI node, then we should release the
449 SSA_NAME node so that it can be reused. */
450 release_phi_node (phi);
451 if (release_lhs_p)
452 release_ssa_name (gimple_phi_result (phi));
453 }
454
455 /* Remove all the phi nodes from BB. */
456
457 void
458 remove_phi_nodes (basic_block bb)
459 {
460 gphi_iterator gsi;
461
462 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
463 remove_phi_node (&gsi, true);
464
465 set_phi_nodes (bb, NULL);
466 }
467
468 /* Given PHI, return its RHS if the PHI is a degenerate, otherwise return
469 NULL. */
470
471 tree
472 degenerate_phi_result (gphi *phi)
473 {
474 tree lhs = gimple_phi_result (phi);
475 tree val = NULL;
476 size_t i;
477
478 /* Ignoring arguments which are the same as LHS, if all the remaining
479 arguments are the same, then the PHI is a degenerate and has the
480 value of that common argument. */
481 for (i = 0; i < gimple_phi_num_args (phi); i++)
482 {
483 tree arg = gimple_phi_arg_def (phi, i);
484
485 if (arg == lhs)
486 continue;
487 else if (!arg)
488 break;
489 else if (!val)
490 val = arg;
491 else if (arg == val)
492 continue;
493 /* We bring in some of operand_equal_p not only to speed things
494 up, but also to avoid crashing when dereferencing the type of
495 a released SSA name. */
496 else if (TREE_CODE (val) != TREE_CODE (arg)
497 || TREE_CODE (val) == SSA_NAME
498 || !operand_equal_p (arg, val, 0))
499 break;
500 }
501 return (i == gimple_phi_num_args (phi) ? val : NULL);
502 }
503
504 /* Set PHI nodes of a basic block BB to SEQ. */
505
506 void
507 set_phi_nodes (basic_block bb, gimple_seq seq)
508 {
509 gimple_stmt_iterator i;
510
511 gcc_checking_assert (!(bb->flags & BB_RTL));
512 bb->il.gimple.phi_nodes = seq;
513 if (seq)
514 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
515 gimple_set_bb (gsi_stmt (i), bb);
516 }
517
518 #include "gt-tree-phinodes.h"