]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
Merge in trunk.
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "ggc.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
31 #include "function.h"
32 #include "gimple-pretty-print.h"
33 #include "pointer-set.h"
34 #include "gimple.h"
35 #include "gimple-ssa.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-ssanames.h"
39 #include "tree-ssa-loop-manip.h"
40 #include "tree-into-ssa.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43 #include "hashtab.h"
44 #include "tree-pass.h"
45 #include "diagnostic-core.h"
46 #include "cfgloop.h"
47
48 /* Pointer map of variable mappings, keyed by edge. */
49 static struct pointer_map_t *edge_var_maps;
50
51
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
53
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
56 {
57 void **slot;
58 edge_var_map_vector *head;
59 edge_var_map new_node;
60
61 if (edge_var_maps == NULL)
62 edge_var_maps = pointer_map_create ();
63
64 slot = pointer_map_insert (edge_var_maps, e);
65 head = (edge_var_map_vector *) *slot;
66 if (!head)
67 vec_safe_reserve (head, 5);
68 new_node.def = def;
69 new_node.result = result;
70 new_node.locus = locus;
71
72 vec_safe_push (head, new_node);
73 *slot = head;
74 }
75
76
77 /* Clear the var mappings in edge E. */
78
79 void
80 redirect_edge_var_map_clear (edge e)
81 {
82 void **slot;
83 edge_var_map_vector *head;
84
85 if (!edge_var_maps)
86 return;
87
88 slot = pointer_map_contains (edge_var_maps, e);
89
90 if (slot)
91 {
92 head = (edge_var_map_vector *) *slot;
93 vec_free (head);
94 *slot = NULL;
95 }
96 }
97
98
99 /* Duplicate the redirected var mappings in OLDE in NEWE.
100
101 Since we can't remove a mapping, let's just duplicate it. This assumes a
102 pointer_map can have multiple edges mapping to the same var_map (many to
103 one mapping), since we don't remove the previous mappings. */
104
105 void
106 redirect_edge_var_map_dup (edge newe, edge olde)
107 {
108 void **new_slot, **old_slot;
109 edge_var_map_vector *head;
110
111 if (!edge_var_maps)
112 return;
113
114 new_slot = pointer_map_insert (edge_var_maps, newe);
115 old_slot = pointer_map_contains (edge_var_maps, olde);
116 if (!old_slot)
117 return;
118 head = (edge_var_map_vector *) *old_slot;
119
120 edge_var_map_vector *new_head = NULL;
121 if (head)
122 new_head = vec_safe_copy (head);
123 else
124 vec_safe_reserve (new_head, 5);
125 *new_slot = new_head;
126 }
127
128
129 /* Return the variable mappings for a given edge. If there is none, return
130 NULL. */
131
132 edge_var_map_vector *
133 redirect_edge_var_map_vector (edge e)
134 {
135 void **slot;
136
137 /* Hey, what kind of idiot would... you'd be surprised. */
138 if (!edge_var_maps)
139 return NULL;
140
141 slot = pointer_map_contains (edge_var_maps, e);
142 if (!slot)
143 return NULL;
144
145 return (edge_var_map_vector *) *slot;
146 }
147
148 /* Used by redirect_edge_var_map_destroy to free all memory. */
149
150 static bool
151 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
152 void **value,
153 void *data ATTRIBUTE_UNUSED)
154 {
155 edge_var_map_vector *head = (edge_var_map_vector *) *value;
156 vec_free (head);
157 return true;
158 }
159
160 /* Clear the edge variable mappings. */
161
162 void
163 redirect_edge_var_map_destroy (void)
164 {
165 if (edge_var_maps)
166 {
167 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
168 pointer_map_destroy (edge_var_maps);
169 edge_var_maps = NULL;
170 }
171 }
172
173
174 /* Remove the corresponding arguments from the PHI nodes in E's
175 destination block and redirect it to DEST. Return redirected edge.
176 The list of removed arguments is stored in a vector accessed
177 through edge_var_maps. */
178
179 edge
180 ssa_redirect_edge (edge e, basic_block dest)
181 {
182 gimple_stmt_iterator gsi;
183 gimple phi;
184
185 redirect_edge_var_map_clear (e);
186
187 /* Remove the appropriate PHI arguments in E's destination block. */
188 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
189 {
190 tree def;
191 source_location locus ;
192
193 phi = gsi_stmt (gsi);
194 def = gimple_phi_arg_def (phi, e->dest_idx);
195 locus = gimple_phi_arg_location (phi, e->dest_idx);
196
197 if (def == NULL_TREE)
198 continue;
199
200 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
201 }
202
203 e = redirect_edge_succ_nodup (e, dest);
204
205 return e;
206 }
207
208
209 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
210 E->dest. */
211
212 void
213 flush_pending_stmts (edge e)
214 {
215 gimple phi;
216 edge_var_map_vector *v;
217 edge_var_map *vm;
218 int i;
219 gimple_stmt_iterator gsi;
220
221 v = redirect_edge_var_map_vector (e);
222 if (!v)
223 return;
224
225 for (gsi = gsi_start_phis (e->dest), i = 0;
226 !gsi_end_p (gsi) && v->iterate (i, &vm);
227 gsi_next (&gsi), i++)
228 {
229 tree def;
230
231 phi = gsi_stmt (gsi);
232 def = redirect_edge_var_map_def (vm);
233 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
234 }
235
236 redirect_edge_var_map_clear (e);
237 }
238
239
240 /* Data structure used to count the number of dereferences to PTR
241 inside an expression. */
242 struct count_ptr_d
243 {
244 tree ptr;
245 unsigned num_stores;
246 unsigned num_loads;
247 };
248
249
250 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
251 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
252
253 static tree
254 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
255 {
256 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
257 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
258
259 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
260 pointer 'ptr' is *not* dereferenced, it is simply used to compute
261 the address of 'fld' as 'ptr + offsetof(fld)'. */
262 if (TREE_CODE (*tp) == ADDR_EXPR)
263 {
264 *walk_subtrees = 0;
265 return NULL_TREE;
266 }
267
268 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
269 {
270 if (wi_p->is_lhs)
271 count_p->num_stores++;
272 else
273 count_p->num_loads++;
274 }
275
276 return NULL_TREE;
277 }
278
279
280 /* Count the number of direct and indirect uses for pointer PTR in
281 statement STMT. The number of direct uses is stored in
282 *NUM_USES_P. Indirect references are counted separately depending
283 on whether they are store or load operations. The counts are
284 stored in *NUM_STORES_P and *NUM_LOADS_P. */
285
286 void
287 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
288 unsigned *num_loads_p, unsigned *num_stores_p)
289 {
290 ssa_op_iter i;
291 tree use;
292
293 *num_uses_p = 0;
294 *num_loads_p = 0;
295 *num_stores_p = 0;
296
297 /* Find out the total number of uses of PTR in STMT. */
298 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
299 if (use == ptr)
300 (*num_uses_p)++;
301
302 /* Now count the number of indirect references to PTR. This is
303 truly awful, but we don't have much choice. There are no parent
304 pointers inside INDIRECT_REFs, so an expression like
305 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
306 find all the indirect and direct uses of x_1 inside. The only
307 shortcut we can take is the fact that GIMPLE only allows
308 INDIRECT_REFs inside the expressions below. */
309 if (is_gimple_assign (stmt)
310 || gimple_code (stmt) == GIMPLE_RETURN
311 || gimple_code (stmt) == GIMPLE_ASM
312 || is_gimple_call (stmt))
313 {
314 struct walk_stmt_info wi;
315 struct count_ptr_d count;
316
317 count.ptr = ptr;
318 count.num_stores = 0;
319 count.num_loads = 0;
320
321 memset (&wi, 0, sizeof (wi));
322 wi.info = &count;
323 walk_gimple_op (stmt, count_ptr_derefs, &wi);
324
325 *num_stores_p = count.num_stores;
326 *num_loads_p = count.num_loads;
327 }
328
329 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
330 }
331
332
333 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
334 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
335 expression with a different value.
336
337 This will update any annotations (say debug bind stmts) referring
338 to the original LHS, so that they use the RHS instead. This is
339 done even if NLHS and LHS are the same, for it is understood that
340 the RHS will be modified afterwards, and NLHS will not be assigned
341 an equivalent value.
342
343 Adjusting any non-annotation uses of the LHS, if needed, is a
344 responsibility of the caller.
345
346 The effect of this call should be pretty much the same as that of
347 inserting a copy of STMT before STMT, and then removing the
348 original stmt, at which time gsi_remove() would have update
349 annotations, but using this function saves all the inserting,
350 copying and removing. */
351
352 void
353 gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
354 {
355 if (MAY_HAVE_DEBUG_STMTS)
356 {
357 tree lhs = gimple_get_lhs (stmt);
358
359 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
360
361 insert_debug_temp_for_var_def (NULL, lhs);
362 }
363
364 gimple_set_lhs (stmt, nlhs);
365 }
366
367
368 /* Given a tree for an expression for which we might want to emit
369 locations or values in debug information (generally a variable, but
370 we might deal with other kinds of trees in the future), return the
371 tree that should be used as the variable of a DEBUG_BIND STMT or
372 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
373
374 tree
375 target_for_debug_bind (tree var)
376 {
377 if (!MAY_HAVE_DEBUG_STMTS)
378 return NULL_TREE;
379
380 if (TREE_CODE (var) == SSA_NAME)
381 {
382 var = SSA_NAME_VAR (var);
383 if (var == NULL_TREE)
384 return NULL_TREE;
385 }
386
387 if ((TREE_CODE (var) != VAR_DECL
388 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
389 && TREE_CODE (var) != PARM_DECL)
390 return NULL_TREE;
391
392 if (DECL_HAS_VALUE_EXPR_P (var))
393 return target_for_debug_bind (DECL_VALUE_EXPR (var));
394
395 if (DECL_IGNORED_P (var))
396 return NULL_TREE;
397
398 /* var-tracking only tracks registers. */
399 if (!is_gimple_reg_type (TREE_TYPE (var)))
400 return NULL_TREE;
401
402 return var;
403 }
404
405 /* Called via walk_tree, look for SSA_NAMEs that have already been
406 released. */
407
408 static tree
409 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
410 {
411 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
412
413 if (wi && wi->is_lhs)
414 return NULL_TREE;
415
416 if (TREE_CODE (*tp) == SSA_NAME)
417 {
418 if (SSA_NAME_IN_FREE_LIST (*tp))
419 return *tp;
420
421 *walk_subtrees = 0;
422 }
423 else if (IS_TYPE_OR_DECL_P (*tp))
424 *walk_subtrees = 0;
425
426 return NULL_TREE;
427 }
428
429 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
430 by other DEBUG stmts, and replace uses of the DEF with the
431 newly-created debug temp. */
432
433 void
434 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
435 {
436 imm_use_iterator imm_iter;
437 use_operand_p use_p;
438 gimple stmt;
439 gimple def_stmt = NULL;
440 int usecount = 0;
441 tree value = NULL;
442
443 if (!MAY_HAVE_DEBUG_STMTS)
444 return;
445
446 /* If this name has already been registered for replacement, do nothing
447 as anything that uses this name isn't in SSA form. */
448 if (name_registered_for_update_p (var))
449 return;
450
451 /* Check whether there are debug stmts that reference this variable and,
452 if there are, decide whether we should use a debug temp. */
453 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
454 {
455 stmt = USE_STMT (use_p);
456
457 if (!gimple_debug_bind_p (stmt))
458 continue;
459
460 if (usecount++)
461 break;
462
463 if (gimple_debug_bind_get_value (stmt) != var)
464 {
465 /* Count this as an additional use, so as to make sure we
466 use a temp unless VAR's definition has a SINGLE_RHS that
467 can be shared. */
468 usecount++;
469 break;
470 }
471 }
472
473 if (!usecount)
474 return;
475
476 if (gsi)
477 def_stmt = gsi_stmt (*gsi);
478 else
479 def_stmt = SSA_NAME_DEF_STMT (var);
480
481 /* If we didn't get an insertion point, and the stmt has already
482 been removed, we won't be able to insert the debug bind stmt, so
483 we'll have to drop debug information. */
484 if (gimple_code (def_stmt) == GIMPLE_PHI)
485 {
486 value = degenerate_phi_result (def_stmt);
487 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
488 value = NULL;
489 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
490 to. */
491 else if (value == error_mark_node)
492 value = NULL;
493 }
494 else if (is_gimple_assign (def_stmt))
495 {
496 bool no_value = false;
497
498 if (!dom_info_available_p (CDI_DOMINATORS))
499 {
500 struct walk_stmt_info wi;
501
502 memset (&wi, 0, sizeof (wi));
503
504 /* When removing blocks without following reverse dominance
505 order, we may sometimes encounter SSA_NAMEs that have
506 already been released, referenced in other SSA_DEFs that
507 we're about to release. Consider:
508
509 <bb X>:
510 v_1 = foo;
511
512 <bb Y>:
513 w_2 = v_1 + bar;
514 # DEBUG w => w_2
515
516 If we deleted BB X first, propagating the value of w_2
517 won't do us any good. It's too late to recover their
518 original definition of v_1: when it was deleted, it was
519 only referenced in other DEFs, it couldn't possibly know
520 it should have been retained, and propagating every
521 single DEF just in case it might have to be propagated
522 into a DEBUG STMT would probably be too wasteful.
523
524 When dominator information is not readily available, we
525 check for and accept some loss of debug information. But
526 if it is available, there's no excuse for us to remove
527 blocks in the wrong order, so we don't even check for
528 dead SSA NAMEs. SSA verification shall catch any
529 errors. */
530 if ((!gsi && !gimple_bb (def_stmt))
531 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
532 no_value = true;
533 }
534
535 if (!no_value)
536 value = gimple_assign_rhs_to_tree (def_stmt);
537 }
538
539 if (value)
540 {
541 /* If there's a single use of VAR, and VAR is the entire debug
542 expression (usecount would have been incremented again
543 otherwise), and the definition involves only constants and
544 SSA names, then we can propagate VALUE into this single use,
545 avoiding the temp.
546
547 We can also avoid using a temp if VALUE can be shared and
548 propagated into all uses, without generating expressions that
549 wouldn't be valid gimple RHSs.
550
551 Other cases that would require unsharing or non-gimple RHSs
552 are deferred to a debug temp, although we could avoid temps
553 at the expense of duplication of expressions. */
554
555 if (CONSTANT_CLASS_P (value)
556 || gimple_code (def_stmt) == GIMPLE_PHI
557 || (usecount == 1
558 && (!gimple_assign_single_p (def_stmt)
559 || is_gimple_min_invariant (value)))
560 || is_gimple_reg (value))
561 ;
562 else
563 {
564 gimple def_temp;
565 tree vexpr = make_node (DEBUG_EXPR_DECL);
566
567 def_temp = gimple_build_debug_bind (vexpr,
568 unshare_expr (value),
569 def_stmt);
570
571 DECL_ARTIFICIAL (vexpr) = 1;
572 TREE_TYPE (vexpr) = TREE_TYPE (value);
573 if (DECL_P (value))
574 DECL_MODE (vexpr) = DECL_MODE (value);
575 else
576 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
577
578 if (gsi)
579 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
580 else
581 {
582 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
583 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
584 }
585
586 value = vexpr;
587 }
588 }
589
590 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
591 {
592 if (!gimple_debug_bind_p (stmt))
593 continue;
594
595 if (value)
596 {
597 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
598 /* unshare_expr is not needed here. vexpr is either a
599 SINGLE_RHS, that can be safely shared, some other RHS
600 that was unshared when we found it had a single debug
601 use, or a DEBUG_EXPR_DECL, that can be safely
602 shared. */
603 SET_USE (use_p, unshare_expr (value));
604 /* If we didn't replace uses with a debug decl fold the
605 resulting expression. Otherwise we end up with invalid IL. */
606 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
607 {
608 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
609 fold_stmt_inplace (&gsi);
610 }
611 }
612 else
613 gimple_debug_bind_reset_value (stmt);
614
615 update_stmt (stmt);
616 }
617 }
618
619
620 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
621 other DEBUG stmts, and replace uses of the DEF with the
622 newly-created debug temp. */
623
624 void
625 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
626 {
627 gimple stmt;
628 ssa_op_iter op_iter;
629 def_operand_p def_p;
630
631 if (!MAY_HAVE_DEBUG_STMTS)
632 return;
633
634 stmt = gsi_stmt (*gsi);
635
636 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
637 {
638 tree var = DEF_FROM_PTR (def_p);
639
640 if (TREE_CODE (var) != SSA_NAME)
641 continue;
642
643 insert_debug_temp_for_var_def (gsi, var);
644 }
645 }
646
647 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
648
649 void
650 reset_debug_uses (gimple stmt)
651 {
652 ssa_op_iter op_iter;
653 def_operand_p def_p;
654 imm_use_iterator imm_iter;
655 gimple use_stmt;
656
657 if (!MAY_HAVE_DEBUG_STMTS)
658 return;
659
660 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
661 {
662 tree var = DEF_FROM_PTR (def_p);
663
664 if (TREE_CODE (var) != SSA_NAME)
665 continue;
666
667 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
668 {
669 if (!gimple_debug_bind_p (use_stmt))
670 continue;
671
672 gimple_debug_bind_reset_value (use_stmt);
673 update_stmt (use_stmt);
674 }
675 }
676 }
677
678 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
679 dominated stmts before their dominators, so that release_ssa_defs
680 stands a chance of propagating DEFs into debug bind stmts. */
681
682 void
683 release_defs_bitset (bitmap toremove)
684 {
685 unsigned j;
686 bitmap_iterator bi;
687
688 /* Performing a topological sort is probably overkill, this will
689 most likely run in slightly superlinear time, rather than the
690 pathological quadratic worst case. */
691 while (!bitmap_empty_p (toremove))
692 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
693 {
694 bool remove_now = true;
695 tree var = ssa_name (j);
696 gimple stmt;
697 imm_use_iterator uit;
698
699 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
700 {
701 ssa_op_iter dit;
702 def_operand_p def_p;
703
704 /* We can't propagate PHI nodes into debug stmts. */
705 if (gimple_code (stmt) == GIMPLE_PHI
706 || is_gimple_debug (stmt))
707 continue;
708
709 /* If we find another definition to remove that uses
710 the one we're looking at, defer the removal of this
711 one, so that it can be propagated into debug stmts
712 after the other is. */
713 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
714 {
715 tree odef = DEF_FROM_PTR (def_p);
716
717 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
718 {
719 remove_now = false;
720 break;
721 }
722 }
723
724 if (!remove_now)
725 BREAK_FROM_IMM_USE_STMT (uit);
726 }
727
728 if (remove_now)
729 {
730 gimple def = SSA_NAME_DEF_STMT (var);
731 gimple_stmt_iterator gsi = gsi_for_stmt (def);
732
733 if (gimple_code (def) == GIMPLE_PHI)
734 remove_phi_node (&gsi, true);
735 else
736 {
737 gsi_remove (&gsi, true);
738 release_defs (def);
739 }
740
741 bitmap_clear_bit (toremove, j);
742 }
743 }
744 }
745
746 /* Return true if SSA_NAME is malformed and mark it visited.
747
748 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
749 operand. */
750
751 static bool
752 verify_ssa_name (tree ssa_name, bool is_virtual)
753 {
754 if (TREE_CODE (ssa_name) != SSA_NAME)
755 {
756 error ("expected an SSA_NAME object");
757 return true;
758 }
759
760 if (SSA_NAME_IN_FREE_LIST (ssa_name))
761 {
762 error ("found an SSA_NAME that had been released into the free pool");
763 return true;
764 }
765
766 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
767 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
768 {
769 error ("type mismatch between an SSA_NAME and its symbol");
770 return true;
771 }
772
773 if (is_virtual && !virtual_operand_p (ssa_name))
774 {
775 error ("found a virtual definition for a GIMPLE register");
776 return true;
777 }
778
779 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
780 {
781 error ("virtual SSA name for non-VOP decl");
782 return true;
783 }
784
785 if (!is_virtual && virtual_operand_p (ssa_name))
786 {
787 error ("found a real definition for a non-register");
788 return true;
789 }
790
791 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
792 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
793 {
794 error ("found a default name with a non-empty defining statement");
795 return true;
796 }
797
798 return false;
799 }
800
801
802 /* Return true if the definition of SSA_NAME at block BB is malformed.
803
804 STMT is the statement where SSA_NAME is created.
805
806 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
807 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
808 it means that the block in that array slot contains the
809 definition of SSA_NAME.
810
811 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
812
813 static bool
814 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
815 gimple stmt, bool is_virtual)
816 {
817 if (verify_ssa_name (ssa_name, is_virtual))
818 goto err;
819
820 if (SSA_NAME_VAR (ssa_name)
821 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
822 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
823 {
824 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
825 goto err;
826 }
827
828 if (definition_block[SSA_NAME_VERSION (ssa_name)])
829 {
830 error ("SSA_NAME created in two different blocks %i and %i",
831 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
832 goto err;
833 }
834
835 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
836
837 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
838 {
839 error ("SSA_NAME_DEF_STMT is wrong");
840 fprintf (stderr, "Expected definition statement:\n");
841 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
842 fprintf (stderr, "\nActual definition statement:\n");
843 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
844 goto err;
845 }
846
847 return false;
848
849 err:
850 fprintf (stderr, "while verifying SSA_NAME ");
851 print_generic_expr (stderr, ssa_name, 0);
852 fprintf (stderr, " in statement\n");
853 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
854
855 return true;
856 }
857
858
859 /* Return true if the use of SSA_NAME at statement STMT in block BB is
860 malformed.
861
862 DEF_BB is the block where SSA_NAME was found to be created.
863
864 IDOM contains immediate dominator information for the flowgraph.
865
866 CHECK_ABNORMAL is true if the caller wants to check whether this use
867 is flowing through an abnormal edge (only used when checking PHI
868 arguments).
869
870 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
871 that are defined before STMT in basic block BB. */
872
873 static bool
874 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
875 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
876 {
877 bool err = false;
878 tree ssa_name = USE_FROM_PTR (use_p);
879
880 if (!TREE_VISITED (ssa_name))
881 if (verify_imm_links (stderr, ssa_name))
882 err = true;
883
884 TREE_VISITED (ssa_name) = 1;
885
886 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
887 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
888 ; /* Default definitions have empty statements. Nothing to do. */
889 else if (!def_bb)
890 {
891 error ("missing definition");
892 err = true;
893 }
894 else if (bb != def_bb
895 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
896 {
897 error ("definition in block %i does not dominate use in block %i",
898 def_bb->index, bb->index);
899 err = true;
900 }
901 else if (bb == def_bb
902 && names_defined_in_bb != NULL
903 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
904 {
905 error ("definition in block %i follows the use", def_bb->index);
906 err = true;
907 }
908
909 if (check_abnormal
910 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
911 {
912 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
913 err = true;
914 }
915
916 /* Make sure the use is in an appropriate list by checking the previous
917 element to make sure it's the same. */
918 if (use_p->prev == NULL)
919 {
920 error ("no immediate_use list");
921 err = true;
922 }
923 else
924 {
925 tree listvar;
926 if (use_p->prev->use == NULL)
927 listvar = use_p->prev->loc.ssa_name;
928 else
929 listvar = USE_FROM_PTR (use_p->prev);
930 if (listvar != ssa_name)
931 {
932 error ("wrong immediate use list");
933 err = true;
934 }
935 }
936
937 if (err)
938 {
939 fprintf (stderr, "for SSA_NAME: ");
940 print_generic_expr (stderr, ssa_name, TDF_VOPS);
941 fprintf (stderr, " in statement:\n");
942 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
943 }
944
945 return err;
946 }
947
948
949 /* Return true if any of the arguments for PHI node PHI at block BB is
950 malformed.
951
952 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
953 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
954 it means that the block in that array slot contains the
955 definition of SSA_NAME. */
956
957 static bool
958 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
959 {
960 edge e;
961 bool err = false;
962 size_t i, phi_num_args = gimple_phi_num_args (phi);
963
964 if (EDGE_COUNT (bb->preds) != phi_num_args)
965 {
966 error ("incoming edge count does not match number of PHI arguments");
967 err = true;
968 goto error;
969 }
970
971 for (i = 0; i < phi_num_args; i++)
972 {
973 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
974 tree op = USE_FROM_PTR (op_p);
975
976 e = EDGE_PRED (bb, i);
977
978 if (op == NULL_TREE)
979 {
980 error ("PHI argument is missing for edge %d->%d",
981 e->src->index,
982 e->dest->index);
983 err = true;
984 goto error;
985 }
986
987 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
988 {
989 error ("PHI argument is not SSA_NAME, or invariant");
990 err = true;
991 }
992
993 if (TREE_CODE (op) == SSA_NAME)
994 {
995 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
996 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
997 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
998 }
999
1000 if (TREE_CODE (op) == ADDR_EXPR)
1001 {
1002 tree base = TREE_OPERAND (op, 0);
1003 while (handled_component_p (base))
1004 base = TREE_OPERAND (base, 0);
1005 if ((TREE_CODE (base) == VAR_DECL
1006 || TREE_CODE (base) == PARM_DECL
1007 || TREE_CODE (base) == RESULT_DECL)
1008 && !TREE_ADDRESSABLE (base))
1009 {
1010 error ("address taken, but ADDRESSABLE bit not set");
1011 err = true;
1012 }
1013 }
1014
1015 if (e->dest != bb)
1016 {
1017 error ("wrong edge %d->%d for PHI argument",
1018 e->src->index, e->dest->index);
1019 err = true;
1020 }
1021
1022 if (err)
1023 {
1024 fprintf (stderr, "PHI argument\n");
1025 print_generic_stmt (stderr, op, TDF_VOPS);
1026 goto error;
1027 }
1028 }
1029
1030 error:
1031 if (err)
1032 {
1033 fprintf (stderr, "for PHI node\n");
1034 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1035 }
1036
1037
1038 return err;
1039 }
1040
1041
1042 /* Verify common invariants in the SSA web.
1043 TODO: verify the variable annotations. */
1044
1045 DEBUG_FUNCTION void
1046 verify_ssa (bool check_modified_stmt)
1047 {
1048 size_t i;
1049 basic_block bb;
1050 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1051 ssa_op_iter iter;
1052 tree op;
1053 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1054 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
1055
1056 gcc_assert (!need_ssa_update_p (cfun));
1057
1058 timevar_push (TV_TREE_SSA_VERIFY);
1059
1060 /* Keep track of SSA names present in the IL. */
1061 for (i = 1; i < num_ssa_names; i++)
1062 {
1063 tree name = ssa_name (i);
1064 if (name)
1065 {
1066 gimple stmt;
1067 TREE_VISITED (name) = 0;
1068
1069 verify_ssa_name (name, virtual_operand_p (name));
1070
1071 stmt = SSA_NAME_DEF_STMT (name);
1072 if (!gimple_nop_p (stmt))
1073 {
1074 basic_block bb = gimple_bb (stmt);
1075 verify_def (bb, definition_block,
1076 name, stmt, virtual_operand_p (name));
1077
1078 }
1079 }
1080 }
1081
1082 calculate_dominance_info (CDI_DOMINATORS);
1083
1084 /* Now verify all the uses and make sure they agree with the definitions
1085 found in the previous pass. */
1086 FOR_EACH_BB (bb)
1087 {
1088 edge e;
1089 gimple phi;
1090 edge_iterator ei;
1091 gimple_stmt_iterator gsi;
1092
1093 /* Make sure that all edges have a clear 'aux' field. */
1094 FOR_EACH_EDGE (e, ei, bb->preds)
1095 {
1096 if (e->aux)
1097 {
1098 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1099 e->dest->index);
1100 goto err;
1101 }
1102 }
1103
1104 /* Verify the arguments for every PHI node in the block. */
1105 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1106 {
1107 phi = gsi_stmt (gsi);
1108 if (verify_phi_args (phi, bb, definition_block))
1109 goto err;
1110
1111 bitmap_set_bit (names_defined_in_bb,
1112 SSA_NAME_VERSION (gimple_phi_result (phi)));
1113 }
1114
1115 /* Now verify all the uses and vuses in every statement of the block. */
1116 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1117 {
1118 gimple stmt = gsi_stmt (gsi);
1119 use_operand_p use_p;
1120
1121 if (check_modified_stmt && gimple_modified_p (stmt))
1122 {
1123 error ("stmt (%p) marked modified after optimization pass: ",
1124 (void *)stmt);
1125 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1126 goto err;
1127 }
1128
1129 if (verify_ssa_operands (stmt))
1130 {
1131 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1132 goto err;
1133 }
1134
1135 if (gimple_debug_bind_p (stmt)
1136 && !gimple_debug_bind_has_value_p (stmt))
1137 continue;
1138
1139 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1140 {
1141 op = USE_FROM_PTR (use_p);
1142 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1143 use_p, stmt, false, names_defined_in_bb))
1144 goto err;
1145 }
1146
1147 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1148 {
1149 if (SSA_NAME_DEF_STMT (op) != stmt)
1150 {
1151 error ("SSA_NAME_DEF_STMT is wrong");
1152 fprintf (stderr, "Expected definition statement:\n");
1153 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1154 fprintf (stderr, "\nActual definition statement:\n");
1155 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1156 4, TDF_VOPS);
1157 goto err;
1158 }
1159 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1160 }
1161 }
1162
1163 bitmap_clear (names_defined_in_bb);
1164 }
1165
1166 free (definition_block);
1167
1168 /* Restore the dominance information to its prior known state, so
1169 that we do not perturb the compiler's subsequent behavior. */
1170 if (orig_dom_state == DOM_NONE)
1171 free_dominance_info (CDI_DOMINATORS);
1172 else
1173 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1174
1175 BITMAP_FREE (names_defined_in_bb);
1176 timevar_pop (TV_TREE_SSA_VERIFY);
1177 return;
1178
1179 err:
1180 internal_error ("verify_ssa failed");
1181 }
1182
1183 /* Return true if the DECL_UID in both trees are equal. */
1184
1185 static int
1186 uid_ssaname_map_eq (const void *va, const void *vb)
1187 {
1188 const_tree a = (const_tree) va;
1189 const_tree b = (const_tree) vb;
1190 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1191 }
1192
1193 /* Hash a tree in a uid_decl_map. */
1194
1195 static unsigned int
1196 uid_ssaname_map_hash (const void *item)
1197 {
1198 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1199 }
1200
1201
1202 /* Initialize global DFA and SSA structures. */
1203
1204 void
1205 init_tree_ssa (struct function *fn)
1206 {
1207 fn->gimple_df = ggc_alloc_cleared_gimple_df ();
1208 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
1209 uid_ssaname_map_eq, NULL);
1210 pt_solution_reset (&fn->gimple_df->escaped);
1211 init_ssanames (fn, 0);
1212 }
1213
1214 /* Do the actions required to initialize internal data structures used
1215 in tree-ssa optimization passes. */
1216
1217 static unsigned int
1218 execute_init_datastructures (void)
1219 {
1220 /* Allocate hash tables, arrays and other structures. */
1221 gcc_assert (!cfun->gimple_df);
1222 init_tree_ssa (cfun);
1223 return 0;
1224 }
1225
1226 /* Gate for IPCP optimization. */
1227
1228 static bool
1229 gate_init_datastructures (void)
1230 {
1231 /* Do nothing for funcions that was produced already in SSA form. */
1232 return !(cfun->curr_properties & PROP_ssa);
1233 }
1234
1235 namespace {
1236
1237 const pass_data pass_data_init_datastructures =
1238 {
1239 GIMPLE_PASS, /* type */
1240 "*init_datastructures", /* name */
1241 OPTGROUP_NONE, /* optinfo_flags */
1242 true, /* has_gate */
1243 true, /* has_execute */
1244 TV_NONE, /* tv_id */
1245 PROP_cfg, /* properties_required */
1246 0, /* properties_provided */
1247 0, /* properties_destroyed */
1248 0, /* todo_flags_start */
1249 0, /* todo_flags_finish */
1250 };
1251
1252 class pass_init_datastructures : public gimple_opt_pass
1253 {
1254 public:
1255 pass_init_datastructures (gcc::context *ctxt)
1256 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1257 {}
1258
1259 /* opt_pass methods: */
1260 bool gate () { return gate_init_datastructures (); }
1261 unsigned int execute () { return execute_init_datastructures (); }
1262
1263 }; // class pass_init_datastructures
1264
1265 } // anon namespace
1266
1267 gimple_opt_pass *
1268 make_pass_init_datastructures (gcc::context *ctxt)
1269 {
1270 return new pass_init_datastructures (ctxt);
1271 }
1272
1273 /* Deallocate memory associated with SSA data structures for FNDECL. */
1274
1275 void
1276 delete_tree_ssa (void)
1277 {
1278 fini_ssanames ();
1279
1280 /* We no longer maintain the SSA operand cache at this point. */
1281 if (ssa_operands_active (cfun))
1282 fini_ssa_operands ();
1283
1284 htab_delete (cfun->gimple_df->default_defs);
1285 cfun->gimple_df->default_defs = NULL;
1286 pt_solution_reset (&cfun->gimple_df->escaped);
1287 if (cfun->gimple_df->decls_to_pointers != NULL)
1288 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1289 cfun->gimple_df->decls_to_pointers = NULL;
1290 cfun->gimple_df->modified_noreturn_calls = NULL;
1291 cfun->gimple_df = NULL;
1292
1293 /* We no longer need the edge variable maps. */
1294 redirect_edge_var_map_destroy ();
1295 }
1296
1297 /* Return true if EXPR is a useless type conversion, otherwise return
1298 false. */
1299
1300 bool
1301 tree_ssa_useless_type_conversion (tree expr)
1302 {
1303 /* If we have an assignment that merely uses a NOP_EXPR to change
1304 the top of the RHS to the type of the LHS and the type conversion
1305 is "safe", then strip away the type conversion so that we can
1306 enter LHS = RHS into the const_and_copies table. */
1307 if (CONVERT_EXPR_P (expr)
1308 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1309 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1310 return useless_type_conversion_p
1311 (TREE_TYPE (expr),
1312 TREE_TYPE (TREE_OPERAND (expr, 0)));
1313
1314 return false;
1315 }
1316
1317 /* Strip conversions from EXP according to
1318 tree_ssa_useless_type_conversion and return the resulting
1319 expression. */
1320
1321 tree
1322 tree_ssa_strip_useless_type_conversions (tree exp)
1323 {
1324 while (tree_ssa_useless_type_conversion (exp))
1325 exp = TREE_OPERAND (exp, 0);
1326 return exp;
1327 }
1328
1329
1330 /* Return true if T, an SSA_NAME, has an undefined value. */
1331
1332 bool
1333 ssa_undefined_value_p (tree t)
1334 {
1335 tree var = SSA_NAME_VAR (t);
1336
1337 if (!var)
1338 ;
1339 /* Parameters get their initial value from the function entry. */
1340 else if (TREE_CODE (var) == PARM_DECL)
1341 return false;
1342 /* When returning by reference the return address is actually a hidden
1343 parameter. */
1344 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1345 return false;
1346 /* Hard register variables get their initial value from the ether. */
1347 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1348 return false;
1349
1350 /* The value is undefined iff its definition statement is empty. */
1351 return gimple_nop_p (SSA_NAME_DEF_STMT (t));
1352 }
1353
1354
1355 /* If necessary, rewrite the base of the reference tree *TP from
1356 a MEM_REF to a plain or converted symbol. */
1357
1358 static void
1359 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1360 {
1361 tree sym;
1362
1363 while (handled_component_p (*tp))
1364 tp = &TREE_OPERAND (*tp, 0);
1365 if (TREE_CODE (*tp) == MEM_REF
1366 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1367 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1368 && DECL_P (sym)
1369 && !TREE_ADDRESSABLE (sym)
1370 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1371 {
1372 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1373 && useless_type_conversion_p (TREE_TYPE (*tp),
1374 TREE_TYPE (TREE_TYPE (sym)))
1375 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1376 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1377 {
1378 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1379 TYPE_SIZE (TREE_TYPE (*tp)),
1380 int_const_binop (MULT_EXPR,
1381 bitsize_int (BITS_PER_UNIT),
1382 TREE_OPERAND (*tp, 1)));
1383 }
1384 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1385 && useless_type_conversion_p (TREE_TYPE (*tp),
1386 TREE_TYPE (TREE_TYPE (sym))))
1387 {
1388 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1389 ? REALPART_EXPR : IMAGPART_EXPR,
1390 TREE_TYPE (*tp), sym);
1391 }
1392 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1393 {
1394 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1395 TREE_TYPE (sym)))
1396 *tp = build1 (VIEW_CONVERT_EXPR,
1397 TREE_TYPE (*tp), sym);
1398 else
1399 *tp = sym;
1400 }
1401 }
1402 }
1403
1404 /* For a tree REF return its base if it is the base of a MEM_REF
1405 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1406
1407 static tree
1408 non_rewritable_mem_ref_base (tree ref)
1409 {
1410 tree base = ref;
1411
1412 /* A plain decl does not need it set. */
1413 if (DECL_P (ref))
1414 return NULL_TREE;
1415
1416 while (handled_component_p (base))
1417 base = TREE_OPERAND (base, 0);
1418
1419 /* But watch out for MEM_REFs we cannot lower to a
1420 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1421 if (TREE_CODE (base) == MEM_REF
1422 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1423 {
1424 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1425 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1426 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1427 && useless_type_conversion_p (TREE_TYPE (base),
1428 TREE_TYPE (TREE_TYPE (decl)))
1429 && wi::fits_uhwi_p (mem_ref_offset (base))
1430 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1431 mem_ref_offset (base))
1432 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1433 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1434 return NULL_TREE;
1435 if (DECL_P (decl)
1436 && (!integer_zerop (TREE_OPERAND (base, 1))
1437 || (DECL_SIZE (decl)
1438 != TYPE_SIZE (TREE_TYPE (base)))
1439 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1440 return decl;
1441 }
1442
1443 return NULL_TREE;
1444 }
1445
1446 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1447 Otherwise return true. */
1448
1449 static bool
1450 non_rewritable_lvalue_p (tree lhs)
1451 {
1452 /* A plain decl is always rewritable. */
1453 if (DECL_P (lhs))
1454 return false;
1455
1456 /* A decl that is wrapped inside a MEM-REF that covers
1457 it full is also rewritable.
1458 ??? The following could be relaxed allowing component
1459 references that do not change the access size. */
1460 if (TREE_CODE (lhs) == MEM_REF
1461 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1462 && integer_zerop (TREE_OPERAND (lhs, 1)))
1463 {
1464 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1465 if (DECL_P (decl)
1466 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1467 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1468 return false;
1469 }
1470
1471 return true;
1472 }
1473
1474 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1475 mark the variable VAR for conversion into SSA. Return true when updating
1476 stmts is required. */
1477
1478 static void
1479 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1480 bitmap suitable_for_renaming)
1481 {
1482 /* Global Variables, result decls cannot be changed. */
1483 if (is_global_var (var)
1484 || TREE_CODE (var) == RESULT_DECL
1485 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1486 return;
1487
1488 if (TREE_ADDRESSABLE (var)
1489 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1490 a non-register. Otherwise we are confused and forget to
1491 add virtual operands for it. */
1492 && (!is_gimple_reg_type (TREE_TYPE (var))
1493 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1494 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1495 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1496 {
1497 TREE_ADDRESSABLE (var) = 0;
1498 if (is_gimple_reg (var))
1499 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1500 if (dump_file)
1501 {
1502 fprintf (dump_file, "No longer having address taken: ");
1503 print_generic_expr (dump_file, var, 0);
1504 fprintf (dump_file, "\n");
1505 }
1506 }
1507
1508 if (!DECL_GIMPLE_REG_P (var)
1509 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1510 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1511 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1512 && !TREE_THIS_VOLATILE (var)
1513 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1514 {
1515 DECL_GIMPLE_REG_P (var) = 1;
1516 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1517 if (dump_file)
1518 {
1519 fprintf (dump_file, "Now a gimple register: ");
1520 print_generic_expr (dump_file, var, 0);
1521 fprintf (dump_file, "\n");
1522 }
1523 }
1524 }
1525
1526 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1527
1528 void
1529 execute_update_addresses_taken (void)
1530 {
1531 gimple_stmt_iterator gsi;
1532 basic_block bb;
1533 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1534 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1535 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1536 tree var;
1537 unsigned i;
1538
1539 timevar_push (TV_ADDRESS_TAKEN);
1540
1541 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1542 the function body. */
1543 FOR_EACH_BB (bb)
1544 {
1545 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1546 {
1547 gimple stmt = gsi_stmt (gsi);
1548 enum gimple_code code = gimple_code (stmt);
1549 tree decl;
1550
1551 /* Note all addresses taken by the stmt. */
1552 gimple_ior_addresses_taken (addresses_taken, stmt);
1553
1554 /* If we have a call or an assignment, see if the lhs contains
1555 a local decl that requires not to be a gimple register. */
1556 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1557 {
1558 tree lhs = gimple_get_lhs (stmt);
1559 if (lhs
1560 && TREE_CODE (lhs) != SSA_NAME
1561 && non_rewritable_lvalue_p (lhs))
1562 {
1563 decl = get_base_address (lhs);
1564 if (DECL_P (decl))
1565 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1566 }
1567 }
1568
1569 if (gimple_assign_single_p (stmt))
1570 {
1571 tree rhs = gimple_assign_rhs1 (stmt);
1572 if ((decl = non_rewritable_mem_ref_base (rhs)))
1573 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1574 }
1575
1576 else if (code == GIMPLE_CALL)
1577 {
1578 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1579 {
1580 tree arg = gimple_call_arg (stmt, i);
1581 if ((decl = non_rewritable_mem_ref_base (arg)))
1582 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1583 }
1584 }
1585
1586 else if (code == GIMPLE_ASM)
1587 {
1588 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1589 {
1590 tree link = gimple_asm_output_op (stmt, i);
1591 tree lhs = TREE_VALUE (link);
1592 if (TREE_CODE (lhs) != SSA_NAME)
1593 {
1594 decl = get_base_address (lhs);
1595 if (DECL_P (decl)
1596 && (non_rewritable_lvalue_p (lhs)
1597 /* We cannot move required conversions from
1598 the lhs to the rhs in asm statements, so
1599 require we do not need any. */
1600 || !useless_type_conversion_p
1601 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1602 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1603 }
1604 }
1605 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1606 {
1607 tree link = gimple_asm_input_op (stmt, i);
1608 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1609 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1610 }
1611 }
1612 }
1613
1614 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1615 {
1616 size_t i;
1617 gimple phi = gsi_stmt (gsi);
1618
1619 for (i = 0; i < gimple_phi_num_args (phi); i++)
1620 {
1621 tree op = PHI_ARG_DEF (phi, i), var;
1622 if (TREE_CODE (op) == ADDR_EXPR
1623 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1624 && DECL_P (var))
1625 bitmap_set_bit (addresses_taken, DECL_UID (var));
1626 }
1627 }
1628 }
1629
1630 /* We cannot iterate over all referenced vars because that can contain
1631 unused vars from BLOCK trees, which causes code generation differences
1632 for -g vs. -g0. */
1633 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1634 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1635 suitable_for_renaming);
1636
1637 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1638 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1639 suitable_for_renaming);
1640
1641 /* Operand caches need to be recomputed for operands referencing the updated
1642 variables and operands need to be rewritten to expose bare symbols. */
1643 if (!bitmap_empty_p (suitable_for_renaming))
1644 {
1645 FOR_EACH_BB (bb)
1646 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1647 {
1648 gimple stmt = gsi_stmt (gsi);
1649
1650 /* Re-write TARGET_MEM_REFs of symbols we want to
1651 rewrite into SSA form. */
1652 if (gimple_assign_single_p (stmt))
1653 {
1654 tree lhs = gimple_assign_lhs (stmt);
1655 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1656 tree sym;
1657
1658 /* We shouldn't have any fancy wrapping of
1659 component-refs on the LHS, but look through
1660 VIEW_CONVERT_EXPRs as that is easy. */
1661 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1662 lhs = TREE_OPERAND (lhs, 0);
1663 if (TREE_CODE (lhs) == MEM_REF
1664 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1665 && integer_zerop (TREE_OPERAND (lhs, 1))
1666 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1667 && DECL_P (sym)
1668 && !TREE_ADDRESSABLE (sym)
1669 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1670 lhs = sym;
1671 else
1672 lhs = gimple_assign_lhs (stmt);
1673
1674 /* Rewrite the RHS and make sure the resulting assignment
1675 is validly typed. */
1676 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1677 rhs = gimple_assign_rhs1 (stmt);
1678 if (gimple_assign_lhs (stmt) != lhs
1679 && !useless_type_conversion_p (TREE_TYPE (lhs),
1680 TREE_TYPE (rhs)))
1681 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1682 TREE_TYPE (lhs), rhs);
1683
1684 if (gimple_assign_lhs (stmt) != lhs)
1685 gimple_assign_set_lhs (stmt, lhs);
1686
1687 /* For var ={v} {CLOBBER}; where var lost
1688 TREE_ADDRESSABLE just remove the stmt. */
1689 if (DECL_P (lhs)
1690 && TREE_CLOBBER_P (rhs)
1691 && bitmap_bit_p (suitable_for_renaming, DECL_UID (lhs)))
1692 {
1693 unlink_stmt_vdef (stmt);
1694 gsi_remove (&gsi, true);
1695 release_defs (stmt);
1696 continue;
1697 }
1698
1699 if (gimple_assign_rhs1 (stmt) != rhs)
1700 {
1701 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1702 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1703 }
1704 }
1705
1706 else if (gimple_code (stmt) == GIMPLE_CALL)
1707 {
1708 unsigned i;
1709 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1710 {
1711 tree *argp = gimple_call_arg_ptr (stmt, i);
1712 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1713 }
1714 }
1715
1716 else if (gimple_code (stmt) == GIMPLE_ASM)
1717 {
1718 unsigned i;
1719 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1720 {
1721 tree link = gimple_asm_output_op (stmt, i);
1722 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1723 suitable_for_renaming);
1724 }
1725 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1726 {
1727 tree link = gimple_asm_input_op (stmt, i);
1728 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1729 suitable_for_renaming);
1730 }
1731 }
1732
1733 else if (gimple_debug_bind_p (stmt)
1734 && gimple_debug_bind_has_value_p (stmt))
1735 {
1736 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1737 tree decl;
1738 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1739 decl = non_rewritable_mem_ref_base (*valuep);
1740 if (decl
1741 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1742 gimple_debug_bind_reset_value (stmt);
1743 }
1744
1745 if (gimple_references_memory_p (stmt)
1746 || is_gimple_debug (stmt))
1747 update_stmt (stmt);
1748
1749 gsi_next (&gsi);
1750 }
1751
1752 /* Update SSA form here, we are called as non-pass as well. */
1753 if (number_of_loops (cfun) > 1
1754 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1755 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1756 else
1757 update_ssa (TODO_update_ssa);
1758 }
1759
1760 BITMAP_FREE (not_reg_needs);
1761 BITMAP_FREE (addresses_taken);
1762 BITMAP_FREE (suitable_for_renaming);
1763 timevar_pop (TV_ADDRESS_TAKEN);
1764 }
1765
1766 namespace {
1767
1768 const pass_data pass_data_update_address_taken =
1769 {
1770 GIMPLE_PASS, /* type */
1771 "addressables", /* name */
1772 OPTGROUP_NONE, /* optinfo_flags */
1773 false, /* has_gate */
1774 false, /* has_execute */
1775 TV_ADDRESS_TAKEN, /* tv_id */
1776 PROP_ssa, /* properties_required */
1777 0, /* properties_provided */
1778 0, /* properties_destroyed */
1779 0, /* todo_flags_start */
1780 TODO_update_address_taken, /* todo_flags_finish */
1781 };
1782
1783 class pass_update_address_taken : public gimple_opt_pass
1784 {
1785 public:
1786 pass_update_address_taken (gcc::context *ctxt)
1787 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1788 {}
1789
1790 /* opt_pass methods: */
1791
1792 }; // class pass_update_address_taken
1793
1794 } // anon namespace
1795
1796 gimple_opt_pass *
1797 make_pass_update_address_taken (gcc::context *ctxt)
1798 {
1799 return new pass_update_address_taken (ctxt);
1800 }