]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
Merge from trunk.
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "ggc.h"
30 #include "langhooks.h"
31 #include "basic-block.h"
32 #include "function.h"
33 #include "gimple-pretty-print.h"
34 #include "pointer-set.h"
35 #include "gimple.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "tree-ssa-loop-manip.h"
45 #include "tree-into-ssa.h"
46 #include "tree-ssa.h"
47 #include "tree-inline.h"
48 #include "hashtab.h"
49 #include "tree-pass.h"
50 #include "diagnostic-core.h"
51 #include "cfgloop.h"
52 #include "cfgexpand.h"
53
54 /* Pointer map of variable mappings, keyed by edge. */
55 static struct pointer_map_t *edge_var_maps;
56
57
58 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
59
60 void
61 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
62 {
63 void **slot;
64 edge_var_map_vector *head;
65 edge_var_map new_node;
66
67 if (edge_var_maps == NULL)
68 edge_var_maps = pointer_map_create ();
69
70 slot = pointer_map_insert (edge_var_maps, e);
71 head = (edge_var_map_vector *) *slot;
72 if (!head)
73 vec_safe_reserve (head, 5);
74 new_node.def = def;
75 new_node.result = result;
76 new_node.locus = locus;
77
78 vec_safe_push (head, new_node);
79 *slot = head;
80 }
81
82
83 /* Clear the var mappings in edge E. */
84
85 void
86 redirect_edge_var_map_clear (edge e)
87 {
88 void **slot;
89 edge_var_map_vector *head;
90
91 if (!edge_var_maps)
92 return;
93
94 slot = pointer_map_contains (edge_var_maps, e);
95
96 if (slot)
97 {
98 head = (edge_var_map_vector *) *slot;
99 vec_free (head);
100 *slot = NULL;
101 }
102 }
103
104
105 /* Duplicate the redirected var mappings in OLDE in NEWE.
106
107 Since we can't remove a mapping, let's just duplicate it. This assumes a
108 pointer_map can have multiple edges mapping to the same var_map (many to
109 one mapping), since we don't remove the previous mappings. */
110
111 void
112 redirect_edge_var_map_dup (edge newe, edge olde)
113 {
114 void **new_slot, **old_slot;
115 edge_var_map_vector *head;
116
117 if (!edge_var_maps)
118 return;
119
120 new_slot = pointer_map_insert (edge_var_maps, newe);
121 old_slot = pointer_map_contains (edge_var_maps, olde);
122 if (!old_slot)
123 return;
124 head = (edge_var_map_vector *) *old_slot;
125
126 edge_var_map_vector *new_head = NULL;
127 if (head)
128 new_head = vec_safe_copy (head);
129 else
130 vec_safe_reserve (new_head, 5);
131 *new_slot = new_head;
132 }
133
134
135 /* Return the variable mappings for a given edge. If there is none, return
136 NULL. */
137
138 edge_var_map_vector *
139 redirect_edge_var_map_vector (edge e)
140 {
141 void **slot;
142
143 /* Hey, what kind of idiot would... you'd be surprised. */
144 if (!edge_var_maps)
145 return NULL;
146
147 slot = pointer_map_contains (edge_var_maps, e);
148 if (!slot)
149 return NULL;
150
151 return (edge_var_map_vector *) *slot;
152 }
153
154 /* Used by redirect_edge_var_map_destroy to free all memory. */
155
156 static bool
157 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
158 void **value,
159 void *data ATTRIBUTE_UNUSED)
160 {
161 edge_var_map_vector *head = (edge_var_map_vector *) *value;
162 vec_free (head);
163 return true;
164 }
165
166 /* Clear the edge variable mappings. */
167
168 void
169 redirect_edge_var_map_destroy (void)
170 {
171 if (edge_var_maps)
172 {
173 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
174 pointer_map_destroy (edge_var_maps);
175 edge_var_maps = NULL;
176 }
177 }
178
179
180 /* Remove the corresponding arguments from the PHI nodes in E's
181 destination block and redirect it to DEST. Return redirected edge.
182 The list of removed arguments is stored in a vector accessed
183 through edge_var_maps. */
184
185 edge
186 ssa_redirect_edge (edge e, basic_block dest)
187 {
188 gimple_stmt_iterator gsi;
189 gimple phi;
190
191 redirect_edge_var_map_clear (e);
192
193 /* Remove the appropriate PHI arguments in E's destination block. */
194 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
195 {
196 tree def;
197 source_location locus ;
198
199 phi = gsi_stmt (gsi);
200 def = gimple_phi_arg_def (phi, e->dest_idx);
201 locus = gimple_phi_arg_location (phi, e->dest_idx);
202
203 if (def == NULL_TREE)
204 continue;
205
206 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
207 }
208
209 e = redirect_edge_succ_nodup (e, dest);
210
211 return e;
212 }
213
214
215 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
216 E->dest. */
217
218 void
219 flush_pending_stmts (edge e)
220 {
221 gimple phi;
222 edge_var_map_vector *v;
223 edge_var_map *vm;
224 int i;
225 gimple_stmt_iterator gsi;
226
227 v = redirect_edge_var_map_vector (e);
228 if (!v)
229 return;
230
231 for (gsi = gsi_start_phis (e->dest), i = 0;
232 !gsi_end_p (gsi) && v->iterate (i, &vm);
233 gsi_next (&gsi), i++)
234 {
235 tree def;
236
237 phi = gsi_stmt (gsi);
238 def = redirect_edge_var_map_def (vm);
239 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
240 }
241
242 redirect_edge_var_map_clear (e);
243 }
244
245 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
246 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
247 expression with a different value.
248
249 This will update any annotations (say debug bind stmts) referring
250 to the original LHS, so that they use the RHS instead. This is
251 done even if NLHS and LHS are the same, for it is understood that
252 the RHS will be modified afterwards, and NLHS will not be assigned
253 an equivalent value.
254
255 Adjusting any non-annotation uses of the LHS, if needed, is a
256 responsibility of the caller.
257
258 The effect of this call should be pretty much the same as that of
259 inserting a copy of STMT before STMT, and then removing the
260 original stmt, at which time gsi_remove() would have update
261 annotations, but using this function saves all the inserting,
262 copying and removing. */
263
264 void
265 gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
266 {
267 if (MAY_HAVE_DEBUG_STMTS)
268 {
269 tree lhs = gimple_get_lhs (stmt);
270
271 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
272
273 insert_debug_temp_for_var_def (NULL, lhs);
274 }
275
276 gimple_set_lhs (stmt, nlhs);
277 }
278
279
280 /* Given a tree for an expression for which we might want to emit
281 locations or values in debug information (generally a variable, but
282 we might deal with other kinds of trees in the future), return the
283 tree that should be used as the variable of a DEBUG_BIND STMT or
284 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
285
286 tree
287 target_for_debug_bind (tree var)
288 {
289 if (!MAY_HAVE_DEBUG_STMTS)
290 return NULL_TREE;
291
292 if (TREE_CODE (var) == SSA_NAME)
293 {
294 var = SSA_NAME_VAR (var);
295 if (var == NULL_TREE)
296 return NULL_TREE;
297 }
298
299 if ((TREE_CODE (var) != VAR_DECL
300 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
301 && TREE_CODE (var) != PARM_DECL)
302 return NULL_TREE;
303
304 if (DECL_HAS_VALUE_EXPR_P (var))
305 return target_for_debug_bind (DECL_VALUE_EXPR (var));
306
307 if (DECL_IGNORED_P (var))
308 return NULL_TREE;
309
310 /* var-tracking only tracks registers. */
311 if (!is_gimple_reg_type (TREE_TYPE (var)))
312 return NULL_TREE;
313
314 return var;
315 }
316
317 /* Called via walk_tree, look for SSA_NAMEs that have already been
318 released. */
319
320 static tree
321 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
322 {
323 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
324
325 if (wi && wi->is_lhs)
326 return NULL_TREE;
327
328 if (TREE_CODE (*tp) == SSA_NAME)
329 {
330 if (SSA_NAME_IN_FREE_LIST (*tp))
331 return *tp;
332
333 *walk_subtrees = 0;
334 }
335 else if (IS_TYPE_OR_DECL_P (*tp))
336 *walk_subtrees = 0;
337
338 return NULL_TREE;
339 }
340
341 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
342 by other DEBUG stmts, and replace uses of the DEF with the
343 newly-created debug temp. */
344
345 void
346 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
347 {
348 imm_use_iterator imm_iter;
349 use_operand_p use_p;
350 gimple stmt;
351 gimple def_stmt = NULL;
352 int usecount = 0;
353 tree value = NULL;
354
355 if (!MAY_HAVE_DEBUG_STMTS)
356 return;
357
358 /* If this name has already been registered for replacement, do nothing
359 as anything that uses this name isn't in SSA form. */
360 if (name_registered_for_update_p (var))
361 return;
362
363 /* Check whether there are debug stmts that reference this variable and,
364 if there are, decide whether we should use a debug temp. */
365 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
366 {
367 stmt = USE_STMT (use_p);
368
369 if (!gimple_debug_bind_p (stmt))
370 continue;
371
372 if (usecount++)
373 break;
374
375 if (gimple_debug_bind_get_value (stmt) != var)
376 {
377 /* Count this as an additional use, so as to make sure we
378 use a temp unless VAR's definition has a SINGLE_RHS that
379 can be shared. */
380 usecount++;
381 break;
382 }
383 }
384
385 if (!usecount)
386 return;
387
388 if (gsi)
389 def_stmt = gsi_stmt (*gsi);
390 else
391 def_stmt = SSA_NAME_DEF_STMT (var);
392
393 /* If we didn't get an insertion point, and the stmt has already
394 been removed, we won't be able to insert the debug bind stmt, so
395 we'll have to drop debug information. */
396 if (gimple_code (def_stmt) == GIMPLE_PHI)
397 {
398 value = degenerate_phi_result (def_stmt);
399 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
400 value = NULL;
401 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
402 to. */
403 else if (value == error_mark_node)
404 value = NULL;
405 }
406 else if (is_gimple_assign (def_stmt))
407 {
408 bool no_value = false;
409
410 if (!dom_info_available_p (CDI_DOMINATORS))
411 {
412 struct walk_stmt_info wi;
413
414 memset (&wi, 0, sizeof (wi));
415
416 /* When removing blocks without following reverse dominance
417 order, we may sometimes encounter SSA_NAMEs that have
418 already been released, referenced in other SSA_DEFs that
419 we're about to release. Consider:
420
421 <bb X>:
422 v_1 = foo;
423
424 <bb Y>:
425 w_2 = v_1 + bar;
426 # DEBUG w => w_2
427
428 If we deleted BB X first, propagating the value of w_2
429 won't do us any good. It's too late to recover their
430 original definition of v_1: when it was deleted, it was
431 only referenced in other DEFs, it couldn't possibly know
432 it should have been retained, and propagating every
433 single DEF just in case it might have to be propagated
434 into a DEBUG STMT would probably be too wasteful.
435
436 When dominator information is not readily available, we
437 check for and accept some loss of debug information. But
438 if it is available, there's no excuse for us to remove
439 blocks in the wrong order, so we don't even check for
440 dead SSA NAMEs. SSA verification shall catch any
441 errors. */
442 if ((!gsi && !gimple_bb (def_stmt))
443 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
444 no_value = true;
445 }
446
447 if (!no_value)
448 value = gimple_assign_rhs_to_tree (def_stmt);
449 }
450
451 if (value)
452 {
453 /* If there's a single use of VAR, and VAR is the entire debug
454 expression (usecount would have been incremented again
455 otherwise), and the definition involves only constants and
456 SSA names, then we can propagate VALUE into this single use,
457 avoiding the temp.
458
459 We can also avoid using a temp if VALUE can be shared and
460 propagated into all uses, without generating expressions that
461 wouldn't be valid gimple RHSs.
462
463 Other cases that would require unsharing or non-gimple RHSs
464 are deferred to a debug temp, although we could avoid temps
465 at the expense of duplication of expressions. */
466
467 if (CONSTANT_CLASS_P (value)
468 || gimple_code (def_stmt) == GIMPLE_PHI
469 || (usecount == 1
470 && (!gimple_assign_single_p (def_stmt)
471 || is_gimple_min_invariant (value)))
472 || is_gimple_reg (value))
473 ;
474 else
475 {
476 gimple def_temp;
477 tree vexpr = make_node (DEBUG_EXPR_DECL);
478
479 def_temp = gimple_build_debug_bind (vexpr,
480 unshare_expr (value),
481 def_stmt);
482
483 DECL_ARTIFICIAL (vexpr) = 1;
484 TREE_TYPE (vexpr) = TREE_TYPE (value);
485 if (DECL_P (value))
486 DECL_MODE (vexpr) = DECL_MODE (value);
487 else
488 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
489
490 if (gsi)
491 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
492 else
493 {
494 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
495 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
496 }
497
498 value = vexpr;
499 }
500 }
501
502 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
503 {
504 if (!gimple_debug_bind_p (stmt))
505 continue;
506
507 if (value)
508 {
509 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
510 /* unshare_expr is not needed here. vexpr is either a
511 SINGLE_RHS, that can be safely shared, some other RHS
512 that was unshared when we found it had a single debug
513 use, or a DEBUG_EXPR_DECL, that can be safely
514 shared. */
515 SET_USE (use_p, unshare_expr (value));
516 /* If we didn't replace uses with a debug decl fold the
517 resulting expression. Otherwise we end up with invalid IL. */
518 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
519 {
520 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
521 fold_stmt_inplace (&gsi);
522 }
523 }
524 else
525 gimple_debug_bind_reset_value (stmt);
526
527 update_stmt (stmt);
528 }
529 }
530
531
532 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
533 other DEBUG stmts, and replace uses of the DEF with the
534 newly-created debug temp. */
535
536 void
537 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
538 {
539 gimple stmt;
540 ssa_op_iter op_iter;
541 def_operand_p def_p;
542
543 if (!MAY_HAVE_DEBUG_STMTS)
544 return;
545
546 stmt = gsi_stmt (*gsi);
547
548 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
549 {
550 tree var = DEF_FROM_PTR (def_p);
551
552 if (TREE_CODE (var) != SSA_NAME)
553 continue;
554
555 insert_debug_temp_for_var_def (gsi, var);
556 }
557 }
558
559 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
560
561 void
562 reset_debug_uses (gimple stmt)
563 {
564 ssa_op_iter op_iter;
565 def_operand_p def_p;
566 imm_use_iterator imm_iter;
567 gimple use_stmt;
568
569 if (!MAY_HAVE_DEBUG_STMTS)
570 return;
571
572 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
573 {
574 tree var = DEF_FROM_PTR (def_p);
575
576 if (TREE_CODE (var) != SSA_NAME)
577 continue;
578
579 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
580 {
581 if (!gimple_debug_bind_p (use_stmt))
582 continue;
583
584 gimple_debug_bind_reset_value (use_stmt);
585 update_stmt (use_stmt);
586 }
587 }
588 }
589
590 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
591 dominated stmts before their dominators, so that release_ssa_defs
592 stands a chance of propagating DEFs into debug bind stmts. */
593
594 void
595 release_defs_bitset (bitmap toremove)
596 {
597 unsigned j;
598 bitmap_iterator bi;
599
600 /* Performing a topological sort is probably overkill, this will
601 most likely run in slightly superlinear time, rather than the
602 pathological quadratic worst case. */
603 while (!bitmap_empty_p (toremove))
604 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
605 {
606 bool remove_now = true;
607 tree var = ssa_name (j);
608 gimple stmt;
609 imm_use_iterator uit;
610
611 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
612 {
613 ssa_op_iter dit;
614 def_operand_p def_p;
615
616 /* We can't propagate PHI nodes into debug stmts. */
617 if (gimple_code (stmt) == GIMPLE_PHI
618 || is_gimple_debug (stmt))
619 continue;
620
621 /* If we find another definition to remove that uses
622 the one we're looking at, defer the removal of this
623 one, so that it can be propagated into debug stmts
624 after the other is. */
625 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
626 {
627 tree odef = DEF_FROM_PTR (def_p);
628
629 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
630 {
631 remove_now = false;
632 break;
633 }
634 }
635
636 if (!remove_now)
637 BREAK_FROM_IMM_USE_STMT (uit);
638 }
639
640 if (remove_now)
641 {
642 gimple def = SSA_NAME_DEF_STMT (var);
643 gimple_stmt_iterator gsi = gsi_for_stmt (def);
644
645 if (gimple_code (def) == GIMPLE_PHI)
646 remove_phi_node (&gsi, true);
647 else
648 {
649 gsi_remove (&gsi, true);
650 release_defs (def);
651 }
652
653 bitmap_clear_bit (toremove, j);
654 }
655 }
656 }
657
658 /* Return true if SSA_NAME is malformed and mark it visited.
659
660 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
661 operand. */
662
663 static bool
664 verify_ssa_name (tree ssa_name, bool is_virtual)
665 {
666 if (TREE_CODE (ssa_name) != SSA_NAME)
667 {
668 error ("expected an SSA_NAME object");
669 return true;
670 }
671
672 if (SSA_NAME_IN_FREE_LIST (ssa_name))
673 {
674 error ("found an SSA_NAME that had been released into the free pool");
675 return true;
676 }
677
678 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
679 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
680 {
681 error ("type mismatch between an SSA_NAME and its symbol");
682 return true;
683 }
684
685 if (is_virtual && !virtual_operand_p (ssa_name))
686 {
687 error ("found a virtual definition for a GIMPLE register");
688 return true;
689 }
690
691 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
692 {
693 error ("virtual SSA name for non-VOP decl");
694 return true;
695 }
696
697 if (!is_virtual && virtual_operand_p (ssa_name))
698 {
699 error ("found a real definition for a non-register");
700 return true;
701 }
702
703 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
704 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
705 {
706 error ("found a default name with a non-empty defining statement");
707 return true;
708 }
709
710 return false;
711 }
712
713
714 /* Return true if the definition of SSA_NAME at block BB is malformed.
715
716 STMT is the statement where SSA_NAME is created.
717
718 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
719 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
720 it means that the block in that array slot contains the
721 definition of SSA_NAME.
722
723 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
724
725 static bool
726 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
727 gimple stmt, bool is_virtual)
728 {
729 if (verify_ssa_name (ssa_name, is_virtual))
730 goto err;
731
732 if (SSA_NAME_VAR (ssa_name)
733 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
734 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
735 {
736 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
737 goto err;
738 }
739
740 if (definition_block[SSA_NAME_VERSION (ssa_name)])
741 {
742 error ("SSA_NAME created in two different blocks %i and %i",
743 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
744 goto err;
745 }
746
747 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
748
749 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
750 {
751 error ("SSA_NAME_DEF_STMT is wrong");
752 fprintf (stderr, "Expected definition statement:\n");
753 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
754 fprintf (stderr, "\nActual definition statement:\n");
755 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
756 goto err;
757 }
758
759 return false;
760
761 err:
762 fprintf (stderr, "while verifying SSA_NAME ");
763 print_generic_expr (stderr, ssa_name, 0);
764 fprintf (stderr, " in statement\n");
765 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
766
767 return true;
768 }
769
770
771 /* Return true if the use of SSA_NAME at statement STMT in block BB is
772 malformed.
773
774 DEF_BB is the block where SSA_NAME was found to be created.
775
776 IDOM contains immediate dominator information for the flowgraph.
777
778 CHECK_ABNORMAL is true if the caller wants to check whether this use
779 is flowing through an abnormal edge (only used when checking PHI
780 arguments).
781
782 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
783 that are defined before STMT in basic block BB. */
784
785 static bool
786 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
787 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
788 {
789 bool err = false;
790 tree ssa_name = USE_FROM_PTR (use_p);
791
792 if (!TREE_VISITED (ssa_name))
793 if (verify_imm_links (stderr, ssa_name))
794 err = true;
795
796 TREE_VISITED (ssa_name) = 1;
797
798 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
799 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
800 ; /* Default definitions have empty statements. Nothing to do. */
801 else if (!def_bb)
802 {
803 error ("missing definition");
804 err = true;
805 }
806 else if (bb != def_bb
807 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
808 {
809 error ("definition in block %i does not dominate use in block %i",
810 def_bb->index, bb->index);
811 err = true;
812 }
813 else if (bb == def_bb
814 && names_defined_in_bb != NULL
815 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
816 {
817 error ("definition in block %i follows the use", def_bb->index);
818 err = true;
819 }
820
821 if (check_abnormal
822 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
823 {
824 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
825 err = true;
826 }
827
828 /* Make sure the use is in an appropriate list by checking the previous
829 element to make sure it's the same. */
830 if (use_p->prev == NULL)
831 {
832 error ("no immediate_use list");
833 err = true;
834 }
835 else
836 {
837 tree listvar;
838 if (use_p->prev->use == NULL)
839 listvar = use_p->prev->loc.ssa_name;
840 else
841 listvar = USE_FROM_PTR (use_p->prev);
842 if (listvar != ssa_name)
843 {
844 error ("wrong immediate use list");
845 err = true;
846 }
847 }
848
849 if (err)
850 {
851 fprintf (stderr, "for SSA_NAME: ");
852 print_generic_expr (stderr, ssa_name, TDF_VOPS);
853 fprintf (stderr, " in statement:\n");
854 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
855 }
856
857 return err;
858 }
859
860
861 /* Return true if any of the arguments for PHI node PHI at block BB is
862 malformed.
863
864 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
865 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
866 it means that the block in that array slot contains the
867 definition of SSA_NAME. */
868
869 static bool
870 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
871 {
872 edge e;
873 bool err = false;
874 size_t i, phi_num_args = gimple_phi_num_args (phi);
875
876 if (EDGE_COUNT (bb->preds) != phi_num_args)
877 {
878 error ("incoming edge count does not match number of PHI arguments");
879 err = true;
880 goto error;
881 }
882
883 for (i = 0; i < phi_num_args; i++)
884 {
885 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
886 tree op = USE_FROM_PTR (op_p);
887
888 e = EDGE_PRED (bb, i);
889
890 if (op == NULL_TREE)
891 {
892 error ("PHI argument is missing for edge %d->%d",
893 e->src->index,
894 e->dest->index);
895 err = true;
896 goto error;
897 }
898
899 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
900 {
901 error ("PHI argument is not SSA_NAME, or invariant");
902 err = true;
903 }
904
905 if (TREE_CODE (op) == SSA_NAME)
906 {
907 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
908 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
909 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
910 }
911
912 if (TREE_CODE (op) == ADDR_EXPR)
913 {
914 tree base = TREE_OPERAND (op, 0);
915 while (handled_component_p (base))
916 base = TREE_OPERAND (base, 0);
917 if ((TREE_CODE (base) == VAR_DECL
918 || TREE_CODE (base) == PARM_DECL
919 || TREE_CODE (base) == RESULT_DECL)
920 && !TREE_ADDRESSABLE (base))
921 {
922 error ("address taken, but ADDRESSABLE bit not set");
923 err = true;
924 }
925 }
926
927 if (e->dest != bb)
928 {
929 error ("wrong edge %d->%d for PHI argument",
930 e->src->index, e->dest->index);
931 err = true;
932 }
933
934 if (err)
935 {
936 fprintf (stderr, "PHI argument\n");
937 print_generic_stmt (stderr, op, TDF_VOPS);
938 goto error;
939 }
940 }
941
942 error:
943 if (err)
944 {
945 fprintf (stderr, "for PHI node\n");
946 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
947 }
948
949
950 return err;
951 }
952
953
954 /* Verify common invariants in the SSA web.
955 TODO: verify the variable annotations. */
956
957 DEBUG_FUNCTION void
958 verify_ssa (bool check_modified_stmt)
959 {
960 size_t i;
961 basic_block bb;
962 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
963 ssa_op_iter iter;
964 tree op;
965 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
966 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
967
968 gcc_assert (!need_ssa_update_p (cfun));
969
970 timevar_push (TV_TREE_SSA_VERIFY);
971
972 /* Keep track of SSA names present in the IL. */
973 for (i = 1; i < num_ssa_names; i++)
974 {
975 tree name = ssa_name (i);
976 if (name)
977 {
978 gimple stmt;
979 TREE_VISITED (name) = 0;
980
981 verify_ssa_name (name, virtual_operand_p (name));
982
983 stmt = SSA_NAME_DEF_STMT (name);
984 if (!gimple_nop_p (stmt))
985 {
986 basic_block bb = gimple_bb (stmt);
987 verify_def (bb, definition_block,
988 name, stmt, virtual_operand_p (name));
989
990 }
991 }
992 }
993
994 calculate_dominance_info (CDI_DOMINATORS);
995
996 /* Now verify all the uses and make sure they agree with the definitions
997 found in the previous pass. */
998 FOR_EACH_BB (bb)
999 {
1000 edge e;
1001 gimple phi;
1002 edge_iterator ei;
1003 gimple_stmt_iterator gsi;
1004
1005 /* Make sure that all edges have a clear 'aux' field. */
1006 FOR_EACH_EDGE (e, ei, bb->preds)
1007 {
1008 if (e->aux)
1009 {
1010 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1011 e->dest->index);
1012 goto err;
1013 }
1014 }
1015
1016 /* Verify the arguments for every PHI node in the block. */
1017 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1018 {
1019 phi = gsi_stmt (gsi);
1020 if (verify_phi_args (phi, bb, definition_block))
1021 goto err;
1022
1023 bitmap_set_bit (names_defined_in_bb,
1024 SSA_NAME_VERSION (gimple_phi_result (phi)));
1025 }
1026
1027 /* Now verify all the uses and vuses in every statement of the block. */
1028 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1029 {
1030 gimple stmt = gsi_stmt (gsi);
1031 use_operand_p use_p;
1032
1033 if (check_modified_stmt && gimple_modified_p (stmt))
1034 {
1035 error ("stmt (%p) marked modified after optimization pass: ",
1036 (void *)stmt);
1037 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1038 goto err;
1039 }
1040
1041 if (verify_ssa_operands (stmt))
1042 {
1043 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1044 goto err;
1045 }
1046
1047 if (gimple_debug_bind_p (stmt)
1048 && !gimple_debug_bind_has_value_p (stmt))
1049 continue;
1050
1051 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1052 {
1053 op = USE_FROM_PTR (use_p);
1054 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1055 use_p, stmt, false, names_defined_in_bb))
1056 goto err;
1057 }
1058
1059 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1060 {
1061 if (SSA_NAME_DEF_STMT (op) != stmt)
1062 {
1063 error ("SSA_NAME_DEF_STMT is wrong");
1064 fprintf (stderr, "Expected definition statement:\n");
1065 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1066 fprintf (stderr, "\nActual definition statement:\n");
1067 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1068 4, TDF_VOPS);
1069 goto err;
1070 }
1071 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1072 }
1073 }
1074
1075 bitmap_clear (names_defined_in_bb);
1076 }
1077
1078 free (definition_block);
1079
1080 /* Restore the dominance information to its prior known state, so
1081 that we do not perturb the compiler's subsequent behavior. */
1082 if (orig_dom_state == DOM_NONE)
1083 free_dominance_info (CDI_DOMINATORS);
1084 else
1085 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1086
1087 BITMAP_FREE (names_defined_in_bb);
1088 timevar_pop (TV_TREE_SSA_VERIFY);
1089 return;
1090
1091 err:
1092 internal_error ("verify_ssa failed");
1093 }
1094
1095 /* Return true if the DECL_UID in both trees are equal. */
1096
1097 static int
1098 uid_ssaname_map_eq (const void *va, const void *vb)
1099 {
1100 const_tree a = (const_tree) va;
1101 const_tree b = (const_tree) vb;
1102 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1103 }
1104
1105 /* Hash a tree in a uid_decl_map. */
1106
1107 static unsigned int
1108 uid_ssaname_map_hash (const void *item)
1109 {
1110 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1111 }
1112
1113
1114 /* Initialize global DFA and SSA structures. */
1115
1116 void
1117 init_tree_ssa (struct function *fn)
1118 {
1119 fn->gimple_df = ggc_alloc_cleared_gimple_df ();
1120 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
1121 uid_ssaname_map_eq, NULL);
1122 pt_solution_reset (&fn->gimple_df->escaped);
1123 init_ssanames (fn, 0);
1124 }
1125
1126 /* Do the actions required to initialize internal data structures used
1127 in tree-ssa optimization passes. */
1128
1129 static unsigned int
1130 execute_init_datastructures (void)
1131 {
1132 /* Allocate hash tables, arrays and other structures. */
1133 gcc_assert (!cfun->gimple_df);
1134 init_tree_ssa (cfun);
1135 return 0;
1136 }
1137
1138 /* Gate for IPCP optimization. */
1139
1140 static bool
1141 gate_init_datastructures (void)
1142 {
1143 /* Do nothing for funcions that was produced already in SSA form. */
1144 return !(cfun->curr_properties & PROP_ssa);
1145 }
1146
1147 namespace {
1148
1149 const pass_data pass_data_init_datastructures =
1150 {
1151 GIMPLE_PASS, /* type */
1152 "*init_datastructures", /* name */
1153 OPTGROUP_NONE, /* optinfo_flags */
1154 true, /* has_gate */
1155 true, /* has_execute */
1156 TV_NONE, /* tv_id */
1157 PROP_cfg, /* properties_required */
1158 0, /* properties_provided */
1159 0, /* properties_destroyed */
1160 0, /* todo_flags_start */
1161 0, /* todo_flags_finish */
1162 };
1163
1164 class pass_init_datastructures : public gimple_opt_pass
1165 {
1166 public:
1167 pass_init_datastructures (gcc::context *ctxt)
1168 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1169 {}
1170
1171 /* opt_pass methods: */
1172 bool gate () { return gate_init_datastructures (); }
1173 unsigned int execute () { return execute_init_datastructures (); }
1174
1175 }; // class pass_init_datastructures
1176
1177 } // anon namespace
1178
1179 gimple_opt_pass *
1180 make_pass_init_datastructures (gcc::context *ctxt)
1181 {
1182 return new pass_init_datastructures (ctxt);
1183 }
1184
1185 /* Deallocate memory associated with SSA data structures for FNDECL. */
1186
1187 void
1188 delete_tree_ssa (void)
1189 {
1190 fini_ssanames ();
1191
1192 /* We no longer maintain the SSA operand cache at this point. */
1193 if (ssa_operands_active (cfun))
1194 fini_ssa_operands ();
1195
1196 htab_delete (cfun->gimple_df->default_defs);
1197 cfun->gimple_df->default_defs = NULL;
1198 pt_solution_reset (&cfun->gimple_df->escaped);
1199 if (cfun->gimple_df->decls_to_pointers != NULL)
1200 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1201 cfun->gimple_df->decls_to_pointers = NULL;
1202 cfun->gimple_df->modified_noreturn_calls = NULL;
1203 cfun->gimple_df = NULL;
1204
1205 /* We no longer need the edge variable maps. */
1206 redirect_edge_var_map_destroy ();
1207 }
1208
1209 /* Return true if EXPR is a useless type conversion, otherwise return
1210 false. */
1211
1212 bool
1213 tree_ssa_useless_type_conversion (tree expr)
1214 {
1215 /* If we have an assignment that merely uses a NOP_EXPR to change
1216 the top of the RHS to the type of the LHS and the type conversion
1217 is "safe", then strip away the type conversion so that we can
1218 enter LHS = RHS into the const_and_copies table. */
1219 if (CONVERT_EXPR_P (expr)
1220 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1221 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1222 return useless_type_conversion_p
1223 (TREE_TYPE (expr),
1224 TREE_TYPE (TREE_OPERAND (expr, 0)));
1225
1226 return false;
1227 }
1228
1229 /* Strip conversions from EXP according to
1230 tree_ssa_useless_type_conversion and return the resulting
1231 expression. */
1232
1233 tree
1234 tree_ssa_strip_useless_type_conversions (tree exp)
1235 {
1236 while (tree_ssa_useless_type_conversion (exp))
1237 exp = TREE_OPERAND (exp, 0);
1238 return exp;
1239 }
1240
1241
1242 /* Return true if T, an SSA_NAME, has an undefined value. */
1243
1244 bool
1245 ssa_undefined_value_p (tree t)
1246 {
1247 tree var = SSA_NAME_VAR (t);
1248
1249 if (!var)
1250 ;
1251 /* Parameters get their initial value from the function entry. */
1252 else if (TREE_CODE (var) == PARM_DECL)
1253 return false;
1254 /* When returning by reference the return address is actually a hidden
1255 parameter. */
1256 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1257 return false;
1258 /* Hard register variables get their initial value from the ether. */
1259 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1260 return false;
1261
1262 /* The value is undefined iff its definition statement is empty. */
1263 return gimple_nop_p (SSA_NAME_DEF_STMT (t));
1264 }
1265
1266
1267 /* If necessary, rewrite the base of the reference tree *TP from
1268 a MEM_REF to a plain or converted symbol. */
1269
1270 static void
1271 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1272 {
1273 tree sym;
1274
1275 while (handled_component_p (*tp))
1276 tp = &TREE_OPERAND (*tp, 0);
1277 if (TREE_CODE (*tp) == MEM_REF
1278 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1279 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1280 && DECL_P (sym)
1281 && !TREE_ADDRESSABLE (sym)
1282 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1283 {
1284 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1285 && useless_type_conversion_p (TREE_TYPE (*tp),
1286 TREE_TYPE (TREE_TYPE (sym)))
1287 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1288 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1289 {
1290 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1291 TYPE_SIZE (TREE_TYPE (*tp)),
1292 int_const_binop (MULT_EXPR,
1293 bitsize_int (BITS_PER_UNIT),
1294 TREE_OPERAND (*tp, 1)));
1295 }
1296 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1297 && useless_type_conversion_p (TREE_TYPE (*tp),
1298 TREE_TYPE (TREE_TYPE (sym))))
1299 {
1300 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1301 ? REALPART_EXPR : IMAGPART_EXPR,
1302 TREE_TYPE (*tp), sym);
1303 }
1304 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1305 {
1306 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1307 TREE_TYPE (sym)))
1308 *tp = build1 (VIEW_CONVERT_EXPR,
1309 TREE_TYPE (*tp), sym);
1310 else
1311 *tp = sym;
1312 }
1313 }
1314 }
1315
1316 /* For a tree REF return its base if it is the base of a MEM_REF
1317 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1318
1319 static tree
1320 non_rewritable_mem_ref_base (tree ref)
1321 {
1322 tree base = ref;
1323
1324 /* A plain decl does not need it set. */
1325 if (DECL_P (ref))
1326 return NULL_TREE;
1327
1328 while (handled_component_p (base))
1329 base = TREE_OPERAND (base, 0);
1330
1331 /* But watch out for MEM_REFs we cannot lower to a
1332 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1333 if (TREE_CODE (base) == MEM_REF
1334 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1335 {
1336 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1337 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1338 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1339 && useless_type_conversion_p (TREE_TYPE (base),
1340 TREE_TYPE (TREE_TYPE (decl)))
1341 && wi::fits_uhwi_p (mem_ref_offset (base))
1342 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1343 mem_ref_offset (base))
1344 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1345 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1346 return NULL_TREE;
1347 if (DECL_P (decl)
1348 && (!integer_zerop (TREE_OPERAND (base, 1))
1349 || (DECL_SIZE (decl)
1350 != TYPE_SIZE (TREE_TYPE (base)))
1351 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1352 return decl;
1353 }
1354
1355 return NULL_TREE;
1356 }
1357
1358 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1359 Otherwise return true. */
1360
1361 static bool
1362 non_rewritable_lvalue_p (tree lhs)
1363 {
1364 /* A plain decl is always rewritable. */
1365 if (DECL_P (lhs))
1366 return false;
1367
1368 /* A decl that is wrapped inside a MEM-REF that covers
1369 it full is also rewritable.
1370 ??? The following could be relaxed allowing component
1371 references that do not change the access size. */
1372 if (TREE_CODE (lhs) == MEM_REF
1373 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1374 && integer_zerop (TREE_OPERAND (lhs, 1)))
1375 {
1376 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1377 if (DECL_P (decl)
1378 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1379 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1380 return false;
1381 }
1382
1383 return true;
1384 }
1385
1386 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1387 mark the variable VAR for conversion into SSA. Return true when updating
1388 stmts is required. */
1389
1390 static void
1391 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1392 bitmap suitable_for_renaming)
1393 {
1394 /* Global Variables, result decls cannot be changed. */
1395 if (is_global_var (var)
1396 || TREE_CODE (var) == RESULT_DECL
1397 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1398 return;
1399
1400 if (TREE_ADDRESSABLE (var)
1401 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1402 a non-register. Otherwise we are confused and forget to
1403 add virtual operands for it. */
1404 && (!is_gimple_reg_type (TREE_TYPE (var))
1405 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1406 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1407 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1408 {
1409 TREE_ADDRESSABLE (var) = 0;
1410 if (is_gimple_reg (var))
1411 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1412 if (dump_file)
1413 {
1414 fprintf (dump_file, "No longer having address taken: ");
1415 print_generic_expr (dump_file, var, 0);
1416 fprintf (dump_file, "\n");
1417 }
1418 }
1419
1420 if (!DECL_GIMPLE_REG_P (var)
1421 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1422 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1423 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1424 && !TREE_THIS_VOLATILE (var)
1425 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1426 {
1427 DECL_GIMPLE_REG_P (var) = 1;
1428 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1429 if (dump_file)
1430 {
1431 fprintf (dump_file, "Now a gimple register: ");
1432 print_generic_expr (dump_file, var, 0);
1433 fprintf (dump_file, "\n");
1434 }
1435 }
1436 }
1437
1438 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1439
1440 void
1441 execute_update_addresses_taken (void)
1442 {
1443 gimple_stmt_iterator gsi;
1444 basic_block bb;
1445 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1446 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1447 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1448 tree var;
1449 unsigned i;
1450
1451 timevar_push (TV_ADDRESS_TAKEN);
1452
1453 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1454 the function body. */
1455 FOR_EACH_BB (bb)
1456 {
1457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1458 {
1459 gimple stmt = gsi_stmt (gsi);
1460 enum gimple_code code = gimple_code (stmt);
1461 tree decl;
1462
1463 /* Note all addresses taken by the stmt. */
1464 gimple_ior_addresses_taken (addresses_taken, stmt);
1465
1466 /* If we have a call or an assignment, see if the lhs contains
1467 a local decl that requires not to be a gimple register. */
1468 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1469 {
1470 tree lhs = gimple_get_lhs (stmt);
1471 if (lhs
1472 && TREE_CODE (lhs) != SSA_NAME
1473 && non_rewritable_lvalue_p (lhs))
1474 {
1475 decl = get_base_address (lhs);
1476 if (DECL_P (decl))
1477 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1478 }
1479 }
1480
1481 if (gimple_assign_single_p (stmt))
1482 {
1483 tree rhs = gimple_assign_rhs1 (stmt);
1484 if ((decl = non_rewritable_mem_ref_base (rhs)))
1485 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1486 }
1487
1488 else if (code == GIMPLE_CALL)
1489 {
1490 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1491 {
1492 tree arg = gimple_call_arg (stmt, i);
1493 if ((decl = non_rewritable_mem_ref_base (arg)))
1494 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1495 }
1496 }
1497
1498 else if (code == GIMPLE_ASM)
1499 {
1500 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1501 {
1502 tree link = gimple_asm_output_op (stmt, i);
1503 tree lhs = TREE_VALUE (link);
1504 if (TREE_CODE (lhs) != SSA_NAME)
1505 {
1506 decl = get_base_address (lhs);
1507 if (DECL_P (decl)
1508 && (non_rewritable_lvalue_p (lhs)
1509 /* We cannot move required conversions from
1510 the lhs to the rhs in asm statements, so
1511 require we do not need any. */
1512 || !useless_type_conversion_p
1513 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1514 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1515 }
1516 }
1517 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1518 {
1519 tree link = gimple_asm_input_op (stmt, i);
1520 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1521 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1522 }
1523 }
1524 }
1525
1526 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1527 {
1528 size_t i;
1529 gimple phi = gsi_stmt (gsi);
1530
1531 for (i = 0; i < gimple_phi_num_args (phi); i++)
1532 {
1533 tree op = PHI_ARG_DEF (phi, i), var;
1534 if (TREE_CODE (op) == ADDR_EXPR
1535 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1536 && DECL_P (var))
1537 bitmap_set_bit (addresses_taken, DECL_UID (var));
1538 }
1539 }
1540 }
1541
1542 /* We cannot iterate over all referenced vars because that can contain
1543 unused vars from BLOCK trees, which causes code generation differences
1544 for -g vs. -g0. */
1545 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1546 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1547 suitable_for_renaming);
1548
1549 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1550 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1551 suitable_for_renaming);
1552
1553 /* Operand caches need to be recomputed for operands referencing the updated
1554 variables and operands need to be rewritten to expose bare symbols. */
1555 if (!bitmap_empty_p (suitable_for_renaming))
1556 {
1557 FOR_EACH_BB (bb)
1558 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1559 {
1560 gimple stmt = gsi_stmt (gsi);
1561
1562 /* Re-write TARGET_MEM_REFs of symbols we want to
1563 rewrite into SSA form. */
1564 if (gimple_assign_single_p (stmt))
1565 {
1566 tree lhs = gimple_assign_lhs (stmt);
1567 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1568 tree sym;
1569
1570 /* We shouldn't have any fancy wrapping of
1571 component-refs on the LHS, but look through
1572 VIEW_CONVERT_EXPRs as that is easy. */
1573 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1574 lhs = TREE_OPERAND (lhs, 0);
1575 if (TREE_CODE (lhs) == MEM_REF
1576 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1577 && integer_zerop (TREE_OPERAND (lhs, 1))
1578 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1579 && DECL_P (sym)
1580 && !TREE_ADDRESSABLE (sym)
1581 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1582 lhs = sym;
1583 else
1584 lhs = gimple_assign_lhs (stmt);
1585
1586 /* Rewrite the RHS and make sure the resulting assignment
1587 is validly typed. */
1588 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1589 rhs = gimple_assign_rhs1 (stmt);
1590 if (gimple_assign_lhs (stmt) != lhs
1591 && !useless_type_conversion_p (TREE_TYPE (lhs),
1592 TREE_TYPE (rhs)))
1593 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1594 TREE_TYPE (lhs), rhs);
1595
1596 if (gimple_assign_lhs (stmt) != lhs)
1597 gimple_assign_set_lhs (stmt, lhs);
1598
1599 /* For var ={v} {CLOBBER}; where var lost
1600 TREE_ADDRESSABLE just remove the stmt. */
1601 if (DECL_P (lhs)
1602 && TREE_CLOBBER_P (rhs)
1603 && bitmap_bit_p (suitable_for_renaming, DECL_UID (lhs)))
1604 {
1605 unlink_stmt_vdef (stmt);
1606 gsi_remove (&gsi, true);
1607 release_defs (stmt);
1608 continue;
1609 }
1610
1611 if (gimple_assign_rhs1 (stmt) != rhs)
1612 {
1613 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1614 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1615 }
1616 }
1617
1618 else if (gimple_code (stmt) == GIMPLE_CALL)
1619 {
1620 unsigned i;
1621 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1622 {
1623 tree *argp = gimple_call_arg_ptr (stmt, i);
1624 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1625 }
1626 }
1627
1628 else if (gimple_code (stmt) == GIMPLE_ASM)
1629 {
1630 unsigned i;
1631 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1632 {
1633 tree link = gimple_asm_output_op (stmt, i);
1634 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1635 suitable_for_renaming);
1636 }
1637 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1638 {
1639 tree link = gimple_asm_input_op (stmt, i);
1640 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1641 suitable_for_renaming);
1642 }
1643 }
1644
1645 else if (gimple_debug_bind_p (stmt)
1646 && gimple_debug_bind_has_value_p (stmt))
1647 {
1648 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1649 tree decl;
1650 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1651 decl = non_rewritable_mem_ref_base (*valuep);
1652 if (decl
1653 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1654 gimple_debug_bind_reset_value (stmt);
1655 }
1656
1657 if (gimple_references_memory_p (stmt)
1658 || is_gimple_debug (stmt))
1659 update_stmt (stmt);
1660
1661 gsi_next (&gsi);
1662 }
1663
1664 /* Update SSA form here, we are called as non-pass as well. */
1665 if (number_of_loops (cfun) > 1
1666 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1667 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1668 else
1669 update_ssa (TODO_update_ssa);
1670 }
1671
1672 BITMAP_FREE (not_reg_needs);
1673 BITMAP_FREE (addresses_taken);
1674 BITMAP_FREE (suitable_for_renaming);
1675 timevar_pop (TV_ADDRESS_TAKEN);
1676 }
1677
1678 namespace {
1679
1680 const pass_data pass_data_update_address_taken =
1681 {
1682 GIMPLE_PASS, /* type */
1683 "addressables", /* name */
1684 OPTGROUP_NONE, /* optinfo_flags */
1685 false, /* has_gate */
1686 false, /* has_execute */
1687 TV_ADDRESS_TAKEN, /* tv_id */
1688 PROP_ssa, /* properties_required */
1689 0, /* properties_provided */
1690 0, /* properties_destroyed */
1691 0, /* todo_flags_start */
1692 TODO_update_address_taken, /* todo_flags_finish */
1693 };
1694
1695 class pass_update_address_taken : public gimple_opt_pass
1696 {
1697 public:
1698 pass_update_address_taken (gcc::context *ctxt)
1699 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1700 {}
1701
1702 /* opt_pass methods: */
1703
1704 }; // class pass_update_address_taken
1705
1706 } // anon namespace
1707
1708 gimple_opt_pass *
1709 make_pass_update_address_taken (gcc::context *ctxt)
1710 {
1711 return new pass_update_address_taken (ctxt);
1712 }