]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
Merger of git branch "gimple-classes-v2-option-3"
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "langhooks.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimplify.h"
49 #include "gimple-iterator.h"
50 #include "gimple-walk.h"
51 #include "gimple-ssa.h"
52 #include "tree-phinodes.h"
53 #include "ssa-iterators.h"
54 #include "stringpool.h"
55 #include "tree-ssanames.h"
56 #include "tree-ssa-loop-manip.h"
57 #include "tree-into-ssa.h"
58 #include "tree-ssa.h"
59 #include "tree-inline.h"
60 #include "hash-map.h"
61 #include "tree-pass.h"
62 #include "diagnostic-core.h"
63 #include "cfgloop.h"
64 #include "cfgexpand.h"
65
66 /* Pointer map of variable mappings, keyed by edge. */
67 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
68
69
70 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
71
72 void
73 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
74 {
75 edge_var_map new_node;
76
77 if (edge_var_maps == NULL)
78 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
79
80 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
81 new_node.def = def;
82 new_node.result = result;
83 new_node.locus = locus;
84
85 slot.safe_push (new_node);
86 }
87
88
89 /* Clear the var mappings in edge E. */
90
91 void
92 redirect_edge_var_map_clear (edge e)
93 {
94 if (!edge_var_maps)
95 return;
96
97 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
98
99 if (head)
100 head->release ();
101 }
102
103
104 /* Duplicate the redirected var mappings in OLDE in NEWE.
105
106 This assumes a hash_map can have multiple edges mapping to the same
107 var_map (many to one mapping), since we don't remove the previous mappings.
108 */
109
110 void
111 redirect_edge_var_map_dup (edge newe, edge olde)
112 {
113 if (!edge_var_maps)
114 return;
115
116 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
117 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
118 if (!old_head)
119 return;
120
121 new_head->safe_splice (*old_head);
122 }
123
124
125 /* Return the variable mappings for a given edge. If there is none, return
126 NULL. */
127
128 vec<edge_var_map> *
129 redirect_edge_var_map_vector (edge e)
130 {
131 /* Hey, what kind of idiot would... you'd be surprised. */
132 if (!edge_var_maps)
133 return NULL;
134
135 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
136 if (!slot)
137 return NULL;
138
139 return slot;
140 }
141
142 /* Clear the edge variable mappings. */
143
144 void
145 redirect_edge_var_map_destroy (void)
146 {
147 delete edge_var_maps;
148 edge_var_maps = NULL;
149 }
150
151
152 /* Remove the corresponding arguments from the PHI nodes in E's
153 destination block and redirect it to DEST. Return redirected edge.
154 The list of removed arguments is stored in a vector accessed
155 through edge_var_maps. */
156
157 edge
158 ssa_redirect_edge (edge e, basic_block dest)
159 {
160 gphi_iterator gsi;
161 gphi *phi;
162
163 redirect_edge_var_map_clear (e);
164
165 /* Remove the appropriate PHI arguments in E's destination block. */
166 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
167 {
168 tree def;
169 source_location locus ;
170
171 phi = gsi.phi ();
172 def = gimple_phi_arg_def (phi, e->dest_idx);
173 locus = gimple_phi_arg_location (phi, e->dest_idx);
174
175 if (def == NULL_TREE)
176 continue;
177
178 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
179 }
180
181 e = redirect_edge_succ_nodup (e, dest);
182
183 return e;
184 }
185
186
187 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
188 E->dest. */
189
190 void
191 flush_pending_stmts (edge e)
192 {
193 gphi *phi;
194 edge_var_map *vm;
195 int i;
196 gphi_iterator gsi;
197
198 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
199 if (!v)
200 return;
201
202 for (gsi = gsi_start_phis (e->dest), i = 0;
203 !gsi_end_p (gsi) && v->iterate (i, &vm);
204 gsi_next (&gsi), i++)
205 {
206 tree def;
207
208 phi = gsi.phi ();
209 def = redirect_edge_var_map_def (vm);
210 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
211 }
212
213 redirect_edge_var_map_clear (e);
214 }
215
216 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
217 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
218 expression with a different value.
219
220 This will update any annotations (say debug bind stmts) referring
221 to the original LHS, so that they use the RHS instead. This is
222 done even if NLHS and LHS are the same, for it is understood that
223 the RHS will be modified afterwards, and NLHS will not be assigned
224 an equivalent value.
225
226 Adjusting any non-annotation uses of the LHS, if needed, is a
227 responsibility of the caller.
228
229 The effect of this call should be pretty much the same as that of
230 inserting a copy of STMT before STMT, and then removing the
231 original stmt, at which time gsi_remove() would have update
232 annotations, but using this function saves all the inserting,
233 copying and removing. */
234
235 void
236 gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
237 {
238 if (MAY_HAVE_DEBUG_STMTS)
239 {
240 tree lhs = gimple_get_lhs (stmt);
241
242 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
243
244 insert_debug_temp_for_var_def (NULL, lhs);
245 }
246
247 gimple_set_lhs (stmt, nlhs);
248 }
249
250
251 /* Given a tree for an expression for which we might want to emit
252 locations or values in debug information (generally a variable, but
253 we might deal with other kinds of trees in the future), return the
254 tree that should be used as the variable of a DEBUG_BIND STMT or
255 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
256
257 tree
258 target_for_debug_bind (tree var)
259 {
260 if (!MAY_HAVE_DEBUG_STMTS)
261 return NULL_TREE;
262
263 if (TREE_CODE (var) == SSA_NAME)
264 {
265 var = SSA_NAME_VAR (var);
266 if (var == NULL_TREE)
267 return NULL_TREE;
268 }
269
270 if ((TREE_CODE (var) != VAR_DECL
271 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
272 && TREE_CODE (var) != PARM_DECL)
273 return NULL_TREE;
274
275 if (DECL_HAS_VALUE_EXPR_P (var))
276 return target_for_debug_bind (DECL_VALUE_EXPR (var));
277
278 if (DECL_IGNORED_P (var))
279 return NULL_TREE;
280
281 /* var-tracking only tracks registers. */
282 if (!is_gimple_reg_type (TREE_TYPE (var)))
283 return NULL_TREE;
284
285 return var;
286 }
287
288 /* Called via walk_tree, look for SSA_NAMEs that have already been
289 released. */
290
291 static tree
292 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
293 {
294 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
295
296 if (wi && wi->is_lhs)
297 return NULL_TREE;
298
299 if (TREE_CODE (*tp) == SSA_NAME)
300 {
301 if (SSA_NAME_IN_FREE_LIST (*tp))
302 return *tp;
303
304 *walk_subtrees = 0;
305 }
306 else if (IS_TYPE_OR_DECL_P (*tp))
307 *walk_subtrees = 0;
308
309 return NULL_TREE;
310 }
311
312 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
313 by other DEBUG stmts, and replace uses of the DEF with the
314 newly-created debug temp. */
315
316 void
317 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
318 {
319 imm_use_iterator imm_iter;
320 use_operand_p use_p;
321 gimple stmt;
322 gimple def_stmt = NULL;
323 int usecount = 0;
324 tree value = NULL;
325
326 if (!MAY_HAVE_DEBUG_STMTS)
327 return;
328
329 /* If this name has already been registered for replacement, do nothing
330 as anything that uses this name isn't in SSA form. */
331 if (name_registered_for_update_p (var))
332 return;
333
334 /* Check whether there are debug stmts that reference this variable and,
335 if there are, decide whether we should use a debug temp. */
336 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
337 {
338 stmt = USE_STMT (use_p);
339
340 if (!gimple_debug_bind_p (stmt))
341 continue;
342
343 if (usecount++)
344 break;
345
346 if (gimple_debug_bind_get_value (stmt) != var)
347 {
348 /* Count this as an additional use, so as to make sure we
349 use a temp unless VAR's definition has a SINGLE_RHS that
350 can be shared. */
351 usecount++;
352 break;
353 }
354 }
355
356 if (!usecount)
357 return;
358
359 if (gsi)
360 def_stmt = gsi_stmt (*gsi);
361 else
362 def_stmt = SSA_NAME_DEF_STMT (var);
363
364 /* If we didn't get an insertion point, and the stmt has already
365 been removed, we won't be able to insert the debug bind stmt, so
366 we'll have to drop debug information. */
367 if (gimple_code (def_stmt) == GIMPLE_PHI)
368 {
369 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
370 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
371 value = NULL;
372 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
373 to. */
374 else if (value == error_mark_node)
375 value = NULL;
376 }
377 else if (is_gimple_assign (def_stmt))
378 {
379 bool no_value = false;
380
381 if (!dom_info_available_p (CDI_DOMINATORS))
382 {
383 struct walk_stmt_info wi;
384
385 memset (&wi, 0, sizeof (wi));
386
387 /* When removing blocks without following reverse dominance
388 order, we may sometimes encounter SSA_NAMEs that have
389 already been released, referenced in other SSA_DEFs that
390 we're about to release. Consider:
391
392 <bb X>:
393 v_1 = foo;
394
395 <bb Y>:
396 w_2 = v_1 + bar;
397 # DEBUG w => w_2
398
399 If we deleted BB X first, propagating the value of w_2
400 won't do us any good. It's too late to recover their
401 original definition of v_1: when it was deleted, it was
402 only referenced in other DEFs, it couldn't possibly know
403 it should have been retained, and propagating every
404 single DEF just in case it might have to be propagated
405 into a DEBUG STMT would probably be too wasteful.
406
407 When dominator information is not readily available, we
408 check for and accept some loss of debug information. But
409 if it is available, there's no excuse for us to remove
410 blocks in the wrong order, so we don't even check for
411 dead SSA NAMEs. SSA verification shall catch any
412 errors. */
413 if ((!gsi && !gimple_bb (def_stmt))
414 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
415 no_value = true;
416 }
417
418 if (!no_value)
419 value = gimple_assign_rhs_to_tree (def_stmt);
420 }
421
422 if (value)
423 {
424 /* If there's a single use of VAR, and VAR is the entire debug
425 expression (usecount would have been incremented again
426 otherwise), and the definition involves only constants and
427 SSA names, then we can propagate VALUE into this single use,
428 avoiding the temp.
429
430 We can also avoid using a temp if VALUE can be shared and
431 propagated into all uses, without generating expressions that
432 wouldn't be valid gimple RHSs.
433
434 Other cases that would require unsharing or non-gimple RHSs
435 are deferred to a debug temp, although we could avoid temps
436 at the expense of duplication of expressions. */
437
438 if (CONSTANT_CLASS_P (value)
439 || gimple_code (def_stmt) == GIMPLE_PHI
440 || (usecount == 1
441 && (!gimple_assign_single_p (def_stmt)
442 || is_gimple_min_invariant (value)))
443 || is_gimple_reg (value))
444 ;
445 else
446 {
447 gdebug *def_temp;
448 tree vexpr = make_node (DEBUG_EXPR_DECL);
449
450 def_temp = gimple_build_debug_bind (vexpr,
451 unshare_expr (value),
452 def_stmt);
453
454 DECL_ARTIFICIAL (vexpr) = 1;
455 TREE_TYPE (vexpr) = TREE_TYPE (value);
456 if (DECL_P (value))
457 DECL_MODE (vexpr) = DECL_MODE (value);
458 else
459 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
460
461 if (gsi)
462 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
463 else
464 {
465 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
466 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
467 }
468
469 value = vexpr;
470 }
471 }
472
473 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
474 {
475 if (!gimple_debug_bind_p (stmt))
476 continue;
477
478 if (value)
479 {
480 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
481 /* unshare_expr is not needed here. vexpr is either a
482 SINGLE_RHS, that can be safely shared, some other RHS
483 that was unshared when we found it had a single debug
484 use, or a DEBUG_EXPR_DECL, that can be safely
485 shared. */
486 SET_USE (use_p, unshare_expr (value));
487 /* If we didn't replace uses with a debug decl fold the
488 resulting expression. Otherwise we end up with invalid IL. */
489 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
490 {
491 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
492 fold_stmt_inplace (&gsi);
493 }
494 }
495 else
496 gimple_debug_bind_reset_value (stmt);
497
498 update_stmt (stmt);
499 }
500 }
501
502
503 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
504 other DEBUG stmts, and replace uses of the DEF with the
505 newly-created debug temp. */
506
507 void
508 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
509 {
510 gimple stmt;
511 ssa_op_iter op_iter;
512 def_operand_p def_p;
513
514 if (!MAY_HAVE_DEBUG_STMTS)
515 return;
516
517 stmt = gsi_stmt (*gsi);
518
519 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
520 {
521 tree var = DEF_FROM_PTR (def_p);
522
523 if (TREE_CODE (var) != SSA_NAME)
524 continue;
525
526 insert_debug_temp_for_var_def (gsi, var);
527 }
528 }
529
530 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
531
532 void
533 reset_debug_uses (gimple stmt)
534 {
535 ssa_op_iter op_iter;
536 def_operand_p def_p;
537 imm_use_iterator imm_iter;
538 gimple use_stmt;
539
540 if (!MAY_HAVE_DEBUG_STMTS)
541 return;
542
543 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
544 {
545 tree var = DEF_FROM_PTR (def_p);
546
547 if (TREE_CODE (var) != SSA_NAME)
548 continue;
549
550 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
551 {
552 if (!gimple_debug_bind_p (use_stmt))
553 continue;
554
555 gimple_debug_bind_reset_value (use_stmt);
556 update_stmt (use_stmt);
557 }
558 }
559 }
560
561 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
562 dominated stmts before their dominators, so that release_ssa_defs
563 stands a chance of propagating DEFs into debug bind stmts. */
564
565 void
566 release_defs_bitset (bitmap toremove)
567 {
568 unsigned j;
569 bitmap_iterator bi;
570
571 /* Performing a topological sort is probably overkill, this will
572 most likely run in slightly superlinear time, rather than the
573 pathological quadratic worst case. */
574 while (!bitmap_empty_p (toremove))
575 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
576 {
577 bool remove_now = true;
578 tree var = ssa_name (j);
579 gimple stmt;
580 imm_use_iterator uit;
581
582 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
583 {
584 ssa_op_iter dit;
585 def_operand_p def_p;
586
587 /* We can't propagate PHI nodes into debug stmts. */
588 if (gimple_code (stmt) == GIMPLE_PHI
589 || is_gimple_debug (stmt))
590 continue;
591
592 /* If we find another definition to remove that uses
593 the one we're looking at, defer the removal of this
594 one, so that it can be propagated into debug stmts
595 after the other is. */
596 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
597 {
598 tree odef = DEF_FROM_PTR (def_p);
599
600 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
601 {
602 remove_now = false;
603 break;
604 }
605 }
606
607 if (!remove_now)
608 BREAK_FROM_IMM_USE_STMT (uit);
609 }
610
611 if (remove_now)
612 {
613 gimple def = SSA_NAME_DEF_STMT (var);
614 gimple_stmt_iterator gsi = gsi_for_stmt (def);
615
616 if (gimple_code (def) == GIMPLE_PHI)
617 remove_phi_node (&gsi, true);
618 else
619 {
620 gsi_remove (&gsi, true);
621 release_defs (def);
622 }
623
624 bitmap_clear_bit (toremove, j);
625 }
626 }
627 }
628
629 /* Return true if SSA_NAME is malformed and mark it visited.
630
631 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
632 operand. */
633
634 static bool
635 verify_ssa_name (tree ssa_name, bool is_virtual)
636 {
637 if (TREE_CODE (ssa_name) != SSA_NAME)
638 {
639 error ("expected an SSA_NAME object");
640 return true;
641 }
642
643 if (SSA_NAME_IN_FREE_LIST (ssa_name))
644 {
645 error ("found an SSA_NAME that had been released into the free pool");
646 return true;
647 }
648
649 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
650 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
651 {
652 error ("type mismatch between an SSA_NAME and its symbol");
653 return true;
654 }
655
656 if (is_virtual && !virtual_operand_p (ssa_name))
657 {
658 error ("found a virtual definition for a GIMPLE register");
659 return true;
660 }
661
662 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
663 {
664 error ("virtual SSA name for non-VOP decl");
665 return true;
666 }
667
668 if (!is_virtual && virtual_operand_p (ssa_name))
669 {
670 error ("found a real definition for a non-register");
671 return true;
672 }
673
674 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
675 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
676 {
677 error ("found a default name with a non-empty defining statement");
678 return true;
679 }
680
681 return false;
682 }
683
684
685 /* Return true if the definition of SSA_NAME at block BB is malformed.
686
687 STMT is the statement where SSA_NAME is created.
688
689 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
690 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
691 it means that the block in that array slot contains the
692 definition of SSA_NAME.
693
694 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
695
696 static bool
697 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
698 gimple stmt, bool is_virtual)
699 {
700 if (verify_ssa_name (ssa_name, is_virtual))
701 goto err;
702
703 if (SSA_NAME_VAR (ssa_name)
704 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
705 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
706 {
707 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
708 goto err;
709 }
710
711 if (definition_block[SSA_NAME_VERSION (ssa_name)])
712 {
713 error ("SSA_NAME created in two different blocks %i and %i",
714 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
715 goto err;
716 }
717
718 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
719
720 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
721 {
722 error ("SSA_NAME_DEF_STMT is wrong");
723 fprintf (stderr, "Expected definition statement:\n");
724 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
725 fprintf (stderr, "\nActual definition statement:\n");
726 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
727 goto err;
728 }
729
730 return false;
731
732 err:
733 fprintf (stderr, "while verifying SSA_NAME ");
734 print_generic_expr (stderr, ssa_name, 0);
735 fprintf (stderr, " in statement\n");
736 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
737
738 return true;
739 }
740
741
742 /* Return true if the use of SSA_NAME at statement STMT in block BB is
743 malformed.
744
745 DEF_BB is the block where SSA_NAME was found to be created.
746
747 IDOM contains immediate dominator information for the flowgraph.
748
749 CHECK_ABNORMAL is true if the caller wants to check whether this use
750 is flowing through an abnormal edge (only used when checking PHI
751 arguments).
752
753 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
754 that are defined before STMT in basic block BB. */
755
756 static bool
757 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
758 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
759 {
760 bool err = false;
761 tree ssa_name = USE_FROM_PTR (use_p);
762
763 if (!TREE_VISITED (ssa_name))
764 if (verify_imm_links (stderr, ssa_name))
765 err = true;
766
767 TREE_VISITED (ssa_name) = 1;
768
769 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
770 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
771 ; /* Default definitions have empty statements. Nothing to do. */
772 else if (!def_bb)
773 {
774 error ("missing definition");
775 err = true;
776 }
777 else if (bb != def_bb
778 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
779 {
780 error ("definition in block %i does not dominate use in block %i",
781 def_bb->index, bb->index);
782 err = true;
783 }
784 else if (bb == def_bb
785 && names_defined_in_bb != NULL
786 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
787 {
788 error ("definition in block %i follows the use", def_bb->index);
789 err = true;
790 }
791
792 if (check_abnormal
793 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
794 {
795 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
796 err = true;
797 }
798
799 /* Make sure the use is in an appropriate list by checking the previous
800 element to make sure it's the same. */
801 if (use_p->prev == NULL)
802 {
803 error ("no immediate_use list");
804 err = true;
805 }
806 else
807 {
808 tree listvar;
809 if (use_p->prev->use == NULL)
810 listvar = use_p->prev->loc.ssa_name;
811 else
812 listvar = USE_FROM_PTR (use_p->prev);
813 if (listvar != ssa_name)
814 {
815 error ("wrong immediate use list");
816 err = true;
817 }
818 }
819
820 if (err)
821 {
822 fprintf (stderr, "for SSA_NAME: ");
823 print_generic_expr (stderr, ssa_name, TDF_VOPS);
824 fprintf (stderr, " in statement:\n");
825 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
826 }
827
828 return err;
829 }
830
831
832 /* Return true if any of the arguments for PHI node PHI at block BB is
833 malformed.
834
835 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
836 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
837 it means that the block in that array slot contains the
838 definition of SSA_NAME. */
839
840 static bool
841 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
842 {
843 edge e;
844 bool err = false;
845 size_t i, phi_num_args = gimple_phi_num_args (phi);
846
847 if (EDGE_COUNT (bb->preds) != phi_num_args)
848 {
849 error ("incoming edge count does not match number of PHI arguments");
850 err = true;
851 goto error;
852 }
853
854 for (i = 0; i < phi_num_args; i++)
855 {
856 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
857 tree op = USE_FROM_PTR (op_p);
858
859 e = EDGE_PRED (bb, i);
860
861 if (op == NULL_TREE)
862 {
863 error ("PHI argument is missing for edge %d->%d",
864 e->src->index,
865 e->dest->index);
866 err = true;
867 goto error;
868 }
869
870 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
871 {
872 error ("PHI argument is not SSA_NAME, or invariant");
873 err = true;
874 }
875
876 if (TREE_CODE (op) == SSA_NAME)
877 {
878 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
879 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
880 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
881 }
882
883 if (TREE_CODE (op) == ADDR_EXPR)
884 {
885 tree base = TREE_OPERAND (op, 0);
886 while (handled_component_p (base))
887 base = TREE_OPERAND (base, 0);
888 if ((TREE_CODE (base) == VAR_DECL
889 || TREE_CODE (base) == PARM_DECL
890 || TREE_CODE (base) == RESULT_DECL)
891 && !TREE_ADDRESSABLE (base))
892 {
893 error ("address taken, but ADDRESSABLE bit not set");
894 err = true;
895 }
896 }
897
898 if (e->dest != bb)
899 {
900 error ("wrong edge %d->%d for PHI argument",
901 e->src->index, e->dest->index);
902 err = true;
903 }
904
905 if (err)
906 {
907 fprintf (stderr, "PHI argument\n");
908 print_generic_stmt (stderr, op, TDF_VOPS);
909 goto error;
910 }
911 }
912
913 error:
914 if (err)
915 {
916 fprintf (stderr, "for PHI node\n");
917 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
918 }
919
920
921 return err;
922 }
923
924
925 /* Verify common invariants in the SSA web.
926 TODO: verify the variable annotations. */
927
928 DEBUG_FUNCTION void
929 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
930 {
931 size_t i;
932 basic_block bb;
933 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
934 ssa_op_iter iter;
935 tree op;
936 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
937 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
938
939 gcc_assert (!need_ssa_update_p (cfun));
940
941 timevar_push (TV_TREE_SSA_VERIFY);
942
943 /* Keep track of SSA names present in the IL. */
944 for (i = 1; i < num_ssa_names; i++)
945 {
946 tree name = ssa_name (i);
947 if (name)
948 {
949 gimple stmt;
950 TREE_VISITED (name) = 0;
951
952 verify_ssa_name (name, virtual_operand_p (name));
953
954 stmt = SSA_NAME_DEF_STMT (name);
955 if (!gimple_nop_p (stmt))
956 {
957 basic_block bb = gimple_bb (stmt);
958 if (verify_def (bb, definition_block,
959 name, stmt, virtual_operand_p (name)))
960 goto err;
961 }
962 }
963 }
964
965 calculate_dominance_info (CDI_DOMINATORS);
966
967 /* Now verify all the uses and make sure they agree with the definitions
968 found in the previous pass. */
969 FOR_EACH_BB_FN (bb, cfun)
970 {
971 edge e;
972 edge_iterator ei;
973
974 /* Make sure that all edges have a clear 'aux' field. */
975 FOR_EACH_EDGE (e, ei, bb->preds)
976 {
977 if (e->aux)
978 {
979 error ("AUX pointer initialized for edge %d->%d", e->src->index,
980 e->dest->index);
981 goto err;
982 }
983 }
984
985 /* Verify the arguments for every PHI node in the block. */
986 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
987 {
988 gphi *phi = gsi.phi ();
989 if (verify_phi_args (phi, bb, definition_block))
990 goto err;
991
992 bitmap_set_bit (names_defined_in_bb,
993 SSA_NAME_VERSION (gimple_phi_result (phi)));
994 }
995
996 /* Now verify all the uses and vuses in every statement of the block. */
997 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
998 gsi_next (&gsi))
999 {
1000 gimple stmt = gsi_stmt (gsi);
1001 use_operand_p use_p;
1002
1003 if (check_modified_stmt && gimple_modified_p (stmt))
1004 {
1005 error ("stmt (%p) marked modified after optimization pass: ",
1006 (void *)stmt);
1007 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1008 goto err;
1009 }
1010
1011 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1012 {
1013 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1014 goto err;
1015 }
1016
1017 if (gimple_debug_bind_p (stmt)
1018 && !gimple_debug_bind_has_value_p (stmt))
1019 continue;
1020
1021 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1022 {
1023 op = USE_FROM_PTR (use_p);
1024 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1025 use_p, stmt, false, names_defined_in_bb))
1026 goto err;
1027 }
1028
1029 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1030 {
1031 if (SSA_NAME_DEF_STMT (op) != stmt)
1032 {
1033 error ("SSA_NAME_DEF_STMT is wrong");
1034 fprintf (stderr, "Expected definition statement:\n");
1035 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1036 fprintf (stderr, "\nActual definition statement:\n");
1037 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1038 4, TDF_VOPS);
1039 goto err;
1040 }
1041 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1042 }
1043 }
1044
1045 bitmap_clear (names_defined_in_bb);
1046 }
1047
1048 free (definition_block);
1049
1050 /* Restore the dominance information to its prior known state, so
1051 that we do not perturb the compiler's subsequent behavior. */
1052 if (orig_dom_state == DOM_NONE)
1053 free_dominance_info (CDI_DOMINATORS);
1054 else
1055 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1056
1057 BITMAP_FREE (names_defined_in_bb);
1058 timevar_pop (TV_TREE_SSA_VERIFY);
1059 return;
1060
1061 err:
1062 internal_error ("verify_ssa failed");
1063 }
1064
1065
1066 /* Initialize global DFA and SSA structures. */
1067
1068 void
1069 init_tree_ssa (struct function *fn)
1070 {
1071 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1072 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1073 pt_solution_reset (&fn->gimple_df->escaped);
1074 init_ssanames (fn, 0);
1075 }
1076
1077 /* Do the actions required to initialize internal data structures used
1078 in tree-ssa optimization passes. */
1079
1080 static unsigned int
1081 execute_init_datastructures (void)
1082 {
1083 /* Allocate hash tables, arrays and other structures. */
1084 gcc_assert (!cfun->gimple_df);
1085 init_tree_ssa (cfun);
1086 return 0;
1087 }
1088
1089 namespace {
1090
1091 const pass_data pass_data_init_datastructures =
1092 {
1093 GIMPLE_PASS, /* type */
1094 "*init_datastructures", /* name */
1095 OPTGROUP_NONE, /* optinfo_flags */
1096 TV_NONE, /* tv_id */
1097 PROP_cfg, /* properties_required */
1098 0, /* properties_provided */
1099 0, /* properties_destroyed */
1100 0, /* todo_flags_start */
1101 0, /* todo_flags_finish */
1102 };
1103
1104 class pass_init_datastructures : public gimple_opt_pass
1105 {
1106 public:
1107 pass_init_datastructures (gcc::context *ctxt)
1108 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1109 {}
1110
1111 /* opt_pass methods: */
1112 virtual bool gate (function *fun)
1113 {
1114 /* Do nothing for funcions that was produced already in SSA form. */
1115 return !(fun->curr_properties & PROP_ssa);
1116 }
1117
1118 virtual unsigned int execute (function *)
1119 {
1120 return execute_init_datastructures ();
1121 }
1122
1123 }; // class pass_init_datastructures
1124
1125 } // anon namespace
1126
1127 gimple_opt_pass *
1128 make_pass_init_datastructures (gcc::context *ctxt)
1129 {
1130 return new pass_init_datastructures (ctxt);
1131 }
1132
1133 /* Deallocate memory associated with SSA data structures for FNDECL. */
1134
1135 void
1136 delete_tree_ssa (void)
1137 {
1138 fini_ssanames ();
1139
1140 /* We no longer maintain the SSA operand cache at this point. */
1141 if (ssa_operands_active (cfun))
1142 fini_ssa_operands (cfun);
1143
1144 cfun->gimple_df->default_defs->empty ();
1145 cfun->gimple_df->default_defs = NULL;
1146 pt_solution_reset (&cfun->gimple_df->escaped);
1147 if (cfun->gimple_df->decls_to_pointers != NULL)
1148 delete cfun->gimple_df->decls_to_pointers;
1149 cfun->gimple_df->decls_to_pointers = NULL;
1150 cfun->gimple_df->modified_noreturn_calls = NULL;
1151 cfun->gimple_df = NULL;
1152
1153 /* We no longer need the edge variable maps. */
1154 redirect_edge_var_map_destroy ();
1155 }
1156
1157 /* Return true if EXPR is a useless type conversion, otherwise return
1158 false. */
1159
1160 bool
1161 tree_ssa_useless_type_conversion (tree expr)
1162 {
1163 /* If we have an assignment that merely uses a NOP_EXPR to change
1164 the top of the RHS to the type of the LHS and the type conversion
1165 is "safe", then strip away the type conversion so that we can
1166 enter LHS = RHS into the const_and_copies table. */
1167 if (CONVERT_EXPR_P (expr)
1168 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1169 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1170 return useless_type_conversion_p
1171 (TREE_TYPE (expr),
1172 TREE_TYPE (TREE_OPERAND (expr, 0)));
1173
1174 return false;
1175 }
1176
1177 /* Strip conversions from EXP according to
1178 tree_ssa_useless_type_conversion and return the resulting
1179 expression. */
1180
1181 tree
1182 tree_ssa_strip_useless_type_conversions (tree exp)
1183 {
1184 while (tree_ssa_useless_type_conversion (exp))
1185 exp = TREE_OPERAND (exp, 0);
1186 return exp;
1187 }
1188
1189
1190 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1191 should be returned if the value is only partially undefined. */
1192
1193 bool
1194 ssa_undefined_value_p (tree t, bool partial)
1195 {
1196 gimple def_stmt;
1197 tree var = SSA_NAME_VAR (t);
1198
1199 if (!var)
1200 ;
1201 /* Parameters get their initial value from the function entry. */
1202 else if (TREE_CODE (var) == PARM_DECL)
1203 return false;
1204 /* When returning by reference the return address is actually a hidden
1205 parameter. */
1206 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1207 return false;
1208 /* Hard register variables get their initial value from the ether. */
1209 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1210 return false;
1211
1212 /* The value is undefined iff its definition statement is empty. */
1213 def_stmt = SSA_NAME_DEF_STMT (t);
1214 if (gimple_nop_p (def_stmt))
1215 return true;
1216
1217 /* Check if the complex was not only partially defined. */
1218 if (partial && is_gimple_assign (def_stmt)
1219 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1220 {
1221 tree rhs1, rhs2;
1222
1223 rhs1 = gimple_assign_rhs1 (def_stmt);
1224 rhs2 = gimple_assign_rhs2 (def_stmt);
1225 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1226 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1227 }
1228 return false;
1229 }
1230
1231
1232 /* If necessary, rewrite the base of the reference tree *TP from
1233 a MEM_REF to a plain or converted symbol. */
1234
1235 static void
1236 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1237 {
1238 tree sym;
1239
1240 while (handled_component_p (*tp))
1241 tp = &TREE_OPERAND (*tp, 0);
1242 if (TREE_CODE (*tp) == MEM_REF
1243 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1244 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1245 && DECL_P (sym)
1246 && !TREE_ADDRESSABLE (sym)
1247 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1248 {
1249 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1250 && useless_type_conversion_p (TREE_TYPE (*tp),
1251 TREE_TYPE (TREE_TYPE (sym)))
1252 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1253 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1254 {
1255 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1256 TYPE_SIZE (TREE_TYPE (*tp)),
1257 int_const_binop (MULT_EXPR,
1258 bitsize_int (BITS_PER_UNIT),
1259 TREE_OPERAND (*tp, 1)));
1260 }
1261 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1262 && useless_type_conversion_p (TREE_TYPE (*tp),
1263 TREE_TYPE (TREE_TYPE (sym))))
1264 {
1265 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1266 ? REALPART_EXPR : IMAGPART_EXPR,
1267 TREE_TYPE (*tp), sym);
1268 }
1269 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1270 {
1271 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1272 TREE_TYPE (sym)))
1273 *tp = build1 (VIEW_CONVERT_EXPR,
1274 TREE_TYPE (*tp), sym);
1275 else
1276 *tp = sym;
1277 }
1278 }
1279 }
1280
1281 /* For a tree REF return its base if it is the base of a MEM_REF
1282 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1283
1284 static tree
1285 non_rewritable_mem_ref_base (tree ref)
1286 {
1287 tree base = ref;
1288
1289 /* A plain decl does not need it set. */
1290 if (DECL_P (ref))
1291 return NULL_TREE;
1292
1293 while (handled_component_p (base))
1294 base = TREE_OPERAND (base, 0);
1295
1296 /* But watch out for MEM_REFs we cannot lower to a
1297 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1298 if (TREE_CODE (base) == MEM_REF
1299 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1300 {
1301 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1302 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1303 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1304 && useless_type_conversion_p (TREE_TYPE (base),
1305 TREE_TYPE (TREE_TYPE (decl)))
1306 && wi::fits_uhwi_p (mem_ref_offset (base))
1307 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1308 mem_ref_offset (base))
1309 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1310 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1311 return NULL_TREE;
1312 if (DECL_P (decl)
1313 && (!integer_zerop (TREE_OPERAND (base, 1))
1314 || (DECL_SIZE (decl)
1315 != TYPE_SIZE (TREE_TYPE (base)))
1316 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1317 return decl;
1318 }
1319
1320 return NULL_TREE;
1321 }
1322
1323 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1324 Otherwise return true. */
1325
1326 static bool
1327 non_rewritable_lvalue_p (tree lhs)
1328 {
1329 /* A plain decl is always rewritable. */
1330 if (DECL_P (lhs))
1331 return false;
1332
1333 /* A decl that is wrapped inside a MEM-REF that covers
1334 it full is also rewritable.
1335 ??? The following could be relaxed allowing component
1336 references that do not change the access size. */
1337 if (TREE_CODE (lhs) == MEM_REF
1338 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1339 && integer_zerop (TREE_OPERAND (lhs, 1)))
1340 {
1341 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1342 if (DECL_P (decl)
1343 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1344 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1345 return false;
1346 }
1347
1348 return true;
1349 }
1350
1351 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1352 mark the variable VAR for conversion into SSA. Return true when updating
1353 stmts is required. */
1354
1355 static void
1356 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1357 bitmap suitable_for_renaming)
1358 {
1359 /* Global Variables, result decls cannot be changed. */
1360 if (is_global_var (var)
1361 || TREE_CODE (var) == RESULT_DECL
1362 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1363 return;
1364
1365 if (TREE_ADDRESSABLE (var)
1366 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1367 a non-register. Otherwise we are confused and forget to
1368 add virtual operands for it. */
1369 && (!is_gimple_reg_type (TREE_TYPE (var))
1370 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1371 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1372 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1373 {
1374 TREE_ADDRESSABLE (var) = 0;
1375 if (is_gimple_reg (var))
1376 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1377 if (dump_file)
1378 {
1379 fprintf (dump_file, "No longer having address taken: ");
1380 print_generic_expr (dump_file, var, 0);
1381 fprintf (dump_file, "\n");
1382 }
1383 }
1384
1385 if (!DECL_GIMPLE_REG_P (var)
1386 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1387 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1388 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1389 && !TREE_THIS_VOLATILE (var)
1390 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1391 {
1392 DECL_GIMPLE_REG_P (var) = 1;
1393 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1394 if (dump_file)
1395 {
1396 fprintf (dump_file, "Now a gimple register: ");
1397 print_generic_expr (dump_file, var, 0);
1398 fprintf (dump_file, "\n");
1399 }
1400 }
1401 }
1402
1403 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1404
1405 void
1406 execute_update_addresses_taken (void)
1407 {
1408 basic_block bb;
1409 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1410 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1411 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1412 tree var;
1413 unsigned i;
1414
1415 timevar_push (TV_ADDRESS_TAKEN);
1416
1417 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1418 the function body. */
1419 FOR_EACH_BB_FN (bb, cfun)
1420 {
1421 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1422 gsi_next (&gsi))
1423 {
1424 gimple stmt = gsi_stmt (gsi);
1425 enum gimple_code code = gimple_code (stmt);
1426 tree decl;
1427
1428 /* Note all addresses taken by the stmt. */
1429 gimple_ior_addresses_taken (addresses_taken, stmt);
1430
1431 /* If we have a call or an assignment, see if the lhs contains
1432 a local decl that requires not to be a gimple register. */
1433 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1434 {
1435 tree lhs = gimple_get_lhs (stmt);
1436 if (lhs
1437 && TREE_CODE (lhs) != SSA_NAME
1438 && non_rewritable_lvalue_p (lhs))
1439 {
1440 decl = get_base_address (lhs);
1441 if (DECL_P (decl))
1442 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1443 }
1444 }
1445
1446 if (gimple_assign_single_p (stmt))
1447 {
1448 tree rhs = gimple_assign_rhs1 (stmt);
1449 if ((decl = non_rewritable_mem_ref_base (rhs)))
1450 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1451 }
1452
1453 else if (code == GIMPLE_CALL)
1454 {
1455 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1456 {
1457 tree arg = gimple_call_arg (stmt, i);
1458 if ((decl = non_rewritable_mem_ref_base (arg)))
1459 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1460 }
1461 }
1462
1463 else if (code == GIMPLE_ASM)
1464 {
1465 gasm *asm_stmt = as_a <gasm *> (stmt);
1466 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1467 {
1468 tree link = gimple_asm_output_op (asm_stmt, i);
1469 tree lhs = TREE_VALUE (link);
1470 if (TREE_CODE (lhs) != SSA_NAME)
1471 {
1472 decl = get_base_address (lhs);
1473 if (DECL_P (decl)
1474 && (non_rewritable_lvalue_p (lhs)
1475 /* We cannot move required conversions from
1476 the lhs to the rhs in asm statements, so
1477 require we do not need any. */
1478 || !useless_type_conversion_p
1479 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1480 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1481 }
1482 }
1483 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1484 {
1485 tree link = gimple_asm_input_op (asm_stmt, i);
1486 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1487 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1488 }
1489 }
1490 }
1491
1492 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1493 gsi_next (&gsi))
1494 {
1495 size_t i;
1496 gphi *phi = gsi.phi ();
1497
1498 for (i = 0; i < gimple_phi_num_args (phi); i++)
1499 {
1500 tree op = PHI_ARG_DEF (phi, i), var;
1501 if (TREE_CODE (op) == ADDR_EXPR
1502 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1503 && DECL_P (var))
1504 bitmap_set_bit (addresses_taken, DECL_UID (var));
1505 }
1506 }
1507 }
1508
1509 /* We cannot iterate over all referenced vars because that can contain
1510 unused vars from BLOCK trees, which causes code generation differences
1511 for -g vs. -g0. */
1512 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1513 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1514 suitable_for_renaming);
1515
1516 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1517 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1518 suitable_for_renaming);
1519
1520 /* Operand caches need to be recomputed for operands referencing the updated
1521 variables and operands need to be rewritten to expose bare symbols. */
1522 if (!bitmap_empty_p (suitable_for_renaming))
1523 {
1524 FOR_EACH_BB_FN (bb, cfun)
1525 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1526 {
1527 gimple stmt = gsi_stmt (gsi);
1528
1529 /* Re-write TARGET_MEM_REFs of symbols we want to
1530 rewrite into SSA form. */
1531 if (gimple_assign_single_p (stmt))
1532 {
1533 tree lhs = gimple_assign_lhs (stmt);
1534 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1535 tree sym;
1536
1537 /* We shouldn't have any fancy wrapping of
1538 component-refs on the LHS, but look through
1539 VIEW_CONVERT_EXPRs as that is easy. */
1540 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1541 lhs = TREE_OPERAND (lhs, 0);
1542 if (TREE_CODE (lhs) == MEM_REF
1543 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1544 && integer_zerop (TREE_OPERAND (lhs, 1))
1545 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1546 && DECL_P (sym)
1547 && !TREE_ADDRESSABLE (sym)
1548 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1549 lhs = sym;
1550 else
1551 lhs = gimple_assign_lhs (stmt);
1552
1553 /* Rewrite the RHS and make sure the resulting assignment
1554 is validly typed. */
1555 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1556 rhs = gimple_assign_rhs1 (stmt);
1557 if (gimple_assign_lhs (stmt) != lhs
1558 && !useless_type_conversion_p (TREE_TYPE (lhs),
1559 TREE_TYPE (rhs)))
1560 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1561 TREE_TYPE (lhs), rhs);
1562
1563 if (gimple_assign_lhs (stmt) != lhs)
1564 gimple_assign_set_lhs (stmt, lhs);
1565
1566 if (gimple_assign_rhs1 (stmt) != rhs)
1567 {
1568 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1569 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1570 }
1571 }
1572
1573 else if (gimple_code (stmt) == GIMPLE_CALL)
1574 {
1575 unsigned i;
1576 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1577 {
1578 tree *argp = gimple_call_arg_ptr (stmt, i);
1579 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1580 }
1581 }
1582
1583 else if (gimple_code (stmt) == GIMPLE_ASM)
1584 {
1585 gasm *asm_stmt = as_a <gasm *> (stmt);
1586 unsigned i;
1587 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1588 {
1589 tree link = gimple_asm_output_op (asm_stmt, i);
1590 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1591 suitable_for_renaming);
1592 }
1593 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1594 {
1595 tree link = gimple_asm_input_op (asm_stmt, i);
1596 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1597 suitable_for_renaming);
1598 }
1599 }
1600
1601 else if (gimple_debug_bind_p (stmt)
1602 && gimple_debug_bind_has_value_p (stmt))
1603 {
1604 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1605 tree decl;
1606 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1607 decl = non_rewritable_mem_ref_base (*valuep);
1608 if (decl
1609 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1610 gimple_debug_bind_reset_value (stmt);
1611 }
1612
1613 if (gimple_references_memory_p (stmt)
1614 || is_gimple_debug (stmt))
1615 update_stmt (stmt);
1616
1617 gsi_next (&gsi);
1618 }
1619
1620 /* Update SSA form here, we are called as non-pass as well. */
1621 if (number_of_loops (cfun) > 1
1622 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1623 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1624 else
1625 update_ssa (TODO_update_ssa);
1626 }
1627
1628 BITMAP_FREE (not_reg_needs);
1629 BITMAP_FREE (addresses_taken);
1630 BITMAP_FREE (suitable_for_renaming);
1631 timevar_pop (TV_ADDRESS_TAKEN);
1632 }
1633
1634 namespace {
1635
1636 const pass_data pass_data_update_address_taken =
1637 {
1638 GIMPLE_PASS, /* type */
1639 "addressables", /* name */
1640 OPTGROUP_NONE, /* optinfo_flags */
1641 TV_ADDRESS_TAKEN, /* tv_id */
1642 PROP_ssa, /* properties_required */
1643 0, /* properties_provided */
1644 0, /* properties_destroyed */
1645 0, /* todo_flags_start */
1646 TODO_update_address_taken, /* todo_flags_finish */
1647 };
1648
1649 class pass_update_address_taken : public gimple_opt_pass
1650 {
1651 public:
1652 pass_update_address_taken (gcc::context *ctxt)
1653 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1654 {}
1655
1656 /* opt_pass methods: */
1657
1658 }; // class pass_update_address_taken
1659
1660 } // anon namespace
1661
1662 gimple_opt_pass *
1663 make_pass_update_address_taken (gcc::context *ctxt)
1664 {
1665 return new pass_update_address_taken (ctxt);
1666 }