]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
function.h: Flatten file.
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
31 #include "hashtab.h"
32 #include "hash-set.h"
33 #include "vec.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "gimple-pretty-print.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "gimple-expr.h"
43 #include "is-a.h"
44 #include "gimple.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "gimple-walk.h"
48 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-into-ssa.h"
55 #include "tree-ssa.h"
56 #include "tree-inline.h"
57 #include "hash-map.h"
58 #include "tree-pass.h"
59 #include "diagnostic-core.h"
60 #include "cfgloop.h"
61 #include "cfgexpand.h"
62
63 /* Pointer map of variable mappings, keyed by edge. */
64 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
65
66
67 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
68
69 void
70 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
71 {
72 edge_var_map new_node;
73
74 if (edge_var_maps == NULL)
75 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
76
77 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
78 new_node.def = def;
79 new_node.result = result;
80 new_node.locus = locus;
81
82 slot.safe_push (new_node);
83 }
84
85
86 /* Clear the var mappings in edge E. */
87
88 void
89 redirect_edge_var_map_clear (edge e)
90 {
91 if (!edge_var_maps)
92 return;
93
94 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
95
96 if (head)
97 head->release ();
98 }
99
100
101 /* Duplicate the redirected var mappings in OLDE in NEWE.
102
103 This assumes a hash_map can have multiple edges mapping to the same
104 var_map (many to one mapping), since we don't remove the previous mappings.
105 */
106
107 void
108 redirect_edge_var_map_dup (edge newe, edge olde)
109 {
110 if (!edge_var_maps)
111 return;
112
113 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
114 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
115 if (!old_head)
116 return;
117
118 new_head->safe_splice (*old_head);
119 }
120
121
122 /* Return the variable mappings for a given edge. If there is none, return
123 NULL. */
124
125 vec<edge_var_map> *
126 redirect_edge_var_map_vector (edge e)
127 {
128 /* Hey, what kind of idiot would... you'd be surprised. */
129 if (!edge_var_maps)
130 return NULL;
131
132 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
133 if (!slot)
134 return NULL;
135
136 return slot;
137 }
138
139 /* Clear the edge variable mappings. */
140
141 void
142 redirect_edge_var_map_destroy (void)
143 {
144 delete edge_var_maps;
145 edge_var_maps = NULL;
146 }
147
148
149 /* Remove the corresponding arguments from the PHI nodes in E's
150 destination block and redirect it to DEST. Return redirected edge.
151 The list of removed arguments is stored in a vector accessed
152 through edge_var_maps. */
153
154 edge
155 ssa_redirect_edge (edge e, basic_block dest)
156 {
157 gimple_stmt_iterator gsi;
158 gimple phi;
159
160 redirect_edge_var_map_clear (e);
161
162 /* Remove the appropriate PHI arguments in E's destination block. */
163 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
164 {
165 tree def;
166 source_location locus ;
167
168 phi = gsi_stmt (gsi);
169 def = gimple_phi_arg_def (phi, e->dest_idx);
170 locus = gimple_phi_arg_location (phi, e->dest_idx);
171
172 if (def == NULL_TREE)
173 continue;
174
175 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
176 }
177
178 e = redirect_edge_succ_nodup (e, dest);
179
180 return e;
181 }
182
183
184 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
185 E->dest. */
186
187 void
188 flush_pending_stmts (edge e)
189 {
190 gimple phi;
191 edge_var_map *vm;
192 int i;
193 gimple_stmt_iterator gsi;
194
195 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
196 if (!v)
197 return;
198
199 for (gsi = gsi_start_phis (e->dest), i = 0;
200 !gsi_end_p (gsi) && v->iterate (i, &vm);
201 gsi_next (&gsi), i++)
202 {
203 tree def;
204
205 phi = gsi_stmt (gsi);
206 def = redirect_edge_var_map_def (vm);
207 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
208 }
209
210 redirect_edge_var_map_clear (e);
211 }
212
213 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
214 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
215 expression with a different value.
216
217 This will update any annotations (say debug bind stmts) referring
218 to the original LHS, so that they use the RHS instead. This is
219 done even if NLHS and LHS are the same, for it is understood that
220 the RHS will be modified afterwards, and NLHS will not be assigned
221 an equivalent value.
222
223 Adjusting any non-annotation uses of the LHS, if needed, is a
224 responsibility of the caller.
225
226 The effect of this call should be pretty much the same as that of
227 inserting a copy of STMT before STMT, and then removing the
228 original stmt, at which time gsi_remove() would have update
229 annotations, but using this function saves all the inserting,
230 copying and removing. */
231
232 void
233 gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
234 {
235 if (MAY_HAVE_DEBUG_STMTS)
236 {
237 tree lhs = gimple_get_lhs (stmt);
238
239 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
240
241 insert_debug_temp_for_var_def (NULL, lhs);
242 }
243
244 gimple_set_lhs (stmt, nlhs);
245 }
246
247
248 /* Given a tree for an expression for which we might want to emit
249 locations or values in debug information (generally a variable, but
250 we might deal with other kinds of trees in the future), return the
251 tree that should be used as the variable of a DEBUG_BIND STMT or
252 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
253
254 tree
255 target_for_debug_bind (tree var)
256 {
257 if (!MAY_HAVE_DEBUG_STMTS)
258 return NULL_TREE;
259
260 if (TREE_CODE (var) == SSA_NAME)
261 {
262 var = SSA_NAME_VAR (var);
263 if (var == NULL_TREE)
264 return NULL_TREE;
265 }
266
267 if ((TREE_CODE (var) != VAR_DECL
268 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
269 && TREE_CODE (var) != PARM_DECL)
270 return NULL_TREE;
271
272 if (DECL_HAS_VALUE_EXPR_P (var))
273 return target_for_debug_bind (DECL_VALUE_EXPR (var));
274
275 if (DECL_IGNORED_P (var))
276 return NULL_TREE;
277
278 /* var-tracking only tracks registers. */
279 if (!is_gimple_reg_type (TREE_TYPE (var)))
280 return NULL_TREE;
281
282 return var;
283 }
284
285 /* Called via walk_tree, look for SSA_NAMEs that have already been
286 released. */
287
288 static tree
289 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
290 {
291 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
292
293 if (wi && wi->is_lhs)
294 return NULL_TREE;
295
296 if (TREE_CODE (*tp) == SSA_NAME)
297 {
298 if (SSA_NAME_IN_FREE_LIST (*tp))
299 return *tp;
300
301 *walk_subtrees = 0;
302 }
303 else if (IS_TYPE_OR_DECL_P (*tp))
304 *walk_subtrees = 0;
305
306 return NULL_TREE;
307 }
308
309 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
310 by other DEBUG stmts, and replace uses of the DEF with the
311 newly-created debug temp. */
312
313 void
314 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
315 {
316 imm_use_iterator imm_iter;
317 use_operand_p use_p;
318 gimple stmt;
319 gimple def_stmt = NULL;
320 int usecount = 0;
321 tree value = NULL;
322
323 if (!MAY_HAVE_DEBUG_STMTS)
324 return;
325
326 /* If this name has already been registered for replacement, do nothing
327 as anything that uses this name isn't in SSA form. */
328 if (name_registered_for_update_p (var))
329 return;
330
331 /* Check whether there are debug stmts that reference this variable and,
332 if there are, decide whether we should use a debug temp. */
333 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
334 {
335 stmt = USE_STMT (use_p);
336
337 if (!gimple_debug_bind_p (stmt))
338 continue;
339
340 if (usecount++)
341 break;
342
343 if (gimple_debug_bind_get_value (stmt) != var)
344 {
345 /* Count this as an additional use, so as to make sure we
346 use a temp unless VAR's definition has a SINGLE_RHS that
347 can be shared. */
348 usecount++;
349 break;
350 }
351 }
352
353 if (!usecount)
354 return;
355
356 if (gsi)
357 def_stmt = gsi_stmt (*gsi);
358 else
359 def_stmt = SSA_NAME_DEF_STMT (var);
360
361 /* If we didn't get an insertion point, and the stmt has already
362 been removed, we won't be able to insert the debug bind stmt, so
363 we'll have to drop debug information. */
364 if (gimple_code (def_stmt) == GIMPLE_PHI)
365 {
366 value = degenerate_phi_result (def_stmt);
367 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
368 value = NULL;
369 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
370 to. */
371 else if (value == error_mark_node)
372 value = NULL;
373 }
374 else if (is_gimple_assign (def_stmt))
375 {
376 bool no_value = false;
377
378 if (!dom_info_available_p (CDI_DOMINATORS))
379 {
380 struct walk_stmt_info wi;
381
382 memset (&wi, 0, sizeof (wi));
383
384 /* When removing blocks without following reverse dominance
385 order, we may sometimes encounter SSA_NAMEs that have
386 already been released, referenced in other SSA_DEFs that
387 we're about to release. Consider:
388
389 <bb X>:
390 v_1 = foo;
391
392 <bb Y>:
393 w_2 = v_1 + bar;
394 # DEBUG w => w_2
395
396 If we deleted BB X first, propagating the value of w_2
397 won't do us any good. It's too late to recover their
398 original definition of v_1: when it was deleted, it was
399 only referenced in other DEFs, it couldn't possibly know
400 it should have been retained, and propagating every
401 single DEF just in case it might have to be propagated
402 into a DEBUG STMT would probably be too wasteful.
403
404 When dominator information is not readily available, we
405 check for and accept some loss of debug information. But
406 if it is available, there's no excuse for us to remove
407 blocks in the wrong order, so we don't even check for
408 dead SSA NAMEs. SSA verification shall catch any
409 errors. */
410 if ((!gsi && !gimple_bb (def_stmt))
411 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
412 no_value = true;
413 }
414
415 if (!no_value)
416 value = gimple_assign_rhs_to_tree (def_stmt);
417 }
418
419 if (value)
420 {
421 /* If there's a single use of VAR, and VAR is the entire debug
422 expression (usecount would have been incremented again
423 otherwise), and the definition involves only constants and
424 SSA names, then we can propagate VALUE into this single use,
425 avoiding the temp.
426
427 We can also avoid using a temp if VALUE can be shared and
428 propagated into all uses, without generating expressions that
429 wouldn't be valid gimple RHSs.
430
431 Other cases that would require unsharing or non-gimple RHSs
432 are deferred to a debug temp, although we could avoid temps
433 at the expense of duplication of expressions. */
434
435 if (CONSTANT_CLASS_P (value)
436 || gimple_code (def_stmt) == GIMPLE_PHI
437 || (usecount == 1
438 && (!gimple_assign_single_p (def_stmt)
439 || is_gimple_min_invariant (value)))
440 || is_gimple_reg (value))
441 ;
442 else
443 {
444 gimple def_temp;
445 tree vexpr = make_node (DEBUG_EXPR_DECL);
446
447 def_temp = gimple_build_debug_bind (vexpr,
448 unshare_expr (value),
449 def_stmt);
450
451 DECL_ARTIFICIAL (vexpr) = 1;
452 TREE_TYPE (vexpr) = TREE_TYPE (value);
453 if (DECL_P (value))
454 DECL_MODE (vexpr) = DECL_MODE (value);
455 else
456 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
457
458 if (gsi)
459 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
460 else
461 {
462 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
463 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
464 }
465
466 value = vexpr;
467 }
468 }
469
470 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
471 {
472 if (!gimple_debug_bind_p (stmt))
473 continue;
474
475 if (value)
476 {
477 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
478 /* unshare_expr is not needed here. vexpr is either a
479 SINGLE_RHS, that can be safely shared, some other RHS
480 that was unshared when we found it had a single debug
481 use, or a DEBUG_EXPR_DECL, that can be safely
482 shared. */
483 SET_USE (use_p, unshare_expr (value));
484 /* If we didn't replace uses with a debug decl fold the
485 resulting expression. Otherwise we end up with invalid IL. */
486 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
487 {
488 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
489 fold_stmt_inplace (&gsi);
490 }
491 }
492 else
493 gimple_debug_bind_reset_value (stmt);
494
495 update_stmt (stmt);
496 }
497 }
498
499
500 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
501 other DEBUG stmts, and replace uses of the DEF with the
502 newly-created debug temp. */
503
504 void
505 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
506 {
507 gimple stmt;
508 ssa_op_iter op_iter;
509 def_operand_p def_p;
510
511 if (!MAY_HAVE_DEBUG_STMTS)
512 return;
513
514 stmt = gsi_stmt (*gsi);
515
516 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
517 {
518 tree var = DEF_FROM_PTR (def_p);
519
520 if (TREE_CODE (var) != SSA_NAME)
521 continue;
522
523 insert_debug_temp_for_var_def (gsi, var);
524 }
525 }
526
527 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
528
529 void
530 reset_debug_uses (gimple stmt)
531 {
532 ssa_op_iter op_iter;
533 def_operand_p def_p;
534 imm_use_iterator imm_iter;
535 gimple use_stmt;
536
537 if (!MAY_HAVE_DEBUG_STMTS)
538 return;
539
540 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
541 {
542 tree var = DEF_FROM_PTR (def_p);
543
544 if (TREE_CODE (var) != SSA_NAME)
545 continue;
546
547 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
548 {
549 if (!gimple_debug_bind_p (use_stmt))
550 continue;
551
552 gimple_debug_bind_reset_value (use_stmt);
553 update_stmt (use_stmt);
554 }
555 }
556 }
557
558 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
559 dominated stmts before their dominators, so that release_ssa_defs
560 stands a chance of propagating DEFs into debug bind stmts. */
561
562 void
563 release_defs_bitset (bitmap toremove)
564 {
565 unsigned j;
566 bitmap_iterator bi;
567
568 /* Performing a topological sort is probably overkill, this will
569 most likely run in slightly superlinear time, rather than the
570 pathological quadratic worst case. */
571 while (!bitmap_empty_p (toremove))
572 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
573 {
574 bool remove_now = true;
575 tree var = ssa_name (j);
576 gimple stmt;
577 imm_use_iterator uit;
578
579 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
580 {
581 ssa_op_iter dit;
582 def_operand_p def_p;
583
584 /* We can't propagate PHI nodes into debug stmts. */
585 if (gimple_code (stmt) == GIMPLE_PHI
586 || is_gimple_debug (stmt))
587 continue;
588
589 /* If we find another definition to remove that uses
590 the one we're looking at, defer the removal of this
591 one, so that it can be propagated into debug stmts
592 after the other is. */
593 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
594 {
595 tree odef = DEF_FROM_PTR (def_p);
596
597 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
598 {
599 remove_now = false;
600 break;
601 }
602 }
603
604 if (!remove_now)
605 BREAK_FROM_IMM_USE_STMT (uit);
606 }
607
608 if (remove_now)
609 {
610 gimple def = SSA_NAME_DEF_STMT (var);
611 gimple_stmt_iterator gsi = gsi_for_stmt (def);
612
613 if (gimple_code (def) == GIMPLE_PHI)
614 remove_phi_node (&gsi, true);
615 else
616 {
617 gsi_remove (&gsi, true);
618 release_defs (def);
619 }
620
621 bitmap_clear_bit (toremove, j);
622 }
623 }
624 }
625
626 /* Return true if SSA_NAME is malformed and mark it visited.
627
628 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
629 operand. */
630
631 static bool
632 verify_ssa_name (tree ssa_name, bool is_virtual)
633 {
634 if (TREE_CODE (ssa_name) != SSA_NAME)
635 {
636 error ("expected an SSA_NAME object");
637 return true;
638 }
639
640 if (SSA_NAME_IN_FREE_LIST (ssa_name))
641 {
642 error ("found an SSA_NAME that had been released into the free pool");
643 return true;
644 }
645
646 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
647 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
648 {
649 error ("type mismatch between an SSA_NAME and its symbol");
650 return true;
651 }
652
653 if (is_virtual && !virtual_operand_p (ssa_name))
654 {
655 error ("found a virtual definition for a GIMPLE register");
656 return true;
657 }
658
659 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
660 {
661 error ("virtual SSA name for non-VOP decl");
662 return true;
663 }
664
665 if (!is_virtual && virtual_operand_p (ssa_name))
666 {
667 error ("found a real definition for a non-register");
668 return true;
669 }
670
671 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
672 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
673 {
674 error ("found a default name with a non-empty defining statement");
675 return true;
676 }
677
678 return false;
679 }
680
681
682 /* Return true if the definition of SSA_NAME at block BB is malformed.
683
684 STMT is the statement where SSA_NAME is created.
685
686 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
687 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
688 it means that the block in that array slot contains the
689 definition of SSA_NAME.
690
691 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
692
693 static bool
694 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
695 gimple stmt, bool is_virtual)
696 {
697 if (verify_ssa_name (ssa_name, is_virtual))
698 goto err;
699
700 if (SSA_NAME_VAR (ssa_name)
701 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
702 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
703 {
704 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
705 goto err;
706 }
707
708 if (definition_block[SSA_NAME_VERSION (ssa_name)])
709 {
710 error ("SSA_NAME created in two different blocks %i and %i",
711 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
712 goto err;
713 }
714
715 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
716
717 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
718 {
719 error ("SSA_NAME_DEF_STMT is wrong");
720 fprintf (stderr, "Expected definition statement:\n");
721 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
722 fprintf (stderr, "\nActual definition statement:\n");
723 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
724 goto err;
725 }
726
727 return false;
728
729 err:
730 fprintf (stderr, "while verifying SSA_NAME ");
731 print_generic_expr (stderr, ssa_name, 0);
732 fprintf (stderr, " in statement\n");
733 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
734
735 return true;
736 }
737
738
739 /* Return true if the use of SSA_NAME at statement STMT in block BB is
740 malformed.
741
742 DEF_BB is the block where SSA_NAME was found to be created.
743
744 IDOM contains immediate dominator information for the flowgraph.
745
746 CHECK_ABNORMAL is true if the caller wants to check whether this use
747 is flowing through an abnormal edge (only used when checking PHI
748 arguments).
749
750 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
751 that are defined before STMT in basic block BB. */
752
753 static bool
754 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
755 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
756 {
757 bool err = false;
758 tree ssa_name = USE_FROM_PTR (use_p);
759
760 if (!TREE_VISITED (ssa_name))
761 if (verify_imm_links (stderr, ssa_name))
762 err = true;
763
764 TREE_VISITED (ssa_name) = 1;
765
766 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
767 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
768 ; /* Default definitions have empty statements. Nothing to do. */
769 else if (!def_bb)
770 {
771 error ("missing definition");
772 err = true;
773 }
774 else if (bb != def_bb
775 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
776 {
777 error ("definition in block %i does not dominate use in block %i",
778 def_bb->index, bb->index);
779 err = true;
780 }
781 else if (bb == def_bb
782 && names_defined_in_bb != NULL
783 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
784 {
785 error ("definition in block %i follows the use", def_bb->index);
786 err = true;
787 }
788
789 if (check_abnormal
790 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
791 {
792 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
793 err = true;
794 }
795
796 /* Make sure the use is in an appropriate list by checking the previous
797 element to make sure it's the same. */
798 if (use_p->prev == NULL)
799 {
800 error ("no immediate_use list");
801 err = true;
802 }
803 else
804 {
805 tree listvar;
806 if (use_p->prev->use == NULL)
807 listvar = use_p->prev->loc.ssa_name;
808 else
809 listvar = USE_FROM_PTR (use_p->prev);
810 if (listvar != ssa_name)
811 {
812 error ("wrong immediate use list");
813 err = true;
814 }
815 }
816
817 if (err)
818 {
819 fprintf (stderr, "for SSA_NAME: ");
820 print_generic_expr (stderr, ssa_name, TDF_VOPS);
821 fprintf (stderr, " in statement:\n");
822 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
823 }
824
825 return err;
826 }
827
828
829 /* Return true if any of the arguments for PHI node PHI at block BB is
830 malformed.
831
832 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
833 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
834 it means that the block in that array slot contains the
835 definition of SSA_NAME. */
836
837 static bool
838 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
839 {
840 edge e;
841 bool err = false;
842 size_t i, phi_num_args = gimple_phi_num_args (phi);
843
844 if (EDGE_COUNT (bb->preds) != phi_num_args)
845 {
846 error ("incoming edge count does not match number of PHI arguments");
847 err = true;
848 goto error;
849 }
850
851 for (i = 0; i < phi_num_args; i++)
852 {
853 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
854 tree op = USE_FROM_PTR (op_p);
855
856 e = EDGE_PRED (bb, i);
857
858 if (op == NULL_TREE)
859 {
860 error ("PHI argument is missing for edge %d->%d",
861 e->src->index,
862 e->dest->index);
863 err = true;
864 goto error;
865 }
866
867 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
868 {
869 error ("PHI argument is not SSA_NAME, or invariant");
870 err = true;
871 }
872
873 if (TREE_CODE (op) == SSA_NAME)
874 {
875 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
876 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
877 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
878 }
879
880 if (TREE_CODE (op) == ADDR_EXPR)
881 {
882 tree base = TREE_OPERAND (op, 0);
883 while (handled_component_p (base))
884 base = TREE_OPERAND (base, 0);
885 if ((TREE_CODE (base) == VAR_DECL
886 || TREE_CODE (base) == PARM_DECL
887 || TREE_CODE (base) == RESULT_DECL)
888 && !TREE_ADDRESSABLE (base))
889 {
890 error ("address taken, but ADDRESSABLE bit not set");
891 err = true;
892 }
893 }
894
895 if (e->dest != bb)
896 {
897 error ("wrong edge %d->%d for PHI argument",
898 e->src->index, e->dest->index);
899 err = true;
900 }
901
902 if (err)
903 {
904 fprintf (stderr, "PHI argument\n");
905 print_generic_stmt (stderr, op, TDF_VOPS);
906 goto error;
907 }
908 }
909
910 error:
911 if (err)
912 {
913 fprintf (stderr, "for PHI node\n");
914 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
915 }
916
917
918 return err;
919 }
920
921
922 /* Verify common invariants in the SSA web.
923 TODO: verify the variable annotations. */
924
925 DEBUG_FUNCTION void
926 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
927 {
928 size_t i;
929 basic_block bb;
930 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
931 ssa_op_iter iter;
932 tree op;
933 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
934 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
935
936 gcc_assert (!need_ssa_update_p (cfun));
937
938 timevar_push (TV_TREE_SSA_VERIFY);
939
940 /* Keep track of SSA names present in the IL. */
941 for (i = 1; i < num_ssa_names; i++)
942 {
943 tree name = ssa_name (i);
944 if (name)
945 {
946 gimple stmt;
947 TREE_VISITED (name) = 0;
948
949 verify_ssa_name (name, virtual_operand_p (name));
950
951 stmt = SSA_NAME_DEF_STMT (name);
952 if (!gimple_nop_p (stmt))
953 {
954 basic_block bb = gimple_bb (stmt);
955 if (verify_def (bb, definition_block,
956 name, stmt, virtual_operand_p (name)))
957 goto err;
958 }
959 }
960 }
961
962 calculate_dominance_info (CDI_DOMINATORS);
963
964 /* Now verify all the uses and make sure they agree with the definitions
965 found in the previous pass. */
966 FOR_EACH_BB_FN (bb, cfun)
967 {
968 edge e;
969 gimple phi;
970 edge_iterator ei;
971 gimple_stmt_iterator gsi;
972
973 /* Make sure that all edges have a clear 'aux' field. */
974 FOR_EACH_EDGE (e, ei, bb->preds)
975 {
976 if (e->aux)
977 {
978 error ("AUX pointer initialized for edge %d->%d", e->src->index,
979 e->dest->index);
980 goto err;
981 }
982 }
983
984 /* Verify the arguments for every PHI node in the block. */
985 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
986 {
987 phi = gsi_stmt (gsi);
988 if (verify_phi_args (phi, bb, definition_block))
989 goto err;
990
991 bitmap_set_bit (names_defined_in_bb,
992 SSA_NAME_VERSION (gimple_phi_result (phi)));
993 }
994
995 /* Now verify all the uses and vuses in every statement of the block. */
996 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
997 {
998 gimple stmt = gsi_stmt (gsi);
999 use_operand_p use_p;
1000
1001 if (check_modified_stmt && gimple_modified_p (stmt))
1002 {
1003 error ("stmt (%p) marked modified after optimization pass: ",
1004 (void *)stmt);
1005 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1006 goto err;
1007 }
1008
1009 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1010 {
1011 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1012 goto err;
1013 }
1014
1015 if (gimple_debug_bind_p (stmt)
1016 && !gimple_debug_bind_has_value_p (stmt))
1017 continue;
1018
1019 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1020 {
1021 op = USE_FROM_PTR (use_p);
1022 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1023 use_p, stmt, false, names_defined_in_bb))
1024 goto err;
1025 }
1026
1027 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1028 {
1029 if (SSA_NAME_DEF_STMT (op) != stmt)
1030 {
1031 error ("SSA_NAME_DEF_STMT is wrong");
1032 fprintf (stderr, "Expected definition statement:\n");
1033 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1034 fprintf (stderr, "\nActual definition statement:\n");
1035 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1036 4, TDF_VOPS);
1037 goto err;
1038 }
1039 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1040 }
1041 }
1042
1043 bitmap_clear (names_defined_in_bb);
1044 }
1045
1046 free (definition_block);
1047
1048 /* Restore the dominance information to its prior known state, so
1049 that we do not perturb the compiler's subsequent behavior. */
1050 if (orig_dom_state == DOM_NONE)
1051 free_dominance_info (CDI_DOMINATORS);
1052 else
1053 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1054
1055 BITMAP_FREE (names_defined_in_bb);
1056 timevar_pop (TV_TREE_SSA_VERIFY);
1057 return;
1058
1059 err:
1060 internal_error ("verify_ssa failed");
1061 }
1062
1063
1064 /* Initialize global DFA and SSA structures. */
1065
1066 void
1067 init_tree_ssa (struct function *fn)
1068 {
1069 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1070 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1071 pt_solution_reset (&fn->gimple_df->escaped);
1072 init_ssanames (fn, 0);
1073 }
1074
1075 /* Do the actions required to initialize internal data structures used
1076 in tree-ssa optimization passes. */
1077
1078 static unsigned int
1079 execute_init_datastructures (void)
1080 {
1081 /* Allocate hash tables, arrays and other structures. */
1082 gcc_assert (!cfun->gimple_df);
1083 init_tree_ssa (cfun);
1084 return 0;
1085 }
1086
1087 namespace {
1088
1089 const pass_data pass_data_init_datastructures =
1090 {
1091 GIMPLE_PASS, /* type */
1092 "*init_datastructures", /* name */
1093 OPTGROUP_NONE, /* optinfo_flags */
1094 TV_NONE, /* tv_id */
1095 PROP_cfg, /* properties_required */
1096 0, /* properties_provided */
1097 0, /* properties_destroyed */
1098 0, /* todo_flags_start */
1099 0, /* todo_flags_finish */
1100 };
1101
1102 class pass_init_datastructures : public gimple_opt_pass
1103 {
1104 public:
1105 pass_init_datastructures (gcc::context *ctxt)
1106 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1107 {}
1108
1109 /* opt_pass methods: */
1110 virtual bool gate (function *fun)
1111 {
1112 /* Do nothing for funcions that was produced already in SSA form. */
1113 return !(fun->curr_properties & PROP_ssa);
1114 }
1115
1116 virtual unsigned int execute (function *)
1117 {
1118 return execute_init_datastructures ();
1119 }
1120
1121 }; // class pass_init_datastructures
1122
1123 } // anon namespace
1124
1125 gimple_opt_pass *
1126 make_pass_init_datastructures (gcc::context *ctxt)
1127 {
1128 return new pass_init_datastructures (ctxt);
1129 }
1130
1131 /* Deallocate memory associated with SSA data structures for FNDECL. */
1132
1133 void
1134 delete_tree_ssa (void)
1135 {
1136 fini_ssanames ();
1137
1138 /* We no longer maintain the SSA operand cache at this point. */
1139 if (ssa_operands_active (cfun))
1140 fini_ssa_operands (cfun);
1141
1142 cfun->gimple_df->default_defs->empty ();
1143 cfun->gimple_df->default_defs = NULL;
1144 pt_solution_reset (&cfun->gimple_df->escaped);
1145 if (cfun->gimple_df->decls_to_pointers != NULL)
1146 delete cfun->gimple_df->decls_to_pointers;
1147 cfun->gimple_df->decls_to_pointers = NULL;
1148 cfun->gimple_df->modified_noreturn_calls = NULL;
1149 cfun->gimple_df = NULL;
1150
1151 /* We no longer need the edge variable maps. */
1152 redirect_edge_var_map_destroy ();
1153 }
1154
1155 /* Return true if EXPR is a useless type conversion, otherwise return
1156 false. */
1157
1158 bool
1159 tree_ssa_useless_type_conversion (tree expr)
1160 {
1161 /* If we have an assignment that merely uses a NOP_EXPR to change
1162 the top of the RHS to the type of the LHS and the type conversion
1163 is "safe", then strip away the type conversion so that we can
1164 enter LHS = RHS into the const_and_copies table. */
1165 if (CONVERT_EXPR_P (expr)
1166 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1167 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1168 return useless_type_conversion_p
1169 (TREE_TYPE (expr),
1170 TREE_TYPE (TREE_OPERAND (expr, 0)));
1171
1172 return false;
1173 }
1174
1175 /* Strip conversions from EXP according to
1176 tree_ssa_useless_type_conversion and return the resulting
1177 expression. */
1178
1179 tree
1180 tree_ssa_strip_useless_type_conversions (tree exp)
1181 {
1182 while (tree_ssa_useless_type_conversion (exp))
1183 exp = TREE_OPERAND (exp, 0);
1184 return exp;
1185 }
1186
1187
1188 /* Return true if T, an SSA_NAME, has an undefined value. */
1189
1190 bool
1191 ssa_undefined_value_p (tree t)
1192 {
1193 gimple def_stmt;
1194 tree var = SSA_NAME_VAR (t);
1195
1196 if (!var)
1197 ;
1198 /* Parameters get their initial value from the function entry. */
1199 else if (TREE_CODE (var) == PARM_DECL)
1200 return false;
1201 /* When returning by reference the return address is actually a hidden
1202 parameter. */
1203 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1204 return false;
1205 /* Hard register variables get their initial value from the ether. */
1206 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1207 return false;
1208
1209 /* The value is undefined iff its definition statement is empty. */
1210 def_stmt = SSA_NAME_DEF_STMT (t);
1211 if (gimple_nop_p (def_stmt))
1212 return true;
1213
1214 /* Check if the complex was not only partially defined. */
1215 if (is_gimple_assign (def_stmt)
1216 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1217 {
1218 tree rhs1, rhs2;
1219
1220 rhs1 = gimple_assign_rhs1 (def_stmt);
1221 rhs2 = gimple_assign_rhs2 (def_stmt);
1222 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1223 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1224 }
1225 return false;
1226 }
1227
1228
1229 /* If necessary, rewrite the base of the reference tree *TP from
1230 a MEM_REF to a plain or converted symbol. */
1231
1232 static void
1233 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1234 {
1235 tree sym;
1236
1237 while (handled_component_p (*tp))
1238 tp = &TREE_OPERAND (*tp, 0);
1239 if (TREE_CODE (*tp) == MEM_REF
1240 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1241 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1242 && DECL_P (sym)
1243 && !TREE_ADDRESSABLE (sym)
1244 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1245 {
1246 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1247 && useless_type_conversion_p (TREE_TYPE (*tp),
1248 TREE_TYPE (TREE_TYPE (sym)))
1249 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1250 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1251 {
1252 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1253 TYPE_SIZE (TREE_TYPE (*tp)),
1254 int_const_binop (MULT_EXPR,
1255 bitsize_int (BITS_PER_UNIT),
1256 TREE_OPERAND (*tp, 1)));
1257 }
1258 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1259 && useless_type_conversion_p (TREE_TYPE (*tp),
1260 TREE_TYPE (TREE_TYPE (sym))))
1261 {
1262 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1263 ? REALPART_EXPR : IMAGPART_EXPR,
1264 TREE_TYPE (*tp), sym);
1265 }
1266 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1267 {
1268 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1269 TREE_TYPE (sym)))
1270 *tp = build1 (VIEW_CONVERT_EXPR,
1271 TREE_TYPE (*tp), sym);
1272 else
1273 *tp = sym;
1274 }
1275 }
1276 }
1277
1278 /* For a tree REF return its base if it is the base of a MEM_REF
1279 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1280
1281 static tree
1282 non_rewritable_mem_ref_base (tree ref)
1283 {
1284 tree base = ref;
1285
1286 /* A plain decl does not need it set. */
1287 if (DECL_P (ref))
1288 return NULL_TREE;
1289
1290 while (handled_component_p (base))
1291 base = TREE_OPERAND (base, 0);
1292
1293 /* But watch out for MEM_REFs we cannot lower to a
1294 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1295 if (TREE_CODE (base) == MEM_REF
1296 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1297 {
1298 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1299 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1300 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1301 && useless_type_conversion_p (TREE_TYPE (base),
1302 TREE_TYPE (TREE_TYPE (decl)))
1303 && wi::fits_uhwi_p (mem_ref_offset (base))
1304 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1305 mem_ref_offset (base))
1306 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1307 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1308 return NULL_TREE;
1309 if (DECL_P (decl)
1310 && (!integer_zerop (TREE_OPERAND (base, 1))
1311 || (DECL_SIZE (decl)
1312 != TYPE_SIZE (TREE_TYPE (base)))
1313 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1314 return decl;
1315 }
1316
1317 return NULL_TREE;
1318 }
1319
1320 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1321 Otherwise return true. */
1322
1323 static bool
1324 non_rewritable_lvalue_p (tree lhs)
1325 {
1326 /* A plain decl is always rewritable. */
1327 if (DECL_P (lhs))
1328 return false;
1329
1330 /* A decl that is wrapped inside a MEM-REF that covers
1331 it full is also rewritable.
1332 ??? The following could be relaxed allowing component
1333 references that do not change the access size. */
1334 if (TREE_CODE (lhs) == MEM_REF
1335 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1336 && integer_zerop (TREE_OPERAND (lhs, 1)))
1337 {
1338 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1339 if (DECL_P (decl)
1340 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1341 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1342 return false;
1343 }
1344
1345 return true;
1346 }
1347
1348 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1349 mark the variable VAR for conversion into SSA. Return true when updating
1350 stmts is required. */
1351
1352 static void
1353 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1354 bitmap suitable_for_renaming)
1355 {
1356 /* Global Variables, result decls cannot be changed. */
1357 if (is_global_var (var)
1358 || TREE_CODE (var) == RESULT_DECL
1359 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1360 return;
1361
1362 if (TREE_ADDRESSABLE (var)
1363 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1364 a non-register. Otherwise we are confused and forget to
1365 add virtual operands for it. */
1366 && (!is_gimple_reg_type (TREE_TYPE (var))
1367 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1368 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1369 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1370 {
1371 TREE_ADDRESSABLE (var) = 0;
1372 if (is_gimple_reg (var))
1373 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1374 if (dump_file)
1375 {
1376 fprintf (dump_file, "No longer having address taken: ");
1377 print_generic_expr (dump_file, var, 0);
1378 fprintf (dump_file, "\n");
1379 }
1380 }
1381
1382 if (!DECL_GIMPLE_REG_P (var)
1383 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1384 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1385 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1386 && !TREE_THIS_VOLATILE (var)
1387 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1388 {
1389 DECL_GIMPLE_REG_P (var) = 1;
1390 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1391 if (dump_file)
1392 {
1393 fprintf (dump_file, "Now a gimple register: ");
1394 print_generic_expr (dump_file, var, 0);
1395 fprintf (dump_file, "\n");
1396 }
1397 }
1398 }
1399
1400 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1401
1402 void
1403 execute_update_addresses_taken (void)
1404 {
1405 gimple_stmt_iterator gsi;
1406 basic_block bb;
1407 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1408 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1409 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1410 tree var;
1411 unsigned i;
1412
1413 timevar_push (TV_ADDRESS_TAKEN);
1414
1415 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1416 the function body. */
1417 FOR_EACH_BB_FN (bb, cfun)
1418 {
1419 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1420 {
1421 gimple stmt = gsi_stmt (gsi);
1422 enum gimple_code code = gimple_code (stmt);
1423 tree decl;
1424
1425 /* Note all addresses taken by the stmt. */
1426 gimple_ior_addresses_taken (addresses_taken, stmt);
1427
1428 /* If we have a call or an assignment, see if the lhs contains
1429 a local decl that requires not to be a gimple register. */
1430 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1431 {
1432 tree lhs = gimple_get_lhs (stmt);
1433 if (lhs
1434 && TREE_CODE (lhs) != SSA_NAME
1435 && non_rewritable_lvalue_p (lhs))
1436 {
1437 decl = get_base_address (lhs);
1438 if (DECL_P (decl))
1439 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1440 }
1441 }
1442
1443 if (gimple_assign_single_p (stmt))
1444 {
1445 tree rhs = gimple_assign_rhs1 (stmt);
1446 if ((decl = non_rewritable_mem_ref_base (rhs)))
1447 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1448 }
1449
1450 else if (code == GIMPLE_CALL)
1451 {
1452 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1453 {
1454 tree arg = gimple_call_arg (stmt, i);
1455 if ((decl = non_rewritable_mem_ref_base (arg)))
1456 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1457 }
1458 }
1459
1460 else if (code == GIMPLE_ASM)
1461 {
1462 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1463 {
1464 tree link = gimple_asm_output_op (stmt, i);
1465 tree lhs = TREE_VALUE (link);
1466 if (TREE_CODE (lhs) != SSA_NAME)
1467 {
1468 decl = get_base_address (lhs);
1469 if (DECL_P (decl)
1470 && (non_rewritable_lvalue_p (lhs)
1471 /* We cannot move required conversions from
1472 the lhs to the rhs in asm statements, so
1473 require we do not need any. */
1474 || !useless_type_conversion_p
1475 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1476 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1477 }
1478 }
1479 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1480 {
1481 tree link = gimple_asm_input_op (stmt, i);
1482 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1483 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1484 }
1485 }
1486 }
1487
1488 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1489 {
1490 size_t i;
1491 gimple phi = gsi_stmt (gsi);
1492
1493 for (i = 0; i < gimple_phi_num_args (phi); i++)
1494 {
1495 tree op = PHI_ARG_DEF (phi, i), var;
1496 if (TREE_CODE (op) == ADDR_EXPR
1497 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1498 && DECL_P (var))
1499 bitmap_set_bit (addresses_taken, DECL_UID (var));
1500 }
1501 }
1502 }
1503
1504 /* We cannot iterate over all referenced vars because that can contain
1505 unused vars from BLOCK trees, which causes code generation differences
1506 for -g vs. -g0. */
1507 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1508 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1509 suitable_for_renaming);
1510
1511 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1512 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1513 suitable_for_renaming);
1514
1515 /* Operand caches need to be recomputed for operands referencing the updated
1516 variables and operands need to be rewritten to expose bare symbols. */
1517 if (!bitmap_empty_p (suitable_for_renaming))
1518 {
1519 FOR_EACH_BB_FN (bb, cfun)
1520 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1521 {
1522 gimple stmt = gsi_stmt (gsi);
1523
1524 /* Re-write TARGET_MEM_REFs of symbols we want to
1525 rewrite into SSA form. */
1526 if (gimple_assign_single_p (stmt))
1527 {
1528 tree lhs = gimple_assign_lhs (stmt);
1529 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1530 tree sym;
1531
1532 /* We shouldn't have any fancy wrapping of
1533 component-refs on the LHS, but look through
1534 VIEW_CONVERT_EXPRs as that is easy. */
1535 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1536 lhs = TREE_OPERAND (lhs, 0);
1537 if (TREE_CODE (lhs) == MEM_REF
1538 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1539 && integer_zerop (TREE_OPERAND (lhs, 1))
1540 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1541 && DECL_P (sym)
1542 && !TREE_ADDRESSABLE (sym)
1543 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1544 lhs = sym;
1545 else
1546 lhs = gimple_assign_lhs (stmt);
1547
1548 /* Rewrite the RHS and make sure the resulting assignment
1549 is validly typed. */
1550 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1551 rhs = gimple_assign_rhs1 (stmt);
1552 if (gimple_assign_lhs (stmt) != lhs
1553 && !useless_type_conversion_p (TREE_TYPE (lhs),
1554 TREE_TYPE (rhs)))
1555 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1556 TREE_TYPE (lhs), rhs);
1557
1558 if (gimple_assign_lhs (stmt) != lhs)
1559 gimple_assign_set_lhs (stmt, lhs);
1560
1561 /* For var ={v} {CLOBBER}; where var lost
1562 TREE_ADDRESSABLE just remove the stmt. */
1563 if (DECL_P (lhs)
1564 && TREE_CLOBBER_P (rhs)
1565 && bitmap_bit_p (suitable_for_renaming, DECL_UID (lhs)))
1566 {
1567 unlink_stmt_vdef (stmt);
1568 gsi_remove (&gsi, true);
1569 release_defs (stmt);
1570 continue;
1571 }
1572
1573 if (gimple_assign_rhs1 (stmt) != rhs)
1574 {
1575 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1576 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1577 }
1578 }
1579
1580 else if (gimple_code (stmt) == GIMPLE_CALL)
1581 {
1582 unsigned i;
1583 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1584 {
1585 tree *argp = gimple_call_arg_ptr (stmt, i);
1586 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1587 }
1588 }
1589
1590 else if (gimple_code (stmt) == GIMPLE_ASM)
1591 {
1592 unsigned i;
1593 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1594 {
1595 tree link = gimple_asm_output_op (stmt, i);
1596 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1597 suitable_for_renaming);
1598 }
1599 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1600 {
1601 tree link = gimple_asm_input_op (stmt, i);
1602 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1603 suitable_for_renaming);
1604 }
1605 }
1606
1607 else if (gimple_debug_bind_p (stmt)
1608 && gimple_debug_bind_has_value_p (stmt))
1609 {
1610 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1611 tree decl;
1612 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1613 decl = non_rewritable_mem_ref_base (*valuep);
1614 if (decl
1615 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1616 gimple_debug_bind_reset_value (stmt);
1617 }
1618
1619 if (gimple_references_memory_p (stmt)
1620 || is_gimple_debug (stmt))
1621 update_stmt (stmt);
1622
1623 gsi_next (&gsi);
1624 }
1625
1626 /* Update SSA form here, we are called as non-pass as well. */
1627 if (number_of_loops (cfun) > 1
1628 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1629 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1630 else
1631 update_ssa (TODO_update_ssa);
1632 }
1633
1634 BITMAP_FREE (not_reg_needs);
1635 BITMAP_FREE (addresses_taken);
1636 BITMAP_FREE (suitable_for_renaming);
1637 timevar_pop (TV_ADDRESS_TAKEN);
1638 }
1639
1640 namespace {
1641
1642 const pass_data pass_data_update_address_taken =
1643 {
1644 GIMPLE_PASS, /* type */
1645 "addressables", /* name */
1646 OPTGROUP_NONE, /* optinfo_flags */
1647 TV_ADDRESS_TAKEN, /* tv_id */
1648 PROP_ssa, /* properties_required */
1649 0, /* properties_provided */
1650 0, /* properties_destroyed */
1651 0, /* todo_flags_start */
1652 TODO_update_address_taken, /* todo_flags_finish */
1653 };
1654
1655 class pass_update_address_taken : public gimple_opt_pass
1656 {
1657 public:
1658 pass_update_address_taken (gcc::context *ctxt)
1659 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1660 {}
1661
1662 /* opt_pass methods: */
1663
1664 }; // class pass_update_address_taken
1665
1666 } // anon namespace
1667
1668 gimple_opt_pass *
1669 make_pass_update_address_taken (gcc::context *ctxt)
1670 {
1671 return new pass_update_address_taken (ctxt);
1672 }