]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
cfg.c (free_edge): Add function argument and use it instead of cfun.
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "hard-reg-set.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "flags.h"
33 #include "tm_p.h"
34 #include "target.h"
35 #include "langhooks.h"
36 #include "gimple-pretty-print.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimple-walk.h"
42 #include "tree-ssa-loop-manip.h"
43 #include "tree-into-ssa.h"
44 #include "tree-ssa.h"
45 #include "tree-inline.h"
46 #include "tree-pass.h"
47 #include "diagnostic-core.h"
48 #include "cfgloop.h"
49 #include "cfgexpand.h"
50
51 /* Pointer map of variable mappings, keyed by edge. */
52 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
53
54
55 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
56
57 void
58 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
59 {
60 edge_var_map new_node;
61
62 if (edge_var_maps == NULL)
63 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
64
65 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
66 new_node.def = def;
67 new_node.result = result;
68 new_node.locus = locus;
69
70 slot.safe_push (new_node);
71 }
72
73
74 /* Clear the var mappings in edge E. */
75
76 void
77 redirect_edge_var_map_clear (edge e)
78 {
79 if (!edge_var_maps)
80 return;
81
82 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
83
84 if (head)
85 head->release ();
86 }
87
88
89 /* Duplicate the redirected var mappings in OLDE in NEWE.
90
91 This assumes a hash_map can have multiple edges mapping to the same
92 var_map (many to one mapping), since we don't remove the previous mappings.
93 */
94
95 void
96 redirect_edge_var_map_dup (edge newe, edge olde)
97 {
98 if (!edge_var_maps)
99 return;
100
101 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
102 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
103 if (!old_head)
104 return;
105
106 new_head->safe_splice (*old_head);
107 }
108
109
110 /* Return the variable mappings for a given edge. If there is none, return
111 NULL. */
112
113 vec<edge_var_map> *
114 redirect_edge_var_map_vector (edge e)
115 {
116 /* Hey, what kind of idiot would... you'd be surprised. */
117 if (!edge_var_maps)
118 return NULL;
119
120 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
121 if (!slot)
122 return NULL;
123
124 return slot;
125 }
126
127 /* Clear the edge variable mappings. */
128
129 void
130 redirect_edge_var_map_destroy (void)
131 {
132 delete edge_var_maps;
133 edge_var_maps = NULL;
134 }
135
136
137 /* Remove the corresponding arguments from the PHI nodes in E's
138 destination block and redirect it to DEST. Return redirected edge.
139 The list of removed arguments is stored in a vector accessed
140 through edge_var_maps. */
141
142 edge
143 ssa_redirect_edge (edge e, basic_block dest)
144 {
145 gphi_iterator gsi;
146 gphi *phi;
147
148 redirect_edge_var_map_clear (e);
149
150 /* Remove the appropriate PHI arguments in E's destination block. */
151 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
152 {
153 tree def;
154 source_location locus ;
155
156 phi = gsi.phi ();
157 def = gimple_phi_arg_def (phi, e->dest_idx);
158 locus = gimple_phi_arg_location (phi, e->dest_idx);
159
160 if (def == NULL_TREE)
161 continue;
162
163 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
164 }
165
166 e = redirect_edge_succ_nodup (e, dest);
167
168 return e;
169 }
170
171
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173 E->dest. */
174
175 void
176 flush_pending_stmts (edge e)
177 {
178 gphi *phi;
179 edge_var_map *vm;
180 int i;
181 gphi_iterator gsi;
182
183 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184 if (!v)
185 return;
186
187 for (gsi = gsi_start_phis (e->dest), i = 0;
188 !gsi_end_p (gsi) && v->iterate (i, &vm);
189 gsi_next (&gsi), i++)
190 {
191 tree def;
192
193 phi = gsi.phi ();
194 def = redirect_edge_var_map_def (vm);
195 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
196 }
197
198 redirect_edge_var_map_clear (e);
199 }
200
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203 expression with a different value.
204
205 This will update any annotations (say debug bind stmts) referring
206 to the original LHS, so that they use the RHS instead. This is
207 done even if NLHS and LHS are the same, for it is understood that
208 the RHS will be modified afterwards, and NLHS will not be assigned
209 an equivalent value.
210
211 Adjusting any non-annotation uses of the LHS, if needed, is a
212 responsibility of the caller.
213
214 The effect of this call should be pretty much the same as that of
215 inserting a copy of STMT before STMT, and then removing the
216 original stmt, at which time gsi_remove() would have update
217 annotations, but using this function saves all the inserting,
218 copying and removing. */
219
220 void
221 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
222 {
223 if (MAY_HAVE_DEBUG_STMTS)
224 {
225 tree lhs = gimple_get_lhs (stmt);
226
227 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228
229 insert_debug_temp_for_var_def (NULL, lhs);
230 }
231
232 gimple_set_lhs (stmt, nlhs);
233 }
234
235
236 /* Given a tree for an expression for which we might want to emit
237 locations or values in debug information (generally a variable, but
238 we might deal with other kinds of trees in the future), return the
239 tree that should be used as the variable of a DEBUG_BIND STMT or
240 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
241
242 tree
243 target_for_debug_bind (tree var)
244 {
245 if (!MAY_HAVE_DEBUG_STMTS)
246 return NULL_TREE;
247
248 if (TREE_CODE (var) == SSA_NAME)
249 {
250 var = SSA_NAME_VAR (var);
251 if (var == NULL_TREE)
252 return NULL_TREE;
253 }
254
255 if ((TREE_CODE (var) != VAR_DECL
256 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
257 && TREE_CODE (var) != PARM_DECL)
258 return NULL_TREE;
259
260 if (DECL_HAS_VALUE_EXPR_P (var))
261 return target_for_debug_bind (DECL_VALUE_EXPR (var));
262
263 if (DECL_IGNORED_P (var))
264 return NULL_TREE;
265
266 /* var-tracking only tracks registers. */
267 if (!is_gimple_reg_type (TREE_TYPE (var)))
268 return NULL_TREE;
269
270 return var;
271 }
272
273 /* Called via walk_tree, look for SSA_NAMEs that have already been
274 released. */
275
276 static tree
277 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
278 {
279 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
280
281 if (wi && wi->is_lhs)
282 return NULL_TREE;
283
284 if (TREE_CODE (*tp) == SSA_NAME)
285 {
286 if (SSA_NAME_IN_FREE_LIST (*tp))
287 return *tp;
288
289 *walk_subtrees = 0;
290 }
291 else if (IS_TYPE_OR_DECL_P (*tp))
292 *walk_subtrees = 0;
293
294 return NULL_TREE;
295 }
296
297 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
298 by other DEBUG stmts, and replace uses of the DEF with the
299 newly-created debug temp. */
300
301 void
302 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
303 {
304 imm_use_iterator imm_iter;
305 use_operand_p use_p;
306 gimple *stmt;
307 gimple *def_stmt = NULL;
308 int usecount = 0;
309 tree value = NULL;
310
311 if (!MAY_HAVE_DEBUG_STMTS)
312 return;
313
314 /* If this name has already been registered for replacement, do nothing
315 as anything that uses this name isn't in SSA form. */
316 if (name_registered_for_update_p (var))
317 return;
318
319 /* Check whether there are debug stmts that reference this variable and,
320 if there are, decide whether we should use a debug temp. */
321 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
322 {
323 stmt = USE_STMT (use_p);
324
325 if (!gimple_debug_bind_p (stmt))
326 continue;
327
328 if (usecount++)
329 break;
330
331 if (gimple_debug_bind_get_value (stmt) != var)
332 {
333 /* Count this as an additional use, so as to make sure we
334 use a temp unless VAR's definition has a SINGLE_RHS that
335 can be shared. */
336 usecount++;
337 break;
338 }
339 }
340
341 if (!usecount)
342 return;
343
344 if (gsi)
345 def_stmt = gsi_stmt (*gsi);
346 else
347 def_stmt = SSA_NAME_DEF_STMT (var);
348
349 /* If we didn't get an insertion point, and the stmt has already
350 been removed, we won't be able to insert the debug bind stmt, so
351 we'll have to drop debug information. */
352 if (gimple_code (def_stmt) == GIMPLE_PHI)
353 {
354 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
355 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
356 value = NULL;
357 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
358 to. */
359 else if (value == error_mark_node)
360 value = NULL;
361 }
362 else if (is_gimple_assign (def_stmt))
363 {
364 bool no_value = false;
365
366 if (!dom_info_available_p (CDI_DOMINATORS))
367 {
368 struct walk_stmt_info wi;
369
370 memset (&wi, 0, sizeof (wi));
371
372 /* When removing blocks without following reverse dominance
373 order, we may sometimes encounter SSA_NAMEs that have
374 already been released, referenced in other SSA_DEFs that
375 we're about to release. Consider:
376
377 <bb X>:
378 v_1 = foo;
379
380 <bb Y>:
381 w_2 = v_1 + bar;
382 # DEBUG w => w_2
383
384 If we deleted BB X first, propagating the value of w_2
385 won't do us any good. It's too late to recover their
386 original definition of v_1: when it was deleted, it was
387 only referenced in other DEFs, it couldn't possibly know
388 it should have been retained, and propagating every
389 single DEF just in case it might have to be propagated
390 into a DEBUG STMT would probably be too wasteful.
391
392 When dominator information is not readily available, we
393 check for and accept some loss of debug information. But
394 if it is available, there's no excuse for us to remove
395 blocks in the wrong order, so we don't even check for
396 dead SSA NAMEs. SSA verification shall catch any
397 errors. */
398 if ((!gsi && !gimple_bb (def_stmt))
399 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
400 no_value = true;
401 }
402
403 if (!no_value)
404 value = gimple_assign_rhs_to_tree (def_stmt);
405 }
406
407 if (value)
408 {
409 /* If there's a single use of VAR, and VAR is the entire debug
410 expression (usecount would have been incremented again
411 otherwise), and the definition involves only constants and
412 SSA names, then we can propagate VALUE into this single use,
413 avoiding the temp.
414
415 We can also avoid using a temp if VALUE can be shared and
416 propagated into all uses, without generating expressions that
417 wouldn't be valid gimple RHSs.
418
419 Other cases that would require unsharing or non-gimple RHSs
420 are deferred to a debug temp, although we could avoid temps
421 at the expense of duplication of expressions. */
422
423 if (CONSTANT_CLASS_P (value)
424 || gimple_code (def_stmt) == GIMPLE_PHI
425 || (usecount == 1
426 && (!gimple_assign_single_p (def_stmt)
427 || is_gimple_min_invariant (value)))
428 || is_gimple_reg (value))
429 ;
430 else
431 {
432 gdebug *def_temp;
433 tree vexpr = make_node (DEBUG_EXPR_DECL);
434
435 def_temp = gimple_build_debug_bind (vexpr,
436 unshare_expr (value),
437 def_stmt);
438
439 DECL_ARTIFICIAL (vexpr) = 1;
440 TREE_TYPE (vexpr) = TREE_TYPE (value);
441 if (DECL_P (value))
442 DECL_MODE (vexpr) = DECL_MODE (value);
443 else
444 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
445
446 if (gsi)
447 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
448 else
449 {
450 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
451 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
452 }
453
454 value = vexpr;
455 }
456 }
457
458 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
459 {
460 if (!gimple_debug_bind_p (stmt))
461 continue;
462
463 if (value)
464 {
465 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
466 /* unshare_expr is not needed here. vexpr is either a
467 SINGLE_RHS, that can be safely shared, some other RHS
468 that was unshared when we found it had a single debug
469 use, or a DEBUG_EXPR_DECL, that can be safely
470 shared. */
471 SET_USE (use_p, unshare_expr (value));
472 /* If we didn't replace uses with a debug decl fold the
473 resulting expression. Otherwise we end up with invalid IL. */
474 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
475 {
476 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
477 fold_stmt_inplace (&gsi);
478 }
479 }
480 else
481 gimple_debug_bind_reset_value (stmt);
482
483 update_stmt (stmt);
484 }
485 }
486
487
488 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
489 other DEBUG stmts, and replace uses of the DEF with the
490 newly-created debug temp. */
491
492 void
493 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
494 {
495 gimple *stmt;
496 ssa_op_iter op_iter;
497 def_operand_p def_p;
498
499 if (!MAY_HAVE_DEBUG_STMTS)
500 return;
501
502 stmt = gsi_stmt (*gsi);
503
504 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
505 {
506 tree var = DEF_FROM_PTR (def_p);
507
508 if (TREE_CODE (var) != SSA_NAME)
509 continue;
510
511 insert_debug_temp_for_var_def (gsi, var);
512 }
513 }
514
515 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
516
517 void
518 reset_debug_uses (gimple *stmt)
519 {
520 ssa_op_iter op_iter;
521 def_operand_p def_p;
522 imm_use_iterator imm_iter;
523 gimple *use_stmt;
524
525 if (!MAY_HAVE_DEBUG_STMTS)
526 return;
527
528 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
529 {
530 tree var = DEF_FROM_PTR (def_p);
531
532 if (TREE_CODE (var) != SSA_NAME)
533 continue;
534
535 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
536 {
537 if (!gimple_debug_bind_p (use_stmt))
538 continue;
539
540 gimple_debug_bind_reset_value (use_stmt);
541 update_stmt (use_stmt);
542 }
543 }
544 }
545
546 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
547 dominated stmts before their dominators, so that release_ssa_defs
548 stands a chance of propagating DEFs into debug bind stmts. */
549
550 void
551 release_defs_bitset (bitmap toremove)
552 {
553 unsigned j;
554 bitmap_iterator bi;
555
556 /* Performing a topological sort is probably overkill, this will
557 most likely run in slightly superlinear time, rather than the
558 pathological quadratic worst case. */
559 while (!bitmap_empty_p (toremove))
560 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
561 {
562 bool remove_now = true;
563 tree var = ssa_name (j);
564 gimple *stmt;
565 imm_use_iterator uit;
566
567 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
568 {
569 ssa_op_iter dit;
570 def_operand_p def_p;
571
572 /* We can't propagate PHI nodes into debug stmts. */
573 if (gimple_code (stmt) == GIMPLE_PHI
574 || is_gimple_debug (stmt))
575 continue;
576
577 /* If we find another definition to remove that uses
578 the one we're looking at, defer the removal of this
579 one, so that it can be propagated into debug stmts
580 after the other is. */
581 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
582 {
583 tree odef = DEF_FROM_PTR (def_p);
584
585 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
586 {
587 remove_now = false;
588 break;
589 }
590 }
591
592 if (!remove_now)
593 BREAK_FROM_IMM_USE_STMT (uit);
594 }
595
596 if (remove_now)
597 {
598 gimple *def = SSA_NAME_DEF_STMT (var);
599 gimple_stmt_iterator gsi = gsi_for_stmt (def);
600
601 if (gimple_code (def) == GIMPLE_PHI)
602 remove_phi_node (&gsi, true);
603 else
604 {
605 gsi_remove (&gsi, true);
606 release_defs (def);
607 }
608
609 bitmap_clear_bit (toremove, j);
610 }
611 }
612 }
613
614 /* Return true if SSA_NAME is malformed and mark it visited.
615
616 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
617 operand. */
618
619 static bool
620 verify_ssa_name (tree ssa_name, bool is_virtual)
621 {
622 if (TREE_CODE (ssa_name) != SSA_NAME)
623 {
624 error ("expected an SSA_NAME object");
625 return true;
626 }
627
628 if (SSA_NAME_IN_FREE_LIST (ssa_name))
629 {
630 error ("found an SSA_NAME that had been released into the free pool");
631 return true;
632 }
633
634 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
635 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
636 {
637 error ("type mismatch between an SSA_NAME and its symbol");
638 return true;
639 }
640
641 if (is_virtual && !virtual_operand_p (ssa_name))
642 {
643 error ("found a virtual definition for a GIMPLE register");
644 return true;
645 }
646
647 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
648 {
649 error ("virtual SSA name for non-VOP decl");
650 return true;
651 }
652
653 if (!is_virtual && virtual_operand_p (ssa_name))
654 {
655 error ("found a real definition for a non-register");
656 return true;
657 }
658
659 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
660 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
661 {
662 error ("found a default name with a non-empty defining statement");
663 return true;
664 }
665
666 return false;
667 }
668
669
670 /* Return true if the definition of SSA_NAME at block BB is malformed.
671
672 STMT is the statement where SSA_NAME is created.
673
674 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
675 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
676 it means that the block in that array slot contains the
677 definition of SSA_NAME.
678
679 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
680
681 static bool
682 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
683 gimple *stmt, bool is_virtual)
684 {
685 if (verify_ssa_name (ssa_name, is_virtual))
686 goto err;
687
688 if (SSA_NAME_VAR (ssa_name)
689 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
690 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
691 {
692 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
693 goto err;
694 }
695
696 if (definition_block[SSA_NAME_VERSION (ssa_name)])
697 {
698 error ("SSA_NAME created in two different blocks %i and %i",
699 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
700 goto err;
701 }
702
703 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
704
705 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
706 {
707 error ("SSA_NAME_DEF_STMT is wrong");
708 fprintf (stderr, "Expected definition statement:\n");
709 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
710 fprintf (stderr, "\nActual definition statement:\n");
711 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
712 goto err;
713 }
714
715 return false;
716
717 err:
718 fprintf (stderr, "while verifying SSA_NAME ");
719 print_generic_expr (stderr, ssa_name, 0);
720 fprintf (stderr, " in statement\n");
721 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
722
723 return true;
724 }
725
726
727 /* Return true if the use of SSA_NAME at statement STMT in block BB is
728 malformed.
729
730 DEF_BB is the block where SSA_NAME was found to be created.
731
732 IDOM contains immediate dominator information for the flowgraph.
733
734 CHECK_ABNORMAL is true if the caller wants to check whether this use
735 is flowing through an abnormal edge (only used when checking PHI
736 arguments).
737
738 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
739 that are defined before STMT in basic block BB. */
740
741 static bool
742 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
743 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
744 {
745 bool err = false;
746 tree ssa_name = USE_FROM_PTR (use_p);
747
748 if (!TREE_VISITED (ssa_name))
749 if (verify_imm_links (stderr, ssa_name))
750 err = true;
751
752 TREE_VISITED (ssa_name) = 1;
753
754 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
755 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
756 ; /* Default definitions have empty statements. Nothing to do. */
757 else if (!def_bb)
758 {
759 error ("missing definition");
760 err = true;
761 }
762 else if (bb != def_bb
763 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
764 {
765 error ("definition in block %i does not dominate use in block %i",
766 def_bb->index, bb->index);
767 err = true;
768 }
769 else if (bb == def_bb
770 && names_defined_in_bb != NULL
771 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
772 {
773 error ("definition in block %i follows the use", def_bb->index);
774 err = true;
775 }
776
777 if (check_abnormal
778 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
779 {
780 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
781 err = true;
782 }
783
784 /* Make sure the use is in an appropriate list by checking the previous
785 element to make sure it's the same. */
786 if (use_p->prev == NULL)
787 {
788 error ("no immediate_use list");
789 err = true;
790 }
791 else
792 {
793 tree listvar;
794 if (use_p->prev->use == NULL)
795 listvar = use_p->prev->loc.ssa_name;
796 else
797 listvar = USE_FROM_PTR (use_p->prev);
798 if (listvar != ssa_name)
799 {
800 error ("wrong immediate use list");
801 err = true;
802 }
803 }
804
805 if (err)
806 {
807 fprintf (stderr, "for SSA_NAME: ");
808 print_generic_expr (stderr, ssa_name, TDF_VOPS);
809 fprintf (stderr, " in statement:\n");
810 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
811 }
812
813 return err;
814 }
815
816
817 /* Return true if any of the arguments for PHI node PHI at block BB is
818 malformed.
819
820 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
821 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
822 it means that the block in that array slot contains the
823 definition of SSA_NAME. */
824
825 static bool
826 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
827 {
828 edge e;
829 bool err = false;
830 size_t i, phi_num_args = gimple_phi_num_args (phi);
831
832 if (EDGE_COUNT (bb->preds) != phi_num_args)
833 {
834 error ("incoming edge count does not match number of PHI arguments");
835 err = true;
836 goto error;
837 }
838
839 for (i = 0; i < phi_num_args; i++)
840 {
841 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
842 tree op = USE_FROM_PTR (op_p);
843
844 e = EDGE_PRED (bb, i);
845
846 if (op == NULL_TREE)
847 {
848 error ("PHI argument is missing for edge %d->%d",
849 e->src->index,
850 e->dest->index);
851 err = true;
852 goto error;
853 }
854
855 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
856 {
857 error ("PHI argument is not SSA_NAME, or invariant");
858 err = true;
859 }
860
861 if (TREE_CODE (op) == SSA_NAME)
862 {
863 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
864 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
865 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
866 }
867
868 if (TREE_CODE (op) == ADDR_EXPR)
869 {
870 tree base = TREE_OPERAND (op, 0);
871 while (handled_component_p (base))
872 base = TREE_OPERAND (base, 0);
873 if ((TREE_CODE (base) == VAR_DECL
874 || TREE_CODE (base) == PARM_DECL
875 || TREE_CODE (base) == RESULT_DECL)
876 && !TREE_ADDRESSABLE (base))
877 {
878 error ("address taken, but ADDRESSABLE bit not set");
879 err = true;
880 }
881 }
882
883 if (e->dest != bb)
884 {
885 error ("wrong edge %d->%d for PHI argument",
886 e->src->index, e->dest->index);
887 err = true;
888 }
889
890 if (err)
891 {
892 fprintf (stderr, "PHI argument\n");
893 print_generic_stmt (stderr, op, TDF_VOPS);
894 goto error;
895 }
896 }
897
898 error:
899 if (err)
900 {
901 fprintf (stderr, "for PHI node\n");
902 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
903 }
904
905
906 return err;
907 }
908
909
910 /* Verify common invariants in the SSA web.
911 TODO: verify the variable annotations. */
912
913 DEBUG_FUNCTION void
914 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
915 {
916 size_t i;
917 basic_block bb;
918 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
919 ssa_op_iter iter;
920 tree op;
921 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
922 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
923
924 gcc_assert (!need_ssa_update_p (cfun));
925
926 timevar_push (TV_TREE_SSA_VERIFY);
927
928 /* Keep track of SSA names present in the IL. */
929 for (i = 1; i < num_ssa_names; i++)
930 {
931 tree name = ssa_name (i);
932 if (name)
933 {
934 gimple *stmt;
935 TREE_VISITED (name) = 0;
936
937 verify_ssa_name (name, virtual_operand_p (name));
938
939 stmt = SSA_NAME_DEF_STMT (name);
940 if (!gimple_nop_p (stmt))
941 {
942 basic_block bb = gimple_bb (stmt);
943 if (verify_def (bb, definition_block,
944 name, stmt, virtual_operand_p (name)))
945 goto err;
946 }
947 }
948 }
949
950 calculate_dominance_info (CDI_DOMINATORS);
951
952 /* Now verify all the uses and make sure they agree with the definitions
953 found in the previous pass. */
954 FOR_EACH_BB_FN (bb, cfun)
955 {
956 edge e;
957 edge_iterator ei;
958
959 /* Make sure that all edges have a clear 'aux' field. */
960 FOR_EACH_EDGE (e, ei, bb->preds)
961 {
962 if (e->aux)
963 {
964 error ("AUX pointer initialized for edge %d->%d", e->src->index,
965 e->dest->index);
966 goto err;
967 }
968 }
969
970 /* Verify the arguments for every PHI node in the block. */
971 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
972 {
973 gphi *phi = gsi.phi ();
974 if (verify_phi_args (phi, bb, definition_block))
975 goto err;
976
977 bitmap_set_bit (names_defined_in_bb,
978 SSA_NAME_VERSION (gimple_phi_result (phi)));
979 }
980
981 /* Now verify all the uses and vuses in every statement of the block. */
982 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
983 gsi_next (&gsi))
984 {
985 gimple *stmt = gsi_stmt (gsi);
986 use_operand_p use_p;
987
988 if (check_modified_stmt && gimple_modified_p (stmt))
989 {
990 error ("stmt (%p) marked modified after optimization pass: ",
991 (void *)stmt);
992 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
993 goto err;
994 }
995
996 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
997 {
998 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
999 goto err;
1000 }
1001
1002 if (gimple_debug_bind_p (stmt)
1003 && !gimple_debug_bind_has_value_p (stmt))
1004 continue;
1005
1006 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1007 {
1008 op = USE_FROM_PTR (use_p);
1009 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1010 use_p, stmt, false, names_defined_in_bb))
1011 goto err;
1012 }
1013
1014 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1015 {
1016 if (SSA_NAME_DEF_STMT (op) != stmt)
1017 {
1018 error ("SSA_NAME_DEF_STMT is wrong");
1019 fprintf (stderr, "Expected definition statement:\n");
1020 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1021 fprintf (stderr, "\nActual definition statement:\n");
1022 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1023 4, TDF_VOPS);
1024 goto err;
1025 }
1026 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1027 }
1028 }
1029
1030 bitmap_clear (names_defined_in_bb);
1031 }
1032
1033 free (definition_block);
1034
1035 /* Restore the dominance information to its prior known state, so
1036 that we do not perturb the compiler's subsequent behavior. */
1037 if (orig_dom_state == DOM_NONE)
1038 free_dominance_info (CDI_DOMINATORS);
1039 else
1040 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1041
1042 BITMAP_FREE (names_defined_in_bb);
1043 timevar_pop (TV_TREE_SSA_VERIFY);
1044 return;
1045
1046 err:
1047 internal_error ("verify_ssa failed");
1048 }
1049
1050
1051 /* Initialize global DFA and SSA structures. */
1052
1053 void
1054 init_tree_ssa (struct function *fn)
1055 {
1056 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1057 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1058 pt_solution_reset (&fn->gimple_df->escaped);
1059 init_ssanames (fn, 0);
1060 }
1061
1062 /* Do the actions required to initialize internal data structures used
1063 in tree-ssa optimization passes. */
1064
1065 static unsigned int
1066 execute_init_datastructures (void)
1067 {
1068 /* Allocate hash tables, arrays and other structures. */
1069 gcc_assert (!cfun->gimple_df);
1070 init_tree_ssa (cfun);
1071 return 0;
1072 }
1073
1074 namespace {
1075
1076 const pass_data pass_data_init_datastructures =
1077 {
1078 GIMPLE_PASS, /* type */
1079 "*init_datastructures", /* name */
1080 OPTGROUP_NONE, /* optinfo_flags */
1081 TV_NONE, /* tv_id */
1082 PROP_cfg, /* properties_required */
1083 0, /* properties_provided */
1084 0, /* properties_destroyed */
1085 0, /* todo_flags_start */
1086 0, /* todo_flags_finish */
1087 };
1088
1089 class pass_init_datastructures : public gimple_opt_pass
1090 {
1091 public:
1092 pass_init_datastructures (gcc::context *ctxt)
1093 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1094 {}
1095
1096 /* opt_pass methods: */
1097 virtual bool gate (function *fun)
1098 {
1099 /* Do nothing for funcions that was produced already in SSA form. */
1100 return !(fun->curr_properties & PROP_ssa);
1101 }
1102
1103 virtual unsigned int execute (function *)
1104 {
1105 return execute_init_datastructures ();
1106 }
1107
1108 }; // class pass_init_datastructures
1109
1110 } // anon namespace
1111
1112 gimple_opt_pass *
1113 make_pass_init_datastructures (gcc::context *ctxt)
1114 {
1115 return new pass_init_datastructures (ctxt);
1116 }
1117
1118 /* Deallocate memory associated with SSA data structures for FNDECL. */
1119
1120 void
1121 delete_tree_ssa (struct function *fn)
1122 {
1123 fini_ssanames (fn);
1124
1125 /* We no longer maintain the SSA operand cache at this point. */
1126 if (ssa_operands_active (fn))
1127 fini_ssa_operands (fn);
1128
1129 fn->gimple_df->default_defs->empty ();
1130 fn->gimple_df->default_defs = NULL;
1131 pt_solution_reset (&fn->gimple_df->escaped);
1132 if (fn->gimple_df->decls_to_pointers != NULL)
1133 delete fn->gimple_df->decls_to_pointers;
1134 fn->gimple_df->decls_to_pointers = NULL;
1135 fn->gimple_df->modified_noreturn_calls = NULL;
1136 fn->gimple_df = NULL;
1137 }
1138
1139 /* Return true if EXPR is a useless type conversion, otherwise return
1140 false. */
1141
1142 bool
1143 tree_ssa_useless_type_conversion (tree expr)
1144 {
1145 /* If we have an assignment that merely uses a NOP_EXPR to change
1146 the top of the RHS to the type of the LHS and the type conversion
1147 is "safe", then strip away the type conversion so that we can
1148 enter LHS = RHS into the const_and_copies table. */
1149 if (CONVERT_EXPR_P (expr)
1150 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1151 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1152 return useless_type_conversion_p
1153 (TREE_TYPE (expr),
1154 TREE_TYPE (TREE_OPERAND (expr, 0)));
1155
1156 return false;
1157 }
1158
1159 /* Strip conversions from EXP according to
1160 tree_ssa_useless_type_conversion and return the resulting
1161 expression. */
1162
1163 tree
1164 tree_ssa_strip_useless_type_conversions (tree exp)
1165 {
1166 while (tree_ssa_useless_type_conversion (exp))
1167 exp = TREE_OPERAND (exp, 0);
1168 return exp;
1169 }
1170
1171
1172 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1173 should be returned if the value is only partially undefined. */
1174
1175 bool
1176 ssa_undefined_value_p (tree t, bool partial)
1177 {
1178 gimple *def_stmt;
1179 tree var = SSA_NAME_VAR (t);
1180
1181 if (!var)
1182 ;
1183 /* Parameters get their initial value from the function entry. */
1184 else if (TREE_CODE (var) == PARM_DECL)
1185 return false;
1186 /* When returning by reference the return address is actually a hidden
1187 parameter. */
1188 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1189 return false;
1190 /* Hard register variables get their initial value from the ether. */
1191 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1192 return false;
1193
1194 /* The value is undefined iff its definition statement is empty. */
1195 def_stmt = SSA_NAME_DEF_STMT (t);
1196 if (gimple_nop_p (def_stmt))
1197 return true;
1198
1199 /* Check if the complex was not only partially defined. */
1200 if (partial && is_gimple_assign (def_stmt)
1201 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1202 {
1203 tree rhs1, rhs2;
1204
1205 rhs1 = gimple_assign_rhs1 (def_stmt);
1206 rhs2 = gimple_assign_rhs2 (def_stmt);
1207 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1208 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1209 }
1210 return false;
1211 }
1212
1213
1214 /* If necessary, rewrite the base of the reference tree *TP from
1215 a MEM_REF to a plain or converted symbol. */
1216
1217 static void
1218 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1219 {
1220 tree sym;
1221
1222 while (handled_component_p (*tp))
1223 tp = &TREE_OPERAND (*tp, 0);
1224 if (TREE_CODE (*tp) == MEM_REF
1225 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1226 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1227 && DECL_P (sym)
1228 && !TREE_ADDRESSABLE (sym)
1229 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1230 {
1231 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1232 && useless_type_conversion_p (TREE_TYPE (*tp),
1233 TREE_TYPE (TREE_TYPE (sym)))
1234 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1235 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1236 {
1237 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1238 TYPE_SIZE (TREE_TYPE (*tp)),
1239 int_const_binop (MULT_EXPR,
1240 bitsize_int (BITS_PER_UNIT),
1241 TREE_OPERAND (*tp, 1)));
1242 }
1243 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1244 && useless_type_conversion_p (TREE_TYPE (*tp),
1245 TREE_TYPE (TREE_TYPE (sym))))
1246 {
1247 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1248 ? REALPART_EXPR : IMAGPART_EXPR,
1249 TREE_TYPE (*tp), sym);
1250 }
1251 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1252 {
1253 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1254 TREE_TYPE (sym)))
1255 *tp = build1 (VIEW_CONVERT_EXPR,
1256 TREE_TYPE (*tp), sym);
1257 else
1258 *tp = sym;
1259 }
1260 }
1261 }
1262
1263 /* For a tree REF return its base if it is the base of a MEM_REF
1264 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1265
1266 static tree
1267 non_rewritable_mem_ref_base (tree ref)
1268 {
1269 tree base = ref;
1270
1271 /* A plain decl does not need it set. */
1272 if (DECL_P (ref))
1273 return NULL_TREE;
1274
1275 while (handled_component_p (base))
1276 base = TREE_OPERAND (base, 0);
1277
1278 /* But watch out for MEM_REFs we cannot lower to a
1279 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1280 if (TREE_CODE (base) == MEM_REF
1281 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1282 {
1283 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1284 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1285 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1286 && useless_type_conversion_p (TREE_TYPE (base),
1287 TREE_TYPE (TREE_TYPE (decl)))
1288 && wi::fits_uhwi_p (mem_ref_offset (base))
1289 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1290 mem_ref_offset (base))
1291 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1292 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1293 return NULL_TREE;
1294 if (DECL_P (decl)
1295 && (!integer_zerop (TREE_OPERAND (base, 1))
1296 || (DECL_SIZE (decl)
1297 != TYPE_SIZE (TREE_TYPE (base)))
1298 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1299 return decl;
1300 }
1301
1302 return NULL_TREE;
1303 }
1304
1305 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1306 Otherwise return true. */
1307
1308 static bool
1309 non_rewritable_lvalue_p (tree lhs)
1310 {
1311 /* A plain decl is always rewritable. */
1312 if (DECL_P (lhs))
1313 return false;
1314
1315 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1316 a reasonably efficient manner... */
1317 if ((TREE_CODE (lhs) == REALPART_EXPR
1318 || TREE_CODE (lhs) == IMAGPART_EXPR)
1319 && DECL_P (TREE_OPERAND (lhs, 0)))
1320 return false;
1321
1322 /* A decl that is wrapped inside a MEM-REF that covers
1323 it full is also rewritable.
1324 ??? The following could be relaxed allowing component
1325 references that do not change the access size. */
1326 if (TREE_CODE (lhs) == MEM_REF
1327 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1328 && integer_zerop (TREE_OPERAND (lhs, 1)))
1329 {
1330 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1331 if (DECL_P (decl)
1332 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1333 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1334 return false;
1335 }
1336
1337 return true;
1338 }
1339
1340 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1341 mark the variable VAR for conversion into SSA. Return true when updating
1342 stmts is required. */
1343
1344 static void
1345 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1346 bitmap suitable_for_renaming)
1347 {
1348 /* Global Variables, result decls cannot be changed. */
1349 if (is_global_var (var)
1350 || TREE_CODE (var) == RESULT_DECL
1351 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1352 return;
1353
1354 if (TREE_ADDRESSABLE (var)
1355 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1356 a non-register. Otherwise we are confused and forget to
1357 add virtual operands for it. */
1358 && (!is_gimple_reg_type (TREE_TYPE (var))
1359 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1360 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1361 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1362 {
1363 TREE_ADDRESSABLE (var) = 0;
1364 if (is_gimple_reg (var))
1365 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1366 if (dump_file)
1367 {
1368 fprintf (dump_file, "No longer having address taken: ");
1369 print_generic_expr (dump_file, var, 0);
1370 fprintf (dump_file, "\n");
1371 }
1372 }
1373
1374 if (!DECL_GIMPLE_REG_P (var)
1375 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1376 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1377 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1378 && !TREE_THIS_VOLATILE (var)
1379 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1380 {
1381 DECL_GIMPLE_REG_P (var) = 1;
1382 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1383 if (dump_file)
1384 {
1385 fprintf (dump_file, "Now a gimple register: ");
1386 print_generic_expr (dump_file, var, 0);
1387 fprintf (dump_file, "\n");
1388 }
1389 }
1390 }
1391
1392 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1393
1394 void
1395 execute_update_addresses_taken (void)
1396 {
1397 basic_block bb;
1398 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1399 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1400 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1401 tree var;
1402 unsigned i;
1403
1404 timevar_push (TV_ADDRESS_TAKEN);
1405
1406 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1407 the function body. */
1408 FOR_EACH_BB_FN (bb, cfun)
1409 {
1410 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1411 gsi_next (&gsi))
1412 {
1413 gimple *stmt = gsi_stmt (gsi);
1414 enum gimple_code code = gimple_code (stmt);
1415 tree decl;
1416
1417 /* Note all addresses taken by the stmt. */
1418 gimple_ior_addresses_taken (addresses_taken, stmt);
1419
1420 /* If we have a call or an assignment, see if the lhs contains
1421 a local decl that requires not to be a gimple register. */
1422 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1423 {
1424 tree lhs = gimple_get_lhs (stmt);
1425 if (lhs
1426 && TREE_CODE (lhs) != SSA_NAME
1427 && non_rewritable_lvalue_p (lhs))
1428 {
1429 decl = get_base_address (lhs);
1430 if (DECL_P (decl))
1431 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1432 }
1433 }
1434
1435 if (gimple_assign_single_p (stmt))
1436 {
1437 tree rhs = gimple_assign_rhs1 (stmt);
1438 if ((decl = non_rewritable_mem_ref_base (rhs)))
1439 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1440 }
1441
1442 else if (code == GIMPLE_CALL)
1443 {
1444 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1445 {
1446 tree arg = gimple_call_arg (stmt, i);
1447 if ((decl = non_rewritable_mem_ref_base (arg)))
1448 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1449 }
1450 }
1451
1452 else if (code == GIMPLE_ASM)
1453 {
1454 gasm *asm_stmt = as_a <gasm *> (stmt);
1455 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1456 {
1457 tree link = gimple_asm_output_op (asm_stmt, i);
1458 tree lhs = TREE_VALUE (link);
1459 if (TREE_CODE (lhs) != SSA_NAME)
1460 {
1461 decl = get_base_address (lhs);
1462 if (DECL_P (decl)
1463 && (non_rewritable_lvalue_p (lhs)
1464 /* We cannot move required conversions from
1465 the lhs to the rhs in asm statements, so
1466 require we do not need any. */
1467 || !useless_type_conversion_p
1468 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1469 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1470 }
1471 }
1472 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1473 {
1474 tree link = gimple_asm_input_op (asm_stmt, i);
1475 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1476 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1477 }
1478 }
1479 }
1480
1481 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1482 gsi_next (&gsi))
1483 {
1484 size_t i;
1485 gphi *phi = gsi.phi ();
1486
1487 for (i = 0; i < gimple_phi_num_args (phi); i++)
1488 {
1489 tree op = PHI_ARG_DEF (phi, i), var;
1490 if (TREE_CODE (op) == ADDR_EXPR
1491 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1492 && DECL_P (var))
1493 bitmap_set_bit (addresses_taken, DECL_UID (var));
1494 }
1495 }
1496 }
1497
1498 /* We cannot iterate over all referenced vars because that can contain
1499 unused vars from BLOCK trees, which causes code generation differences
1500 for -g vs. -g0. */
1501 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1502 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1503 suitable_for_renaming);
1504
1505 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1506 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1507 suitable_for_renaming);
1508
1509 /* Operand caches need to be recomputed for operands referencing the updated
1510 variables and operands need to be rewritten to expose bare symbols. */
1511 if (!bitmap_empty_p (suitable_for_renaming))
1512 {
1513 FOR_EACH_BB_FN (bb, cfun)
1514 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1515 {
1516 gimple *stmt = gsi_stmt (gsi);
1517
1518 /* Re-write TARGET_MEM_REFs of symbols we want to
1519 rewrite into SSA form. */
1520 if (gimple_assign_single_p (stmt))
1521 {
1522 tree lhs = gimple_assign_lhs (stmt);
1523 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1524 tree sym;
1525
1526 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1527 gimplify_modify_expr_complex_part. */
1528 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1529 || TREE_CODE (lhs) == REALPART_EXPR)
1530 && DECL_P (TREE_OPERAND (lhs, 0))
1531 && bitmap_bit_p (suitable_for_renaming,
1532 DECL_UID (TREE_OPERAND (lhs, 0))))
1533 {
1534 tree other = make_ssa_name (TREE_TYPE (lhs));
1535 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1536 ? REALPART_EXPR : IMAGPART_EXPR,
1537 TREE_TYPE (other),
1538 TREE_OPERAND (lhs, 0));
1539 gimple *load = gimple_build_assign (other, lrhs);
1540 location_t loc = gimple_location (stmt);
1541 gimple_set_location (load, loc);
1542 gimple_set_vuse (load, gimple_vuse (stmt));
1543 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1544 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1545 gimple_assign_set_rhs_with_ops
1546 (&gsi, COMPLEX_EXPR,
1547 TREE_CODE (lhs) == IMAGPART_EXPR
1548 ? other : gimple_assign_rhs1 (stmt),
1549 TREE_CODE (lhs) == IMAGPART_EXPR
1550 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1551 stmt = gsi_stmt (gsi);
1552 unlink_stmt_vdef (stmt);
1553 update_stmt (stmt);
1554 continue;
1555 }
1556
1557 /* We shouldn't have any fancy wrapping of
1558 component-refs on the LHS, but look through
1559 VIEW_CONVERT_EXPRs as that is easy. */
1560 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1561 lhs = TREE_OPERAND (lhs, 0);
1562 if (TREE_CODE (lhs) == MEM_REF
1563 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1564 && integer_zerop (TREE_OPERAND (lhs, 1))
1565 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1566 && DECL_P (sym)
1567 && !TREE_ADDRESSABLE (sym)
1568 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1569 lhs = sym;
1570 else
1571 lhs = gimple_assign_lhs (stmt);
1572
1573 /* Rewrite the RHS and make sure the resulting assignment
1574 is validly typed. */
1575 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1576 rhs = gimple_assign_rhs1 (stmt);
1577 if (gimple_assign_lhs (stmt) != lhs
1578 && !useless_type_conversion_p (TREE_TYPE (lhs),
1579 TREE_TYPE (rhs)))
1580 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1581 TREE_TYPE (lhs), rhs);
1582
1583 if (gimple_assign_lhs (stmt) != lhs)
1584 gimple_assign_set_lhs (stmt, lhs);
1585
1586 if (gimple_assign_rhs1 (stmt) != rhs)
1587 {
1588 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1589 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1590 }
1591 }
1592
1593 else if (gimple_code (stmt) == GIMPLE_CALL)
1594 {
1595 unsigned i;
1596 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1597 {
1598 tree *argp = gimple_call_arg_ptr (stmt, i);
1599 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1600 }
1601 }
1602
1603 else if (gimple_code (stmt) == GIMPLE_ASM)
1604 {
1605 gasm *asm_stmt = as_a <gasm *> (stmt);
1606 unsigned i;
1607 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1608 {
1609 tree link = gimple_asm_output_op (asm_stmt, i);
1610 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1611 suitable_for_renaming);
1612 }
1613 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1614 {
1615 tree link = gimple_asm_input_op (asm_stmt, i);
1616 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1617 suitable_for_renaming);
1618 }
1619 }
1620
1621 else if (gimple_debug_bind_p (stmt)
1622 && gimple_debug_bind_has_value_p (stmt))
1623 {
1624 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1625 tree decl;
1626 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1627 decl = non_rewritable_mem_ref_base (*valuep);
1628 if (decl
1629 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1630 gimple_debug_bind_reset_value (stmt);
1631 }
1632
1633 if (gimple_references_memory_p (stmt)
1634 || is_gimple_debug (stmt))
1635 update_stmt (stmt);
1636
1637 gsi_next (&gsi);
1638 }
1639
1640 /* Update SSA form here, we are called as non-pass as well. */
1641 if (number_of_loops (cfun) > 1
1642 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1643 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1644 else
1645 update_ssa (TODO_update_ssa);
1646 }
1647
1648 BITMAP_FREE (not_reg_needs);
1649 BITMAP_FREE (addresses_taken);
1650 BITMAP_FREE (suitable_for_renaming);
1651 timevar_pop (TV_ADDRESS_TAKEN);
1652 }
1653
1654 namespace {
1655
1656 const pass_data pass_data_update_address_taken =
1657 {
1658 GIMPLE_PASS, /* type */
1659 "addressables", /* name */
1660 OPTGROUP_NONE, /* optinfo_flags */
1661 TV_ADDRESS_TAKEN, /* tv_id */
1662 PROP_ssa, /* properties_required */
1663 0, /* properties_provided */
1664 0, /* properties_destroyed */
1665 0, /* todo_flags_start */
1666 TODO_update_address_taken, /* todo_flags_finish */
1667 };
1668
1669 class pass_update_address_taken : public gimple_opt_pass
1670 {
1671 public:
1672 pass_update_address_taken (gcc::context *ctxt)
1673 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1674 {}
1675
1676 /* opt_pass methods: */
1677
1678 }; // class pass_update_address_taken
1679
1680 } // anon namespace
1681
1682 gimple_opt_pass *
1683 make_pass_update_address_taken (gcc::context *ctxt)
1684 {
1685 return new pass_update_address_taken (ctxt);
1686 }