]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
gimple-predict.h: New file.
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "hard-reg-set.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "flags.h"
33 #include "tm_p.h"
34 #include "target.h"
35 #include "langhooks.h"
36 #include "gimple-pretty-print.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimple-walk.h"
42 #include "tree-ssa-loop-manip.h"
43 #include "tree-into-ssa.h"
44 #include "tree-ssa.h"
45 #include "tree-inline.h"
46 #include "tree-pass.h"
47 #include "diagnostic-core.h"
48 #include "cfgloop.h"
49 #include "cfgexpand.h"
50
51 /* Pointer map of variable mappings, keyed by edge. */
52 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
53
54
55 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
56
57 void
58 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
59 {
60 edge_var_map new_node;
61
62 if (edge_var_maps == NULL)
63 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
64
65 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
66 new_node.def = def;
67 new_node.result = result;
68 new_node.locus = locus;
69
70 slot.safe_push (new_node);
71 }
72
73
74 /* Clear the var mappings in edge E. */
75
76 void
77 redirect_edge_var_map_clear (edge e)
78 {
79 if (!edge_var_maps)
80 return;
81
82 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
83
84 if (head)
85 head->release ();
86 }
87
88
89 /* Duplicate the redirected var mappings in OLDE in NEWE.
90
91 This assumes a hash_map can have multiple edges mapping to the same
92 var_map (many to one mapping), since we don't remove the previous mappings.
93 */
94
95 void
96 redirect_edge_var_map_dup (edge newe, edge olde)
97 {
98 if (!edge_var_maps)
99 return;
100
101 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
102 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
103 if (!old_head)
104 return;
105
106 new_head->safe_splice (*old_head);
107 }
108
109
110 /* Return the variable mappings for a given edge. If there is none, return
111 NULL. */
112
113 vec<edge_var_map> *
114 redirect_edge_var_map_vector (edge e)
115 {
116 /* Hey, what kind of idiot would... you'd be surprised. */
117 if (!edge_var_maps)
118 return NULL;
119
120 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
121 if (!slot)
122 return NULL;
123
124 return slot;
125 }
126
127 /* Clear the edge variable mappings. */
128
129 void
130 redirect_edge_var_map_destroy (void)
131 {
132 delete edge_var_maps;
133 edge_var_maps = NULL;
134 }
135
136
137 /* Remove the corresponding arguments from the PHI nodes in E's
138 destination block and redirect it to DEST. Return redirected edge.
139 The list of removed arguments is stored in a vector accessed
140 through edge_var_maps. */
141
142 edge
143 ssa_redirect_edge (edge e, basic_block dest)
144 {
145 gphi_iterator gsi;
146 gphi *phi;
147
148 redirect_edge_var_map_clear (e);
149
150 /* Remove the appropriate PHI arguments in E's destination block. */
151 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
152 {
153 tree def;
154 source_location locus ;
155
156 phi = gsi.phi ();
157 def = gimple_phi_arg_def (phi, e->dest_idx);
158 locus = gimple_phi_arg_location (phi, e->dest_idx);
159
160 if (def == NULL_TREE)
161 continue;
162
163 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
164 }
165
166 e = redirect_edge_succ_nodup (e, dest);
167
168 return e;
169 }
170
171
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173 E->dest. */
174
175 void
176 flush_pending_stmts (edge e)
177 {
178 gphi *phi;
179 edge_var_map *vm;
180 int i;
181 gphi_iterator gsi;
182
183 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184 if (!v)
185 return;
186
187 for (gsi = gsi_start_phis (e->dest), i = 0;
188 !gsi_end_p (gsi) && v->iterate (i, &vm);
189 gsi_next (&gsi), i++)
190 {
191 tree def;
192
193 phi = gsi.phi ();
194 def = redirect_edge_var_map_def (vm);
195 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
196 }
197
198 redirect_edge_var_map_clear (e);
199 }
200
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203 expression with a different value.
204
205 This will update any annotations (say debug bind stmts) referring
206 to the original LHS, so that they use the RHS instead. This is
207 done even if NLHS and LHS are the same, for it is understood that
208 the RHS will be modified afterwards, and NLHS will not be assigned
209 an equivalent value.
210
211 Adjusting any non-annotation uses of the LHS, if needed, is a
212 responsibility of the caller.
213
214 The effect of this call should be pretty much the same as that of
215 inserting a copy of STMT before STMT, and then removing the
216 original stmt, at which time gsi_remove() would have update
217 annotations, but using this function saves all the inserting,
218 copying and removing. */
219
220 void
221 gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
222 {
223 if (MAY_HAVE_DEBUG_STMTS)
224 {
225 tree lhs = gimple_get_lhs (stmt);
226
227 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228
229 insert_debug_temp_for_var_def (NULL, lhs);
230 }
231
232 gimple_set_lhs (stmt, nlhs);
233 }
234
235
236 /* Given a tree for an expression for which we might want to emit
237 locations or values in debug information (generally a variable, but
238 we might deal with other kinds of trees in the future), return the
239 tree that should be used as the variable of a DEBUG_BIND STMT or
240 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
241
242 tree
243 target_for_debug_bind (tree var)
244 {
245 if (!MAY_HAVE_DEBUG_STMTS)
246 return NULL_TREE;
247
248 if (TREE_CODE (var) == SSA_NAME)
249 {
250 var = SSA_NAME_VAR (var);
251 if (var == NULL_TREE)
252 return NULL_TREE;
253 }
254
255 if ((TREE_CODE (var) != VAR_DECL
256 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
257 && TREE_CODE (var) != PARM_DECL)
258 return NULL_TREE;
259
260 if (DECL_HAS_VALUE_EXPR_P (var))
261 return target_for_debug_bind (DECL_VALUE_EXPR (var));
262
263 if (DECL_IGNORED_P (var))
264 return NULL_TREE;
265
266 /* var-tracking only tracks registers. */
267 if (!is_gimple_reg_type (TREE_TYPE (var)))
268 return NULL_TREE;
269
270 return var;
271 }
272
273 /* Called via walk_tree, look for SSA_NAMEs that have already been
274 released. */
275
276 static tree
277 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
278 {
279 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
280
281 if (wi && wi->is_lhs)
282 return NULL_TREE;
283
284 if (TREE_CODE (*tp) == SSA_NAME)
285 {
286 if (SSA_NAME_IN_FREE_LIST (*tp))
287 return *tp;
288
289 *walk_subtrees = 0;
290 }
291 else if (IS_TYPE_OR_DECL_P (*tp))
292 *walk_subtrees = 0;
293
294 return NULL_TREE;
295 }
296
297 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
298 by other DEBUG stmts, and replace uses of the DEF with the
299 newly-created debug temp. */
300
301 void
302 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
303 {
304 imm_use_iterator imm_iter;
305 use_operand_p use_p;
306 gimple stmt;
307 gimple def_stmt = NULL;
308 int usecount = 0;
309 tree value = NULL;
310
311 if (!MAY_HAVE_DEBUG_STMTS)
312 return;
313
314 /* If this name has already been registered for replacement, do nothing
315 as anything that uses this name isn't in SSA form. */
316 if (name_registered_for_update_p (var))
317 return;
318
319 /* Check whether there are debug stmts that reference this variable and,
320 if there are, decide whether we should use a debug temp. */
321 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
322 {
323 stmt = USE_STMT (use_p);
324
325 if (!gimple_debug_bind_p (stmt))
326 continue;
327
328 if (usecount++)
329 break;
330
331 if (gimple_debug_bind_get_value (stmt) != var)
332 {
333 /* Count this as an additional use, so as to make sure we
334 use a temp unless VAR's definition has a SINGLE_RHS that
335 can be shared. */
336 usecount++;
337 break;
338 }
339 }
340
341 if (!usecount)
342 return;
343
344 if (gsi)
345 def_stmt = gsi_stmt (*gsi);
346 else
347 def_stmt = SSA_NAME_DEF_STMT (var);
348
349 /* If we didn't get an insertion point, and the stmt has already
350 been removed, we won't be able to insert the debug bind stmt, so
351 we'll have to drop debug information. */
352 if (gimple_code (def_stmt) == GIMPLE_PHI)
353 {
354 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
355 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
356 value = NULL;
357 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
358 to. */
359 else if (value == error_mark_node)
360 value = NULL;
361 }
362 else if (is_gimple_assign (def_stmt))
363 {
364 bool no_value = false;
365
366 if (!dom_info_available_p (CDI_DOMINATORS))
367 {
368 struct walk_stmt_info wi;
369
370 memset (&wi, 0, sizeof (wi));
371
372 /* When removing blocks without following reverse dominance
373 order, we may sometimes encounter SSA_NAMEs that have
374 already been released, referenced in other SSA_DEFs that
375 we're about to release. Consider:
376
377 <bb X>:
378 v_1 = foo;
379
380 <bb Y>:
381 w_2 = v_1 + bar;
382 # DEBUG w => w_2
383
384 If we deleted BB X first, propagating the value of w_2
385 won't do us any good. It's too late to recover their
386 original definition of v_1: when it was deleted, it was
387 only referenced in other DEFs, it couldn't possibly know
388 it should have been retained, and propagating every
389 single DEF just in case it might have to be propagated
390 into a DEBUG STMT would probably be too wasteful.
391
392 When dominator information is not readily available, we
393 check for and accept some loss of debug information. But
394 if it is available, there's no excuse for us to remove
395 blocks in the wrong order, so we don't even check for
396 dead SSA NAMEs. SSA verification shall catch any
397 errors. */
398 if ((!gsi && !gimple_bb (def_stmt))
399 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
400 no_value = true;
401 }
402
403 if (!no_value)
404 value = gimple_assign_rhs_to_tree (def_stmt);
405 }
406
407 if (value)
408 {
409 /* If there's a single use of VAR, and VAR is the entire debug
410 expression (usecount would have been incremented again
411 otherwise), and the definition involves only constants and
412 SSA names, then we can propagate VALUE into this single use,
413 avoiding the temp.
414
415 We can also avoid using a temp if VALUE can be shared and
416 propagated into all uses, without generating expressions that
417 wouldn't be valid gimple RHSs.
418
419 Other cases that would require unsharing or non-gimple RHSs
420 are deferred to a debug temp, although we could avoid temps
421 at the expense of duplication of expressions. */
422
423 if (CONSTANT_CLASS_P (value)
424 || gimple_code (def_stmt) == GIMPLE_PHI
425 || (usecount == 1
426 && (!gimple_assign_single_p (def_stmt)
427 || is_gimple_min_invariant (value)))
428 || is_gimple_reg (value))
429 ;
430 else
431 {
432 gdebug *def_temp;
433 tree vexpr = make_node (DEBUG_EXPR_DECL);
434
435 def_temp = gimple_build_debug_bind (vexpr,
436 unshare_expr (value),
437 def_stmt);
438
439 DECL_ARTIFICIAL (vexpr) = 1;
440 TREE_TYPE (vexpr) = TREE_TYPE (value);
441 if (DECL_P (value))
442 DECL_MODE (vexpr) = DECL_MODE (value);
443 else
444 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
445
446 if (gsi)
447 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
448 else
449 {
450 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
451 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
452 }
453
454 value = vexpr;
455 }
456 }
457
458 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
459 {
460 if (!gimple_debug_bind_p (stmt))
461 continue;
462
463 if (value)
464 {
465 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
466 /* unshare_expr is not needed here. vexpr is either a
467 SINGLE_RHS, that can be safely shared, some other RHS
468 that was unshared when we found it had a single debug
469 use, or a DEBUG_EXPR_DECL, that can be safely
470 shared. */
471 SET_USE (use_p, unshare_expr (value));
472 /* If we didn't replace uses with a debug decl fold the
473 resulting expression. Otherwise we end up with invalid IL. */
474 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
475 {
476 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
477 fold_stmt_inplace (&gsi);
478 }
479 }
480 else
481 gimple_debug_bind_reset_value (stmt);
482
483 update_stmt (stmt);
484 }
485 }
486
487
488 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
489 other DEBUG stmts, and replace uses of the DEF with the
490 newly-created debug temp. */
491
492 void
493 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
494 {
495 gimple stmt;
496 ssa_op_iter op_iter;
497 def_operand_p def_p;
498
499 if (!MAY_HAVE_DEBUG_STMTS)
500 return;
501
502 stmt = gsi_stmt (*gsi);
503
504 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
505 {
506 tree var = DEF_FROM_PTR (def_p);
507
508 if (TREE_CODE (var) != SSA_NAME)
509 continue;
510
511 insert_debug_temp_for_var_def (gsi, var);
512 }
513 }
514
515 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
516
517 void
518 reset_debug_uses (gimple stmt)
519 {
520 ssa_op_iter op_iter;
521 def_operand_p def_p;
522 imm_use_iterator imm_iter;
523 gimple use_stmt;
524
525 if (!MAY_HAVE_DEBUG_STMTS)
526 return;
527
528 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
529 {
530 tree var = DEF_FROM_PTR (def_p);
531
532 if (TREE_CODE (var) != SSA_NAME)
533 continue;
534
535 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
536 {
537 if (!gimple_debug_bind_p (use_stmt))
538 continue;
539
540 gimple_debug_bind_reset_value (use_stmt);
541 update_stmt (use_stmt);
542 }
543 }
544 }
545
546 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
547 dominated stmts before their dominators, so that release_ssa_defs
548 stands a chance of propagating DEFs into debug bind stmts. */
549
550 void
551 release_defs_bitset (bitmap toremove)
552 {
553 unsigned j;
554 bitmap_iterator bi;
555
556 /* Performing a topological sort is probably overkill, this will
557 most likely run in slightly superlinear time, rather than the
558 pathological quadratic worst case. */
559 while (!bitmap_empty_p (toremove))
560 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
561 {
562 bool remove_now = true;
563 tree var = ssa_name (j);
564 gimple stmt;
565 imm_use_iterator uit;
566
567 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
568 {
569 ssa_op_iter dit;
570 def_operand_p def_p;
571
572 /* We can't propagate PHI nodes into debug stmts. */
573 if (gimple_code (stmt) == GIMPLE_PHI
574 || is_gimple_debug (stmt))
575 continue;
576
577 /* If we find another definition to remove that uses
578 the one we're looking at, defer the removal of this
579 one, so that it can be propagated into debug stmts
580 after the other is. */
581 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
582 {
583 tree odef = DEF_FROM_PTR (def_p);
584
585 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
586 {
587 remove_now = false;
588 break;
589 }
590 }
591
592 if (!remove_now)
593 BREAK_FROM_IMM_USE_STMT (uit);
594 }
595
596 if (remove_now)
597 {
598 gimple def = SSA_NAME_DEF_STMT (var);
599 gimple_stmt_iterator gsi = gsi_for_stmt (def);
600
601 if (gimple_code (def) == GIMPLE_PHI)
602 remove_phi_node (&gsi, true);
603 else
604 {
605 gsi_remove (&gsi, true);
606 release_defs (def);
607 }
608
609 bitmap_clear_bit (toremove, j);
610 }
611 }
612 }
613
614 /* Return true if SSA_NAME is malformed and mark it visited.
615
616 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
617 operand. */
618
619 static bool
620 verify_ssa_name (tree ssa_name, bool is_virtual)
621 {
622 if (TREE_CODE (ssa_name) != SSA_NAME)
623 {
624 error ("expected an SSA_NAME object");
625 return true;
626 }
627
628 if (SSA_NAME_IN_FREE_LIST (ssa_name))
629 {
630 error ("found an SSA_NAME that had been released into the free pool");
631 return true;
632 }
633
634 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
635 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
636 {
637 error ("type mismatch between an SSA_NAME and its symbol");
638 return true;
639 }
640
641 if (is_virtual && !virtual_operand_p (ssa_name))
642 {
643 error ("found a virtual definition for a GIMPLE register");
644 return true;
645 }
646
647 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
648 {
649 error ("virtual SSA name for non-VOP decl");
650 return true;
651 }
652
653 if (!is_virtual && virtual_operand_p (ssa_name))
654 {
655 error ("found a real definition for a non-register");
656 return true;
657 }
658
659 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
660 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
661 {
662 error ("found a default name with a non-empty defining statement");
663 return true;
664 }
665
666 return false;
667 }
668
669
670 /* Return true if the definition of SSA_NAME at block BB is malformed.
671
672 STMT is the statement where SSA_NAME is created.
673
674 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
675 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
676 it means that the block in that array slot contains the
677 definition of SSA_NAME.
678
679 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
680
681 static bool
682 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
683 gimple stmt, bool is_virtual)
684 {
685 if (verify_ssa_name (ssa_name, is_virtual))
686 goto err;
687
688 if (SSA_NAME_VAR (ssa_name)
689 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
690 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
691 {
692 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
693 goto err;
694 }
695
696 if (definition_block[SSA_NAME_VERSION (ssa_name)])
697 {
698 error ("SSA_NAME created in two different blocks %i and %i",
699 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
700 goto err;
701 }
702
703 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
704
705 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
706 {
707 error ("SSA_NAME_DEF_STMT is wrong");
708 fprintf (stderr, "Expected definition statement:\n");
709 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
710 fprintf (stderr, "\nActual definition statement:\n");
711 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
712 goto err;
713 }
714
715 return false;
716
717 err:
718 fprintf (stderr, "while verifying SSA_NAME ");
719 print_generic_expr (stderr, ssa_name, 0);
720 fprintf (stderr, " in statement\n");
721 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
722
723 return true;
724 }
725
726
727 /* Return true if the use of SSA_NAME at statement STMT in block BB is
728 malformed.
729
730 DEF_BB is the block where SSA_NAME was found to be created.
731
732 IDOM contains immediate dominator information for the flowgraph.
733
734 CHECK_ABNORMAL is true if the caller wants to check whether this use
735 is flowing through an abnormal edge (only used when checking PHI
736 arguments).
737
738 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
739 that are defined before STMT in basic block BB. */
740
741 static bool
742 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
743 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
744 {
745 bool err = false;
746 tree ssa_name = USE_FROM_PTR (use_p);
747
748 if (!TREE_VISITED (ssa_name))
749 if (verify_imm_links (stderr, ssa_name))
750 err = true;
751
752 TREE_VISITED (ssa_name) = 1;
753
754 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
755 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
756 ; /* Default definitions have empty statements. Nothing to do. */
757 else if (!def_bb)
758 {
759 error ("missing definition");
760 err = true;
761 }
762 else if (bb != def_bb
763 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
764 {
765 error ("definition in block %i does not dominate use in block %i",
766 def_bb->index, bb->index);
767 err = true;
768 }
769 else if (bb == def_bb
770 && names_defined_in_bb != NULL
771 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
772 {
773 error ("definition in block %i follows the use", def_bb->index);
774 err = true;
775 }
776
777 if (check_abnormal
778 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
779 {
780 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
781 err = true;
782 }
783
784 /* Make sure the use is in an appropriate list by checking the previous
785 element to make sure it's the same. */
786 if (use_p->prev == NULL)
787 {
788 error ("no immediate_use list");
789 err = true;
790 }
791 else
792 {
793 tree listvar;
794 if (use_p->prev->use == NULL)
795 listvar = use_p->prev->loc.ssa_name;
796 else
797 listvar = USE_FROM_PTR (use_p->prev);
798 if (listvar != ssa_name)
799 {
800 error ("wrong immediate use list");
801 err = true;
802 }
803 }
804
805 if (err)
806 {
807 fprintf (stderr, "for SSA_NAME: ");
808 print_generic_expr (stderr, ssa_name, TDF_VOPS);
809 fprintf (stderr, " in statement:\n");
810 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
811 }
812
813 return err;
814 }
815
816
817 /* Return true if any of the arguments for PHI node PHI at block BB is
818 malformed.
819
820 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
821 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
822 it means that the block in that array slot contains the
823 definition of SSA_NAME. */
824
825 static bool
826 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
827 {
828 edge e;
829 bool err = false;
830 size_t i, phi_num_args = gimple_phi_num_args (phi);
831
832 if (EDGE_COUNT (bb->preds) != phi_num_args)
833 {
834 error ("incoming edge count does not match number of PHI arguments");
835 err = true;
836 goto error;
837 }
838
839 for (i = 0; i < phi_num_args; i++)
840 {
841 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
842 tree op = USE_FROM_PTR (op_p);
843
844 e = EDGE_PRED (bb, i);
845
846 if (op == NULL_TREE)
847 {
848 error ("PHI argument is missing for edge %d->%d",
849 e->src->index,
850 e->dest->index);
851 err = true;
852 goto error;
853 }
854
855 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
856 {
857 error ("PHI argument is not SSA_NAME, or invariant");
858 err = true;
859 }
860
861 if (TREE_CODE (op) == SSA_NAME)
862 {
863 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
864 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
865 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
866 }
867
868 if (TREE_CODE (op) == ADDR_EXPR)
869 {
870 tree base = TREE_OPERAND (op, 0);
871 while (handled_component_p (base))
872 base = TREE_OPERAND (base, 0);
873 if ((TREE_CODE (base) == VAR_DECL
874 || TREE_CODE (base) == PARM_DECL
875 || TREE_CODE (base) == RESULT_DECL)
876 && !TREE_ADDRESSABLE (base))
877 {
878 error ("address taken, but ADDRESSABLE bit not set");
879 err = true;
880 }
881 }
882
883 if (e->dest != bb)
884 {
885 error ("wrong edge %d->%d for PHI argument",
886 e->src->index, e->dest->index);
887 err = true;
888 }
889
890 if (err)
891 {
892 fprintf (stderr, "PHI argument\n");
893 print_generic_stmt (stderr, op, TDF_VOPS);
894 goto error;
895 }
896 }
897
898 error:
899 if (err)
900 {
901 fprintf (stderr, "for PHI node\n");
902 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
903 }
904
905
906 return err;
907 }
908
909
910 /* Verify common invariants in the SSA web.
911 TODO: verify the variable annotations. */
912
913 DEBUG_FUNCTION void
914 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
915 {
916 size_t i;
917 basic_block bb;
918 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
919 ssa_op_iter iter;
920 tree op;
921 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
922 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
923
924 gcc_assert (!need_ssa_update_p (cfun));
925
926 timevar_push (TV_TREE_SSA_VERIFY);
927
928 /* Keep track of SSA names present in the IL. */
929 for (i = 1; i < num_ssa_names; i++)
930 {
931 tree name = ssa_name (i);
932 if (name)
933 {
934 gimple stmt;
935 TREE_VISITED (name) = 0;
936
937 verify_ssa_name (name, virtual_operand_p (name));
938
939 stmt = SSA_NAME_DEF_STMT (name);
940 if (!gimple_nop_p (stmt))
941 {
942 basic_block bb = gimple_bb (stmt);
943 if (verify_def (bb, definition_block,
944 name, stmt, virtual_operand_p (name)))
945 goto err;
946 }
947 }
948 }
949
950 calculate_dominance_info (CDI_DOMINATORS);
951
952 /* Now verify all the uses and make sure they agree with the definitions
953 found in the previous pass. */
954 FOR_EACH_BB_FN (bb, cfun)
955 {
956 edge e;
957 edge_iterator ei;
958
959 /* Make sure that all edges have a clear 'aux' field. */
960 FOR_EACH_EDGE (e, ei, bb->preds)
961 {
962 if (e->aux)
963 {
964 error ("AUX pointer initialized for edge %d->%d", e->src->index,
965 e->dest->index);
966 goto err;
967 }
968 }
969
970 /* Verify the arguments for every PHI node in the block. */
971 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
972 {
973 gphi *phi = gsi.phi ();
974 if (verify_phi_args (phi, bb, definition_block))
975 goto err;
976
977 bitmap_set_bit (names_defined_in_bb,
978 SSA_NAME_VERSION (gimple_phi_result (phi)));
979 }
980
981 /* Now verify all the uses and vuses in every statement of the block. */
982 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
983 gsi_next (&gsi))
984 {
985 gimple stmt = gsi_stmt (gsi);
986 use_operand_p use_p;
987
988 if (check_modified_stmt && gimple_modified_p (stmt))
989 {
990 error ("stmt (%p) marked modified after optimization pass: ",
991 (void *)stmt);
992 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
993 goto err;
994 }
995
996 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
997 {
998 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
999 goto err;
1000 }
1001
1002 if (gimple_debug_bind_p (stmt)
1003 && !gimple_debug_bind_has_value_p (stmt))
1004 continue;
1005
1006 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1007 {
1008 op = USE_FROM_PTR (use_p);
1009 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1010 use_p, stmt, false, names_defined_in_bb))
1011 goto err;
1012 }
1013
1014 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1015 {
1016 if (SSA_NAME_DEF_STMT (op) != stmt)
1017 {
1018 error ("SSA_NAME_DEF_STMT is wrong");
1019 fprintf (stderr, "Expected definition statement:\n");
1020 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1021 fprintf (stderr, "\nActual definition statement:\n");
1022 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1023 4, TDF_VOPS);
1024 goto err;
1025 }
1026 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1027 }
1028 }
1029
1030 bitmap_clear (names_defined_in_bb);
1031 }
1032
1033 free (definition_block);
1034
1035 /* Restore the dominance information to its prior known state, so
1036 that we do not perturb the compiler's subsequent behavior. */
1037 if (orig_dom_state == DOM_NONE)
1038 free_dominance_info (CDI_DOMINATORS);
1039 else
1040 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1041
1042 BITMAP_FREE (names_defined_in_bb);
1043 timevar_pop (TV_TREE_SSA_VERIFY);
1044 return;
1045
1046 err:
1047 internal_error ("verify_ssa failed");
1048 }
1049
1050
1051 /* Initialize global DFA and SSA structures. */
1052
1053 void
1054 init_tree_ssa (struct function *fn)
1055 {
1056 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1057 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1058 pt_solution_reset (&fn->gimple_df->escaped);
1059 init_ssanames (fn, 0);
1060 }
1061
1062 /* Do the actions required to initialize internal data structures used
1063 in tree-ssa optimization passes. */
1064
1065 static unsigned int
1066 execute_init_datastructures (void)
1067 {
1068 /* Allocate hash tables, arrays and other structures. */
1069 gcc_assert (!cfun->gimple_df);
1070 init_tree_ssa (cfun);
1071 return 0;
1072 }
1073
1074 namespace {
1075
1076 const pass_data pass_data_init_datastructures =
1077 {
1078 GIMPLE_PASS, /* type */
1079 "*init_datastructures", /* name */
1080 OPTGROUP_NONE, /* optinfo_flags */
1081 TV_NONE, /* tv_id */
1082 PROP_cfg, /* properties_required */
1083 0, /* properties_provided */
1084 0, /* properties_destroyed */
1085 0, /* todo_flags_start */
1086 0, /* todo_flags_finish */
1087 };
1088
1089 class pass_init_datastructures : public gimple_opt_pass
1090 {
1091 public:
1092 pass_init_datastructures (gcc::context *ctxt)
1093 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1094 {}
1095
1096 /* opt_pass methods: */
1097 virtual bool gate (function *fun)
1098 {
1099 /* Do nothing for funcions that was produced already in SSA form. */
1100 return !(fun->curr_properties & PROP_ssa);
1101 }
1102
1103 virtual unsigned int execute (function *)
1104 {
1105 return execute_init_datastructures ();
1106 }
1107
1108 }; // class pass_init_datastructures
1109
1110 } // anon namespace
1111
1112 gimple_opt_pass *
1113 make_pass_init_datastructures (gcc::context *ctxt)
1114 {
1115 return new pass_init_datastructures (ctxt);
1116 }
1117
1118 /* Deallocate memory associated with SSA data structures for FNDECL. */
1119
1120 void
1121 delete_tree_ssa (void)
1122 {
1123 fini_ssanames ();
1124
1125 /* We no longer maintain the SSA operand cache at this point. */
1126 if (ssa_operands_active (cfun))
1127 fini_ssa_operands (cfun);
1128
1129 cfun->gimple_df->default_defs->empty ();
1130 cfun->gimple_df->default_defs = NULL;
1131 pt_solution_reset (&cfun->gimple_df->escaped);
1132 if (cfun->gimple_df->decls_to_pointers != NULL)
1133 delete cfun->gimple_df->decls_to_pointers;
1134 cfun->gimple_df->decls_to_pointers = NULL;
1135 cfun->gimple_df->modified_noreturn_calls = NULL;
1136 cfun->gimple_df = NULL;
1137
1138 /* We no longer need the edge variable maps. */
1139 redirect_edge_var_map_destroy ();
1140 }
1141
1142 /* Return true if EXPR is a useless type conversion, otherwise return
1143 false. */
1144
1145 bool
1146 tree_ssa_useless_type_conversion (tree expr)
1147 {
1148 /* If we have an assignment that merely uses a NOP_EXPR to change
1149 the top of the RHS to the type of the LHS and the type conversion
1150 is "safe", then strip away the type conversion so that we can
1151 enter LHS = RHS into the const_and_copies table. */
1152 if (CONVERT_EXPR_P (expr)
1153 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1154 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1155 return useless_type_conversion_p
1156 (TREE_TYPE (expr),
1157 TREE_TYPE (TREE_OPERAND (expr, 0)));
1158
1159 return false;
1160 }
1161
1162 /* Strip conversions from EXP according to
1163 tree_ssa_useless_type_conversion and return the resulting
1164 expression. */
1165
1166 tree
1167 tree_ssa_strip_useless_type_conversions (tree exp)
1168 {
1169 while (tree_ssa_useless_type_conversion (exp))
1170 exp = TREE_OPERAND (exp, 0);
1171 return exp;
1172 }
1173
1174
1175 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1176 should be returned if the value is only partially undefined. */
1177
1178 bool
1179 ssa_undefined_value_p (tree t, bool partial)
1180 {
1181 gimple def_stmt;
1182 tree var = SSA_NAME_VAR (t);
1183
1184 if (!var)
1185 ;
1186 /* Parameters get their initial value from the function entry. */
1187 else if (TREE_CODE (var) == PARM_DECL)
1188 return false;
1189 /* When returning by reference the return address is actually a hidden
1190 parameter. */
1191 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1192 return false;
1193 /* Hard register variables get their initial value from the ether. */
1194 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1195 return false;
1196
1197 /* The value is undefined iff its definition statement is empty. */
1198 def_stmt = SSA_NAME_DEF_STMT (t);
1199 if (gimple_nop_p (def_stmt))
1200 return true;
1201
1202 /* Check if the complex was not only partially defined. */
1203 if (partial && is_gimple_assign (def_stmt)
1204 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1205 {
1206 tree rhs1, rhs2;
1207
1208 rhs1 = gimple_assign_rhs1 (def_stmt);
1209 rhs2 = gimple_assign_rhs2 (def_stmt);
1210 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1211 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1212 }
1213 return false;
1214 }
1215
1216
1217 /* If necessary, rewrite the base of the reference tree *TP from
1218 a MEM_REF to a plain or converted symbol. */
1219
1220 static void
1221 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1222 {
1223 tree sym;
1224
1225 while (handled_component_p (*tp))
1226 tp = &TREE_OPERAND (*tp, 0);
1227 if (TREE_CODE (*tp) == MEM_REF
1228 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1229 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1230 && DECL_P (sym)
1231 && !TREE_ADDRESSABLE (sym)
1232 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1233 {
1234 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1235 && useless_type_conversion_p (TREE_TYPE (*tp),
1236 TREE_TYPE (TREE_TYPE (sym)))
1237 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1238 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1239 {
1240 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1241 TYPE_SIZE (TREE_TYPE (*tp)),
1242 int_const_binop (MULT_EXPR,
1243 bitsize_int (BITS_PER_UNIT),
1244 TREE_OPERAND (*tp, 1)));
1245 }
1246 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1247 && useless_type_conversion_p (TREE_TYPE (*tp),
1248 TREE_TYPE (TREE_TYPE (sym))))
1249 {
1250 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1251 ? REALPART_EXPR : IMAGPART_EXPR,
1252 TREE_TYPE (*tp), sym);
1253 }
1254 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1255 {
1256 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1257 TREE_TYPE (sym)))
1258 *tp = build1 (VIEW_CONVERT_EXPR,
1259 TREE_TYPE (*tp), sym);
1260 else
1261 *tp = sym;
1262 }
1263 }
1264 }
1265
1266 /* For a tree REF return its base if it is the base of a MEM_REF
1267 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1268
1269 static tree
1270 non_rewritable_mem_ref_base (tree ref)
1271 {
1272 tree base = ref;
1273
1274 /* A plain decl does not need it set. */
1275 if (DECL_P (ref))
1276 return NULL_TREE;
1277
1278 while (handled_component_p (base))
1279 base = TREE_OPERAND (base, 0);
1280
1281 /* But watch out for MEM_REFs we cannot lower to a
1282 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1283 if (TREE_CODE (base) == MEM_REF
1284 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1285 {
1286 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1287 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1288 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1289 && useless_type_conversion_p (TREE_TYPE (base),
1290 TREE_TYPE (TREE_TYPE (decl)))
1291 && wi::fits_uhwi_p (mem_ref_offset (base))
1292 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1293 mem_ref_offset (base))
1294 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1295 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1296 return NULL_TREE;
1297 if (DECL_P (decl)
1298 && (!integer_zerop (TREE_OPERAND (base, 1))
1299 || (DECL_SIZE (decl)
1300 != TYPE_SIZE (TREE_TYPE (base)))
1301 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1302 return decl;
1303 }
1304
1305 return NULL_TREE;
1306 }
1307
1308 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1309 Otherwise return true. */
1310
1311 static bool
1312 non_rewritable_lvalue_p (tree lhs)
1313 {
1314 /* A plain decl is always rewritable. */
1315 if (DECL_P (lhs))
1316 return false;
1317
1318 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1319 a reasonably efficient manner... */
1320 if ((TREE_CODE (lhs) == REALPART_EXPR
1321 || TREE_CODE (lhs) == IMAGPART_EXPR)
1322 && DECL_P (TREE_OPERAND (lhs, 0)))
1323 return false;
1324
1325 /* A decl that is wrapped inside a MEM-REF that covers
1326 it full is also rewritable.
1327 ??? The following could be relaxed allowing component
1328 references that do not change the access size. */
1329 if (TREE_CODE (lhs) == MEM_REF
1330 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1331 && integer_zerop (TREE_OPERAND (lhs, 1)))
1332 {
1333 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1334 if (DECL_P (decl)
1335 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1336 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1337 return false;
1338 }
1339
1340 return true;
1341 }
1342
1343 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1344 mark the variable VAR for conversion into SSA. Return true when updating
1345 stmts is required. */
1346
1347 static void
1348 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1349 bitmap suitable_for_renaming)
1350 {
1351 /* Global Variables, result decls cannot be changed. */
1352 if (is_global_var (var)
1353 || TREE_CODE (var) == RESULT_DECL
1354 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1355 return;
1356
1357 if (TREE_ADDRESSABLE (var)
1358 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1359 a non-register. Otherwise we are confused and forget to
1360 add virtual operands for it. */
1361 && (!is_gimple_reg_type (TREE_TYPE (var))
1362 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1363 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1364 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1365 {
1366 TREE_ADDRESSABLE (var) = 0;
1367 if (is_gimple_reg (var))
1368 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1369 if (dump_file)
1370 {
1371 fprintf (dump_file, "No longer having address taken: ");
1372 print_generic_expr (dump_file, var, 0);
1373 fprintf (dump_file, "\n");
1374 }
1375 }
1376
1377 if (!DECL_GIMPLE_REG_P (var)
1378 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1379 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1380 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1381 && !TREE_THIS_VOLATILE (var)
1382 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1383 {
1384 DECL_GIMPLE_REG_P (var) = 1;
1385 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1386 if (dump_file)
1387 {
1388 fprintf (dump_file, "Now a gimple register: ");
1389 print_generic_expr (dump_file, var, 0);
1390 fprintf (dump_file, "\n");
1391 }
1392 }
1393 }
1394
1395 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1396
1397 void
1398 execute_update_addresses_taken (void)
1399 {
1400 basic_block bb;
1401 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1402 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1403 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1404 tree var;
1405 unsigned i;
1406
1407 timevar_push (TV_ADDRESS_TAKEN);
1408
1409 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1410 the function body. */
1411 FOR_EACH_BB_FN (bb, cfun)
1412 {
1413 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1414 gsi_next (&gsi))
1415 {
1416 gimple stmt = gsi_stmt (gsi);
1417 enum gimple_code code = gimple_code (stmt);
1418 tree decl;
1419
1420 /* Note all addresses taken by the stmt. */
1421 gimple_ior_addresses_taken (addresses_taken, stmt);
1422
1423 /* If we have a call or an assignment, see if the lhs contains
1424 a local decl that requires not to be a gimple register. */
1425 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1426 {
1427 tree lhs = gimple_get_lhs (stmt);
1428 if (lhs
1429 && TREE_CODE (lhs) != SSA_NAME
1430 && non_rewritable_lvalue_p (lhs))
1431 {
1432 decl = get_base_address (lhs);
1433 if (DECL_P (decl))
1434 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1435 }
1436 }
1437
1438 if (gimple_assign_single_p (stmt))
1439 {
1440 tree rhs = gimple_assign_rhs1 (stmt);
1441 if ((decl = non_rewritable_mem_ref_base (rhs)))
1442 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1443 }
1444
1445 else if (code == GIMPLE_CALL)
1446 {
1447 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1448 {
1449 tree arg = gimple_call_arg (stmt, i);
1450 if ((decl = non_rewritable_mem_ref_base (arg)))
1451 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1452 }
1453 }
1454
1455 else if (code == GIMPLE_ASM)
1456 {
1457 gasm *asm_stmt = as_a <gasm *> (stmt);
1458 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1459 {
1460 tree link = gimple_asm_output_op (asm_stmt, i);
1461 tree lhs = TREE_VALUE (link);
1462 if (TREE_CODE (lhs) != SSA_NAME)
1463 {
1464 decl = get_base_address (lhs);
1465 if (DECL_P (decl)
1466 && (non_rewritable_lvalue_p (lhs)
1467 /* We cannot move required conversions from
1468 the lhs to the rhs in asm statements, so
1469 require we do not need any. */
1470 || !useless_type_conversion_p
1471 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1472 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1473 }
1474 }
1475 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1476 {
1477 tree link = gimple_asm_input_op (asm_stmt, i);
1478 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1479 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1480 }
1481 }
1482 }
1483
1484 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1485 gsi_next (&gsi))
1486 {
1487 size_t i;
1488 gphi *phi = gsi.phi ();
1489
1490 for (i = 0; i < gimple_phi_num_args (phi); i++)
1491 {
1492 tree op = PHI_ARG_DEF (phi, i), var;
1493 if (TREE_CODE (op) == ADDR_EXPR
1494 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1495 && DECL_P (var))
1496 bitmap_set_bit (addresses_taken, DECL_UID (var));
1497 }
1498 }
1499 }
1500
1501 /* We cannot iterate over all referenced vars because that can contain
1502 unused vars from BLOCK trees, which causes code generation differences
1503 for -g vs. -g0. */
1504 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1505 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1506 suitable_for_renaming);
1507
1508 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1509 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1510 suitable_for_renaming);
1511
1512 /* Operand caches need to be recomputed for operands referencing the updated
1513 variables and operands need to be rewritten to expose bare symbols. */
1514 if (!bitmap_empty_p (suitable_for_renaming))
1515 {
1516 FOR_EACH_BB_FN (bb, cfun)
1517 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1518 {
1519 gimple stmt = gsi_stmt (gsi);
1520
1521 /* Re-write TARGET_MEM_REFs of symbols we want to
1522 rewrite into SSA form. */
1523 if (gimple_assign_single_p (stmt))
1524 {
1525 tree lhs = gimple_assign_lhs (stmt);
1526 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1527 tree sym;
1528
1529 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1530 gimplify_modify_expr_complex_part. */
1531 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1532 || TREE_CODE (lhs) == REALPART_EXPR)
1533 && DECL_P (TREE_OPERAND (lhs, 0))
1534 && bitmap_bit_p (suitable_for_renaming,
1535 DECL_UID (TREE_OPERAND (lhs, 0))))
1536 {
1537 tree other = make_ssa_name (TREE_TYPE (lhs));
1538 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1539 ? REALPART_EXPR : IMAGPART_EXPR,
1540 TREE_TYPE (other),
1541 TREE_OPERAND (lhs, 0));
1542 gimple load = gimple_build_assign (other, lrhs);
1543 location_t loc = gimple_location (stmt);
1544 gimple_set_location (load, loc);
1545 gimple_set_vuse (load, gimple_vuse (stmt));
1546 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1547 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1548 gimple_assign_set_rhs_with_ops
1549 (&gsi, COMPLEX_EXPR,
1550 TREE_CODE (lhs) == IMAGPART_EXPR
1551 ? other : gimple_assign_rhs1 (stmt),
1552 TREE_CODE (lhs) == IMAGPART_EXPR
1553 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1554 stmt = gsi_stmt (gsi);
1555 unlink_stmt_vdef (stmt);
1556 update_stmt (stmt);
1557 continue;
1558 }
1559
1560 /* We shouldn't have any fancy wrapping of
1561 component-refs on the LHS, but look through
1562 VIEW_CONVERT_EXPRs as that is easy. */
1563 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1564 lhs = TREE_OPERAND (lhs, 0);
1565 if (TREE_CODE (lhs) == MEM_REF
1566 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1567 && integer_zerop (TREE_OPERAND (lhs, 1))
1568 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1569 && DECL_P (sym)
1570 && !TREE_ADDRESSABLE (sym)
1571 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1572 lhs = sym;
1573 else
1574 lhs = gimple_assign_lhs (stmt);
1575
1576 /* Rewrite the RHS and make sure the resulting assignment
1577 is validly typed. */
1578 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1579 rhs = gimple_assign_rhs1 (stmt);
1580 if (gimple_assign_lhs (stmt) != lhs
1581 && !useless_type_conversion_p (TREE_TYPE (lhs),
1582 TREE_TYPE (rhs)))
1583 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1584 TREE_TYPE (lhs), rhs);
1585
1586 if (gimple_assign_lhs (stmt) != lhs)
1587 gimple_assign_set_lhs (stmt, lhs);
1588
1589 if (gimple_assign_rhs1 (stmt) != rhs)
1590 {
1591 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1592 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1593 }
1594 }
1595
1596 else if (gimple_code (stmt) == GIMPLE_CALL)
1597 {
1598 unsigned i;
1599 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1600 {
1601 tree *argp = gimple_call_arg_ptr (stmt, i);
1602 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1603 }
1604 }
1605
1606 else if (gimple_code (stmt) == GIMPLE_ASM)
1607 {
1608 gasm *asm_stmt = as_a <gasm *> (stmt);
1609 unsigned i;
1610 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1611 {
1612 tree link = gimple_asm_output_op (asm_stmt, i);
1613 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1614 suitable_for_renaming);
1615 }
1616 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1617 {
1618 tree link = gimple_asm_input_op (asm_stmt, i);
1619 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1620 suitable_for_renaming);
1621 }
1622 }
1623
1624 else if (gimple_debug_bind_p (stmt)
1625 && gimple_debug_bind_has_value_p (stmt))
1626 {
1627 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1628 tree decl;
1629 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1630 decl = non_rewritable_mem_ref_base (*valuep);
1631 if (decl
1632 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1633 gimple_debug_bind_reset_value (stmt);
1634 }
1635
1636 if (gimple_references_memory_p (stmt)
1637 || is_gimple_debug (stmt))
1638 update_stmt (stmt);
1639
1640 gsi_next (&gsi);
1641 }
1642
1643 /* Update SSA form here, we are called as non-pass as well. */
1644 if (number_of_loops (cfun) > 1
1645 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1646 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1647 else
1648 update_ssa (TODO_update_ssa);
1649 }
1650
1651 BITMAP_FREE (not_reg_needs);
1652 BITMAP_FREE (addresses_taken);
1653 BITMAP_FREE (suitable_for_renaming);
1654 timevar_pop (TV_ADDRESS_TAKEN);
1655 }
1656
1657 namespace {
1658
1659 const pass_data pass_data_update_address_taken =
1660 {
1661 GIMPLE_PASS, /* type */
1662 "addressables", /* name */
1663 OPTGROUP_NONE, /* optinfo_flags */
1664 TV_ADDRESS_TAKEN, /* tv_id */
1665 PROP_ssa, /* properties_required */
1666 0, /* properties_provided */
1667 0, /* properties_destroyed */
1668 0, /* todo_flags_start */
1669 TODO_update_address_taken, /* todo_flags_finish */
1670 };
1671
1672 class pass_update_address_taken : public gimple_opt_pass
1673 {
1674 public:
1675 pass_update_address_taken (gcc::context *ctxt)
1676 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1677 {}
1678
1679 /* opt_pass methods: */
1680
1681 }; // class pass_update_address_taken
1682
1683 } // anon namespace
1684
1685 gimple_opt_pass *
1686 make_pass_update_address_taken (gcc::context *ctxt)
1687 {
1688 return new pass_update_address_taken (ctxt);
1689 }