]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
tree-ssa.c: Include tree-cfg.h and tree-dfa.h.
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44
45 /* Pointer map of variable mappings, keyed by edge. */
46 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
47
48
49 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
50
51 void
52 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
53 {
54 edge_var_map new_node;
55
56 if (edge_var_maps == NULL)
57 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
58
59 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
60 new_node.def = def;
61 new_node.result = result;
62 new_node.locus = locus;
63
64 slot.safe_push (new_node);
65 }
66
67
68 /* Clear the var mappings in edge E. */
69
70 void
71 redirect_edge_var_map_clear (edge e)
72 {
73 if (!edge_var_maps)
74 return;
75
76 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
77
78 if (head)
79 head->release ();
80 }
81
82
83 /* Duplicate the redirected var mappings in OLDE in NEWE.
84
85 This assumes a hash_map can have multiple edges mapping to the same
86 var_map (many to one mapping), since we don't remove the previous mappings.
87 */
88
89 void
90 redirect_edge_var_map_dup (edge newe, edge olde)
91 {
92 if (!edge_var_maps)
93 return;
94
95 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
96 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
97 if (!old_head)
98 return;
99
100 new_head->safe_splice (*old_head);
101 }
102
103
104 /* Return the variable mappings for a given edge. If there is none, return
105 NULL. */
106
107 vec<edge_var_map> *
108 redirect_edge_var_map_vector (edge e)
109 {
110 /* Hey, what kind of idiot would... you'd be surprised. */
111 if (!edge_var_maps)
112 return NULL;
113
114 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
115 if (!slot)
116 return NULL;
117
118 return slot;
119 }
120
121 /* Clear the edge variable mappings. */
122
123 void
124 redirect_edge_var_map_empty (void)
125 {
126 if (edge_var_maps)
127 edge_var_maps->empty ();
128 }
129
130
131 /* Remove the corresponding arguments from the PHI nodes in E's
132 destination block and redirect it to DEST. Return redirected edge.
133 The list of removed arguments is stored in a vector accessed
134 through edge_var_maps. */
135
136 edge
137 ssa_redirect_edge (edge e, basic_block dest)
138 {
139 gphi_iterator gsi;
140 gphi *phi;
141
142 redirect_edge_var_map_clear (e);
143
144 /* Remove the appropriate PHI arguments in E's destination block. */
145 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
146 {
147 tree def;
148 source_location locus ;
149
150 phi = gsi.phi ();
151 def = gimple_phi_arg_def (phi, e->dest_idx);
152 locus = gimple_phi_arg_location (phi, e->dest_idx);
153
154 if (def == NULL_TREE)
155 continue;
156
157 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
158 }
159
160 e = redirect_edge_succ_nodup (e, dest);
161
162 return e;
163 }
164
165
166 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
167 E->dest. */
168
169 void
170 flush_pending_stmts (edge e)
171 {
172 gphi *phi;
173 edge_var_map *vm;
174 int i;
175 gphi_iterator gsi;
176
177 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
178 if (!v)
179 return;
180
181 for (gsi = gsi_start_phis (e->dest), i = 0;
182 !gsi_end_p (gsi) && v->iterate (i, &vm);
183 gsi_next (&gsi), i++)
184 {
185 tree def;
186
187 phi = gsi.phi ();
188 def = redirect_edge_var_map_def (vm);
189 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
190 }
191
192 redirect_edge_var_map_clear (e);
193 }
194
195 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
196 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
197 expression with a different value.
198
199 This will update any annotations (say debug bind stmts) referring
200 to the original LHS, so that they use the RHS instead. This is
201 done even if NLHS and LHS are the same, for it is understood that
202 the RHS will be modified afterwards, and NLHS will not be assigned
203 an equivalent value.
204
205 Adjusting any non-annotation uses of the LHS, if needed, is a
206 responsibility of the caller.
207
208 The effect of this call should be pretty much the same as that of
209 inserting a copy of STMT before STMT, and then removing the
210 original stmt, at which time gsi_remove() would have update
211 annotations, but using this function saves all the inserting,
212 copying and removing. */
213
214 void
215 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
216 {
217 if (MAY_HAVE_DEBUG_STMTS)
218 {
219 tree lhs = gimple_get_lhs (stmt);
220
221 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
222
223 insert_debug_temp_for_var_def (NULL, lhs);
224 }
225
226 gimple_set_lhs (stmt, nlhs);
227 }
228
229
230 /* Given a tree for an expression for which we might want to emit
231 locations or values in debug information (generally a variable, but
232 we might deal with other kinds of trees in the future), return the
233 tree that should be used as the variable of a DEBUG_BIND STMT or
234 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
235
236 tree
237 target_for_debug_bind (tree var)
238 {
239 if (!MAY_HAVE_DEBUG_STMTS)
240 return NULL_TREE;
241
242 if (TREE_CODE (var) == SSA_NAME)
243 {
244 var = SSA_NAME_VAR (var);
245 if (var == NULL_TREE)
246 return NULL_TREE;
247 }
248
249 if ((TREE_CODE (var) != VAR_DECL
250 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
251 && TREE_CODE (var) != PARM_DECL)
252 return NULL_TREE;
253
254 if (DECL_HAS_VALUE_EXPR_P (var))
255 return target_for_debug_bind (DECL_VALUE_EXPR (var));
256
257 if (DECL_IGNORED_P (var))
258 return NULL_TREE;
259
260 /* var-tracking only tracks registers. */
261 if (!is_gimple_reg_type (TREE_TYPE (var)))
262 return NULL_TREE;
263
264 return var;
265 }
266
267 /* Called via walk_tree, look for SSA_NAMEs that have already been
268 released. */
269
270 static tree
271 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
272 {
273 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
274
275 if (wi && wi->is_lhs)
276 return NULL_TREE;
277
278 if (TREE_CODE (*tp) == SSA_NAME)
279 {
280 if (SSA_NAME_IN_FREE_LIST (*tp))
281 return *tp;
282
283 *walk_subtrees = 0;
284 }
285 else if (IS_TYPE_OR_DECL_P (*tp))
286 *walk_subtrees = 0;
287
288 return NULL_TREE;
289 }
290
291 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
292 by other DEBUG stmts, and replace uses of the DEF with the
293 newly-created debug temp. */
294
295 void
296 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
297 {
298 imm_use_iterator imm_iter;
299 use_operand_p use_p;
300 gimple *stmt;
301 gimple *def_stmt = NULL;
302 int usecount = 0;
303 tree value = NULL;
304
305 if (!MAY_HAVE_DEBUG_STMTS)
306 return;
307
308 /* If this name has already been registered for replacement, do nothing
309 as anything that uses this name isn't in SSA form. */
310 if (name_registered_for_update_p (var))
311 return;
312
313 /* Check whether there are debug stmts that reference this variable and,
314 if there are, decide whether we should use a debug temp. */
315 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
316 {
317 stmt = USE_STMT (use_p);
318
319 if (!gimple_debug_bind_p (stmt))
320 continue;
321
322 if (usecount++)
323 break;
324
325 if (gimple_debug_bind_get_value (stmt) != var)
326 {
327 /* Count this as an additional use, so as to make sure we
328 use a temp unless VAR's definition has a SINGLE_RHS that
329 can be shared. */
330 usecount++;
331 break;
332 }
333 }
334
335 if (!usecount)
336 return;
337
338 if (gsi)
339 def_stmt = gsi_stmt (*gsi);
340 else
341 def_stmt = SSA_NAME_DEF_STMT (var);
342
343 /* If we didn't get an insertion point, and the stmt has already
344 been removed, we won't be able to insert the debug bind stmt, so
345 we'll have to drop debug information. */
346 if (gimple_code (def_stmt) == GIMPLE_PHI)
347 {
348 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
349 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
350 value = NULL;
351 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
352 to. */
353 else if (value == error_mark_node)
354 value = NULL;
355 }
356 else if (is_gimple_assign (def_stmt))
357 {
358 bool no_value = false;
359
360 if (!dom_info_available_p (CDI_DOMINATORS))
361 {
362 struct walk_stmt_info wi;
363
364 memset (&wi, 0, sizeof (wi));
365
366 /* When removing blocks without following reverse dominance
367 order, we may sometimes encounter SSA_NAMEs that have
368 already been released, referenced in other SSA_DEFs that
369 we're about to release. Consider:
370
371 <bb X>:
372 v_1 = foo;
373
374 <bb Y>:
375 w_2 = v_1 + bar;
376 # DEBUG w => w_2
377
378 If we deleted BB X first, propagating the value of w_2
379 won't do us any good. It's too late to recover their
380 original definition of v_1: when it was deleted, it was
381 only referenced in other DEFs, it couldn't possibly know
382 it should have been retained, and propagating every
383 single DEF just in case it might have to be propagated
384 into a DEBUG STMT would probably be too wasteful.
385
386 When dominator information is not readily available, we
387 check for and accept some loss of debug information. But
388 if it is available, there's no excuse for us to remove
389 blocks in the wrong order, so we don't even check for
390 dead SSA NAMEs. SSA verification shall catch any
391 errors. */
392 if ((!gsi && !gimple_bb (def_stmt))
393 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
394 no_value = true;
395 }
396
397 if (!no_value)
398 value = gimple_assign_rhs_to_tree (def_stmt);
399 }
400
401 if (value)
402 {
403 /* If there's a single use of VAR, and VAR is the entire debug
404 expression (usecount would have been incremented again
405 otherwise), and the definition involves only constants and
406 SSA names, then we can propagate VALUE into this single use,
407 avoiding the temp.
408
409 We can also avoid using a temp if VALUE can be shared and
410 propagated into all uses, without generating expressions that
411 wouldn't be valid gimple RHSs.
412
413 Other cases that would require unsharing or non-gimple RHSs
414 are deferred to a debug temp, although we could avoid temps
415 at the expense of duplication of expressions. */
416
417 if (CONSTANT_CLASS_P (value)
418 || gimple_code (def_stmt) == GIMPLE_PHI
419 || (usecount == 1
420 && (!gimple_assign_single_p (def_stmt)
421 || is_gimple_min_invariant (value)))
422 || is_gimple_reg (value))
423 ;
424 else
425 {
426 gdebug *def_temp;
427 tree vexpr = make_node (DEBUG_EXPR_DECL);
428
429 def_temp = gimple_build_debug_bind (vexpr,
430 unshare_expr (value),
431 def_stmt);
432
433 DECL_ARTIFICIAL (vexpr) = 1;
434 TREE_TYPE (vexpr) = TREE_TYPE (value);
435 if (DECL_P (value))
436 DECL_MODE (vexpr) = DECL_MODE (value);
437 else
438 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
439
440 if (gsi)
441 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
442 else
443 {
444 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
445 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
446 }
447
448 value = vexpr;
449 }
450 }
451
452 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
453 {
454 if (!gimple_debug_bind_p (stmt))
455 continue;
456
457 if (value)
458 {
459 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
460 /* unshare_expr is not needed here. vexpr is either a
461 SINGLE_RHS, that can be safely shared, some other RHS
462 that was unshared when we found it had a single debug
463 use, or a DEBUG_EXPR_DECL, that can be safely
464 shared. */
465 SET_USE (use_p, unshare_expr (value));
466 /* If we didn't replace uses with a debug decl fold the
467 resulting expression. Otherwise we end up with invalid IL. */
468 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
469 {
470 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
471 fold_stmt_inplace (&gsi);
472 }
473 }
474 else
475 gimple_debug_bind_reset_value (stmt);
476
477 update_stmt (stmt);
478 }
479 }
480
481
482 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
483 other DEBUG stmts, and replace uses of the DEF with the
484 newly-created debug temp. */
485
486 void
487 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
488 {
489 gimple *stmt;
490 ssa_op_iter op_iter;
491 def_operand_p def_p;
492
493 if (!MAY_HAVE_DEBUG_STMTS)
494 return;
495
496 stmt = gsi_stmt (*gsi);
497
498 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
499 {
500 tree var = DEF_FROM_PTR (def_p);
501
502 if (TREE_CODE (var) != SSA_NAME)
503 continue;
504
505 insert_debug_temp_for_var_def (gsi, var);
506 }
507 }
508
509 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
510
511 void
512 reset_debug_uses (gimple *stmt)
513 {
514 ssa_op_iter op_iter;
515 def_operand_p def_p;
516 imm_use_iterator imm_iter;
517 gimple *use_stmt;
518
519 if (!MAY_HAVE_DEBUG_STMTS)
520 return;
521
522 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
523 {
524 tree var = DEF_FROM_PTR (def_p);
525
526 if (TREE_CODE (var) != SSA_NAME)
527 continue;
528
529 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
530 {
531 if (!gimple_debug_bind_p (use_stmt))
532 continue;
533
534 gimple_debug_bind_reset_value (use_stmt);
535 update_stmt (use_stmt);
536 }
537 }
538 }
539
540 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
541 dominated stmts before their dominators, so that release_ssa_defs
542 stands a chance of propagating DEFs into debug bind stmts. */
543
544 void
545 release_defs_bitset (bitmap toremove)
546 {
547 unsigned j;
548 bitmap_iterator bi;
549
550 /* Performing a topological sort is probably overkill, this will
551 most likely run in slightly superlinear time, rather than the
552 pathological quadratic worst case. */
553 while (!bitmap_empty_p (toremove))
554 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
555 {
556 bool remove_now = true;
557 tree var = ssa_name (j);
558 gimple *stmt;
559 imm_use_iterator uit;
560
561 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
562 {
563 ssa_op_iter dit;
564 def_operand_p def_p;
565
566 /* We can't propagate PHI nodes into debug stmts. */
567 if (gimple_code (stmt) == GIMPLE_PHI
568 || is_gimple_debug (stmt))
569 continue;
570
571 /* If we find another definition to remove that uses
572 the one we're looking at, defer the removal of this
573 one, so that it can be propagated into debug stmts
574 after the other is. */
575 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
576 {
577 tree odef = DEF_FROM_PTR (def_p);
578
579 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
580 {
581 remove_now = false;
582 break;
583 }
584 }
585
586 if (!remove_now)
587 BREAK_FROM_IMM_USE_STMT (uit);
588 }
589
590 if (remove_now)
591 {
592 gimple *def = SSA_NAME_DEF_STMT (var);
593 gimple_stmt_iterator gsi = gsi_for_stmt (def);
594
595 if (gimple_code (def) == GIMPLE_PHI)
596 remove_phi_node (&gsi, true);
597 else
598 {
599 gsi_remove (&gsi, true);
600 release_defs (def);
601 }
602
603 bitmap_clear_bit (toremove, j);
604 }
605 }
606 }
607
608 /* Verify virtual SSA form. */
609
610 bool
611 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
612 {
613 bool err = false;
614
615 if (bitmap_bit_p (visited, bb->index))
616 return false;
617
618 bitmap_set_bit (visited, bb->index);
619
620 /* Pick up the single virtual PHI def. */
621 gphi *phi = NULL;
622 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
623 gsi_next (&si))
624 {
625 tree res = gimple_phi_result (si.phi ());
626 if (virtual_operand_p (res))
627 {
628 if (phi)
629 {
630 error ("multiple virtual PHI nodes in BB %d", bb->index);
631 print_gimple_stmt (stderr, phi, 0, 0);
632 print_gimple_stmt (stderr, si.phi (), 0, 0);
633 err = true;
634 }
635 else
636 phi = si.phi ();
637 }
638 }
639 if (phi)
640 {
641 current_vdef = gimple_phi_result (phi);
642 if (TREE_CODE (current_vdef) != SSA_NAME)
643 {
644 error ("virtual definition is not an SSA name");
645 print_gimple_stmt (stderr, phi, 0, 0);
646 err = true;
647 }
648 }
649
650 /* Verify stmts. */
651 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
652 gsi_next (&gsi))
653 {
654 gimple *stmt = gsi_stmt (gsi);
655 tree vuse = gimple_vuse (stmt);
656 if (vuse)
657 {
658 if (vuse != current_vdef)
659 {
660 error ("stmt with wrong VUSE");
661 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
662 fprintf (stderr, "expected ");
663 print_generic_expr (stderr, current_vdef, 0);
664 fprintf (stderr, "\n");
665 err = true;
666 }
667 tree vdef = gimple_vdef (stmt);
668 if (vdef)
669 {
670 current_vdef = vdef;
671 if (TREE_CODE (current_vdef) != SSA_NAME)
672 {
673 error ("virtual definition is not an SSA name");
674 print_gimple_stmt (stderr, phi, 0, 0);
675 err = true;
676 }
677 }
678 }
679 }
680
681 /* Verify destination PHI uses and recurse. */
682 edge_iterator ei;
683 edge e;
684 FOR_EACH_EDGE (e, ei, bb->succs)
685 {
686 gphi *phi = get_virtual_phi (e->dest);
687 if (phi
688 && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
689 {
690 error ("PHI node with wrong VUSE on edge from BB %d",
691 e->src->index);
692 print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
693 fprintf (stderr, "expected ");
694 print_generic_expr (stderr, current_vdef, 0);
695 fprintf (stderr, "\n");
696 err = true;
697 }
698
699 /* Recurse. */
700 err |= verify_vssa (e->dest, current_vdef, visited);
701 }
702
703 return err;
704 }
705
706 /* Return true if SSA_NAME is malformed and mark it visited.
707
708 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
709 operand. */
710
711 static bool
712 verify_ssa_name (tree ssa_name, bool is_virtual)
713 {
714 if (TREE_CODE (ssa_name) != SSA_NAME)
715 {
716 error ("expected an SSA_NAME object");
717 return true;
718 }
719
720 if (SSA_NAME_IN_FREE_LIST (ssa_name))
721 {
722 error ("found an SSA_NAME that had been released into the free pool");
723 return true;
724 }
725
726 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
727 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
728 {
729 error ("type mismatch between an SSA_NAME and its symbol");
730 return true;
731 }
732
733 if (is_virtual && !virtual_operand_p (ssa_name))
734 {
735 error ("found a virtual definition for a GIMPLE register");
736 return true;
737 }
738
739 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
740 {
741 error ("virtual SSA name for non-VOP decl");
742 return true;
743 }
744
745 if (!is_virtual && virtual_operand_p (ssa_name))
746 {
747 error ("found a real definition for a non-register");
748 return true;
749 }
750
751 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
752 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
753 {
754 error ("found a default name with a non-empty defining statement");
755 return true;
756 }
757
758 return false;
759 }
760
761
762 /* Return true if the definition of SSA_NAME at block BB is malformed.
763
764 STMT is the statement where SSA_NAME is created.
765
766 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
767 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
768 it means that the block in that array slot contains the
769 definition of SSA_NAME.
770
771 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
772
773 static bool
774 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
775 gimple *stmt, bool is_virtual)
776 {
777 if (verify_ssa_name (ssa_name, is_virtual))
778 goto err;
779
780 if (SSA_NAME_VAR (ssa_name)
781 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
782 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
783 {
784 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
785 goto err;
786 }
787
788 if (definition_block[SSA_NAME_VERSION (ssa_name)])
789 {
790 error ("SSA_NAME created in two different blocks %i and %i",
791 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
792 goto err;
793 }
794
795 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
796
797 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
798 {
799 error ("SSA_NAME_DEF_STMT is wrong");
800 fprintf (stderr, "Expected definition statement:\n");
801 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
802 fprintf (stderr, "\nActual definition statement:\n");
803 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
804 goto err;
805 }
806
807 return false;
808
809 err:
810 fprintf (stderr, "while verifying SSA_NAME ");
811 print_generic_expr (stderr, ssa_name, 0);
812 fprintf (stderr, " in statement\n");
813 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
814
815 return true;
816 }
817
818
819 /* Return true if the use of SSA_NAME at statement STMT in block BB is
820 malformed.
821
822 DEF_BB is the block where SSA_NAME was found to be created.
823
824 IDOM contains immediate dominator information for the flowgraph.
825
826 CHECK_ABNORMAL is true if the caller wants to check whether this use
827 is flowing through an abnormal edge (only used when checking PHI
828 arguments).
829
830 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
831 that are defined before STMT in basic block BB. */
832
833 static bool
834 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
835 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
836 {
837 bool err = false;
838 tree ssa_name = USE_FROM_PTR (use_p);
839
840 if (!TREE_VISITED (ssa_name))
841 if (verify_imm_links (stderr, ssa_name))
842 err = true;
843
844 TREE_VISITED (ssa_name) = 1;
845
846 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
847 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
848 ; /* Default definitions have empty statements. Nothing to do. */
849 else if (!def_bb)
850 {
851 error ("missing definition");
852 err = true;
853 }
854 else if (bb != def_bb
855 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
856 {
857 error ("definition in block %i does not dominate use in block %i",
858 def_bb->index, bb->index);
859 err = true;
860 }
861 else if (bb == def_bb
862 && names_defined_in_bb != NULL
863 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
864 {
865 error ("definition in block %i follows the use", def_bb->index);
866 err = true;
867 }
868
869 if (check_abnormal
870 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
871 {
872 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
873 err = true;
874 }
875
876 /* Make sure the use is in an appropriate list by checking the previous
877 element to make sure it's the same. */
878 if (use_p->prev == NULL)
879 {
880 error ("no immediate_use list");
881 err = true;
882 }
883 else
884 {
885 tree listvar;
886 if (use_p->prev->use == NULL)
887 listvar = use_p->prev->loc.ssa_name;
888 else
889 listvar = USE_FROM_PTR (use_p->prev);
890 if (listvar != ssa_name)
891 {
892 error ("wrong immediate use list");
893 err = true;
894 }
895 }
896
897 if (err)
898 {
899 fprintf (stderr, "for SSA_NAME: ");
900 print_generic_expr (stderr, ssa_name, TDF_VOPS);
901 fprintf (stderr, " in statement:\n");
902 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
903 }
904
905 return err;
906 }
907
908
909 /* Return true if any of the arguments for PHI node PHI at block BB is
910 malformed.
911
912 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
913 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
914 it means that the block in that array slot contains the
915 definition of SSA_NAME. */
916
917 static bool
918 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
919 {
920 edge e;
921 bool err = false;
922 size_t i, phi_num_args = gimple_phi_num_args (phi);
923
924 if (EDGE_COUNT (bb->preds) != phi_num_args)
925 {
926 error ("incoming edge count does not match number of PHI arguments");
927 err = true;
928 goto error;
929 }
930
931 for (i = 0; i < phi_num_args; i++)
932 {
933 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
934 tree op = USE_FROM_PTR (op_p);
935
936 e = EDGE_PRED (bb, i);
937
938 if (op == NULL_TREE)
939 {
940 error ("PHI argument is missing for edge %d->%d",
941 e->src->index,
942 e->dest->index);
943 err = true;
944 goto error;
945 }
946
947 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
948 {
949 error ("PHI argument is not SSA_NAME, or invariant");
950 err = true;
951 }
952
953 if (TREE_CODE (op) == SSA_NAME)
954 {
955 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
956 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
957 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
958 }
959
960 if (TREE_CODE (op) == ADDR_EXPR)
961 {
962 tree base = TREE_OPERAND (op, 0);
963 while (handled_component_p (base))
964 base = TREE_OPERAND (base, 0);
965 if ((TREE_CODE (base) == VAR_DECL
966 || TREE_CODE (base) == PARM_DECL
967 || TREE_CODE (base) == RESULT_DECL)
968 && !TREE_ADDRESSABLE (base))
969 {
970 error ("address taken, but ADDRESSABLE bit not set");
971 err = true;
972 }
973 }
974
975 if (e->dest != bb)
976 {
977 error ("wrong edge %d->%d for PHI argument",
978 e->src->index, e->dest->index);
979 err = true;
980 }
981
982 if (err)
983 {
984 fprintf (stderr, "PHI argument\n");
985 print_generic_stmt (stderr, op, TDF_VOPS);
986 goto error;
987 }
988 }
989
990 error:
991 if (err)
992 {
993 fprintf (stderr, "for PHI node\n");
994 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
995 }
996
997
998 return err;
999 }
1000
1001
1002 /* Verify common invariants in the SSA web.
1003 TODO: verify the variable annotations. */
1004
1005 DEBUG_FUNCTION void
1006 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1007 {
1008 size_t i;
1009 basic_block bb;
1010 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1011 ssa_op_iter iter;
1012 tree op;
1013 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1014 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
1015
1016 gcc_assert (!need_ssa_update_p (cfun));
1017
1018 timevar_push (TV_TREE_SSA_VERIFY);
1019
1020 /* Keep track of SSA names present in the IL. */
1021 for (i = 1; i < num_ssa_names; i++)
1022 {
1023 tree name = ssa_name (i);
1024 if (name)
1025 {
1026 gimple *stmt;
1027 TREE_VISITED (name) = 0;
1028
1029 verify_ssa_name (name, virtual_operand_p (name));
1030
1031 stmt = SSA_NAME_DEF_STMT (name);
1032 if (!gimple_nop_p (stmt))
1033 {
1034 basic_block bb = gimple_bb (stmt);
1035 if (verify_def (bb, definition_block,
1036 name, stmt, virtual_operand_p (name)))
1037 goto err;
1038 }
1039 }
1040 }
1041
1042 calculate_dominance_info (CDI_DOMINATORS);
1043
1044 /* Now verify all the uses and make sure they agree with the definitions
1045 found in the previous pass. */
1046 FOR_EACH_BB_FN (bb, cfun)
1047 {
1048 edge e;
1049 edge_iterator ei;
1050
1051 /* Make sure that all edges have a clear 'aux' field. */
1052 FOR_EACH_EDGE (e, ei, bb->preds)
1053 {
1054 if (e->aux)
1055 {
1056 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1057 e->dest->index);
1058 goto err;
1059 }
1060 }
1061
1062 /* Verify the arguments for every PHI node in the block. */
1063 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1064 {
1065 gphi *phi = gsi.phi ();
1066 if (verify_phi_args (phi, bb, definition_block))
1067 goto err;
1068
1069 bitmap_set_bit (names_defined_in_bb,
1070 SSA_NAME_VERSION (gimple_phi_result (phi)));
1071 }
1072
1073 /* Now verify all the uses and vuses in every statement of the block. */
1074 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1075 gsi_next (&gsi))
1076 {
1077 gimple *stmt = gsi_stmt (gsi);
1078 use_operand_p use_p;
1079
1080 if (check_modified_stmt && gimple_modified_p (stmt))
1081 {
1082 error ("stmt (%p) marked modified after optimization pass: ",
1083 (void *)stmt);
1084 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1085 goto err;
1086 }
1087
1088 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1089 {
1090 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1091 goto err;
1092 }
1093
1094 if (gimple_debug_bind_p (stmt)
1095 && !gimple_debug_bind_has_value_p (stmt))
1096 continue;
1097
1098 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1099 {
1100 op = USE_FROM_PTR (use_p);
1101 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1102 use_p, stmt, false, names_defined_in_bb))
1103 goto err;
1104 }
1105
1106 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1107 {
1108 if (SSA_NAME_DEF_STMT (op) != stmt)
1109 {
1110 error ("SSA_NAME_DEF_STMT is wrong");
1111 fprintf (stderr, "Expected definition statement:\n");
1112 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1113 fprintf (stderr, "\nActual definition statement:\n");
1114 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1115 4, TDF_VOPS);
1116 goto err;
1117 }
1118 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1119 }
1120 }
1121
1122 bitmap_clear (names_defined_in_bb);
1123 }
1124
1125 free (definition_block);
1126
1127 if (gimple_vop (cfun)
1128 && ssa_default_def (cfun, gimple_vop (cfun)))
1129 {
1130 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1131 bitmap_clear (visited);
1132 if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1133 ssa_default_def (cfun, gimple_vop (cfun)), visited))
1134 goto err;
1135 }
1136
1137 /* Restore the dominance information to its prior known state, so
1138 that we do not perturb the compiler's subsequent behavior. */
1139 if (orig_dom_state == DOM_NONE)
1140 free_dominance_info (CDI_DOMINATORS);
1141 else
1142 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1143
1144 BITMAP_FREE (names_defined_in_bb);
1145 timevar_pop (TV_TREE_SSA_VERIFY);
1146 return;
1147
1148 err:
1149 internal_error ("verify_ssa failed");
1150 }
1151
1152
1153 /* Initialize global DFA and SSA structures. */
1154
1155 void
1156 init_tree_ssa (struct function *fn)
1157 {
1158 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1159 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1160 pt_solution_reset (&fn->gimple_df->escaped);
1161 init_ssanames (fn, 0);
1162 }
1163
1164 /* Deallocate memory associated with SSA data structures for FNDECL. */
1165
1166 void
1167 delete_tree_ssa (struct function *fn)
1168 {
1169 fini_ssanames (fn);
1170
1171 /* We no longer maintain the SSA operand cache at this point. */
1172 if (ssa_operands_active (fn))
1173 fini_ssa_operands (fn);
1174
1175 fn->gimple_df->default_defs->empty ();
1176 fn->gimple_df->default_defs = NULL;
1177 pt_solution_reset (&fn->gimple_df->escaped);
1178 if (fn->gimple_df->decls_to_pointers != NULL)
1179 delete fn->gimple_df->decls_to_pointers;
1180 fn->gimple_df->decls_to_pointers = NULL;
1181 fn->gimple_df = NULL;
1182
1183 /* We no longer need the edge variable maps. */
1184 redirect_edge_var_map_empty ();
1185 }
1186
1187 /* Return true if EXPR is a useless type conversion, otherwise return
1188 false. */
1189
1190 bool
1191 tree_ssa_useless_type_conversion (tree expr)
1192 {
1193 /* If we have an assignment that merely uses a NOP_EXPR to change
1194 the top of the RHS to the type of the LHS and the type conversion
1195 is "safe", then strip away the type conversion so that we can
1196 enter LHS = RHS into the const_and_copies table. */
1197 if (CONVERT_EXPR_P (expr)
1198 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1199 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1200 return useless_type_conversion_p
1201 (TREE_TYPE (expr),
1202 TREE_TYPE (TREE_OPERAND (expr, 0)));
1203
1204 return false;
1205 }
1206
1207 /* Strip conversions from EXP according to
1208 tree_ssa_useless_type_conversion and return the resulting
1209 expression. */
1210
1211 tree
1212 tree_ssa_strip_useless_type_conversions (tree exp)
1213 {
1214 while (tree_ssa_useless_type_conversion (exp))
1215 exp = TREE_OPERAND (exp, 0);
1216 return exp;
1217 }
1218
1219
1220 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1221 should be returned if the value is only partially undefined. */
1222
1223 bool
1224 ssa_undefined_value_p (tree t, bool partial)
1225 {
1226 gimple *def_stmt;
1227 tree var = SSA_NAME_VAR (t);
1228
1229 if (!var)
1230 ;
1231 /* Parameters get their initial value from the function entry. */
1232 else if (TREE_CODE (var) == PARM_DECL)
1233 return false;
1234 /* When returning by reference the return address is actually a hidden
1235 parameter. */
1236 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1237 return false;
1238 /* Hard register variables get their initial value from the ether. */
1239 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1240 return false;
1241
1242 /* The value is undefined iff its definition statement is empty. */
1243 def_stmt = SSA_NAME_DEF_STMT (t);
1244 if (gimple_nop_p (def_stmt))
1245 return true;
1246
1247 /* Check if the complex was not only partially defined. */
1248 if (partial && is_gimple_assign (def_stmt)
1249 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1250 {
1251 tree rhs1, rhs2;
1252
1253 rhs1 = gimple_assign_rhs1 (def_stmt);
1254 rhs2 = gimple_assign_rhs2 (def_stmt);
1255 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1256 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1257 }
1258 return false;
1259 }
1260
1261
1262 /* Return TRUE iff STMT, a gimple statement, references an undefined
1263 SSA name. */
1264
1265 bool
1266 gimple_uses_undefined_value_p (gimple *stmt)
1267 {
1268 ssa_op_iter iter;
1269 tree op;
1270
1271 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1272 if (ssa_undefined_value_p (op))
1273 return true;
1274
1275 return false;
1276 }
1277
1278
1279
1280 /* If necessary, rewrite the base of the reference tree *TP from
1281 a MEM_REF to a plain or converted symbol. */
1282
1283 static void
1284 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1285 {
1286 tree sym;
1287
1288 while (handled_component_p (*tp))
1289 tp = &TREE_OPERAND (*tp, 0);
1290 if (TREE_CODE (*tp) == MEM_REF
1291 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1292 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1293 && DECL_P (sym)
1294 && !TREE_ADDRESSABLE (sym)
1295 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1296 {
1297 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1298 && useless_type_conversion_p (TREE_TYPE (*tp),
1299 TREE_TYPE (TREE_TYPE (sym)))
1300 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1301 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1302 {
1303 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1304 TYPE_SIZE (TREE_TYPE (*tp)),
1305 int_const_binop (MULT_EXPR,
1306 bitsize_int (BITS_PER_UNIT),
1307 TREE_OPERAND (*tp, 1)));
1308 }
1309 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1310 && useless_type_conversion_p (TREE_TYPE (*tp),
1311 TREE_TYPE (TREE_TYPE (sym))))
1312 {
1313 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1314 ? REALPART_EXPR : IMAGPART_EXPR,
1315 TREE_TYPE (*tp), sym);
1316 }
1317 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1318 {
1319 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1320 TREE_TYPE (sym)))
1321 *tp = build1 (VIEW_CONVERT_EXPR,
1322 TREE_TYPE (*tp), sym);
1323 else
1324 *tp = sym;
1325 }
1326 }
1327 }
1328
1329 /* For a tree REF return its base if it is the base of a MEM_REF
1330 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1331
1332 static tree
1333 non_rewritable_mem_ref_base (tree ref)
1334 {
1335 tree base;
1336
1337 /* A plain decl does not need it set. */
1338 if (DECL_P (ref))
1339 return NULL_TREE;
1340
1341 if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1342 {
1343 base = get_base_address (ref);
1344 if (DECL_P (base))
1345 return base;
1346 return NULL_TREE;
1347 }
1348
1349 /* But watch out for MEM_REFs we cannot lower to a
1350 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1351 if (TREE_CODE (base) == MEM_REF
1352 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1353 {
1354 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1355 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1356 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1357 && useless_type_conversion_p (TREE_TYPE (base),
1358 TREE_TYPE (TREE_TYPE (decl)))
1359 && wi::fits_uhwi_p (mem_ref_offset (base))
1360 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1361 mem_ref_offset (base))
1362 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1363 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1364 return NULL_TREE;
1365 if (DECL_P (decl)
1366 && (!integer_zerop (TREE_OPERAND (base, 1))
1367 || (DECL_SIZE (decl)
1368 != TYPE_SIZE (TREE_TYPE (base)))
1369 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1370 return decl;
1371 }
1372
1373 return NULL_TREE;
1374 }
1375
1376 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1377 Otherwise return true. */
1378
1379 static bool
1380 non_rewritable_lvalue_p (tree lhs)
1381 {
1382 /* A plain decl is always rewritable. */
1383 if (DECL_P (lhs))
1384 return false;
1385
1386 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1387 a reasonably efficient manner... */
1388 if ((TREE_CODE (lhs) == REALPART_EXPR
1389 || TREE_CODE (lhs) == IMAGPART_EXPR)
1390 && DECL_P (TREE_OPERAND (lhs, 0)))
1391 return false;
1392
1393 /* ??? The following could be relaxed allowing component
1394 references that do not change the access size. */
1395 if (TREE_CODE (lhs) == MEM_REF
1396 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1397 {
1398 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1399
1400 /* A decl that is wrapped inside a MEM-REF that covers
1401 it full is also rewritable. */
1402 if (integer_zerop (TREE_OPERAND (lhs, 1))
1403 && DECL_P (decl)
1404 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1405 /* If the dynamic type of the decl has larger precision than
1406 the decl itself we can't use the decls type for SSA rewriting. */
1407 && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1408 || compare_tree_int (DECL_SIZE (decl),
1409 TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1410 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1411 && (TYPE_PRECISION (TREE_TYPE (decl))
1412 >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1413 /* Make sure we are not re-writing non-float copying into float
1414 copying as that can incur normalization. */
1415 && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1416 || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1417 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1418 return false;
1419
1420 /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1421 using a BIT_INSERT_EXPR. */
1422 if (DECL_P (decl)
1423 && VECTOR_TYPE_P (TREE_TYPE (decl))
1424 && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1425 && types_compatible_p (TREE_TYPE (lhs),
1426 TREE_TYPE (TREE_TYPE (decl)))
1427 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1428 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1429 TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1430 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1431 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1432 return false;
1433 }
1434
1435 /* A vector-insert using a BIT_FIELD_REF is rewritable using
1436 BIT_INSERT_EXPR. */
1437 if (TREE_CODE (lhs) == BIT_FIELD_REF
1438 && DECL_P (TREE_OPERAND (lhs, 0))
1439 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1440 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1441 && types_compatible_p (TREE_TYPE (lhs),
1442 TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0))))
1443 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1444 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1445 return false;
1446
1447 return true;
1448 }
1449
1450 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1451 mark the variable VAR for conversion into SSA. Return true when updating
1452 stmts is required. */
1453
1454 static void
1455 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1456 bitmap suitable_for_renaming)
1457 {
1458 /* Global Variables, result decls cannot be changed. */
1459 if (is_global_var (var)
1460 || TREE_CODE (var) == RESULT_DECL
1461 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1462 return;
1463
1464 if (TREE_ADDRESSABLE (var)
1465 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1466 a non-register. Otherwise we are confused and forget to
1467 add virtual operands for it. */
1468 && (!is_gimple_reg_type (TREE_TYPE (var))
1469 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1470 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1471 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1472 {
1473 TREE_ADDRESSABLE (var) = 0;
1474 if (is_gimple_reg (var))
1475 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1476 if (dump_file)
1477 {
1478 fprintf (dump_file, "No longer having address taken: ");
1479 print_generic_expr (dump_file, var, 0);
1480 fprintf (dump_file, "\n");
1481 }
1482 }
1483
1484 if (!DECL_GIMPLE_REG_P (var)
1485 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1486 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1487 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1488 && !TREE_THIS_VOLATILE (var)
1489 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1490 {
1491 DECL_GIMPLE_REG_P (var) = 1;
1492 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1493 if (dump_file)
1494 {
1495 fprintf (dump_file, "Now a gimple register: ");
1496 print_generic_expr (dump_file, var, 0);
1497 fprintf (dump_file, "\n");
1498 }
1499 }
1500 }
1501
1502 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1503
1504 void
1505 execute_update_addresses_taken (void)
1506 {
1507 basic_block bb;
1508 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1509 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1510 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1511 tree var;
1512 unsigned i;
1513
1514 timevar_push (TV_ADDRESS_TAKEN);
1515
1516 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1517 the function body. */
1518 FOR_EACH_BB_FN (bb, cfun)
1519 {
1520 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1521 gsi_next (&gsi))
1522 {
1523 gimple *stmt = gsi_stmt (gsi);
1524 enum gimple_code code = gimple_code (stmt);
1525 tree decl;
1526
1527 if (code == GIMPLE_CALL
1528 && optimize_atomic_compare_exchange_p (stmt))
1529 {
1530 /* For __atomic_compare_exchange_N if the second argument
1531 is &var, don't mark var addressable;
1532 if it becomes non-addressable, we'll rewrite it into
1533 ATOMIC_COMPARE_EXCHANGE call. */
1534 tree arg = gimple_call_arg (stmt, 1);
1535 gimple_call_set_arg (stmt, 1, null_pointer_node);
1536 gimple_ior_addresses_taken (addresses_taken, stmt);
1537 gimple_call_set_arg (stmt, 1, arg);
1538 }
1539 else
1540 /* Note all addresses taken by the stmt. */
1541 gimple_ior_addresses_taken (addresses_taken, stmt);
1542
1543 /* If we have a call or an assignment, see if the lhs contains
1544 a local decl that requires not to be a gimple register. */
1545 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1546 {
1547 tree lhs = gimple_get_lhs (stmt);
1548 if (lhs
1549 && TREE_CODE (lhs) != SSA_NAME
1550 && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1551 || non_rewritable_lvalue_p (lhs)))
1552 {
1553 decl = get_base_address (lhs);
1554 if (DECL_P (decl))
1555 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1556 }
1557 }
1558
1559 if (gimple_assign_single_p (stmt))
1560 {
1561 tree rhs = gimple_assign_rhs1 (stmt);
1562 if ((decl = non_rewritable_mem_ref_base (rhs)))
1563 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1564 }
1565
1566 else if (code == GIMPLE_CALL)
1567 {
1568 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1569 {
1570 tree arg = gimple_call_arg (stmt, i);
1571 if ((decl = non_rewritable_mem_ref_base (arg)))
1572 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1573 }
1574 }
1575
1576 else if (code == GIMPLE_ASM)
1577 {
1578 gasm *asm_stmt = as_a <gasm *> (stmt);
1579 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1580 {
1581 tree link = gimple_asm_output_op (asm_stmt, i);
1582 tree lhs = TREE_VALUE (link);
1583 if (TREE_CODE (lhs) != SSA_NAME)
1584 {
1585 decl = get_base_address (lhs);
1586 if (DECL_P (decl)
1587 && (non_rewritable_lvalue_p (lhs)
1588 /* We cannot move required conversions from
1589 the lhs to the rhs in asm statements, so
1590 require we do not need any. */
1591 || !useless_type_conversion_p
1592 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1593 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1594 }
1595 }
1596 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1597 {
1598 tree link = gimple_asm_input_op (asm_stmt, i);
1599 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1600 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1601 }
1602 }
1603 }
1604
1605 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1606 gsi_next (&gsi))
1607 {
1608 size_t i;
1609 gphi *phi = gsi.phi ();
1610
1611 for (i = 0; i < gimple_phi_num_args (phi); i++)
1612 {
1613 tree op = PHI_ARG_DEF (phi, i), var;
1614 if (TREE_CODE (op) == ADDR_EXPR
1615 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1616 && DECL_P (var))
1617 bitmap_set_bit (addresses_taken, DECL_UID (var));
1618 }
1619 }
1620 }
1621
1622 /* We cannot iterate over all referenced vars because that can contain
1623 unused vars from BLOCK trees, which causes code generation differences
1624 for -g vs. -g0. */
1625 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1626 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1627 suitable_for_renaming);
1628
1629 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1630 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1631 suitable_for_renaming);
1632
1633 /* Operand caches need to be recomputed for operands referencing the updated
1634 variables and operands need to be rewritten to expose bare symbols. */
1635 if (!bitmap_empty_p (suitable_for_renaming))
1636 {
1637 FOR_EACH_BB_FN (bb, cfun)
1638 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1639 {
1640 gimple *stmt = gsi_stmt (gsi);
1641
1642 /* Re-write TARGET_MEM_REFs of symbols we want to
1643 rewrite into SSA form. */
1644 if (gimple_assign_single_p (stmt))
1645 {
1646 tree lhs = gimple_assign_lhs (stmt);
1647 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1648 tree sym;
1649
1650 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1651 gimplify_modify_expr_complex_part. */
1652 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1653 || TREE_CODE (lhs) == REALPART_EXPR)
1654 && DECL_P (TREE_OPERAND (lhs, 0))
1655 && bitmap_bit_p (suitable_for_renaming,
1656 DECL_UID (TREE_OPERAND (lhs, 0))))
1657 {
1658 tree other = make_ssa_name (TREE_TYPE (lhs));
1659 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1660 ? REALPART_EXPR : IMAGPART_EXPR,
1661 TREE_TYPE (other),
1662 TREE_OPERAND (lhs, 0));
1663 gimple *load = gimple_build_assign (other, lrhs);
1664 location_t loc = gimple_location (stmt);
1665 gimple_set_location (load, loc);
1666 gimple_set_vuse (load, gimple_vuse (stmt));
1667 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1668 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1669 gimple_assign_set_rhs_with_ops
1670 (&gsi, COMPLEX_EXPR,
1671 TREE_CODE (lhs) == IMAGPART_EXPR
1672 ? other : gimple_assign_rhs1 (stmt),
1673 TREE_CODE (lhs) == IMAGPART_EXPR
1674 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1675 stmt = gsi_stmt (gsi);
1676 unlink_stmt_vdef (stmt);
1677 update_stmt (stmt);
1678 continue;
1679 }
1680
1681 /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1682 into a BIT_INSERT_EXPR. */
1683 if (TREE_CODE (lhs) == BIT_FIELD_REF
1684 && DECL_P (TREE_OPERAND (lhs, 0))
1685 && bitmap_bit_p (suitable_for_renaming,
1686 DECL_UID (TREE_OPERAND (lhs, 0)))
1687 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1688 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1689 && types_compatible_p (TREE_TYPE (lhs),
1690 TREE_TYPE (TREE_TYPE
1691 (TREE_OPERAND (lhs, 0))))
1692 && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1693 % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1694 {
1695 tree var = TREE_OPERAND (lhs, 0);
1696 tree val = gimple_assign_rhs1 (stmt);
1697 tree bitpos = TREE_OPERAND (lhs, 2);
1698 gimple_assign_set_lhs (stmt, var);
1699 gimple_assign_set_rhs_with_ops
1700 (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1701 stmt = gsi_stmt (gsi);
1702 unlink_stmt_vdef (stmt);
1703 update_stmt (stmt);
1704 continue;
1705 }
1706
1707 /* Rewrite a vector insert using a MEM_REF on the LHS
1708 into a BIT_INSERT_EXPR. */
1709 if (TREE_CODE (lhs) == MEM_REF
1710 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1711 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1712 && DECL_P (sym)
1713 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1714 && VECTOR_TYPE_P (TREE_TYPE (sym))
1715 && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1716 && types_compatible_p (TREE_TYPE (lhs),
1717 TREE_TYPE (TREE_TYPE (sym)))
1718 && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1719 && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1720 TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1721 && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1722 % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1723 {
1724 tree val = gimple_assign_rhs1 (stmt);
1725 tree bitpos
1726 = wide_int_to_tree (bitsizetype,
1727 mem_ref_offset (lhs) * BITS_PER_UNIT);
1728 gimple_assign_set_lhs (stmt, sym);
1729 gimple_assign_set_rhs_with_ops
1730 (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1731 stmt = gsi_stmt (gsi);
1732 unlink_stmt_vdef (stmt);
1733 update_stmt (stmt);
1734 continue;
1735 }
1736
1737 /* We shouldn't have any fancy wrapping of
1738 component-refs on the LHS, but look through
1739 VIEW_CONVERT_EXPRs as that is easy. */
1740 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1741 lhs = TREE_OPERAND (lhs, 0);
1742 if (TREE_CODE (lhs) == MEM_REF
1743 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1744 && integer_zerop (TREE_OPERAND (lhs, 1))
1745 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1746 && DECL_P (sym)
1747 && !TREE_ADDRESSABLE (sym)
1748 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1749 lhs = sym;
1750 else
1751 lhs = gimple_assign_lhs (stmt);
1752
1753 /* Rewrite the RHS and make sure the resulting assignment
1754 is validly typed. */
1755 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1756 rhs = gimple_assign_rhs1 (stmt);
1757 if (gimple_assign_lhs (stmt) != lhs
1758 && !useless_type_conversion_p (TREE_TYPE (lhs),
1759 TREE_TYPE (rhs)))
1760 {
1761 if (gimple_clobber_p (stmt))
1762 {
1763 rhs = build_constructor (TREE_TYPE (lhs), NULL);
1764 TREE_THIS_VOLATILE (rhs) = 1;
1765 }
1766 else
1767 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1768 TREE_TYPE (lhs), rhs);
1769 }
1770 if (gimple_assign_lhs (stmt) != lhs)
1771 gimple_assign_set_lhs (stmt, lhs);
1772
1773 if (gimple_assign_rhs1 (stmt) != rhs)
1774 {
1775 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1776 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1777 }
1778 }
1779
1780 else if (gimple_code (stmt) == GIMPLE_CALL)
1781 {
1782 unsigned i;
1783 if (optimize_atomic_compare_exchange_p (stmt))
1784 {
1785 tree expected = gimple_call_arg (stmt, 1);
1786 if (bitmap_bit_p (suitable_for_renaming,
1787 DECL_UID (TREE_OPERAND (expected, 0))))
1788 {
1789 fold_builtin_atomic_compare_exchange (&gsi);
1790 continue;
1791 }
1792 }
1793 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1794 {
1795 tree *argp = gimple_call_arg_ptr (stmt, i);
1796 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1797 }
1798 }
1799
1800 else if (gimple_code (stmt) == GIMPLE_ASM)
1801 {
1802 gasm *asm_stmt = as_a <gasm *> (stmt);
1803 unsigned i;
1804 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1805 {
1806 tree link = gimple_asm_output_op (asm_stmt, i);
1807 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1808 suitable_for_renaming);
1809 }
1810 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1811 {
1812 tree link = gimple_asm_input_op (asm_stmt, i);
1813 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1814 suitable_for_renaming);
1815 }
1816 }
1817
1818 else if (gimple_debug_bind_p (stmt)
1819 && gimple_debug_bind_has_value_p (stmt))
1820 {
1821 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1822 tree decl;
1823 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1824 decl = non_rewritable_mem_ref_base (*valuep);
1825 if (decl
1826 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1827 gimple_debug_bind_reset_value (stmt);
1828 }
1829
1830 if (gimple_references_memory_p (stmt)
1831 || is_gimple_debug (stmt))
1832 update_stmt (stmt);
1833
1834 gsi_next (&gsi);
1835 }
1836
1837 /* Update SSA form here, we are called as non-pass as well. */
1838 if (number_of_loops (cfun) > 1
1839 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1840 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1841 else
1842 update_ssa (TODO_update_ssa);
1843 }
1844
1845 BITMAP_FREE (not_reg_needs);
1846 BITMAP_FREE (addresses_taken);
1847 BITMAP_FREE (suitable_for_renaming);
1848 timevar_pop (TV_ADDRESS_TAKEN);
1849 }
1850
1851 namespace {
1852
1853 const pass_data pass_data_update_address_taken =
1854 {
1855 GIMPLE_PASS, /* type */
1856 "addressables", /* name */
1857 OPTGROUP_NONE, /* optinfo_flags */
1858 TV_ADDRESS_TAKEN, /* tv_id */
1859 PROP_ssa, /* properties_required */
1860 0, /* properties_provided */
1861 0, /* properties_destroyed */
1862 0, /* todo_flags_start */
1863 TODO_update_address_taken, /* todo_flags_finish */
1864 };
1865
1866 class pass_update_address_taken : public gimple_opt_pass
1867 {
1868 public:
1869 pass_update_address_taken (gcc::context *ctxt)
1870 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1871 {}
1872
1873 /* opt_pass methods: */
1874
1875 }; // class pass_update_address_taken
1876
1877 } // anon namespace
1878
1879 gimple_opt_pass *
1880 make_pass_update_address_taken (gcc::context *ctxt)
1881 {
1882 return new pass_update_address_taken (ctxt);
1883 }