]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa.c
Makefile.in (OBJS): Add gimple-ssa-isolate-paths.o
[thirdparty/gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "ggc.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
31 #include "function.h"
32 #include "gimple-pretty-print.h"
33 #include "pointer-set.h"
34 #include "gimple.h"
35 #include "gimple-ssa.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-ssanames.h"
39 #include "tree-ssa-loop-manip.h"
40 #include "tree-into-ssa.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43 #include "hashtab.h"
44 #include "tree-pass.h"
45 #include "diagnostic-core.h"
46 #include "cfgloop.h"
47
48 /* Pointer map of variable mappings, keyed by edge. */
49 static struct pointer_map_t *edge_var_maps;
50
51
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
53
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
56 {
57 void **slot;
58 edge_var_map_vector *head;
59 edge_var_map new_node;
60
61 if (edge_var_maps == NULL)
62 edge_var_maps = pointer_map_create ();
63
64 slot = pointer_map_insert (edge_var_maps, e);
65 head = (edge_var_map_vector *) *slot;
66 if (!head)
67 vec_safe_reserve (head, 5);
68 new_node.def = def;
69 new_node.result = result;
70 new_node.locus = locus;
71
72 vec_safe_push (head, new_node);
73 *slot = head;
74 }
75
76
77 /* Clear the var mappings in edge E. */
78
79 void
80 redirect_edge_var_map_clear (edge e)
81 {
82 void **slot;
83 edge_var_map_vector *head;
84
85 if (!edge_var_maps)
86 return;
87
88 slot = pointer_map_contains (edge_var_maps, e);
89
90 if (slot)
91 {
92 head = (edge_var_map_vector *) *slot;
93 vec_free (head);
94 *slot = NULL;
95 }
96 }
97
98
99 /* Duplicate the redirected var mappings in OLDE in NEWE.
100
101 Since we can't remove a mapping, let's just duplicate it. This assumes a
102 pointer_map can have multiple edges mapping to the same var_map (many to
103 one mapping), since we don't remove the previous mappings. */
104
105 void
106 redirect_edge_var_map_dup (edge newe, edge olde)
107 {
108 void **new_slot, **old_slot;
109 edge_var_map_vector *head;
110
111 if (!edge_var_maps)
112 return;
113
114 new_slot = pointer_map_insert (edge_var_maps, newe);
115 old_slot = pointer_map_contains (edge_var_maps, olde);
116 if (!old_slot)
117 return;
118 head = (edge_var_map_vector *) *old_slot;
119
120 edge_var_map_vector *new_head = NULL;
121 if (head)
122 new_head = vec_safe_copy (head);
123 else
124 vec_safe_reserve (new_head, 5);
125 *new_slot = new_head;
126 }
127
128
129 /* Return the variable mappings for a given edge. If there is none, return
130 NULL. */
131
132 edge_var_map_vector *
133 redirect_edge_var_map_vector (edge e)
134 {
135 void **slot;
136
137 /* Hey, what kind of idiot would... you'd be surprised. */
138 if (!edge_var_maps)
139 return NULL;
140
141 slot = pointer_map_contains (edge_var_maps, e);
142 if (!slot)
143 return NULL;
144
145 return (edge_var_map_vector *) *slot;
146 }
147
148 /* Used by redirect_edge_var_map_destroy to free all memory. */
149
150 static bool
151 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
152 void **value,
153 void *data ATTRIBUTE_UNUSED)
154 {
155 edge_var_map_vector *head = (edge_var_map_vector *) *value;
156 vec_free (head);
157 return true;
158 }
159
160 /* Clear the edge variable mappings. */
161
162 void
163 redirect_edge_var_map_destroy (void)
164 {
165 if (edge_var_maps)
166 {
167 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
168 pointer_map_destroy (edge_var_maps);
169 edge_var_maps = NULL;
170 }
171 }
172
173
174 /* Remove the corresponding arguments from the PHI nodes in E's
175 destination block and redirect it to DEST. Return redirected edge.
176 The list of removed arguments is stored in a vector accessed
177 through edge_var_maps. */
178
179 edge
180 ssa_redirect_edge (edge e, basic_block dest)
181 {
182 gimple_stmt_iterator gsi;
183 gimple phi;
184
185 redirect_edge_var_map_clear (e);
186
187 /* Remove the appropriate PHI arguments in E's destination block. */
188 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
189 {
190 tree def;
191 source_location locus ;
192
193 phi = gsi_stmt (gsi);
194 def = gimple_phi_arg_def (phi, e->dest_idx);
195 locus = gimple_phi_arg_location (phi, e->dest_idx);
196
197 if (def == NULL_TREE)
198 continue;
199
200 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
201 }
202
203 e = redirect_edge_succ_nodup (e, dest);
204
205 return e;
206 }
207
208
209 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
210 E->dest. */
211
212 void
213 flush_pending_stmts (edge e)
214 {
215 gimple phi;
216 edge_var_map_vector *v;
217 edge_var_map *vm;
218 int i;
219 gimple_stmt_iterator gsi;
220
221 v = redirect_edge_var_map_vector (e);
222 if (!v)
223 return;
224
225 for (gsi = gsi_start_phis (e->dest), i = 0;
226 !gsi_end_p (gsi) && v->iterate (i, &vm);
227 gsi_next (&gsi), i++)
228 {
229 tree def;
230
231 phi = gsi_stmt (gsi);
232 def = redirect_edge_var_map_def (vm);
233 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
234 }
235
236 redirect_edge_var_map_clear (e);
237 }
238
239 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
240 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
241 expression with a different value.
242
243 This will update any annotations (say debug bind stmts) referring
244 to the original LHS, so that they use the RHS instead. This is
245 done even if NLHS and LHS are the same, for it is understood that
246 the RHS will be modified afterwards, and NLHS will not be assigned
247 an equivalent value.
248
249 Adjusting any non-annotation uses of the LHS, if needed, is a
250 responsibility of the caller.
251
252 The effect of this call should be pretty much the same as that of
253 inserting a copy of STMT before STMT, and then removing the
254 original stmt, at which time gsi_remove() would have update
255 annotations, but using this function saves all the inserting,
256 copying and removing. */
257
258 void
259 gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
260 {
261 if (MAY_HAVE_DEBUG_STMTS)
262 {
263 tree lhs = gimple_get_lhs (stmt);
264
265 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
266
267 insert_debug_temp_for_var_def (NULL, lhs);
268 }
269
270 gimple_set_lhs (stmt, nlhs);
271 }
272
273
274 /* Given a tree for an expression for which we might want to emit
275 locations or values in debug information (generally a variable, but
276 we might deal with other kinds of trees in the future), return the
277 tree that should be used as the variable of a DEBUG_BIND STMT or
278 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
279
280 tree
281 target_for_debug_bind (tree var)
282 {
283 if (!MAY_HAVE_DEBUG_STMTS)
284 return NULL_TREE;
285
286 if (TREE_CODE (var) == SSA_NAME)
287 {
288 var = SSA_NAME_VAR (var);
289 if (var == NULL_TREE)
290 return NULL_TREE;
291 }
292
293 if ((TREE_CODE (var) != VAR_DECL
294 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
295 && TREE_CODE (var) != PARM_DECL)
296 return NULL_TREE;
297
298 if (DECL_HAS_VALUE_EXPR_P (var))
299 return target_for_debug_bind (DECL_VALUE_EXPR (var));
300
301 if (DECL_IGNORED_P (var))
302 return NULL_TREE;
303
304 /* var-tracking only tracks registers. */
305 if (!is_gimple_reg_type (TREE_TYPE (var)))
306 return NULL_TREE;
307
308 return var;
309 }
310
311 /* Called via walk_tree, look for SSA_NAMEs that have already been
312 released. */
313
314 static tree
315 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
316 {
317 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
318
319 if (wi && wi->is_lhs)
320 return NULL_TREE;
321
322 if (TREE_CODE (*tp) == SSA_NAME)
323 {
324 if (SSA_NAME_IN_FREE_LIST (*tp))
325 return *tp;
326
327 *walk_subtrees = 0;
328 }
329 else if (IS_TYPE_OR_DECL_P (*tp))
330 *walk_subtrees = 0;
331
332 return NULL_TREE;
333 }
334
335 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
336 by other DEBUG stmts, and replace uses of the DEF with the
337 newly-created debug temp. */
338
339 void
340 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
341 {
342 imm_use_iterator imm_iter;
343 use_operand_p use_p;
344 gimple stmt;
345 gimple def_stmt = NULL;
346 int usecount = 0;
347 tree value = NULL;
348
349 if (!MAY_HAVE_DEBUG_STMTS)
350 return;
351
352 /* If this name has already been registered for replacement, do nothing
353 as anything that uses this name isn't in SSA form. */
354 if (name_registered_for_update_p (var))
355 return;
356
357 /* Check whether there are debug stmts that reference this variable and,
358 if there are, decide whether we should use a debug temp. */
359 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
360 {
361 stmt = USE_STMT (use_p);
362
363 if (!gimple_debug_bind_p (stmt))
364 continue;
365
366 if (usecount++)
367 break;
368
369 if (gimple_debug_bind_get_value (stmt) != var)
370 {
371 /* Count this as an additional use, so as to make sure we
372 use a temp unless VAR's definition has a SINGLE_RHS that
373 can be shared. */
374 usecount++;
375 break;
376 }
377 }
378
379 if (!usecount)
380 return;
381
382 if (gsi)
383 def_stmt = gsi_stmt (*gsi);
384 else
385 def_stmt = SSA_NAME_DEF_STMT (var);
386
387 /* If we didn't get an insertion point, and the stmt has already
388 been removed, we won't be able to insert the debug bind stmt, so
389 we'll have to drop debug information. */
390 if (gimple_code (def_stmt) == GIMPLE_PHI)
391 {
392 value = degenerate_phi_result (def_stmt);
393 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
394 value = NULL;
395 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
396 to. */
397 else if (value == error_mark_node)
398 value = NULL;
399 }
400 else if (is_gimple_assign (def_stmt))
401 {
402 bool no_value = false;
403
404 if (!dom_info_available_p (CDI_DOMINATORS))
405 {
406 struct walk_stmt_info wi;
407
408 memset (&wi, 0, sizeof (wi));
409
410 /* When removing blocks without following reverse dominance
411 order, we may sometimes encounter SSA_NAMEs that have
412 already been released, referenced in other SSA_DEFs that
413 we're about to release. Consider:
414
415 <bb X>:
416 v_1 = foo;
417
418 <bb Y>:
419 w_2 = v_1 + bar;
420 # DEBUG w => w_2
421
422 If we deleted BB X first, propagating the value of w_2
423 won't do us any good. It's too late to recover their
424 original definition of v_1: when it was deleted, it was
425 only referenced in other DEFs, it couldn't possibly know
426 it should have been retained, and propagating every
427 single DEF just in case it might have to be propagated
428 into a DEBUG STMT would probably be too wasteful.
429
430 When dominator information is not readily available, we
431 check for and accept some loss of debug information. But
432 if it is available, there's no excuse for us to remove
433 blocks in the wrong order, so we don't even check for
434 dead SSA NAMEs. SSA verification shall catch any
435 errors. */
436 if ((!gsi && !gimple_bb (def_stmt))
437 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
438 no_value = true;
439 }
440
441 if (!no_value)
442 value = gimple_assign_rhs_to_tree (def_stmt);
443 }
444
445 if (value)
446 {
447 /* If there's a single use of VAR, and VAR is the entire debug
448 expression (usecount would have been incremented again
449 otherwise), and the definition involves only constants and
450 SSA names, then we can propagate VALUE into this single use,
451 avoiding the temp.
452
453 We can also avoid using a temp if VALUE can be shared and
454 propagated into all uses, without generating expressions that
455 wouldn't be valid gimple RHSs.
456
457 Other cases that would require unsharing or non-gimple RHSs
458 are deferred to a debug temp, although we could avoid temps
459 at the expense of duplication of expressions. */
460
461 if (CONSTANT_CLASS_P (value)
462 || gimple_code (def_stmt) == GIMPLE_PHI
463 || (usecount == 1
464 && (!gimple_assign_single_p (def_stmt)
465 || is_gimple_min_invariant (value)))
466 || is_gimple_reg (value))
467 ;
468 else
469 {
470 gimple def_temp;
471 tree vexpr = make_node (DEBUG_EXPR_DECL);
472
473 def_temp = gimple_build_debug_bind (vexpr,
474 unshare_expr (value),
475 def_stmt);
476
477 DECL_ARTIFICIAL (vexpr) = 1;
478 TREE_TYPE (vexpr) = TREE_TYPE (value);
479 if (DECL_P (value))
480 DECL_MODE (vexpr) = DECL_MODE (value);
481 else
482 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
483
484 if (gsi)
485 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
486 else
487 {
488 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
489 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
490 }
491
492 value = vexpr;
493 }
494 }
495
496 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
497 {
498 if (!gimple_debug_bind_p (stmt))
499 continue;
500
501 if (value)
502 {
503 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
504 /* unshare_expr is not needed here. vexpr is either a
505 SINGLE_RHS, that can be safely shared, some other RHS
506 that was unshared when we found it had a single debug
507 use, or a DEBUG_EXPR_DECL, that can be safely
508 shared. */
509 SET_USE (use_p, unshare_expr (value));
510 /* If we didn't replace uses with a debug decl fold the
511 resulting expression. Otherwise we end up with invalid IL. */
512 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
513 {
514 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
515 fold_stmt_inplace (&gsi);
516 }
517 }
518 else
519 gimple_debug_bind_reset_value (stmt);
520
521 update_stmt (stmt);
522 }
523 }
524
525
526 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
527 other DEBUG stmts, and replace uses of the DEF with the
528 newly-created debug temp. */
529
530 void
531 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
532 {
533 gimple stmt;
534 ssa_op_iter op_iter;
535 def_operand_p def_p;
536
537 if (!MAY_HAVE_DEBUG_STMTS)
538 return;
539
540 stmt = gsi_stmt (*gsi);
541
542 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
543 {
544 tree var = DEF_FROM_PTR (def_p);
545
546 if (TREE_CODE (var) != SSA_NAME)
547 continue;
548
549 insert_debug_temp_for_var_def (gsi, var);
550 }
551 }
552
553 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
554
555 void
556 reset_debug_uses (gimple stmt)
557 {
558 ssa_op_iter op_iter;
559 def_operand_p def_p;
560 imm_use_iterator imm_iter;
561 gimple use_stmt;
562
563 if (!MAY_HAVE_DEBUG_STMTS)
564 return;
565
566 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
567 {
568 tree var = DEF_FROM_PTR (def_p);
569
570 if (TREE_CODE (var) != SSA_NAME)
571 continue;
572
573 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
574 {
575 if (!gimple_debug_bind_p (use_stmt))
576 continue;
577
578 gimple_debug_bind_reset_value (use_stmt);
579 update_stmt (use_stmt);
580 }
581 }
582 }
583
584 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
585 dominated stmts before their dominators, so that release_ssa_defs
586 stands a chance of propagating DEFs into debug bind stmts. */
587
588 void
589 release_defs_bitset (bitmap toremove)
590 {
591 unsigned j;
592 bitmap_iterator bi;
593
594 /* Performing a topological sort is probably overkill, this will
595 most likely run in slightly superlinear time, rather than the
596 pathological quadratic worst case. */
597 while (!bitmap_empty_p (toremove))
598 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
599 {
600 bool remove_now = true;
601 tree var = ssa_name (j);
602 gimple stmt;
603 imm_use_iterator uit;
604
605 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
606 {
607 ssa_op_iter dit;
608 def_operand_p def_p;
609
610 /* We can't propagate PHI nodes into debug stmts. */
611 if (gimple_code (stmt) == GIMPLE_PHI
612 || is_gimple_debug (stmt))
613 continue;
614
615 /* If we find another definition to remove that uses
616 the one we're looking at, defer the removal of this
617 one, so that it can be propagated into debug stmts
618 after the other is. */
619 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
620 {
621 tree odef = DEF_FROM_PTR (def_p);
622
623 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
624 {
625 remove_now = false;
626 break;
627 }
628 }
629
630 if (!remove_now)
631 BREAK_FROM_IMM_USE_STMT (uit);
632 }
633
634 if (remove_now)
635 {
636 gimple def = SSA_NAME_DEF_STMT (var);
637 gimple_stmt_iterator gsi = gsi_for_stmt (def);
638
639 if (gimple_code (def) == GIMPLE_PHI)
640 remove_phi_node (&gsi, true);
641 else
642 {
643 gsi_remove (&gsi, true);
644 release_defs (def);
645 }
646
647 bitmap_clear_bit (toremove, j);
648 }
649 }
650 }
651
652 /* Return true if SSA_NAME is malformed and mark it visited.
653
654 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
655 operand. */
656
657 static bool
658 verify_ssa_name (tree ssa_name, bool is_virtual)
659 {
660 if (TREE_CODE (ssa_name) != SSA_NAME)
661 {
662 error ("expected an SSA_NAME object");
663 return true;
664 }
665
666 if (SSA_NAME_IN_FREE_LIST (ssa_name))
667 {
668 error ("found an SSA_NAME that had been released into the free pool");
669 return true;
670 }
671
672 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
673 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
674 {
675 error ("type mismatch between an SSA_NAME and its symbol");
676 return true;
677 }
678
679 if (is_virtual && !virtual_operand_p (ssa_name))
680 {
681 error ("found a virtual definition for a GIMPLE register");
682 return true;
683 }
684
685 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
686 {
687 error ("virtual SSA name for non-VOP decl");
688 return true;
689 }
690
691 if (!is_virtual && virtual_operand_p (ssa_name))
692 {
693 error ("found a real definition for a non-register");
694 return true;
695 }
696
697 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
698 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
699 {
700 error ("found a default name with a non-empty defining statement");
701 return true;
702 }
703
704 return false;
705 }
706
707
708 /* Return true if the definition of SSA_NAME at block BB is malformed.
709
710 STMT is the statement where SSA_NAME is created.
711
712 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
713 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
714 it means that the block in that array slot contains the
715 definition of SSA_NAME.
716
717 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
718
719 static bool
720 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
721 gimple stmt, bool is_virtual)
722 {
723 if (verify_ssa_name (ssa_name, is_virtual))
724 goto err;
725
726 if (SSA_NAME_VAR (ssa_name)
727 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
728 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
729 {
730 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
731 goto err;
732 }
733
734 if (definition_block[SSA_NAME_VERSION (ssa_name)])
735 {
736 error ("SSA_NAME created in two different blocks %i and %i",
737 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
738 goto err;
739 }
740
741 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
742
743 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
744 {
745 error ("SSA_NAME_DEF_STMT is wrong");
746 fprintf (stderr, "Expected definition statement:\n");
747 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
748 fprintf (stderr, "\nActual definition statement:\n");
749 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
750 goto err;
751 }
752
753 return false;
754
755 err:
756 fprintf (stderr, "while verifying SSA_NAME ");
757 print_generic_expr (stderr, ssa_name, 0);
758 fprintf (stderr, " in statement\n");
759 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
760
761 return true;
762 }
763
764
765 /* Return true if the use of SSA_NAME at statement STMT in block BB is
766 malformed.
767
768 DEF_BB is the block where SSA_NAME was found to be created.
769
770 IDOM contains immediate dominator information for the flowgraph.
771
772 CHECK_ABNORMAL is true if the caller wants to check whether this use
773 is flowing through an abnormal edge (only used when checking PHI
774 arguments).
775
776 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
777 that are defined before STMT in basic block BB. */
778
779 static bool
780 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
781 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
782 {
783 bool err = false;
784 tree ssa_name = USE_FROM_PTR (use_p);
785
786 if (!TREE_VISITED (ssa_name))
787 if (verify_imm_links (stderr, ssa_name))
788 err = true;
789
790 TREE_VISITED (ssa_name) = 1;
791
792 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
793 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
794 ; /* Default definitions have empty statements. Nothing to do. */
795 else if (!def_bb)
796 {
797 error ("missing definition");
798 err = true;
799 }
800 else if (bb != def_bb
801 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
802 {
803 error ("definition in block %i does not dominate use in block %i",
804 def_bb->index, bb->index);
805 err = true;
806 }
807 else if (bb == def_bb
808 && names_defined_in_bb != NULL
809 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
810 {
811 error ("definition in block %i follows the use", def_bb->index);
812 err = true;
813 }
814
815 if (check_abnormal
816 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
817 {
818 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
819 err = true;
820 }
821
822 /* Make sure the use is in an appropriate list by checking the previous
823 element to make sure it's the same. */
824 if (use_p->prev == NULL)
825 {
826 error ("no immediate_use list");
827 err = true;
828 }
829 else
830 {
831 tree listvar;
832 if (use_p->prev->use == NULL)
833 listvar = use_p->prev->loc.ssa_name;
834 else
835 listvar = USE_FROM_PTR (use_p->prev);
836 if (listvar != ssa_name)
837 {
838 error ("wrong immediate use list");
839 err = true;
840 }
841 }
842
843 if (err)
844 {
845 fprintf (stderr, "for SSA_NAME: ");
846 print_generic_expr (stderr, ssa_name, TDF_VOPS);
847 fprintf (stderr, " in statement:\n");
848 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
849 }
850
851 return err;
852 }
853
854
855 /* Return true if any of the arguments for PHI node PHI at block BB is
856 malformed.
857
858 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
859 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
860 it means that the block in that array slot contains the
861 definition of SSA_NAME. */
862
863 static bool
864 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
865 {
866 edge e;
867 bool err = false;
868 size_t i, phi_num_args = gimple_phi_num_args (phi);
869
870 if (EDGE_COUNT (bb->preds) != phi_num_args)
871 {
872 error ("incoming edge count does not match number of PHI arguments");
873 err = true;
874 goto error;
875 }
876
877 for (i = 0; i < phi_num_args; i++)
878 {
879 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
880 tree op = USE_FROM_PTR (op_p);
881
882 e = EDGE_PRED (bb, i);
883
884 if (op == NULL_TREE)
885 {
886 error ("PHI argument is missing for edge %d->%d",
887 e->src->index,
888 e->dest->index);
889 err = true;
890 goto error;
891 }
892
893 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
894 {
895 error ("PHI argument is not SSA_NAME, or invariant");
896 err = true;
897 }
898
899 if (TREE_CODE (op) == SSA_NAME)
900 {
901 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
902 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
903 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
904 }
905
906 if (TREE_CODE (op) == ADDR_EXPR)
907 {
908 tree base = TREE_OPERAND (op, 0);
909 while (handled_component_p (base))
910 base = TREE_OPERAND (base, 0);
911 if ((TREE_CODE (base) == VAR_DECL
912 || TREE_CODE (base) == PARM_DECL
913 || TREE_CODE (base) == RESULT_DECL)
914 && !TREE_ADDRESSABLE (base))
915 {
916 error ("address taken, but ADDRESSABLE bit not set");
917 err = true;
918 }
919 }
920
921 if (e->dest != bb)
922 {
923 error ("wrong edge %d->%d for PHI argument",
924 e->src->index, e->dest->index);
925 err = true;
926 }
927
928 if (err)
929 {
930 fprintf (stderr, "PHI argument\n");
931 print_generic_stmt (stderr, op, TDF_VOPS);
932 goto error;
933 }
934 }
935
936 error:
937 if (err)
938 {
939 fprintf (stderr, "for PHI node\n");
940 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
941 }
942
943
944 return err;
945 }
946
947
948 /* Verify common invariants in the SSA web.
949 TODO: verify the variable annotations. */
950
951 DEBUG_FUNCTION void
952 verify_ssa (bool check_modified_stmt)
953 {
954 size_t i;
955 basic_block bb;
956 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
957 ssa_op_iter iter;
958 tree op;
959 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
960 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
961
962 gcc_assert (!need_ssa_update_p (cfun));
963
964 timevar_push (TV_TREE_SSA_VERIFY);
965
966 /* Keep track of SSA names present in the IL. */
967 for (i = 1; i < num_ssa_names; i++)
968 {
969 tree name = ssa_name (i);
970 if (name)
971 {
972 gimple stmt;
973 TREE_VISITED (name) = 0;
974
975 verify_ssa_name (name, virtual_operand_p (name));
976
977 stmt = SSA_NAME_DEF_STMT (name);
978 if (!gimple_nop_p (stmt))
979 {
980 basic_block bb = gimple_bb (stmt);
981 verify_def (bb, definition_block,
982 name, stmt, virtual_operand_p (name));
983
984 }
985 }
986 }
987
988 calculate_dominance_info (CDI_DOMINATORS);
989
990 /* Now verify all the uses and make sure they agree with the definitions
991 found in the previous pass. */
992 FOR_EACH_BB (bb)
993 {
994 edge e;
995 gimple phi;
996 edge_iterator ei;
997 gimple_stmt_iterator gsi;
998
999 /* Make sure that all edges have a clear 'aux' field. */
1000 FOR_EACH_EDGE (e, ei, bb->preds)
1001 {
1002 if (e->aux)
1003 {
1004 error ("AUX pointer initialized for edge %d->%d", e->src->index,
1005 e->dest->index);
1006 goto err;
1007 }
1008 }
1009
1010 /* Verify the arguments for every PHI node in the block. */
1011 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1012 {
1013 phi = gsi_stmt (gsi);
1014 if (verify_phi_args (phi, bb, definition_block))
1015 goto err;
1016
1017 bitmap_set_bit (names_defined_in_bb,
1018 SSA_NAME_VERSION (gimple_phi_result (phi)));
1019 }
1020
1021 /* Now verify all the uses and vuses in every statement of the block. */
1022 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1023 {
1024 gimple stmt = gsi_stmt (gsi);
1025 use_operand_p use_p;
1026
1027 if (check_modified_stmt && gimple_modified_p (stmt))
1028 {
1029 error ("stmt (%p) marked modified after optimization pass: ",
1030 (void *)stmt);
1031 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1032 goto err;
1033 }
1034
1035 if (verify_ssa_operands (stmt))
1036 {
1037 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1038 goto err;
1039 }
1040
1041 if (gimple_debug_bind_p (stmt)
1042 && !gimple_debug_bind_has_value_p (stmt))
1043 continue;
1044
1045 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1046 {
1047 op = USE_FROM_PTR (use_p);
1048 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1049 use_p, stmt, false, names_defined_in_bb))
1050 goto err;
1051 }
1052
1053 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1054 {
1055 if (SSA_NAME_DEF_STMT (op) != stmt)
1056 {
1057 error ("SSA_NAME_DEF_STMT is wrong");
1058 fprintf (stderr, "Expected definition statement:\n");
1059 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1060 fprintf (stderr, "\nActual definition statement:\n");
1061 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1062 4, TDF_VOPS);
1063 goto err;
1064 }
1065 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1066 }
1067 }
1068
1069 bitmap_clear (names_defined_in_bb);
1070 }
1071
1072 free (definition_block);
1073
1074 /* Restore the dominance information to its prior known state, so
1075 that we do not perturb the compiler's subsequent behavior. */
1076 if (orig_dom_state == DOM_NONE)
1077 free_dominance_info (CDI_DOMINATORS);
1078 else
1079 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1080
1081 BITMAP_FREE (names_defined_in_bb);
1082 timevar_pop (TV_TREE_SSA_VERIFY);
1083 return;
1084
1085 err:
1086 internal_error ("verify_ssa failed");
1087 }
1088
1089 /* Return true if the DECL_UID in both trees are equal. */
1090
1091 static int
1092 uid_ssaname_map_eq (const void *va, const void *vb)
1093 {
1094 const_tree a = (const_tree) va;
1095 const_tree b = (const_tree) vb;
1096 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1097 }
1098
1099 /* Hash a tree in a uid_decl_map. */
1100
1101 static unsigned int
1102 uid_ssaname_map_hash (const void *item)
1103 {
1104 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1105 }
1106
1107
1108 /* Initialize global DFA and SSA structures. */
1109
1110 void
1111 init_tree_ssa (struct function *fn)
1112 {
1113 fn->gimple_df = ggc_alloc_cleared_gimple_df ();
1114 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
1115 uid_ssaname_map_eq, NULL);
1116 pt_solution_reset (&fn->gimple_df->escaped);
1117 init_ssanames (fn, 0);
1118 }
1119
1120 /* Do the actions required to initialize internal data structures used
1121 in tree-ssa optimization passes. */
1122
1123 static unsigned int
1124 execute_init_datastructures (void)
1125 {
1126 /* Allocate hash tables, arrays and other structures. */
1127 gcc_assert (!cfun->gimple_df);
1128 init_tree_ssa (cfun);
1129 return 0;
1130 }
1131
1132 /* Gate for IPCP optimization. */
1133
1134 static bool
1135 gate_init_datastructures (void)
1136 {
1137 /* Do nothing for funcions that was produced already in SSA form. */
1138 return !(cfun->curr_properties & PROP_ssa);
1139 }
1140
1141 namespace {
1142
1143 const pass_data pass_data_init_datastructures =
1144 {
1145 GIMPLE_PASS, /* type */
1146 "*init_datastructures", /* name */
1147 OPTGROUP_NONE, /* optinfo_flags */
1148 true, /* has_gate */
1149 true, /* has_execute */
1150 TV_NONE, /* tv_id */
1151 PROP_cfg, /* properties_required */
1152 0, /* properties_provided */
1153 0, /* properties_destroyed */
1154 0, /* todo_flags_start */
1155 0, /* todo_flags_finish */
1156 };
1157
1158 class pass_init_datastructures : public gimple_opt_pass
1159 {
1160 public:
1161 pass_init_datastructures (gcc::context *ctxt)
1162 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1163 {}
1164
1165 /* opt_pass methods: */
1166 bool gate () { return gate_init_datastructures (); }
1167 unsigned int execute () { return execute_init_datastructures (); }
1168
1169 }; // class pass_init_datastructures
1170
1171 } // anon namespace
1172
1173 gimple_opt_pass *
1174 make_pass_init_datastructures (gcc::context *ctxt)
1175 {
1176 return new pass_init_datastructures (ctxt);
1177 }
1178
1179 /* Deallocate memory associated with SSA data structures for FNDECL. */
1180
1181 void
1182 delete_tree_ssa (void)
1183 {
1184 fini_ssanames ();
1185
1186 /* We no longer maintain the SSA operand cache at this point. */
1187 if (ssa_operands_active (cfun))
1188 fini_ssa_operands ();
1189
1190 htab_delete (cfun->gimple_df->default_defs);
1191 cfun->gimple_df->default_defs = NULL;
1192 pt_solution_reset (&cfun->gimple_df->escaped);
1193 if (cfun->gimple_df->decls_to_pointers != NULL)
1194 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1195 cfun->gimple_df->decls_to_pointers = NULL;
1196 cfun->gimple_df->modified_noreturn_calls = NULL;
1197 cfun->gimple_df = NULL;
1198
1199 /* We no longer need the edge variable maps. */
1200 redirect_edge_var_map_destroy ();
1201 }
1202
1203 /* Return true if EXPR is a useless type conversion, otherwise return
1204 false. */
1205
1206 bool
1207 tree_ssa_useless_type_conversion (tree expr)
1208 {
1209 /* If we have an assignment that merely uses a NOP_EXPR to change
1210 the top of the RHS to the type of the LHS and the type conversion
1211 is "safe", then strip away the type conversion so that we can
1212 enter LHS = RHS into the const_and_copies table. */
1213 if (CONVERT_EXPR_P (expr)
1214 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1215 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1216 return useless_type_conversion_p
1217 (TREE_TYPE (expr),
1218 TREE_TYPE (TREE_OPERAND (expr, 0)));
1219
1220 return false;
1221 }
1222
1223 /* Strip conversions from EXP according to
1224 tree_ssa_useless_type_conversion and return the resulting
1225 expression. */
1226
1227 tree
1228 tree_ssa_strip_useless_type_conversions (tree exp)
1229 {
1230 while (tree_ssa_useless_type_conversion (exp))
1231 exp = TREE_OPERAND (exp, 0);
1232 return exp;
1233 }
1234
1235
1236 /* Return true if T, an SSA_NAME, has an undefined value. */
1237
1238 bool
1239 ssa_undefined_value_p (tree t)
1240 {
1241 tree var = SSA_NAME_VAR (t);
1242
1243 if (!var)
1244 ;
1245 /* Parameters get their initial value from the function entry. */
1246 else if (TREE_CODE (var) == PARM_DECL)
1247 return false;
1248 /* When returning by reference the return address is actually a hidden
1249 parameter. */
1250 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1251 return false;
1252 /* Hard register variables get their initial value from the ether. */
1253 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1254 return false;
1255
1256 /* The value is undefined iff its definition statement is empty. */
1257 return gimple_nop_p (SSA_NAME_DEF_STMT (t));
1258 }
1259
1260
1261 /* If necessary, rewrite the base of the reference tree *TP from
1262 a MEM_REF to a plain or converted symbol. */
1263
1264 static void
1265 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1266 {
1267 tree sym;
1268
1269 while (handled_component_p (*tp))
1270 tp = &TREE_OPERAND (*tp, 0);
1271 if (TREE_CODE (*tp) == MEM_REF
1272 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1273 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1274 && DECL_P (sym)
1275 && !TREE_ADDRESSABLE (sym)
1276 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1277 {
1278 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1279 && useless_type_conversion_p (TREE_TYPE (*tp),
1280 TREE_TYPE (TREE_TYPE (sym)))
1281 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1282 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1283 {
1284 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1285 TYPE_SIZE (TREE_TYPE (*tp)),
1286 int_const_binop (MULT_EXPR,
1287 bitsize_int (BITS_PER_UNIT),
1288 TREE_OPERAND (*tp, 1)));
1289 }
1290 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1291 && useless_type_conversion_p (TREE_TYPE (*tp),
1292 TREE_TYPE (TREE_TYPE (sym))))
1293 {
1294 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1295 ? REALPART_EXPR : IMAGPART_EXPR,
1296 TREE_TYPE (*tp), sym);
1297 }
1298 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1299 {
1300 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1301 TREE_TYPE (sym)))
1302 *tp = build1 (VIEW_CONVERT_EXPR,
1303 TREE_TYPE (*tp), sym);
1304 else
1305 *tp = sym;
1306 }
1307 }
1308 }
1309
1310 /* For a tree REF return its base if it is the base of a MEM_REF
1311 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1312
1313 static tree
1314 non_rewritable_mem_ref_base (tree ref)
1315 {
1316 tree base = ref;
1317
1318 /* A plain decl does not need it set. */
1319 if (DECL_P (ref))
1320 return NULL_TREE;
1321
1322 while (handled_component_p (base))
1323 base = TREE_OPERAND (base, 0);
1324
1325 /* But watch out for MEM_REFs we cannot lower to a
1326 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1327 if (TREE_CODE (base) == MEM_REF
1328 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1329 {
1330 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1331 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1332 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1333 && useless_type_conversion_p (TREE_TYPE (base),
1334 TREE_TYPE (TREE_TYPE (decl)))
1335 && mem_ref_offset (base).fits_uhwi ()
1336 && tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1337 .ugt (mem_ref_offset (base))
1338 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1339 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1340 return NULL_TREE;
1341 if (DECL_P (decl)
1342 && (!integer_zerop (TREE_OPERAND (base, 1))
1343 || (DECL_SIZE (decl)
1344 != TYPE_SIZE (TREE_TYPE (base)))
1345 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1346 return decl;
1347 }
1348
1349 return NULL_TREE;
1350 }
1351
1352 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1353 Otherwise return true. */
1354
1355 static bool
1356 non_rewritable_lvalue_p (tree lhs)
1357 {
1358 /* A plain decl is always rewritable. */
1359 if (DECL_P (lhs))
1360 return false;
1361
1362 /* A decl that is wrapped inside a MEM-REF that covers
1363 it full is also rewritable.
1364 ??? The following could be relaxed allowing component
1365 references that do not change the access size. */
1366 if (TREE_CODE (lhs) == MEM_REF
1367 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1368 && integer_zerop (TREE_OPERAND (lhs, 1)))
1369 {
1370 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1371 if (DECL_P (decl)
1372 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1373 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1374 return false;
1375 }
1376
1377 return true;
1378 }
1379
1380 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1381 mark the variable VAR for conversion into SSA. Return true when updating
1382 stmts is required. */
1383
1384 static void
1385 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1386 bitmap suitable_for_renaming)
1387 {
1388 /* Global Variables, result decls cannot be changed. */
1389 if (is_global_var (var)
1390 || TREE_CODE (var) == RESULT_DECL
1391 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1392 return;
1393
1394 if (TREE_ADDRESSABLE (var)
1395 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1396 a non-register. Otherwise we are confused and forget to
1397 add virtual operands for it. */
1398 && (!is_gimple_reg_type (TREE_TYPE (var))
1399 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1400 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1401 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1402 {
1403 TREE_ADDRESSABLE (var) = 0;
1404 if (is_gimple_reg (var))
1405 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1406 if (dump_file)
1407 {
1408 fprintf (dump_file, "No longer having address taken: ");
1409 print_generic_expr (dump_file, var, 0);
1410 fprintf (dump_file, "\n");
1411 }
1412 }
1413
1414 if (!DECL_GIMPLE_REG_P (var)
1415 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1416 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1417 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1418 && !TREE_THIS_VOLATILE (var)
1419 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1420 {
1421 DECL_GIMPLE_REG_P (var) = 1;
1422 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1423 if (dump_file)
1424 {
1425 fprintf (dump_file, "Now a gimple register: ");
1426 print_generic_expr (dump_file, var, 0);
1427 fprintf (dump_file, "\n");
1428 }
1429 }
1430 }
1431
1432 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1433
1434 void
1435 execute_update_addresses_taken (void)
1436 {
1437 gimple_stmt_iterator gsi;
1438 basic_block bb;
1439 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1440 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1441 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1442 tree var;
1443 unsigned i;
1444
1445 timevar_push (TV_ADDRESS_TAKEN);
1446
1447 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1448 the function body. */
1449 FOR_EACH_BB (bb)
1450 {
1451 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1452 {
1453 gimple stmt = gsi_stmt (gsi);
1454 enum gimple_code code = gimple_code (stmt);
1455 tree decl;
1456
1457 /* Note all addresses taken by the stmt. */
1458 gimple_ior_addresses_taken (addresses_taken, stmt);
1459
1460 /* If we have a call or an assignment, see if the lhs contains
1461 a local decl that requires not to be a gimple register. */
1462 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1463 {
1464 tree lhs = gimple_get_lhs (stmt);
1465 if (lhs
1466 && TREE_CODE (lhs) != SSA_NAME
1467 && non_rewritable_lvalue_p (lhs))
1468 {
1469 decl = get_base_address (lhs);
1470 if (DECL_P (decl))
1471 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1472 }
1473 }
1474
1475 if (gimple_assign_single_p (stmt))
1476 {
1477 tree rhs = gimple_assign_rhs1 (stmt);
1478 if ((decl = non_rewritable_mem_ref_base (rhs)))
1479 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1480 }
1481
1482 else if (code == GIMPLE_CALL)
1483 {
1484 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1485 {
1486 tree arg = gimple_call_arg (stmt, i);
1487 if ((decl = non_rewritable_mem_ref_base (arg)))
1488 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1489 }
1490 }
1491
1492 else if (code == GIMPLE_ASM)
1493 {
1494 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1495 {
1496 tree link = gimple_asm_output_op (stmt, i);
1497 tree lhs = TREE_VALUE (link);
1498 if (TREE_CODE (lhs) != SSA_NAME)
1499 {
1500 decl = get_base_address (lhs);
1501 if (DECL_P (decl)
1502 && (non_rewritable_lvalue_p (lhs)
1503 /* We cannot move required conversions from
1504 the lhs to the rhs in asm statements, so
1505 require we do not need any. */
1506 || !useless_type_conversion_p
1507 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1508 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1509 }
1510 }
1511 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1512 {
1513 tree link = gimple_asm_input_op (stmt, i);
1514 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1515 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1516 }
1517 }
1518 }
1519
1520 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1521 {
1522 size_t i;
1523 gimple phi = gsi_stmt (gsi);
1524
1525 for (i = 0; i < gimple_phi_num_args (phi); i++)
1526 {
1527 tree op = PHI_ARG_DEF (phi, i), var;
1528 if (TREE_CODE (op) == ADDR_EXPR
1529 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1530 && DECL_P (var))
1531 bitmap_set_bit (addresses_taken, DECL_UID (var));
1532 }
1533 }
1534 }
1535
1536 /* We cannot iterate over all referenced vars because that can contain
1537 unused vars from BLOCK trees, which causes code generation differences
1538 for -g vs. -g0. */
1539 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1540 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1541 suitable_for_renaming);
1542
1543 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1544 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1545 suitable_for_renaming);
1546
1547 /* Operand caches need to be recomputed for operands referencing the updated
1548 variables and operands need to be rewritten to expose bare symbols. */
1549 if (!bitmap_empty_p (suitable_for_renaming))
1550 {
1551 FOR_EACH_BB (bb)
1552 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1553 {
1554 gimple stmt = gsi_stmt (gsi);
1555
1556 /* Re-write TARGET_MEM_REFs of symbols we want to
1557 rewrite into SSA form. */
1558 if (gimple_assign_single_p (stmt))
1559 {
1560 tree lhs = gimple_assign_lhs (stmt);
1561 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1562 tree sym;
1563
1564 /* We shouldn't have any fancy wrapping of
1565 component-refs on the LHS, but look through
1566 VIEW_CONVERT_EXPRs as that is easy. */
1567 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1568 lhs = TREE_OPERAND (lhs, 0);
1569 if (TREE_CODE (lhs) == MEM_REF
1570 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1571 && integer_zerop (TREE_OPERAND (lhs, 1))
1572 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1573 && DECL_P (sym)
1574 && !TREE_ADDRESSABLE (sym)
1575 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1576 lhs = sym;
1577 else
1578 lhs = gimple_assign_lhs (stmt);
1579
1580 /* Rewrite the RHS and make sure the resulting assignment
1581 is validly typed. */
1582 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1583 rhs = gimple_assign_rhs1 (stmt);
1584 if (gimple_assign_lhs (stmt) != lhs
1585 && !useless_type_conversion_p (TREE_TYPE (lhs),
1586 TREE_TYPE (rhs)))
1587 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1588 TREE_TYPE (lhs), rhs);
1589
1590 if (gimple_assign_lhs (stmt) != lhs)
1591 gimple_assign_set_lhs (stmt, lhs);
1592
1593 /* For var ={v} {CLOBBER}; where var lost
1594 TREE_ADDRESSABLE just remove the stmt. */
1595 if (DECL_P (lhs)
1596 && TREE_CLOBBER_P (rhs)
1597 && bitmap_bit_p (suitable_for_renaming, DECL_UID (lhs)))
1598 {
1599 unlink_stmt_vdef (stmt);
1600 gsi_remove (&gsi, true);
1601 release_defs (stmt);
1602 continue;
1603 }
1604
1605 if (gimple_assign_rhs1 (stmt) != rhs)
1606 {
1607 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1608 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1609 }
1610 }
1611
1612 else if (gimple_code (stmt) == GIMPLE_CALL)
1613 {
1614 unsigned i;
1615 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1616 {
1617 tree *argp = gimple_call_arg_ptr (stmt, i);
1618 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1619 }
1620 }
1621
1622 else if (gimple_code (stmt) == GIMPLE_ASM)
1623 {
1624 unsigned i;
1625 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1626 {
1627 tree link = gimple_asm_output_op (stmt, i);
1628 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1629 suitable_for_renaming);
1630 }
1631 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1632 {
1633 tree link = gimple_asm_input_op (stmt, i);
1634 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1635 suitable_for_renaming);
1636 }
1637 }
1638
1639 else if (gimple_debug_bind_p (stmt)
1640 && gimple_debug_bind_has_value_p (stmt))
1641 {
1642 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1643 tree decl;
1644 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1645 decl = non_rewritable_mem_ref_base (*valuep);
1646 if (decl
1647 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1648 gimple_debug_bind_reset_value (stmt);
1649 }
1650
1651 if (gimple_references_memory_p (stmt)
1652 || is_gimple_debug (stmt))
1653 update_stmt (stmt);
1654
1655 gsi_next (&gsi);
1656 }
1657
1658 /* Update SSA form here, we are called as non-pass as well. */
1659 if (number_of_loops (cfun) > 1
1660 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1661 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1662 else
1663 update_ssa (TODO_update_ssa);
1664 }
1665
1666 BITMAP_FREE (not_reg_needs);
1667 BITMAP_FREE (addresses_taken);
1668 BITMAP_FREE (suitable_for_renaming);
1669 timevar_pop (TV_ADDRESS_TAKEN);
1670 }
1671
1672 namespace {
1673
1674 const pass_data pass_data_update_address_taken =
1675 {
1676 GIMPLE_PASS, /* type */
1677 "addressables", /* name */
1678 OPTGROUP_NONE, /* optinfo_flags */
1679 false, /* has_gate */
1680 false, /* has_execute */
1681 TV_ADDRESS_TAKEN, /* tv_id */
1682 PROP_ssa, /* properties_required */
1683 0, /* properties_provided */
1684 0, /* properties_destroyed */
1685 0, /* todo_flags_start */
1686 TODO_update_address_taken, /* todo_flags_finish */
1687 };
1688
1689 class pass_update_address_taken : public gimple_opt_pass
1690 {
1691 public:
1692 pass_update_address_taken (gcc::context *ctxt)
1693 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1694 {}
1695
1696 /* opt_pass methods: */
1697
1698 }; // class pass_update_address_taken
1699
1700 } // anon namespace
1701
1702 gimple_opt_pass *
1703 make_pass_update_address_taken (gcc::context *ctxt)
1704 {
1705 return new pass_update_address_taken (ctxt);
1706 }