]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa.c
2012-07-16 Dehao Chen <dehao@google.com>
[thirdparty/gcc.git] / gcc / tree-ssa.c
CommitLineData
4ee9c684 1/* Miscellaneous SSA utility functions.
a05063ec 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
cfaf579d 3 Free Software Foundation, Inc.
4ee9c684 4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
4ee9c684 27#include "tm_p.h"
9fa67218 28#include "target.h"
4ee9c684 29#include "ggc.h"
30#include "langhooks.h"
4ee9c684 31#include "basic-block.h"
4ee9c684 32#include "function.h"
ce084dfc 33#include "tree-pretty-print.h"
34#include "gimple-pretty-print.h"
4ee9c684 35#include "bitmap.h"
dd940949 36#include "pointer-set.h"
4ee9c684 37#include "tree-flow.h"
75a70cf9 38#include "gimple.h"
4ee9c684 39#include "tree-inline.h"
4ee9c684 40#include "timevar.h"
4ee9c684 41#include "hashtab.h"
42#include "tree-dump.h"
43#include "tree-pass.h"
0b205f4c 44#include "diagnostic-core.h"
0ab72b53 45#include "cfgloop.h"
4ee9c684 46
d03ba86f 47/* Pointer map of variable mappings, keyed by edge. */
48static struct pointer_map_t *edge_var_maps;
49
50
51/* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
52
53void
60d535d2 54redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
d03ba86f 55{
56 void **slot;
57 edge_var_map_vector old_head, head;
58 edge_var_map new_node;
59
60 if (edge_var_maps == NULL)
61 edge_var_maps = pointer_map_create ();
62
63 slot = pointer_map_insert (edge_var_maps, e);
45ba1503 64 old_head = head = (edge_var_map_vector) *slot;
d03ba86f 65 if (!head)
66 {
67 head = VEC_alloc (edge_var_map, heap, 5);
68 *slot = head;
69 }
70 new_node.def = def;
71 new_node.result = result;
efbcb6de 72 new_node.locus = locus;
d03ba86f 73
74 VEC_safe_push (edge_var_map, heap, head, &new_node);
75 if (old_head != head)
76 {
77 /* The push did some reallocation. Update the pointer map. */
78 *slot = head;
79 }
80}
81
82
83/* Clear the var mappings in edge E. */
84
85void
86redirect_edge_var_map_clear (edge e)
87{
88 void **slot;
89 edge_var_map_vector head;
90
91 if (!edge_var_maps)
92 return;
93
94 slot = pointer_map_contains (edge_var_maps, e);
95
96 if (slot)
97 {
45ba1503 98 head = (edge_var_map_vector) *slot;
d03ba86f 99 VEC_free (edge_var_map, heap, head);
100 *slot = NULL;
101 }
102}
103
104
105/* Duplicate the redirected var mappings in OLDE in NEWE.
106
107 Since we can't remove a mapping, let's just duplicate it. This assumes a
108 pointer_map can have multiple edges mapping to the same var_map (many to
109 one mapping), since we don't remove the previous mappings. */
110
111void
112redirect_edge_var_map_dup (edge newe, edge olde)
113{
182db9cf 114 void **new_slot, **old_slot;
115 edge_var_map_vector head;
d03ba86f 116
117 if (!edge_var_maps)
118 return;
119
120 new_slot = pointer_map_insert (edge_var_maps, newe);
121 old_slot = pointer_map_contains (edge_var_maps, olde);
122 if (!old_slot)
123 return;
45ba1503 124 head = (edge_var_map_vector) *old_slot;
d03ba86f 125
126 if (head)
127 *new_slot = VEC_copy (edge_var_map, heap, head);
128 else
129 *new_slot = VEC_alloc (edge_var_map, heap, 5);
130}
131
132
f0b5f617 133/* Return the variable mappings for a given edge. If there is none, return
d03ba86f 134 NULL. */
135
136edge_var_map_vector
137redirect_edge_var_map_vector (edge e)
138{
139 void **slot;
140
141 /* Hey, what kind of idiot would... you'd be surprised. */
142 if (!edge_var_maps)
143 return NULL;
144
145 slot = pointer_map_contains (edge_var_maps, e);
146 if (!slot)
147 return NULL;
148
149 return (edge_var_map_vector) *slot;
150}
151
182db9cf 152/* Used by redirect_edge_var_map_destroy to free all memory. */
153
154static bool
155free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
156 void **value,
157 void *data ATTRIBUTE_UNUSED)
158{
159 edge_var_map_vector head = (edge_var_map_vector) *value;
160 VEC_free (edge_var_map, heap, head);
161 return true;
162}
d03ba86f 163
164/* Clear the edge variable mappings. */
165
166void
167redirect_edge_var_map_destroy (void)
168{
169 if (edge_var_maps)
170 {
182db9cf 171 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
d03ba86f 172 pointer_map_destroy (edge_var_maps);
173 edge_var_maps = NULL;
174 }
175}
176
177
a61b7bc4 178/* Remove the corresponding arguments from the PHI nodes in E's
179 destination block and redirect it to DEST. Return redirected edge.
d03ba86f 180 The list of removed arguments is stored in a vector accessed
181 through edge_var_maps. */
4ee9c684 182
183edge
184ssa_redirect_edge (edge e, basic_block dest)
185{
75a70cf9 186 gimple_stmt_iterator gsi;
187 gimple phi;
d03ba86f 188
189 redirect_edge_var_map_clear (e);
4ee9c684 190
191 /* Remove the appropriate PHI arguments in E's destination block. */
75a70cf9 192 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 193 {
75a70cf9 194 tree def;
efbcb6de 195 source_location locus ;
75a70cf9 196
197 phi = gsi_stmt (gsi);
198 def = gimple_phi_arg_def (phi, e->dest_idx);
efbcb6de 199 locus = gimple_phi_arg_location (phi, e->dest_idx);
d03ba86f 200
201 if (def == NULL_TREE)
4ee9c684 202 continue;
203
60d535d2 204 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
4ee9c684 205 }
206
207 e = redirect_edge_succ_nodup (e, dest);
4ee9c684 208
209 return e;
210}
211
75a70cf9 212
4fb5e5ca 213/* Add PHI arguments queued in PENDING_STMT list on edge E to edge
44a46103 214 E->dest. */
215
216void
217flush_pending_stmts (edge e)
218{
75a70cf9 219 gimple phi;
d03ba86f 220 edge_var_map_vector v;
221 edge_var_map *vm;
222 int i;
75a70cf9 223 gimple_stmt_iterator gsi;
44a46103 224
d03ba86f 225 v = redirect_edge_var_map_vector (e);
226 if (!v)
44a46103 227 return;
228
75a70cf9 229 for (gsi = gsi_start_phis (e->dest), i = 0;
230 !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
231 gsi_next (&gsi), i++)
44a46103 232 {
75a70cf9 233 tree def;
234
235 phi = gsi_stmt (gsi);
236 def = redirect_edge_var_map_def (vm);
60d535d2 237 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
44a46103 238 }
239
d03ba86f 240 redirect_edge_var_map_clear (e);
44a46103 241}
4ee9c684 242
9845d120 243/* Given a tree for an expression for which we might want to emit
244 locations or values in debug information (generally a variable, but
245 we might deal with other kinds of trees in the future), return the
246 tree that should be used as the variable of a DEBUG_BIND STMT or
247 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
248
249tree
250target_for_debug_bind (tree var)
251{
252 if (!MAY_HAVE_DEBUG_STMTS)
253 return NULL_TREE;
254
255 if (TREE_CODE (var) != VAR_DECL
256 && TREE_CODE (var) != PARM_DECL)
257 return NULL_TREE;
258
259 if (DECL_HAS_VALUE_EXPR_P (var))
260 return target_for_debug_bind (DECL_VALUE_EXPR (var));
261
262 if (DECL_IGNORED_P (var))
263 return NULL_TREE;
264
265 if (!is_gimple_reg (var))
88f2e16b 266 {
267 if (is_gimple_reg_type (TREE_TYPE (var))
268 && referenced_var_lookup (cfun, DECL_UID (var)) == NULL_TREE)
269 return var;
270 return NULL_TREE;
271 }
9845d120 272
273 return var;
274}
275
276/* Called via walk_tree, look for SSA_NAMEs that have already been
277 released. */
278
279static tree
280find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
281{
282 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
283
43e54ec3 284 if (wi && wi->is_lhs)
9845d120 285 return NULL_TREE;
286
287 if (TREE_CODE (*tp) == SSA_NAME)
288 {
289 if (SSA_NAME_IN_FREE_LIST (*tp))
290 return *tp;
291
292 *walk_subtrees = 0;
293 }
294 else if (IS_TYPE_OR_DECL_P (*tp))
295 *walk_subtrees = 0;
296
297 return NULL_TREE;
298}
299
688ff29b 300/* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
301 by other DEBUG stmts, and replace uses of the DEF with the
302 newly-created debug temp. */
303
9845d120 304void
688ff29b 305insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
9845d120 306{
307 imm_use_iterator imm_iter;
9845d120 308 use_operand_p use_p;
688ff29b 309 gimple stmt;
310 gimple def_stmt = NULL;
311 int usecount = 0;
9845d120 312 tree value = NULL;
9845d120 313
314 if (!MAY_HAVE_DEBUG_STMTS)
315 return;
316
afed704c 317 /* If this name has already been registered for replacement, do nothing
318 as anything that uses this name isn't in SSA form. */
319 if (name_registered_for_update_p (var))
320 return;
321
322 /* Check whether there are debug stmts that reference this variable and,
323 if there are, decide whether we should use a debug temp. */
688ff29b 324 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
9845d120 325 {
688ff29b 326 stmt = USE_STMT (use_p);
9845d120 327
688ff29b 328 if (!gimple_debug_bind_p (stmt))
9845d120 329 continue;
330
688ff29b 331 if (usecount++)
332 break;
333
334 if (gimple_debug_bind_get_value (stmt) != var)
9845d120 335 {
688ff29b 336 /* Count this as an additional use, so as to make sure we
337 use a temp unless VAR's definition has a SINGLE_RHS that
338 can be shared. */
339 usecount++;
340 break;
341 }
342 }
9845d120 343
688ff29b 344 if (!usecount)
345 return;
9845d120 346
688ff29b 347 if (gsi)
348 def_stmt = gsi_stmt (*gsi);
349 else
350 def_stmt = SSA_NAME_DEF_STMT (var);
9845d120 351
688ff29b 352 /* If we didn't get an insertion point, and the stmt has already
353 been removed, we won't be able to insert the debug bind stmt, so
354 we'll have to drop debug information. */
43e54ec3 355 if (gimple_code (def_stmt) == GIMPLE_PHI)
356 {
357 value = degenerate_phi_result (def_stmt);
358 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
359 value = NULL;
a05063ec 360 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
361 to. */
362 else if (value == error_mark_node)
363 value = NULL;
43e54ec3 364 }
365 else if (is_gimple_assign (def_stmt))
688ff29b 366 {
367 bool no_value = false;
9845d120 368
688ff29b 369 if (!dom_info_available_p (CDI_DOMINATORS))
370 {
371 struct walk_stmt_info wi;
372
373 memset (&wi, 0, sizeof (wi));
374
375 /* When removing blocks without following reverse dominance
376 order, we may sometimes encounter SSA_NAMEs that have
377 already been released, referenced in other SSA_DEFs that
378 we're about to release. Consider:
379
380 <bb X>:
381 v_1 = foo;
382
383 <bb Y>:
384 w_2 = v_1 + bar;
385 # DEBUG w => w_2
386
387 If we deleted BB X first, propagating the value of w_2
388 won't do us any good. It's too late to recover their
389 original definition of v_1: when it was deleted, it was
390 only referenced in other DEFs, it couldn't possibly know
391 it should have been retained, and propagating every
392 single DEF just in case it might have to be propagated
393 into a DEBUG STMT would probably be too wasteful.
394
395 When dominator information is not readily available, we
396 check for and accept some loss of debug information. But
397 if it is available, there's no excuse for us to remove
398 blocks in the wrong order, so we don't even check for
399 dead SSA NAMEs. SSA verification shall catch any
400 errors. */
401 if ((!gsi && !gimple_bb (def_stmt))
7f9256fe 402 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
688ff29b 403 no_value = true;
9845d120 404 }
405
688ff29b 406 if (!no_value)
407 value = gimple_assign_rhs_to_tree (def_stmt);
408 }
409
410 if (value)
411 {
412 /* If there's a single use of VAR, and VAR is the entire debug
413 expression (usecount would have been incremented again
414 otherwise), and the definition involves only constants and
415 SSA names, then we can propagate VALUE into this single use,
416 avoiding the temp.
417
418 We can also avoid using a temp if VALUE can be shared and
419 propagated into all uses, without generating expressions that
420 wouldn't be valid gimple RHSs.
421
422 Other cases that would require unsharing or non-gimple RHSs
423 are deferred to a debug temp, although we could avoid temps
424 at the expense of duplication of expressions. */
425
426 if (CONSTANT_CLASS_P (value)
43e54ec3 427 || gimple_code (def_stmt) == GIMPLE_PHI
688ff29b 428 || (usecount == 1
429 && (!gimple_assign_single_p (def_stmt)
430 || is_gimple_min_invariant (value)))
431 || is_gimple_reg (value))
432 value = unshare_expr (value);
433 else
9845d120 434 {
688ff29b 435 gimple def_temp;
436 tree vexpr = make_node (DEBUG_EXPR_DECL);
437
438 def_temp = gimple_build_debug_bind (vexpr,
439 unshare_expr (value),
440 def_stmt);
441
442 DECL_ARTIFICIAL (vexpr) = 1;
443 TREE_TYPE (vexpr) = TREE_TYPE (value);
444 if (DECL_P (value))
445 DECL_MODE (vexpr) = DECL_MODE (value);
446 else
447 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
9845d120 448
688ff29b 449 if (gsi)
450 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
451 else
9845d120 452 {
688ff29b 453 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
454 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
9845d120 455 }
456
688ff29b 457 value = vexpr;
9845d120 458 }
688ff29b 459 }
9845d120 460
688ff29b 461 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
462 {
463 if (!gimple_debug_bind_p (stmt))
464 continue;
465
466 if (value)
02a562db 467 {
468 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
469 /* unshare_expr is not needed here. vexpr is either a
470 SINGLE_RHS, that can be safely shared, some other RHS
471 that was unshared when we found it had a single debug
472 use, or a DEBUG_EXPR_DECL, that can be safely
473 shared. */
474 SET_USE (use_p, value);
475 /* If we didn't replace uses with a debug decl fold the
476 resulting expression. Otherwise we end up with invalid IL. */
477 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
50aacf4c 478 {
479 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
480 fold_stmt_inplace (&gsi);
481 }
02a562db 482 }
688ff29b 483 else
484 gimple_debug_bind_reset_value (stmt);
9845d120 485
486 update_stmt (stmt);
487 }
488}
489
490
688ff29b 491/* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
492 other DEBUG stmts, and replace uses of the DEF with the
493 newly-created debug temp. */
9845d120 494
495void
688ff29b 496insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
9845d120 497{
688ff29b 498 gimple stmt;
9845d120 499 ssa_op_iter op_iter;
500 def_operand_p def_p;
501
502 if (!MAY_HAVE_DEBUG_STMTS)
503 return;
504
688ff29b 505 stmt = gsi_stmt (*gsi);
506
43e54ec3 507 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
9845d120 508 {
509 tree var = DEF_FROM_PTR (def_p);
510
511 if (TREE_CODE (var) != SSA_NAME)
512 continue;
513
688ff29b 514 insert_debug_temp_for_var_def (gsi, var);
9845d120 515 }
516}
517
8ebd8f29 518/* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
519
520void
521reset_debug_uses (gimple stmt)
522{
523 ssa_op_iter op_iter;
524 def_operand_p def_p;
525 imm_use_iterator imm_iter;
526 gimple use_stmt;
527
528 if (!MAY_HAVE_DEBUG_STMTS)
529 return;
530
531 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
532 {
533 tree var = DEF_FROM_PTR (def_p);
534
535 if (TREE_CODE (var) != SSA_NAME)
536 continue;
537
538 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
539 {
540 if (!gimple_debug_bind_p (use_stmt))
541 continue;
542
543 gimple_debug_bind_reset_value (use_stmt);
544 update_stmt (use_stmt);
545 }
546 }
547}
548
1b223194 549/* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
550 dominated stmts before their dominators, so that release_ssa_defs
551 stands a chance of propagating DEFs into debug bind stmts. */
552
553void
554release_defs_bitset (bitmap toremove)
555{
556 unsigned j;
557 bitmap_iterator bi;
558
559 /* Performing a topological sort is probably overkill, this will
560 most likely run in slightly superlinear time, rather than the
561 pathological quadratic worst case. */
562 while (!bitmap_empty_p (toremove))
563 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
564 {
565 bool remove_now = true;
566 tree var = ssa_name (j);
567 gimple stmt;
568 imm_use_iterator uit;
569
570 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
571 {
572 ssa_op_iter dit;
573 def_operand_p def_p;
574
575 /* We can't propagate PHI nodes into debug stmts. */
576 if (gimple_code (stmt) == GIMPLE_PHI
577 || is_gimple_debug (stmt))
578 continue;
579
580 /* If we find another definition to remove that uses
581 the one we're looking at, defer the removal of this
582 one, so that it can be propagated into debug stmts
583 after the other is. */
584 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
585 {
586 tree odef = DEF_FROM_PTR (def_p);
587
588 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
589 {
590 remove_now = false;
591 break;
592 }
593 }
594
595 if (!remove_now)
596 BREAK_FROM_IMM_USE_STMT (uit);
597 }
598
599 if (remove_now)
600 {
601 gimple def = SSA_NAME_DEF_STMT (var);
602 gimple_stmt_iterator gsi = gsi_for_stmt (def);
603
604 if (gimple_code (def) == GIMPLE_PHI)
605 remove_phi_node (&gsi, true);
606 else
607 {
608 gsi_remove (&gsi, true);
609 release_defs (def);
610 }
611
612 bitmap_clear_bit (toremove, j);
613 }
614 }
615}
616
81d08033 617/* Return true if SSA_NAME is malformed and mark it visited.
4ee9c684 618
81d08033 619 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
620 operand. */
4ee9c684 621
622static bool
81d08033 623verify_ssa_name (tree ssa_name, bool is_virtual)
4ee9c684 624{
4ee9c684 625 if (TREE_CODE (ssa_name) != SSA_NAME)
626 {
0a81f5a0 627 error ("expected an SSA_NAME object");
81d08033 628 return true;
4ee9c684 629 }
630
cbbefea4 631 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
632 {
0a81f5a0 633 error ("type mismatch between an SSA_NAME and its symbol");
cbbefea4 634 return true;
635 }
636
81d08033 637 if (SSA_NAME_IN_FREE_LIST (ssa_name))
638 {
0a81f5a0 639 error ("found an SSA_NAME that had been released into the free pool");
81d08033 640 return true;
641 }
642
643 if (is_virtual && is_gimple_reg (ssa_name))
644 {
0a81f5a0 645 error ("found a virtual definition for a GIMPLE register");
81d08033 646 return true;
647 }
648
dd277d48 649 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
650 {
651 error ("virtual SSA name for non-VOP decl");
652 return true;
653 }
654
81d08033 655 if (!is_virtual && !is_gimple_reg (ssa_name))
656 {
0a81f5a0 657 error ("found a real definition for a non-register");
81d08033 658 return true;
659 }
660
4fb5e5ca 661 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
75a70cf9 662 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
4fb5e5ca 663 {
664 error ("found a default name with a non-empty defining statement");
665 return true;
666 }
667
81d08033 668 return false;
669}
670
671
672/* Return true if the definition of SSA_NAME at block BB is malformed.
673
674 STMT is the statement where SSA_NAME is created.
675
676 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
677 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
678 it means that the block in that array slot contains the
679 definition of SSA_NAME.
680
4fb5e5ca 681 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
81d08033 682
683static bool
684verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
75a70cf9 685 gimple stmt, bool is_virtual)
81d08033 686{
687 if (verify_ssa_name (ssa_name, is_virtual))
688 goto err;
689
524a0531 690 if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
691 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
692 {
bf776685 693 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
524a0531 694 goto err;
695 }
696
4ee9c684 697 if (definition_block[SSA_NAME_VERSION (ssa_name)])
698 {
699 error ("SSA_NAME created in two different blocks %i and %i",
700 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
81d08033 701 goto err;
4ee9c684 702 }
703
704 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
705
706 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
707 {
708 error ("SSA_NAME_DEF_STMT is wrong");
4ee9c684 709 fprintf (stderr, "Expected definition statement:\n");
75a70cf9 710 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
4ee9c684 711 fprintf (stderr, "\nActual definition statement:\n");
75a70cf9 712 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
81d08033 713 goto err;
4ee9c684 714 }
715
81d08033 716 return false;
717
718err:
719 fprintf (stderr, "while verifying SSA_NAME ");
720 print_generic_expr (stderr, ssa_name, 0);
721 fprintf (stderr, " in statement\n");
75a70cf9 722 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
81d08033 723
724 return true;
4ee9c684 725}
726
727
728/* Return true if the use of SSA_NAME at statement STMT in block BB is
729 malformed.
730
731 DEF_BB is the block where SSA_NAME was found to be created.
732
733 IDOM contains immediate dominator information for the flowgraph.
734
735 CHECK_ABNORMAL is true if the caller wants to check whether this use
736 is flowing through an abnormal edge (only used when checking PHI
81d08033 737 arguments).
738
e599eacb 739 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
740 that are defined before STMT in basic block BB. */
4ee9c684 741
742static bool
22aa74c4 743verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
75a70cf9 744 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
4ee9c684 745{
746 bool err = false;
22aa74c4 747 tree ssa_name = USE_FROM_PTR (use_p);
4ee9c684 748
22aa74c4 749 if (!TREE_VISITED (ssa_name))
750 if (verify_imm_links (stderr, ssa_name))
751 err = true;
752
06c1e160 753 TREE_VISITED (ssa_name) = 1;
81d08033 754
75a70cf9 755 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
4fb5e5ca 756 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
81d08033 757 ; /* Default definitions have empty statements. Nothing to do. */
4ee9c684 758 else if (!def_bb)
759 {
0a81f5a0 760 error ("missing definition");
4ee9c684 761 err = true;
762 }
763 else if (bb != def_bb
764 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
765 {
0a81f5a0 766 error ("definition in block %i does not dominate use in block %i",
4ee9c684 767 def_bb->index, bb->index);
768 err = true;
769 }
e599eacb 770 else if (bb == def_bb
771 && names_defined_in_bb != NULL
772 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
773 {
0a81f5a0 774 error ("definition in block %i follows the use", def_bb->index);
e599eacb 775 err = true;
776 }
4ee9c684 777
778 if (check_abnormal
779 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
780 {
781 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
782 err = true;
783 }
784
48e1416a 785 /* Make sure the use is in an appropriate list by checking the previous
20833d12 786 element to make sure it's the same. */
22aa74c4 787 if (use_p->prev == NULL)
788 {
0a81f5a0 789 error ("no immediate_use list");
22aa74c4 790 err = true;
791 }
792 else
793 {
75a70cf9 794 tree listvar;
22aa74c4 795 if (use_p->prev->use == NULL)
75a70cf9 796 listvar = use_p->prev->loc.ssa_name;
22aa74c4 797 else
798 listvar = USE_FROM_PTR (use_p->prev);
799 if (listvar != ssa_name)
800 {
0a81f5a0 801 error ("wrong immediate use list");
22aa74c4 802 err = true;
803 }
804 }
805
4ee9c684 806 if (err)
807 {
808 fprintf (stderr, "for SSA_NAME: ");
94dcc41f 809 print_generic_expr (stderr, ssa_name, TDF_VOPS);
88dbf20f 810 fprintf (stderr, " in statement:\n");
75a70cf9 811 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
4ee9c684 812 }
813
814 return err;
815}
816
817
818/* Return true if any of the arguments for PHI node PHI at block BB is
819 malformed.
820
4fb5e5ca 821 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
822 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
823 it means that the block in that array slot contains the
824 definition of SSA_NAME. */
4ee9c684 825
826static bool
75a70cf9 827verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
4ee9c684 828{
829 edge e;
830 bool err = false;
75a70cf9 831 size_t i, phi_num_args = gimple_phi_num_args (phi);
4ee9c684 832
79aae762 833 if (EDGE_COUNT (bb->preds) != phi_num_args)
834 {
0a81f5a0 835 error ("incoming edge count does not match number of PHI arguments");
79aae762 836 err = true;
837 goto error;
838 }
839
4ee9c684 840 for (i = 0; i < phi_num_args; i++)
841 {
75a70cf9 842 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
22aa74c4 843 tree op = USE_FROM_PTR (op_p);
844
e0ebfc19 845 e = EDGE_PRED (bb, i);
cc890649 846
847 if (op == NULL_TREE)
848 {
0a81f5a0 849 error ("PHI argument is missing for edge %d->%d",
cc890649 850 e->src->index,
851 e->dest->index);
852 err = true;
853 goto error;
854 }
855
79aae762 856 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
857 {
858 error ("PHI argument is not SSA_NAME, or invariant");
859 err = true;
860 }
861
4ee9c684 862 if (TREE_CODE (op) == SSA_NAME)
4fb5e5ca 863 {
75a70cf9 864 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
4fb5e5ca 865 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
866 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
867 }
4ee9c684 868
86f2ad37 869 if (TREE_CODE (op) == ADDR_EXPR)
870 {
871 tree base = TREE_OPERAND (op, 0);
872 while (handled_component_p (base))
873 base = TREE_OPERAND (base, 0);
874 if ((TREE_CODE (base) == VAR_DECL
875 || TREE_CODE (base) == PARM_DECL
876 || TREE_CODE (base) == RESULT_DECL)
877 && !TREE_ADDRESSABLE (base))
878 {
879 error ("address taken, but ADDRESSABLE bit not set");
880 err = true;
881 }
882 }
883
4ee9c684 884 if (e->dest != bb)
885 {
0a81f5a0 886 error ("wrong edge %d->%d for PHI argument",
7781aa77 887 e->src->index, e->dest->index);
4ee9c684 888 err = true;
889 }
890
4ee9c684 891 if (err)
892 {
893 fprintf (stderr, "PHI argument\n");
94dcc41f 894 print_generic_stmt (stderr, op, TDF_VOPS);
81d08033 895 goto error;
4ee9c684 896 }
4ee9c684 897 }
898
81d08033 899error:
4ee9c684 900 if (err)
901 {
902 fprintf (stderr, "for PHI node\n");
75a70cf9 903 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
4ee9c684 904 }
905
906
907 return err;
908}
909
910
911/* Verify common invariants in the SSA web.
912 TODO: verify the variable annotations. */
913
4b987fac 914DEBUG_FUNCTION void
22aa74c4 915verify_ssa (bool check_modified_stmt)
4ee9c684 916{
81d08033 917 size_t i;
4ee9c684 918 basic_block bb;
680a19b9 919 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
43daa21e 920 ssa_op_iter iter;
921 tree op;
50b08d37 922 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
27335ffd 923 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
4ee9c684 924
dd277d48 925 gcc_assert (!need_ssa_update_p (cfun));
095dcfa3 926
4ee9c684 927 timevar_push (TV_TREE_SSA_VERIFY);
928
81d08033 929 /* Keep track of SSA names present in the IL. */
930 for (i = 1; i < num_ssa_names; i++)
4ee9c684 931 {
79aae762 932 tree name = ssa_name (i);
933 if (name)
4ee9c684 934 {
75a70cf9 935 gimple stmt;
79aae762 936 TREE_VISITED (name) = 0;
4ee9c684 937
85f3d834 938 verify_ssa_name (name, !is_gimple_reg (name));
939
79aae762 940 stmt = SSA_NAME_DEF_STMT (name);
75a70cf9 941 if (!gimple_nop_p (stmt))
81d08033 942 {
75a70cf9 943 basic_block bb = gimple_bb (stmt);
79aae762 944 verify_def (bb, definition_block,
945 name, stmt, !is_gimple_reg (name));
946
4ee9c684 947 }
948 }
949 }
950
79aae762 951 calculate_dominance_info (CDI_DOMINATORS);
4ee9c684 952
953 /* Now verify all the uses and make sure they agree with the definitions
954 found in the previous pass. */
955 FOR_EACH_BB (bb)
956 {
957 edge e;
75a70cf9 958 gimple phi;
cd665a06 959 edge_iterator ei;
75a70cf9 960 gimple_stmt_iterator gsi;
4ee9c684 961
962 /* Make sure that all edges have a clear 'aux' field. */
cd665a06 963 FOR_EACH_EDGE (e, ei, bb->preds)
4ee9c684 964 {
965 if (e->aux)
966 {
0a81f5a0 967 error ("AUX pointer initialized for edge %d->%d", e->src->index,
4ee9c684 968 e->dest->index);
81d08033 969 goto err;
4ee9c684 970 }
971 }
972
973 /* Verify the arguments for every PHI node in the block. */
75a70cf9 974 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e599eacb 975 {
75a70cf9 976 phi = gsi_stmt (gsi);
e599eacb 977 if (verify_phi_args (phi, bb, definition_block))
978 goto err;
4fb5e5ca 979
e599eacb 980 bitmap_set_bit (names_defined_in_bb,
75a70cf9 981 SSA_NAME_VERSION (gimple_phi_result (phi)));
e599eacb 982 }
4ee9c684 983
81d08033 984 /* Now verify all the uses and vuses in every statement of the block. */
75a70cf9 985 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 986 {
75a70cf9 987 gimple stmt = gsi_stmt (gsi);
22aa74c4 988 use_operand_p use_p;
989
75a70cf9 990 if (check_modified_stmt && gimple_modified_p (stmt))
22aa74c4 991 {
4fb5e5ca 992 error ("stmt (%p) marked modified after optimization pass: ",
b66731e8 993 (void *)stmt);
75a70cf9 994 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
22aa74c4 995 goto err;
996 }
4ee9c684 997
85f3d834 998 if (verify_ssa_operands (stmt))
dd277d48 999 {
85f3d834 1000 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
dd277d48 1001 goto err;
4fb5e5ca 1002 }
1003
85f3d834 1004 if (gimple_debug_bind_p (stmt)
1005 && !gimple_debug_bind_has_value_p (stmt))
1006 continue;
4fb5e5ca 1007
1008 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
4ee9c684 1009 {
22aa74c4 1010 op = USE_FROM_PTR (use_p);
81d08033 1011 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
4fb5e5ca 1012 use_p, stmt, false, names_defined_in_bb))
e599eacb 1013 goto err;
1014 }
1015
1016 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
dd277d48 1017 {
1018 if (SSA_NAME_DEF_STMT (op) != stmt)
1019 {
1020 error ("SSA_NAME_DEF_STMT is wrong");
1021 fprintf (stderr, "Expected definition statement:\n");
1022 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1023 fprintf (stderr, "\nActual definition statement:\n");
1024 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1025 4, TDF_VOPS);
1026 goto err;
1027 }
1028 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1029 }
e599eacb 1030 }
1031
e599eacb 1032 bitmap_clear (names_defined_in_bb);
4ee9c684 1033 }
1034
81d08033 1035 free (definition_block);
095dcfa3 1036
c26a6416 1037 /* Restore the dominance information to its prior known state, so
1038 that we do not perturb the compiler's subsequent behavior. */
08323300 1039 if (orig_dom_state == DOM_NONE)
1040 free_dominance_info (CDI_DOMINATORS);
1041 else
50b08d37 1042 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
48e1416a 1043
27335ffd 1044 BITMAP_FREE (names_defined_in_bb);
4ee9c684 1045 timevar_pop (TV_TREE_SSA_VERIFY);
81d08033 1046 return;
4ee9c684 1047
81d08033 1048err:
0a81f5a0 1049 internal_error ("verify_ssa failed");
4ee9c684 1050}
1051
a55dc2cd 1052/* Return true if the uid in both int tree maps are equal. */
1053
1054int
1055int_tree_map_eq (const void *va, const void *vb)
1056{
680a19b9 1057 const struct int_tree_map *a = (const struct int_tree_map *) va;
1058 const struct int_tree_map *b = (const struct int_tree_map *) vb;
a55dc2cd 1059 return (a->uid == b->uid);
1060}
1061
1062/* Hash a UID in a int_tree_map. */
1063
1064unsigned int
1065int_tree_map_hash (const void *item)
1066{
1067 return ((const struct int_tree_map *)item)->uid;
1068}
1069
c0ff55b3 1070/* Return true if the DECL_UID in both trees are equal. */
1071
1072int
1073uid_decl_map_eq (const void *va, const void *vb)
1074{
1075 const_tree a = (const_tree) va;
1076 const_tree b = (const_tree) vb;
1077 return (a->decl_minimal.uid == b->decl_minimal.uid);
1078}
1079
1080/* Hash a tree in a uid_decl_map. */
1081
1082unsigned int
1083uid_decl_map_hash (const void *item)
1084{
1085 return ((const_tree)item)->decl_minimal.uid;
1086}
1087
24239da3 1088/* Return true if the DECL_UID in both trees are equal. */
1089
1090static int
1091uid_ssaname_map_eq (const void *va, const void *vb)
1092{
1093 const_tree a = (const_tree) va;
1094 const_tree b = (const_tree) vb;
1095 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1096}
1097
1098/* Hash a tree in a uid_decl_map. */
1099
1100static unsigned int
1101uid_ssaname_map_hash (const void *item)
1102{
1103 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1104}
1105
4ee9c684 1106
4ee9c684 1107/* Initialize global DFA and SSA structures. */
1108
1109void
bcaa2770 1110init_tree_ssa (struct function *fn)
4ee9c684 1111{
ba72912a 1112 fn->gimple_df = ggc_alloc_cleared_gimple_df ();
48e1416a 1113 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
bcaa2770 1114 uid_decl_map_eq, NULL);
48e1416a 1115 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
bcaa2770 1116 uid_ssaname_map_eq, NULL);
dd277d48 1117 pt_solution_reset (&fn->gimple_df->escaped);
bcaa2770 1118 init_ssanames (fn, 0);
4ee9c684 1119}
1120
3db65b62 1121/* Do the actions required to initialize internal data structures used
1122 in tree-ssa optimization passes. */
1123
1124static unsigned int
1125execute_init_datastructures (void)
1126{
1127 /* Allocate hash tables, arrays and other structures. */
1128 init_tree_ssa (cfun);
1129 return 0;
1130}
1131
1132struct gimple_opt_pass pass_init_datastructures =
1133{
1134 {
1135 GIMPLE_PASS,
1136 "*init_datastructures", /* name */
1137 NULL, /* gate */
1138 execute_init_datastructures, /* execute */
1139 NULL, /* sub */
1140 NULL, /* next */
1141 0, /* static_pass_number */
1142 TV_NONE, /* tv_id */
1143 PROP_cfg, /* properties_required */
1144 0, /* properties_provided */
1145 0, /* properties_destroyed */
1146 0, /* todo_flags_start */
1147 0 /* todo_flags_finish */
1148 }
1149};
4ee9c684 1150
1151/* Deallocate memory associated with SSA data structures for FNDECL. */
1152
1153void
1154delete_tree_ssa (void)
1155{
a55dc2cd 1156 referenced_var_iterator rvi;
1157 tree var;
4ee9c684 1158
dd7b9387 1159 /* Remove annotations from every referenced local variable. */
a23e00ae 1160 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
4ee9c684 1161 {
dd277d48 1162 if (is_global_var (var))
1163 continue;
8cee8dc0 1164 if (var_ann (var))
1165 {
1166 ggc_free (var_ann (var));
1167 *DECL_VAR_ANN_PTR (var) = NULL;
1168 }
4ee9c684 1169 }
c0ff55b3 1170 htab_delete (gimple_referenced_vars (cfun));
2d04fd8d 1171 cfun->gimple_df->referenced_vars = NULL;
4ee9c684 1172
1173 fini_ssanames ();
75a70cf9 1174
1175 /* We no longer maintain the SSA operand cache at this point. */
94b3ed0b 1176 if (ssa_operands_active ())
1177 fini_ssa_operands ();
4ee9c684 1178
2d04fd8d 1179 htab_delete (cfun->gimple_df->default_defs);
f5ab006b 1180 cfun->gimple_df->default_defs = NULL;
dd277d48 1181 pt_solution_reset (&cfun->gimple_df->escaped);
3a443843 1182 if (cfun->gimple_df->decls_to_pointers != NULL)
1183 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1184 cfun->gimple_df->decls_to_pointers = NULL;
2d04fd8d 1185 cfun->gimple_df->modified_noreturn_calls = NULL;
f5ab006b 1186 cfun->gimple_df = NULL;
d03ba86f 1187
1188 /* We no longer need the edge variable maps. */
1189 redirect_edge_var_map_destroy ();
4ee9c684 1190}
1191
4cf75304 1192/* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1193 useless type conversion, otherwise return false.
4ee9c684 1194
4cf75304 1195 This function implicitly defines the middle-end type system. With
1196 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1197 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1198 the following invariants shall be fulfilled:
1199
1200 1) useless_type_conversion_p is transitive.
1201 If a < b and b < c then a < c.
1202
1203 2) useless_type_conversion_p is not symmetric.
1204 From a < b does not follow a > b.
1205
1206 3) Types define the available set of operations applicable to values.
1207 A type conversion is useless if the operations for the target type
1208 is a subset of the operations for the source type. For example
1209 casts to void* are useless, casts from void* are not (void* can't
1210 be dereferenced or offsetted, but copied, hence its set of operations
1211 is a strict subset of that of all other data pointer types). Casts
1212 to const T* are useless (can't be written to), casts from const T*
1213 to T* are not. */
1214
1215bool
1216useless_type_conversion_p (tree outer_type, tree inner_type)
4ee9c684 1217{
fb8ed03f 1218 /* Do the following before stripping toplevel qualifiers. */
1219 if (POINTER_TYPE_P (inner_type)
1220 && POINTER_TYPE_P (outer_type))
1221 {
bd1a81f7 1222 /* Do not lose casts between pointers to different address spaces. */
1223 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
1224 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
1225 return false;
fb8ed03f 1226 }
1227
1228 /* From now on qualifiers on value types do not matter. */
f757850a 1229 inner_type = TYPE_MAIN_VARIANT (inner_type);
1230 outer_type = TYPE_MAIN_VARIANT (outer_type);
1231
a9fc9109 1232 if (inner_type == outer_type)
1233 return true;
1234
73bb980c 1235 /* If we know the canonical types, compare them. */
1236 if (TYPE_CANONICAL (inner_type)
1237 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1238 return true;
1239
9fa67218 1240 /* Changes in machine mode are never useless conversions unless we
1241 deal with aggregate types in which case we defer to later checks. */
1242 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
1243 && !AGGREGATE_TYPE_P (inner_type))
a9fc9109 1244 return false;
1245
5bd657e9 1246 /* If both the inner and outer types are integral types, then the
1247 conversion is not necessary if they have the same mode and
1248 signedness and precision, and both or neither are boolean. */
1249 if (INTEGRAL_TYPE_P (inner_type)
1250 && INTEGRAL_TYPE_P (outer_type))
1251 {
1252 /* Preserve changes in signedness or precision. */
1253 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1254 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1255 return false;
1256
4b5f1658 1257 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
1258 of precision one. */
1259 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
1260 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
9ad3a576 1261 && TYPE_PRECISION (outer_type) != 1)
1262 return false;
1263
79307120 1264 /* We don't need to preserve changes in the types minimum or
1265 maximum value in general as these do not generate code
1266 unless the types precisions are different. */
5bd657e9 1267 return true;
1268 }
a3986bc0 1269
f757850a 1270 /* Scalar floating point types with the same mode are compatible. */
1271 else if (SCALAR_FLOAT_TYPE_P (inner_type)
1272 && SCALAR_FLOAT_TYPE_P (outer_type))
1273 return true;
1274
2ed8de55 1275 /* Fixed point types with the same mode are compatible. */
1276 else if (FIXED_POINT_TYPE_P (inner_type)
1277 && FIXED_POINT_TYPE_P (outer_type))
1278 return true;
1279
5bd657e9 1280 /* We need to take special care recursing to pointed-to types. */
4ee9c684 1281 else if (POINTER_TYPE_P (inner_type)
5bd657e9 1282 && POINTER_TYPE_P (outer_type))
1283 {
182cf5a9 1284 /* Do not lose casts to function pointer types. */
1285 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
1286 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
39f59e65 1287 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
1288 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
5bd657e9 1289 return false;
1290
8ac2d49b 1291 /* We do not care for const qualification of the pointed-to types
1292 as const qualification has no semantic value to the middle-end. */
f757850a 1293
182cf5a9 1294 /* Otherwise pointers/references are equivalent. */
1295 return true;
4f7f7efd 1296 }
4ee9c684 1297
1298 /* Recurse for complex types. */
1299 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
5bd657e9 1300 && TREE_CODE (outer_type) == COMPLEX_TYPE)
6b42d67d 1301 return useless_type_conversion_p (TREE_TYPE (outer_type),
1302 TREE_TYPE (inner_type));
4ee9c684 1303
f757850a 1304 /* Recurse for vector types with the same number of subparts. */
1305 else if (TREE_CODE (inner_type) == VECTOR_TYPE
1306 && TREE_CODE (outer_type) == VECTOR_TYPE
1307 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
6b42d67d 1308 return useless_type_conversion_p (TREE_TYPE (outer_type),
1309 TREE_TYPE (inner_type));
f757850a 1310
9fa67218 1311 else if (TREE_CODE (inner_type) == ARRAY_TYPE
1312 && TREE_CODE (outer_type) == ARRAY_TYPE)
f757850a 1313 {
9fa67218 1314 /* Preserve string attributes. */
1315 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
f757850a 1316 return false;
1317
9225ad44 1318 /* Conversions from array types with unknown extent to
1319 array types with known extent are not useless. */
9fa67218 1320 if (!TYPE_DOMAIN (inner_type)
9225ad44 1321 && TYPE_DOMAIN (outer_type))
1322 return false;
1323
9fa67218 1324 /* Nor are conversions from array types with non-constant size to
1325 array types with constant size or to different size. */
1326 if (TYPE_SIZE (outer_type)
1327 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
1328 && (!TYPE_SIZE (inner_type)
1329 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
1330 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
1331 TYPE_SIZE (inner_type))))
1332 return false;
1333
1334 /* Check conversions between arrays with partially known extents.
1335 If the array min/max values are constant they have to match.
1336 Otherwise allow conversions to unknown and variable extents.
1337 In particular this declares conversions that may change the
1338 mode to BLKmode as useless. */
1339 if (TYPE_DOMAIN (inner_type)
1340 && TYPE_DOMAIN (outer_type)
1341 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
1342 {
1343 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
1344 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
1345 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
1346 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
1347
1348 /* After gimplification a variable min/max value carries no
1349 additional information compared to a NULL value. All that
1350 matters has been lowered to be part of the IL. */
1351 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
1352 inner_min = NULL_TREE;
1353 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
1354 outer_min = NULL_TREE;
1355 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
1356 inner_max = NULL_TREE;
1357 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
1358 outer_max = NULL_TREE;
1359
1360 /* Conversions NULL / variable <- cst are useless, but not
1361 the other way around. */
1362 if (outer_min
1363 && (!inner_min
1364 || !tree_int_cst_equal (inner_min, outer_min)))
1365 return false;
1366 if (outer_max
1367 && (!inner_max
1368 || !tree_int_cst_equal (inner_max, outer_max)))
1369 return false;
1370 }
1371
1372 /* Recurse on the element check. */
1373 return useless_type_conversion_p (TREE_TYPE (outer_type),
1374 TREE_TYPE (inner_type));
1375 }
1376
1377 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
1378 || TREE_CODE (inner_type) == METHOD_TYPE)
1379 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1380 {
1381 tree outer_parm, inner_parm;
1382
1383 /* If the return types are not compatible bail out. */
1384 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
1385 TREE_TYPE (inner_type)))
1386 return false;
1387
1388 /* Method types should belong to a compatible base class. */
1389 if (TREE_CODE (inner_type) == METHOD_TYPE
1390 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
1391 TYPE_METHOD_BASETYPE (inner_type)))
1392 return false;
1393
1394 /* A conversion to an unprototyped argument list is ok. */
a36cf284 1395 if (!prototype_p (outer_type))
9fa67218 1396 return true;
1397
9fc8f414 1398 /* If the unqualified argument types are compatible the conversion
1399 is useless. */
9fa67218 1400 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
1401 return true;
1402
1403 for (outer_parm = TYPE_ARG_TYPES (outer_type),
1404 inner_parm = TYPE_ARG_TYPES (inner_type);
1405 outer_parm && inner_parm;
1406 outer_parm = TREE_CHAIN (outer_parm),
1407 inner_parm = TREE_CHAIN (inner_parm))
9fc8f414 1408 if (!useless_type_conversion_p
1409 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
1410 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
9fa67218 1411 return false;
1412
1413 /* If there is a mismatch in the number of arguments the functions
1414 are not compatible. */
1415 if (outer_parm || inner_parm)
1416 return false;
1417
1418 /* Defer to the target if necessary. */
1419 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
309303cf 1420 return comp_type_attributes (outer_type, inner_type) != 0;
9fa67218 1421
1422 return true;
1423 }
1424
dfe8e806 1425 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1426 explicit conversions for types involving to be structurally
1427 compared types. */
9fa67218 1428 else if (AGGREGATE_TYPE_P (inner_type)
1429 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
7a3ccc3b 1430 return false;
48e1416a 1431
f757850a 1432 return false;
4ee9c684 1433}
1434
c8ca3ee7 1435/* Return true if a conversion from either type of TYPE1 and TYPE2
1436 to the other is not required. Otherwise return false. */
1437
1438bool
1439types_compatible_p (tree type1, tree type2)
1440{
1441 return (type1 == type2
1442 || (useless_type_conversion_p (type1, type2)
1443 && useless_type_conversion_p (type2, type1)));
1444}
1445
4ee9c684 1446/* Return true if EXPR is a useless type conversion, otherwise return
1447 false. */
1448
1449bool
1450tree_ssa_useless_type_conversion (tree expr)
1451{
1452 /* If we have an assignment that merely uses a NOP_EXPR to change
1453 the top of the RHS to the type of the LHS and the type conversion
1454 is "safe", then strip away the type conversion so that we can
1455 enter LHS = RHS into the const_and_copies table. */
72dd6141 1456 if (CONVERT_EXPR_P (expr)
b6b93727 1457 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1458 || TREE_CODE (expr) == NON_LVALUE_EXPR)
548044d8 1459 return useless_type_conversion_p
c2f47e15 1460 (TREE_TYPE (expr),
75a70cf9 1461 TREE_TYPE (TREE_OPERAND (expr, 0)));
4ee9c684 1462
1463 return false;
1464}
1465
98112881 1466/* Strip conversions from EXP according to
1467 tree_ssa_useless_type_conversion and return the resulting
1468 expression. */
1469
1470tree
1471tree_ssa_strip_useless_type_conversions (tree exp)
1472{
1473 while (tree_ssa_useless_type_conversion (exp))
1474 exp = TREE_OPERAND (exp, 0);
1475 return exp;
1476}
1477
4ee9c684 1478
1479/* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
81d08033 1480 described in walk_use_def_chains.
48e1416a 1481
dd940949 1482 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1483 infinite loops. We used to have a bitmap for this to just mark
1484 SSA versions we had visited. But non-sparse bitmaps are way too
1485 expensive, while sparse bitmaps may cause quadratic behavior.
81d08033 1486
1487 IS_DFS is true if the caller wants to perform a depth-first search
1488 when visiting PHI nodes. A DFS will visit each PHI argument and
1489 call FN after each one. Otherwise, all the arguments are
1490 visited first and then FN is called with each of the visited
1491 arguments in a separate pass. */
4ee9c684 1492
1493static bool
1494walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
dd940949 1495 struct pointer_set_t *visited, bool is_dfs)
4ee9c684 1496{
75a70cf9 1497 gimple def_stmt;
4ee9c684 1498
dd940949 1499 if (pointer_set_insert (visited, var))
4ee9c684 1500 return false;
1501
4ee9c684 1502 def_stmt = SSA_NAME_DEF_STMT (var);
1503
75a70cf9 1504 if (gimple_code (def_stmt) != GIMPLE_PHI)
4ee9c684 1505 {
1506 /* If we reached the end of the use-def chain, call FN. */
81d08033 1507 return fn (var, def_stmt, data);
4ee9c684 1508 }
1509 else
1510 {
75a70cf9 1511 size_t i;
4ee9c684 1512
81d08033 1513 /* When doing a breadth-first search, call FN before following the
1514 use-def links for each argument. */
1515 if (!is_dfs)
75a70cf9 1516 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1517 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
81d08033 1518 return true;
1519
1520 /* Follow use-def links out of each PHI argument. */
75a70cf9 1521 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
4ee9c684 1522 {
75a70cf9 1523 tree arg = gimple_phi_arg_def (def_stmt, i);
4fb5e5ca 1524
1525 /* ARG may be NULL for newly introduced PHI nodes. */
1526 if (arg
1527 && TREE_CODE (arg) == SSA_NAME
81d08033 1528 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
4ee9c684 1529 return true;
1530 }
81d08033 1531
1532 /* When doing a depth-first search, call FN after following the
1533 use-def links for each argument. */
1534 if (is_dfs)
75a70cf9 1535 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1536 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
81d08033 1537 return true;
4ee9c684 1538 }
48e1416a 1539
4ee9c684 1540 return false;
1541}
48e1416a 1542
4ee9c684 1543
1544
81d08033 1545/* Walk use-def chains starting at the SSA variable VAR. Call
1546 function FN at each reaching definition found. FN takes three
1547 arguments: VAR, its defining statement (DEF_STMT) and a generic
1548 pointer to whatever state information that FN may want to maintain
1549 (DATA). FN is able to stop the walk by returning true, otherwise
48e1416a 1550 in order to continue the walk, FN should return false.
4ee9c684 1551
1552 Note, that if DEF_STMT is a PHI node, the semantics are slightly
81d08033 1553 different. The first argument to FN is no longer the original
1554 variable VAR, but the PHI argument currently being examined. If FN
1555 wants to get at VAR, it should call PHI_RESULT (PHI).
1556
1557 If IS_DFS is true, this function will:
4ee9c684 1558
81d08033 1559 1- walk the use-def chains for all the PHI arguments, and,
1560 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1561
1562 If IS_DFS is false, the two steps above are done in reverse order
1563 (i.e., a breadth-first search). */
4ee9c684 1564
4ee9c684 1565void
81d08033 1566walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1567 bool is_dfs)
4ee9c684 1568{
75a70cf9 1569 gimple def_stmt;
4ee9c684 1570
8c0963c4 1571 gcc_assert (TREE_CODE (var) == SSA_NAME);
4ee9c684 1572
1573 def_stmt = SSA_NAME_DEF_STMT (var);
1574
1575 /* We only need to recurse if the reaching definition comes from a PHI
1576 node. */
75a70cf9 1577 if (gimple_code (def_stmt) != GIMPLE_PHI)
4ee9c684 1578 (*fn) (var, def_stmt, data);
1579 else
1580 {
dd940949 1581 struct pointer_set_t *visited = pointer_set_create ();
81d08033 1582 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
dd940949 1583 pointer_set_destroy (visited);
4ee9c684 1584 }
1585}
1586
4ee9c684 1587\f
1588/* Emit warnings for uninitialized variables. This is done in two passes.
1589
a70770d2 1590 The first pass notices real uses of SSA names with undefined values.
4ee9c684 1591 Such uses are unconditionally uninitialized, and we can be certain that
1592 such a use is a mistake. This pass is run before most optimizations,
1593 so that we catch as many as we can.
1594
1595 The second pass follows PHI nodes to find uses that are potentially
1596 uninitialized. In this case we can't necessarily prove that the use
1597 is really uninitialized. This pass is run after most optimizations,
1598 so that we thread as many jumps and possible, and delete as much dead
1599 code as possible, in order to reduce false positives. We also look
1600 again for plain uninitialized variables, since optimization may have
1601 changed conditionally uninitialized to unconditionally uninitialized. */
1602
33da34ea 1603/* Emit a warning for EXPR based on variable VAR at the point in the
1604 program T, an SSA_NAME, is used being uninitialized. The exact
8c3e8d68 1605 warning text is in MSGID and LOCUS may contain a location or be null.
1606 WC is the warning code. */
4ee9c684 1607
a7d4604b 1608void
33da34ea 1609warn_uninit (enum opt_code wc, tree t,
1610 tree expr, tree var, const char *gmsgid, void *data)
4ee9c684 1611{
75a70cf9 1612 gimple context = (gimple) data;
18f0e0e5 1613 location_t location, cfun_loc;
960217d2 1614 expanded_location xloc, floc;
4ee9c684 1615
a70770d2 1616 if (!ssa_undefined_value_p (t))
4ee9c684 1617 return;
1618
1619 /* TREE_NO_WARNING either means we already warned, or the front end
1620 wishes to suppress the warning. */
33da34ea 1621 if ((context
1622 && (gimple_no_warning_p (context)
1623 || (gimple_assign_single_p (context)
1624 && TREE_NO_WARNING (gimple_assign_rhs1 (context)))))
1625 || TREE_NO_WARNING (expr))
1c32170e 1626 return;
48e1416a 1627
75a70cf9 1628 location = (context != NULL && gimple_has_location (context))
1629 ? gimple_location (context)
1630 : DECL_SOURCE_LOCATION (var);
18f0e0e5 1631 location = linemap_resolve_location (line_table, location,
1632 LRK_SPELLING_LOCATION,
1633 NULL);
1634 cfun_loc = DECL_SOURCE_LOCATION (cfun->decl);
75a70cf9 1635 xloc = expand_location (location);
18f0e0e5 1636 floc = expand_location (cfun_loc);
33da34ea 1637 if (warning_at (location, wc, gmsgid, expr))
a52d5726 1638 {
33da34ea 1639 TREE_NO_WARNING (expr) = 1;
4f652d47 1640
8e3b5e90 1641 if (location == DECL_SOURCE_LOCATION (var))
1642 return;
a52d5726 1643 if (xloc.file != floc.file
18f0e0e5 1644 || linemap_location_before_p (line_table,
1645 location, cfun_loc)
1646 || linemap_location_before_p (line_table,
1647 cfun->function_end_locus,
1648 location))
712d2297 1649 inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
a52d5726 1650 }
4ee9c684 1651}
25084579 1652
a7d4604b 1653unsigned int
6955de20 1654warn_uninitialized_vars (bool warn_possibly_uninitialized)
4ee9c684 1655{
75a70cf9 1656 gimple_stmt_iterator gsi;
4ee9c684 1657 basic_block bb;
1658
1659 FOR_EACH_BB (bb)
25084579 1660 {
33da34ea 1661 bool always_executed = dominated_by_p (CDI_POST_DOMINATORS,
25084579 1662 single_succ (ENTRY_BLOCK_PTR), bb);
75a70cf9 1663 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1664 {
33da34ea 1665 gimple stmt = gsi_stmt (gsi);
1666 use_operand_p use_p;
1667 ssa_op_iter op_iter;
1668 tree use;
1669
1670 if (is_gimple_debug (stmt))
9845d120 1671 continue;
33da34ea 1672
1673 /* We only do data flow with SSA_NAMEs, so that's all we
1674 can warn about. */
1675 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, op_iter, SSA_OP_USE)
1676 {
1677 use = USE_FROM_PTR (use_p);
1678 if (always_executed)
1679 warn_uninit (OPT_Wuninitialized, use,
1680 SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1681 "%qD is used uninitialized in this function",
1682 stmt);
1683 else if (warn_possibly_uninitialized)
1684 warn_uninit (OPT_Wuninitialized, use,
1685 SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1686 "%qD may be used uninitialized in this function",
1687 stmt);
1688 }
1689
1690 /* For memory the only cheap thing we can do is see if we
1691 have a use of the default def of the virtual operand.
1692 ??? Note that at -O0 we do not have virtual operands.
1693 ??? Not so cheap would be to use the alias oracle via
1694 walk_aliased_vdefs, if we don't find any aliasing vdef
1695 warn as is-used-uninitialized, if we don't find an aliasing
1696 vdef that kills our use (stmt_kills_ref_p), warn as
1697 may-be-used-uninitialized. But this walk is quadratic and
1698 so must be limited which means we would miss warning
1699 opportunities. */
1700 use = gimple_vuse (stmt);
1701 if (use
1702 && gimple_assign_single_p (stmt)
1703 && !gimple_vdef (stmt)
1704 && SSA_NAME_IS_DEFAULT_DEF (use))
1705 {
1706 tree rhs = gimple_assign_rhs1 (stmt);
1707 tree base = get_base_address (rhs);
1708
1709 /* Do not warn if it can be initialized outside this function. */
1710 if (TREE_CODE (base) != VAR_DECL
1711 || DECL_HARD_REGISTER (base)
1712 || is_global_var (base))
1713 continue;
1714
1715 if (always_executed)
1716 warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1717 base,
1718 "%qE is used uninitialized in this function",
1719 stmt);
1720 else if (warn_possibly_uninitialized)
1721 warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1722 base,
1723 "%qE may be used uninitialized in this function",
1724 stmt);
1725 }
75a70cf9 1726 }
25084579 1727 }
e19b015b 1728
2a1990e9 1729 return 0;
4ee9c684 1730}
1731
6955de20 1732static unsigned int
1733execute_early_warn_uninitialized (void)
1734{
1735 /* Currently, this pass runs always but
1736 execute_late_warn_uninitialized only runs with optimization. With
1737 optimization we want to warn about possible uninitialized as late
1738 as possible, thus don't do it here. However, without
1739 optimization we need to warn here about "may be uninitialized".
1740 */
a7d4604b 1741 calculate_dominance_info (CDI_POST_DOMINATORS);
4ee9c684 1742
a7d4604b 1743 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
4ee9c684 1744
a7d4604b 1745 /* Post-dominator information can not be reliably updated. Free it
1746 after the use. */
75a70cf9 1747
a7d4604b 1748 free_dominance_info (CDI_POST_DOMINATORS);
2a1990e9 1749 return 0;
4ee9c684 1750}
1751
1752static bool
1753gate_warn_uninitialized (void)
1754{
1755 return warn_uninitialized != 0;
1756}
1757
20099e35 1758struct gimple_opt_pass pass_early_warn_uninitialized =
4ee9c684 1759{
20099e35 1760 {
1761 GIMPLE_PASS,
0c297edc 1762 "*early_warn_uninitialized", /* name */
4ee9c684 1763 gate_warn_uninitialized, /* gate */
1764 execute_early_warn_uninitialized, /* execute */
1765 NULL, /* sub */
1766 NULL, /* next */
1767 0, /* static_pass_number */
4b366dd3 1768 TV_TREE_UNINIT, /* tv_id */
4ee9c684 1769 PROP_ssa, /* properties_required */
1770 0, /* properties_provided */
1771 0, /* properties_destroyed */
1772 0, /* todo_flags_start */
20099e35 1773 0 /* todo_flags_finish */
1774 }
4ee9c684 1775};
1776
182cf5a9 1777
1778/* If necessary, rewrite the base of the reference tree *TP from
1779 a MEM_REF to a plain or converted symbol. */
1780
1781static void
1782maybe_rewrite_mem_ref_base (tree *tp)
1783{
1784 tree sym;
1785
1786 while (handled_component_p (*tp))
1787 tp = &TREE_OPERAND (*tp, 0);
1788 if (TREE_CODE (*tp) == MEM_REF
1789 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
182cf5a9 1790 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1791 && DECL_P (sym)
1792 && !TREE_ADDRESSABLE (sym)
1793 && symbol_marked_for_renaming (sym))
1794 {
8a62fbb0 1795 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1796 && useless_type_conversion_p (TREE_TYPE (*tp),
1797 TREE_TYPE (TREE_TYPE (sym)))
1798 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1799 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1800 {
1801 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1802 TYPE_SIZE (TREE_TYPE (*tp)),
1803 int_const_binop (MULT_EXPR,
1804 bitsize_int (BITS_PER_UNIT),
317e2a67 1805 TREE_OPERAND (*tp, 1)));
8a62fbb0 1806 }
8ba0d70d 1807 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1808 && useless_type_conversion_p (TREE_TYPE (*tp),
1809 TREE_TYPE (TREE_TYPE (sym))))
1810 {
1811 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1812 ? REALPART_EXPR : IMAGPART_EXPR,
1813 TREE_TYPE (*tp), sym);
1814 }
8a62fbb0 1815 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1816 {
1817 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1818 TREE_TYPE (sym)))
1819 *tp = build1 (VIEW_CONVERT_EXPR,
1820 TREE_TYPE (*tp), sym);
1821 else
1822 *tp = sym;
1823 }
182cf5a9 1824 }
1825}
1826
cfe3cae3 1827/* For a tree REF return its base if it is the base of a MEM_REF
1828 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1829
1830static tree
1831non_rewritable_mem_ref_base (tree ref)
1832{
1833 tree base = ref;
1834
1835 /* A plain decl does not need it set. */
1836 if (DECL_P (ref))
1837 return NULL_TREE;
1838
1839 while (handled_component_p (base))
1840 base = TREE_OPERAND (base, 0);
1841
1842 /* But watch out for MEM_REFs we cannot lower to a
8a62fbb0 1843 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
cfe3cae3 1844 if (TREE_CODE (base) == MEM_REF
1845 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1846 {
1847 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
8ba0d70d 1848 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1849 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
8a62fbb0 1850 && useless_type_conversion_p (TREE_TYPE (base),
1851 TREE_TYPE (TREE_TYPE (decl)))
1852 && double_int_fits_in_uhwi_p (mem_ref_offset (base))
8ba0d70d 1853 && double_int_ucmp
1854 (tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1855 mem_ref_offset (base)) == 1
8a62fbb0 1856 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1857 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1858 return NULL_TREE;
cfe3cae3 1859 if (DECL_P (decl)
1860 && (!integer_zerop (TREE_OPERAND (base, 1))
1861 || (DECL_SIZE (decl)
f698147c 1862 != TYPE_SIZE (TREE_TYPE (base)))
1863 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
cfe3cae3 1864 return decl;
1865 }
1866
1867 return NULL_TREE;
1868}
1869
b88d109e 1870/* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1871 Otherwise return true. */
1872
1873static bool
1874non_rewritable_lvalue_p (tree lhs)
1875{
1876 /* A plain decl is always rewritable. */
1877 if (DECL_P (lhs))
1878 return false;
1879
1880 /* A decl that is wrapped inside a MEM-REF that covers
1881 it full is also rewritable.
1882 ??? The following could be relaxed allowing component
ef618bb8 1883 references that do not change the access size. */
b88d109e 1884 if (TREE_CODE (lhs) == MEM_REF
1885 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1886 && integer_zerop (TREE_OPERAND (lhs, 1)))
1887 {
1888 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1889 if (DECL_P (decl)
1890 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1891 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1892 return false;
1893 }
1894
1895 return true;
1896}
1897
5ea2e42f 1898/* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1899 mark the variable VAR for conversion into SSA. Return true when updating
1900 stmts is required. */
cfe3cae3 1901
1902static bool
1903maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs)
1904{
1905 bool update_vops = false;
1906
1907 /* Global Variables, result decls cannot be changed. */
1908 if (is_global_var (var)
1909 || TREE_CODE (var) == RESULT_DECL
1910 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1911 return false;
1912
11bbec13 1913 /* If the variable is not in the list of referenced vars then we
1914 do not need to touch it nor can we rename it. */
29e4566c 1915 if (!referenced_var_lookup (cfun, DECL_UID (var)))
11bbec13 1916 return false;
1917
cfe3cae3 1918 if (TREE_ADDRESSABLE (var)
1919 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1920 a non-register. Otherwise we are confused and forget to
1921 add virtual operands for it. */
1922 && (!is_gimple_reg_type (TREE_TYPE (var))
7e21e36b 1923 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1924 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
cfe3cae3 1925 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1926 {
1927 TREE_ADDRESSABLE (var) = 0;
1928 if (is_gimple_reg (var))
1929 mark_sym_for_renaming (var);
1930 update_vops = true;
1931 if (dump_file)
1932 {
5ea2e42f 1933 fprintf (dump_file, "No longer having address taken: ");
cfe3cae3 1934 print_generic_expr (dump_file, var, 0);
1935 fprintf (dump_file, "\n");
1936 }
1937 }
5ea2e42f 1938
cfe3cae3 1939 if (!DECL_GIMPLE_REG_P (var)
1940 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1941 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1942 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1943 && !TREE_THIS_VOLATILE (var)
1944 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1945 {
1946 DECL_GIMPLE_REG_P (var) = 1;
1947 mark_sym_for_renaming (var);
1948 update_vops = true;
1949 if (dump_file)
1950 {
5ea2e42f 1951 fprintf (dump_file, "Now a gimple register: ");
cfe3cae3 1952 print_generic_expr (dump_file, var, 0);
1953 fprintf (dump_file, "\n");
1954 }
1955 }
1956
1957 return update_vops;
1958}
1959
3f99bd94 1960/* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
83cfcdaf 1961
dd277d48 1962void
5ea2e42f 1963execute_update_addresses_taken (void)
83cfcdaf 1964{
75a70cf9 1965 gimple_stmt_iterator gsi;
83cfcdaf 1966 basic_block bb;
1967 bitmap addresses_taken = BITMAP_ALLOC (NULL);
3f99bd94 1968 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
7c550341 1969 bool update_vops = false;
5ea2e42f 1970 tree var;
cfe3cae3 1971 unsigned i;
83cfcdaf 1972
4b366dd3 1973 timevar_push (TV_ADDRESS_TAKEN);
1974
83cfcdaf 1975 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1976 the function body. */
1977 FOR_EACH_BB (bb)
1978 {
75a70cf9 1979 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
83cfcdaf 1980 {
6d5ec6f8 1981 gimple stmt = gsi_stmt (gsi);
3f99bd94 1982 enum gimple_code code = gimple_code (stmt);
cfe3cae3 1983 tree decl;
6d5ec6f8 1984
1985 /* Note all addresses taken by the stmt. */
1986 gimple_ior_addresses_taken (addresses_taken, stmt);
1987
3f99bd94 1988 /* If we have a call or an assignment, see if the lhs contains
1989 a local decl that requires not to be a gimple register. */
1990 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1991 {
d29f7fa8 1992 tree lhs = gimple_get_lhs (stmt);
b88d109e 1993 if (lhs
1994 && TREE_CODE (lhs) != SSA_NAME
1995 && non_rewritable_lvalue_p (lhs))
182cf5a9 1996 {
b88d109e 1997 decl = get_base_address (lhs);
1998 if (DECL_P (decl))
1999 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
182cf5a9 2000 }
2001 }
2002
2003 if (gimple_assign_single_p (stmt))
2004 {
2005 tree rhs = gimple_assign_rhs1 (stmt);
cfe3cae3 2006 if ((decl = non_rewritable_mem_ref_base (rhs)))
2007 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2008 }
d29f7fa8 2009
cfe3cae3 2010 else if (code == GIMPLE_CALL)
2011 {
2012 for (i = 0; i < gimple_call_num_args (stmt); ++i)
182cf5a9 2013 {
cfe3cae3 2014 tree arg = gimple_call_arg (stmt, i);
2015 if ((decl = non_rewritable_mem_ref_base (arg)))
2016 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2017 }
2018 }
2019
2020 else if (code == GIMPLE_ASM)
2021 {
2022 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2023 {
2024 tree link = gimple_asm_output_op (stmt, i);
737f8866 2025 tree lhs = TREE_VALUE (link);
ef618bb8 2026 if (TREE_CODE (lhs) != SSA_NAME)
737f8866 2027 {
b88d109e 2028 decl = get_base_address (lhs);
ef618bb8 2029 if (DECL_P (decl)
2030 && (non_rewritable_lvalue_p (lhs)
2031 /* We cannot move required conversions from
2032 the lhs to the rhs in asm statements, so
2033 require we do not need any. */
2034 || !useless_type_conversion_p
2035 (TREE_TYPE (lhs), TREE_TYPE (decl))))
b88d109e 2036 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
737f8866 2037 }
cfe3cae3 2038 }
2039 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2040 {
2041 tree link = gimple_asm_input_op (stmt, i);
2042 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
2043 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2044 }
3f99bd94 2045 }
83cfcdaf 2046 }
75a70cf9 2047
2048 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
83cfcdaf 2049 {
75a70cf9 2050 size_t i;
2051 gimple phi = gsi_stmt (gsi);
2052
2053 for (i = 0; i < gimple_phi_num_args (phi); i++)
83cfcdaf 2054 {
2055 tree op = PHI_ARG_DEF (phi, i), var;
2056 if (TREE_CODE (op) == ADDR_EXPR
75a70cf9 2057 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
83cfcdaf 2058 && DECL_P (var))
2059 bitmap_set_bit (addresses_taken, DECL_UID (var));
2060 }
2061 }
2062 }
2063
5ea2e42f 2064 /* We cannot iterate over all referenced vars because that can contain
2065 unused vars from BLOCK trees, which causes code generation differences
2066 for -g vs. -g0. */
2067 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
2068 update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
2069
2070 FOR_EACH_VEC_ELT (tree, cfun->local_decls, i, var)
2071 update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
83cfcdaf 2072
5ea2e42f 2073 /* Operand caches need to be recomputed for operands referencing the updated
83cfcdaf 2074 variables. */
2075 if (update_vops)
dd277d48 2076 {
2077 FOR_EACH_BB (bb)
9b57c1ed 2078 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
182cf5a9 2079 {
2080 gimple stmt = gsi_stmt (gsi);
dd277d48 2081
182cf5a9 2082 /* Re-write TARGET_MEM_REFs of symbols we want to
2083 rewrite into SSA form. */
2084 if (gimple_assign_single_p (stmt))
2085 {
2086 tree lhs = gimple_assign_lhs (stmt);
2087 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
2088 tree sym;
2089
2090 /* We shouldn't have any fancy wrapping of
2091 component-refs on the LHS, but look through
2092 VIEW_CONVERT_EXPRs as that is easy. */
2093 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
2094 lhs = TREE_OPERAND (lhs, 0);
2095 if (TREE_CODE (lhs) == MEM_REF
2096 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
2097 && integer_zerop (TREE_OPERAND (lhs, 1))
2098 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
2099 && DECL_P (sym)
2100 && !TREE_ADDRESSABLE (sym)
2101 && symbol_marked_for_renaming (sym))
2102 lhs = sym;
2103 else
2104 lhs = gimple_assign_lhs (stmt);
2105
2106 /* Rewrite the RHS and make sure the resulting assignment
2107 is validly typed. */
2108 maybe_rewrite_mem_ref_base (rhsp);
2109 rhs = gimple_assign_rhs1 (stmt);
2110 if (gimple_assign_lhs (stmt) != lhs
2111 && !useless_type_conversion_p (TREE_TYPE (lhs),
2112 TREE_TYPE (rhs)))
2113 rhs = fold_build1 (VIEW_CONVERT_EXPR,
2114 TREE_TYPE (lhs), rhs);
2115
2116 if (gimple_assign_lhs (stmt) != lhs)
2117 gimple_assign_set_lhs (stmt, lhs);
2118
9b57c1ed 2119 /* For var ={v} {CLOBBER}; where var lost
2120 TREE_ADDRESSABLE just remove the stmt. */
2121 if (DECL_P (lhs)
2122 && TREE_CLOBBER_P (rhs)
2123 && symbol_marked_for_renaming (lhs))
2124 {
2125 unlink_stmt_vdef (stmt);
2126 gsi_remove (&gsi, true);
2127 release_defs (stmt);
2128 continue;
2129 }
2130
182cf5a9 2131 if (gimple_assign_rhs1 (stmt) != rhs)
2132 {
2133 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2134 gimple_assign_set_rhs_from_tree (&gsi, rhs);
2135 }
2136 }
2137
cfe3cae3 2138 else if (gimple_code (stmt) == GIMPLE_CALL)
2139 {
2140 unsigned i;
2141 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2142 {
2143 tree *argp = gimple_call_arg_ptr (stmt, i);
2144 maybe_rewrite_mem_ref_base (argp);
2145 }
2146 }
2147
2148 else if (gimple_code (stmt) == GIMPLE_ASM)
182cf5a9 2149 {
2150 unsigned i;
2151 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2152 {
2153 tree link = gimple_asm_output_op (stmt, i);
2154 maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2155 }
2156 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2157 {
2158 tree link = gimple_asm_input_op (stmt, i);
2159 maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2160 }
2161 }
2162
fd2b4bf0 2163 else if (gimple_debug_bind_p (stmt)
2164 && gimple_debug_bind_has_value_p (stmt))
2165 {
2166 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2167 tree decl;
2168 maybe_rewrite_mem_ref_base (valuep);
2169 decl = non_rewritable_mem_ref_base (*valuep);
2170 if (decl && symbol_marked_for_renaming (decl))
2171 gimple_debug_bind_reset_value (stmt);
2172 }
2173
182cf5a9 2174 if (gimple_references_memory_p (stmt)
2175 || is_gimple_debug (stmt))
2176 update_stmt (stmt);
9b57c1ed 2177
2178 gsi_next (&gsi);
182cf5a9 2179 }
dd277d48 2180
2181 /* Update SSA form here, we are called as non-pass as well. */
0ab72b53 2182 if (number_of_loops () > 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2183 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2184 else
2185 update_ssa (TODO_update_ssa);
dd277d48 2186 }
83cfcdaf 2187
3f99bd94 2188 BITMAP_FREE (not_reg_needs);
83cfcdaf 2189 BITMAP_FREE (addresses_taken);
4b366dd3 2190 timevar_pop (TV_ADDRESS_TAKEN);
83cfcdaf 2191}
2192
20099e35 2193struct gimple_opt_pass pass_update_address_taken =
83cfcdaf 2194{
20099e35 2195 {
2196 GIMPLE_PASS,
83cfcdaf 2197 "addressables", /* name */
2198 NULL, /* gate */
dd277d48 2199 NULL, /* execute */
83cfcdaf 2200 NULL, /* sub */
2201 NULL, /* next */
2202 0, /* static_pass_number */
4b366dd3 2203 TV_ADDRESS_TAKEN, /* tv_id */
83cfcdaf 2204 PROP_ssa, /* properties_required */
2205 0, /* properties_provided */
2206 0, /* properties_destroyed */
2207 0, /* todo_flags_start */
771e2890 2208 TODO_update_address_taken /* todo_flags_finish */
20099e35 2209 }
83cfcdaf 2210};