]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-operands.c
* gcc.target/arm/pr40835.c: Require a thumb1 target, do not force
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
4ee9c684 1/* SSA operands management for trees.
9845d120 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
75a70cf9 3 Free Software Foundation, Inc.
4ee9c684 4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
690abe5d 34#include "toplev.h"
acc70efa 35#include "langhooks.h"
f7d118a9 36#include "ipa-reference.h"
5b110d39 37
597ff315 38/* This file contains the code required to manage the operands cache of the
5b110d39 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
597ff315 40 annotation. This cache contains operands that will be of interest to
5b110d39 41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
5b110d39 53 The operand tree is the parsed by the various get_* routines which look
91275768 54 through the stmt tree for the occurrence of operands which may be of
5b110d39 55 interest, and calls are made to the append_* routines whenever one is
4fb5e5ca 56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
5b110d39 58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
4fb5e5ca 64 on each of the 4 operand vectors which have been built up.
5b110d39 65
66 If the stmt had a previous operand cache, the finalization routines
20833d12 67 attempt to match up the new operands with the old ones. If it's a perfect
5b110d39 68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
4ec25329 74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
5b110d39 77
4fb5e5ca 78/* Structure storing statistics on how many call clobbers we have, and
79 how many where avoided. */
80
81static struct
82{
83 /* Number of call-clobbered ops we attempt to add to calls in
84 add_call_clobbered_mem_symbols. */
85 unsigned int clobbered_vars;
86
87 /* Number of write-clobbers (VDEFs) avoided by using
88 not_written information. */
89 unsigned int static_write_clobbers_avoided;
90
91 /* Number of reads (VUSEs) avoided by using not_read information. */
92 unsigned int static_read_clobbers_avoided;
93
94 /* Number of write-clobbers avoided because the variable can't escape to
95 this call. */
96 unsigned int unescapable_clobbers_avoided;
97
98 /* Number of read-only uses we attempt to add to calls in
99 add_call_read_mem_symbols. */
100 unsigned int readonly_clobbers;
101
102 /* Number of read-only uses we avoid using not_read information. */
103 unsigned int static_readonly_clobbers_avoided;
104} clobber_stats;
105
106
59b2314d 107/* Flags to describe operand properties in helpers. */
4ee9c684 108
109/* By default, operands are loaded. */
4fb5e5ca 110#define opf_use 0
4ee9c684 111
2cf24776 112/* Operand is the target of an assignment expression or a
f6255040 113 call-clobbered variable. */
4fb5e5ca 114#define opf_def (1 << 0)
2cf24776 115
4ee9c684 116/* No virtual operands should be created in the expression. This is used
117 when traversing ADDR_EXPR nodes which have different semantics than
118 other expressions. Inside an ADDR_EXPR node, the only operands that we
119 need to consider are indices into arrays. For instance, &a.b[i] should
120 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
121 VUSE for 'b'. */
4fb5e5ca 122#define opf_no_vops (1 << 1)
4ee9c684 123
4fb5e5ca 124/* Operand is an implicit reference. This is used to distinguish
75a70cf9 125 explicit assignments in the form of MODIFY_EXPR from
4fb5e5ca 126 clobbering sites like function calls or ASM_EXPRs. */
127#define opf_implicit (1 << 2)
868a0f34 128
4ee9c684 129/* Array for building all the def operands. */
ed542b9f 130static VEC(tree,heap) *build_defs;
4ee9c684 131
132/* Array for building all the use operands. */
ed542b9f 133static VEC(tree,heap) *build_uses;
4ee9c684 134
dd277d48 135/* The built VDEF operand. */
136static tree build_vdef;
4ee9c684 137
dd277d48 138/* The built VUSE operand. */
139static tree build_vuse;
4ee9c684 140
363d040e 141/* Bitmap obstack for our datastructures that needs to survive across
a7614546 142 compilations of multiple functions. */
363d040e 143static bitmap_obstack operands_bitmap_obstack;
085b7aab 144
75a70cf9 145static void get_expr_operands (gimple, tree *, int);
fa999566 146
fcbe34ba 147/* Number of functions with initialized ssa_operands. */
148static int n_initialized = 0;
5b110d39 149
7063afc3 150/* Return the DECL_UID of the base variable of T. */
5b110d39 151
b66731e8 152static inline unsigned
7ecb5bb2 153get_name_decl (const_tree t)
4ee9c684 154{
ed542b9f 155 if (TREE_CODE (t) != SSA_NAME)
156 return DECL_UID (t);
157 else
158 return DECL_UID (SSA_NAME_VAR (t));
4ee9c684 159}
160
fa999566 161
f6255040 162/* Return true if the SSA operands cache is active. */
5b110d39 163
b66731e8 164bool
165ssa_operands_active (void)
4ee9c684 166{
75a70cf9 167 /* This function may be invoked from contexts where CFUN is NULL
168 (IPA passes), return false for now. FIXME: operands may be
169 active in each individual function, maybe this function should
170 take CFUN as a parameter. */
171 if (cfun == NULL)
172 return false;
173
fcbe34ba 174 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
b66731e8 175}
4ee9c684 176
dd277d48 177
178/* Create the VOP variable, an artificial global variable to act as a
179 representative of all of the virtual operands FUD chain. */
fa999566 180
dd277d48 181static void
182create_vop_var (void)
dadb7503 183{
dd277d48 184 tree global_var;
185
186 gcc_assert (cfun->gimple_df->vop == NULL_TREE);
187
e60a6f7b 188 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
189 get_identifier (".MEM"),
dd277d48 190 void_type_node);
191 DECL_ARTIFICIAL (global_var) = 1;
192 TREE_READONLY (global_var) = 0;
193 DECL_EXTERNAL (global_var) = 1;
194 TREE_STATIC (global_var) = 1;
195 TREE_USED (global_var) = 1;
196 DECL_CONTEXT (global_var) = NULL_TREE;
197 TREE_THIS_VOLATILE (global_var) = 0;
198 TREE_ADDRESSABLE (global_var) = 0;
199
200 create_var_ann (global_var);
201 add_referenced_var (global_var);
202 cfun->gimple_df->vop = global_var;
dadb7503 203}
dadb7503 204
dd277d48 205/* These are the sizes of the operand memory buffer in bytes which gets
206 allocated each time more operands space is required. The final value is
207 the amount that is allocated every time after that.
208 In 1k we can fit 25 use operands (or 63 def operands) on a host with
209 8 byte pointers, that would be 10 statements each with 1 def and 2
210 uses. */
dadb7503 211
212#define OP_SIZE_INIT 0
dd277d48 213#define OP_SIZE_1 (1024 - sizeof (void *))
214#define OP_SIZE_2 (1024 * 4 - sizeof (void *))
215#define OP_SIZE_3 (1024 * 16 - sizeof (void *))
dadb7503 216
b66731e8 217/* Initialize the operand cache routines. */
218
219void
220init_ssa_operands (void)
221{
fcbe34ba 222 if (!n_initialized++)
223 {
224 build_defs = VEC_alloc (tree, heap, 5);
225 build_uses = VEC_alloc (tree, heap, 10);
dd277d48 226 build_vuse = NULL_TREE;
227 build_vdef = NULL_TREE;
363d040e 228 bitmap_obstack_initialize (&operands_bitmap_obstack);
fcbe34ba 229 }
230
231 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
363d040e 232 gimple_ssa_operands (cfun)->operand_memory_index
233 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
fcbe34ba 234 gimple_ssa_operands (cfun)->ops_active = true;
7bbb6ff8 235 memset (&clobber_stats, 0, sizeof (clobber_stats));
363d040e 236 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
dd277d48 237 create_vop_var ();
b66731e8 238}
4ee9c684 239
5b110d39 240
b66731e8 241/* Dispose of anything required by the operand routines. */
242
243void
244fini_ssa_operands (void)
245{
246 struct ssa_operand_memory_d *ptr;
4fb5e5ca 247
fcbe34ba 248 if (!--n_initialized)
249 {
250 VEC_free (tree, heap, build_defs);
251 VEC_free (tree, heap, build_uses);
dd277d48 252 build_vdef = NULL_TREE;
253 build_vuse = NULL_TREE;
fcbe34ba 254 }
4fb5e5ca 255
fcbe34ba 256 gimple_ssa_operands (cfun)->free_defs = NULL;
257 gimple_ssa_operands (cfun)->free_uses = NULL;
4fb5e5ca 258
fcbe34ba 259 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
b66731e8 260 {
fcbe34ba 261 gimple_ssa_operands (cfun)->operand_memory
262 = gimple_ssa_operands (cfun)->operand_memory->next;
b66731e8 263 ggc_free (ptr);
5b110d39 264 }
265
fcbe34ba 266 gimple_ssa_operands (cfun)->ops_active = false;
4fb5e5ca 267
363d040e 268 if (!n_initialized)
269 bitmap_obstack_release (&operands_bitmap_obstack);
75a70cf9 270
dd277d48 271 cfun->gimple_df->vop = NULL_TREE;
272
7bbb6ff8 273 if (dump_file && (dump_flags & TDF_STATS))
274 {
4fb5e5ca 275 fprintf (dump_file, "Original clobbered vars: %d\n",
fa999566 276 clobber_stats.clobbered_vars);
4fb5e5ca 277 fprintf (dump_file, "Static write clobbers avoided: %d\n",
fa999566 278 clobber_stats.static_write_clobbers_avoided);
4fb5e5ca 279 fprintf (dump_file, "Static read clobbers avoided: %d\n",
fa999566 280 clobber_stats.static_read_clobbers_avoided);
4fb5e5ca 281 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
fa999566 282 clobber_stats.unescapable_clobbers_avoided);
4fb5e5ca 283 fprintf (dump_file, "Original read-only clobbers: %d\n",
fa999566 284 clobber_stats.readonly_clobbers);
4fb5e5ca 285 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
fa999566 286 clobber_stats.static_readonly_clobbers_avoided);
7bbb6ff8 287 }
b66731e8 288}
5b110d39 289
4ee9c684 290
dd277d48 291/* Return memory for an operand of size SIZE. */
b66731e8 292
293static inline void *
294ssa_operand_alloc (unsigned size)
295{
296 char *ptr;
4fb5e5ca 297
dd277d48 298 gcc_assert (size == sizeof (struct use_optype_d)
299 || size == sizeof (struct def_optype_d));
300
fcbe34ba 301 if (gimple_ssa_operands (cfun)->operand_memory_index + size
363d040e 302 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
b66731e8 303 {
304 struct ssa_operand_memory_d *ptr;
dadb7503 305
dd277d48 306 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
307 {
308 case OP_SIZE_INIT:
309 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
310 break;
311 case OP_SIZE_1:
312 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
313 break;
314 case OP_SIZE_2:
315 case OP_SIZE_3:
316 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
317 break;
318 default:
319 gcc_unreachable ();
320 }
dadb7503 321
322 ptr = (struct ssa_operand_memory_d *)
dd277d48 323 ggc_alloc (sizeof (void *)
324 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
fcbe34ba 325 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
326 gimple_ssa_operands (cfun)->operand_memory = ptr;
327 gimple_ssa_operands (cfun)->operand_memory_index = 0;
b66731e8 328 }
dd277d48 329
fcbe34ba 330 ptr = &(gimple_ssa_operands (cfun)->operand_memory
331 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
332 gimple_ssa_operands (cfun)->operand_memory_index += size;
b66731e8 333 return ptr;
4ee9c684 334}
335
5b110d39 336
dadb7503 337/* Allocate a DEF operand. */
338
4fb5e5ca 339static inline struct def_optype_d *
340alloc_def (void)
341{
342 struct def_optype_d *ret;
343 if (gimple_ssa_operands (cfun)->free_defs)
344 {
345 ret = gimple_ssa_operands (cfun)->free_defs;
346 gimple_ssa_operands (cfun)->free_defs
347 = gimple_ssa_operands (cfun)->free_defs->next;
348 }
349 else
350 ret = (struct def_optype_d *)
dadb7503 351 ssa_operand_alloc (sizeof (struct def_optype_d));
4fb5e5ca 352 return ret;
353}
354
355
dadb7503 356/* Allocate a USE operand. */
357
4fb5e5ca 358static inline struct use_optype_d *
359alloc_use (void)
360{
361 struct use_optype_d *ret;
362 if (gimple_ssa_operands (cfun)->free_uses)
363 {
364 ret = gimple_ssa_operands (cfun)->free_uses;
365 gimple_ssa_operands (cfun)->free_uses
366 = gimple_ssa_operands (cfun)->free_uses->next;
367 }
368 else
dadb7503 369 ret = (struct use_optype_d *)
370 ssa_operand_alloc (sizeof (struct use_optype_d));
4fb5e5ca 371 return ret;
372}
373
374
dadb7503 375/* Adds OP to the list of defs after LAST. */
fd12afe9 376
4fb5e5ca 377static inline def_optype_p
dadb7503 378add_def_op (tree *op, def_optype_p last)
b5b59dda 379{
f0d6e81c 380 def_optype_p new_def;
b5b59dda 381
f0d6e81c 382 new_def = alloc_def ();
383 DEF_OP_PTR (new_def) = op;
384 last->next = new_def;
385 new_def->next = NULL;
386 return new_def;
b5b59dda 387}
388
dadb7503 389
390/* Adds OP to the list of uses of statement STMT after LAST. */
b5b59dda 391
4fb5e5ca 392static inline use_optype_p
75a70cf9 393add_use_op (gimple stmt, tree *op, use_optype_p last)
b5b59dda 394{
f0d6e81c 395 use_optype_p new_use;
396
397 new_use = alloc_use ();
398 USE_OP_PTR (new_use)->use = op;
399 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
400 last->next = new_use;
401 new_use->next = NULL;
402 return new_use;
b5b59dda 403}
404
b5b59dda 405
b5b59dda 406
b5b59dda 407/* Takes elements from build_defs and turns them into def operands of STMT.
dadb7503 408 TODO -- Make build_defs VEC of tree *. */
b5b59dda 409
410static inline void
75a70cf9 411finalize_ssa_defs (gimple stmt)
b5b59dda 412{
413 unsigned new_i;
414 struct def_optype_d new_list;
e817549b 415 def_optype_p old_ops, last;
dadb7503 416 unsigned int num = VEC_length (tree, build_defs);
417
418 /* There should only be a single real definition per assignment. */
75a70cf9 419 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
b5b59dda 420
dd277d48 421 /* Pre-pend the vdef we may have built. */
422 if (build_vdef != NULL_TREE)
423 {
424 tree oldvdef = gimple_vdef (stmt);
425 if (oldvdef
426 && TREE_CODE (oldvdef) == SSA_NAME)
427 oldvdef = SSA_NAME_VAR (oldvdef);
428 if (oldvdef != build_vdef)
429 gimple_set_vdef (stmt, build_vdef);
430 VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
431 ++num;
432 }
433
b5b59dda 434 new_list.next = NULL;
435 last = &new_list;
436
75a70cf9 437 old_ops = gimple_def_ops (stmt);
b5b59dda 438
439 new_i = 0;
5b110d39 440
dd277d48 441 /* Clear and unlink a no longer necessary VDEF. */
442 if (build_vdef == NULL_TREE
443 && gimple_vdef (stmt) != NULL_TREE)
444 {
445 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
446 {
447 unlink_stmt_vdef (stmt);
448 release_ssa_name (gimple_vdef (stmt));
449 }
450 gimple_set_vdef (stmt, NULL_TREE);
451 }
452
453 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
454 if (gimple_vdef (stmt)
455 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
456 mark_sym_for_renaming (gimple_vdef (stmt));
457
dadb7503 458 /* Check for the common case of 1 def that hasn't changed. */
459 if (old_ops && old_ops->next == NULL && num == 1
460 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
461 return;
b5b59dda 462
463 /* If there is anything in the old list, free it. */
464 if (old_ops)
465 {
fcbe34ba 466 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
467 gimple_ssa_operands (cfun)->free_defs = old_ops;
b5b59dda 468 }
469
dadb7503 470 /* If there is anything remaining in the build_defs list, simply emit it. */
471 for ( ; new_i < num; new_i++)
472 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
473
b5b59dda 474 /* Now set the stmt's operands. */
75a70cf9 475 gimple_set_def_ops (stmt, new_list.next);
b5b59dda 476}
b66731e8 477
4ee9c684 478
b5b59dda 479/* Takes elements from build_uses and turns them into use operands of STMT.
09aca5bc 480 TODO -- Make build_uses VEC of tree *. */
b5b59dda 481
482static inline void
75a70cf9 483finalize_ssa_uses (gimple stmt)
b5b59dda 484{
485 unsigned new_i;
486 struct use_optype_d new_list;
487 use_optype_p old_ops, ptr, last;
b5b59dda 488
dd277d48 489 /* Pre-pend the VUSE we may have built. */
490 if (build_vuse != NULL_TREE)
491 {
492 tree oldvuse = gimple_vuse (stmt);
493 if (oldvuse
494 && TREE_CODE (oldvuse) == SSA_NAME)
495 oldvuse = SSA_NAME_VAR (oldvuse);
496 if (oldvuse != (build_vuse != NULL_TREE
497 ? build_vuse : build_vdef))
498 gimple_set_vuse (stmt, NULL_TREE);
499 VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
500 }
501
b5b59dda 502 new_list.next = NULL;
503 last = &new_list;
504
75a70cf9 505 old_ops = gimple_use_ops (stmt);
b5b59dda 506
dd277d48 507 /* Clear a no longer necessary VUSE. */
508 if (build_vuse == NULL_TREE
509 && gimple_vuse (stmt) != NULL_TREE)
510 gimple_set_vuse (stmt, NULL_TREE);
511
b5b59dda 512 /* If there is anything in the old list, free it. */
513 if (old_ops)
514 {
515 for (ptr = old_ops; ptr; ptr = ptr->next)
516 delink_imm_use (USE_OP_PTR (ptr));
fcbe34ba 517 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
518 gimple_ssa_operands (cfun)->free_uses = old_ops;
b5b59dda 519 }
520
dd277d48 521 /* If we added a VUSE, make sure to set the operand if it is not already
522 present and mark it for renaming. */
523 if (build_vuse != NULL_TREE
524 && gimple_vuse (stmt) == NULL_TREE)
525 {
526 gimple_set_vuse (stmt, gimple_vop (cfun));
527 mark_sym_for_renaming (gimple_vop (cfun));
528 }
529
09aca5bc 530 /* Now create nodes for all the new nodes. */
531 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
dadb7503 532 last = add_use_op (stmt,
533 (tree *) VEC_index (tree, build_uses, new_i),
534 last);
09aca5bc 535
b5b59dda 536 /* Now set the stmt's operands. */
75a70cf9 537 gimple_set_use_ops (stmt, new_list.next);
4ee9c684 538}
5b110d39 539
4fb5e5ca 540
541/* Clear the in_list bits and empty the build array for VDEFs and
542 VUSEs. */
b5b59dda 543
544static inline void
4fb5e5ca 545cleanup_build_arrays (void)
b5b59dda 546{
dd277d48 547 build_vdef = NULL_TREE;
548 build_vuse = NULL_TREE;
4fb5e5ca 549 VEC_truncate (tree, build_defs, 0);
550 VEC_truncate (tree, build_uses, 0);
2cf24776 551}
552
4ee9c684 553
5b110d39 554/* Finalize all the build vectors, fill the new ones into INFO. */
b66731e8 555
5b110d39 556static inline void
75a70cf9 557finalize_ssa_stmt_operands (gimple stmt)
5b110d39 558{
b66731e8 559 finalize_ssa_defs (stmt);
560 finalize_ssa_uses (stmt);
4fb5e5ca 561 cleanup_build_arrays ();
4ee9c684 562}
563
564
5b110d39 565/* Start the process of building up operands vectors in INFO. */
566
567static inline void
568start_ssa_stmt_operands (void)
4ee9c684 569{
ed542b9f 570 gcc_assert (VEC_length (tree, build_defs) == 0);
571 gcc_assert (VEC_length (tree, build_uses) == 0);
dd277d48 572 gcc_assert (build_vuse == NULL_TREE);
573 gcc_assert (build_vdef == NULL_TREE);
4ee9c684 574}
575
576
5b110d39 577/* Add DEF_P to the list of pointers to operands. */
4ee9c684 578
579static inline void
5b110d39 580append_def (tree *def_p)
4ee9c684 581{
4fb5e5ca 582 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
4ee9c684 583}
584
585
5b110d39 586/* Add USE_P to the list of pointers to operands. */
4ee9c684 587
588static inline void
5b110d39 589append_use (tree *use_p)
4ee9c684 590{
4fb5e5ca 591 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
4ee9c684 592}
593
594
4fb5e5ca 595/* Add VAR to the set of variables that require a VDEF operator. */
4ee9c684 596
5b110d39 597static inline void
4fb5e5ca 598append_vdef (tree var)
4ee9c684 599{
17fbf1b8 600 if (!optimize)
601 return;
602
dd277d48 603 gcc_assert ((build_vdef == NULL_TREE
604 || build_vdef == var)
605 && (build_vuse == NULL_TREE
606 || build_vuse == var));
4fb5e5ca 607
dd277d48 608 build_vdef = var;
609 build_vuse = var;
4ee9c684 610}
611
612
4fb5e5ca 613/* Add VAR to the set of variables that require a VUSE operator. */
4ee9c684 614
5b110d39 615static inline void
616append_vuse (tree var)
4ee9c684 617{
17fbf1b8 618 if (!optimize)
619 return;
620
dd277d48 621 gcc_assert (build_vuse == NULL_TREE
622 || build_vuse == var);
4ee9c684 623
dd277d48 624 build_vuse = var;
22aa74c4 625}
626
dd277d48 627/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
f0e6e3c1 628
dd277d48 629static void
630add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
631{
632 /* Add virtual operands to the stmt, unless the caller has specifically
633 requested not to do that (used when adding operands inside an
634 ADDR_EXPR expression). */
635 if (flags & opf_no_vops)
636 return;
637
9845d120 638 gcc_assert (!is_gimple_debug (stmt));
639
dd277d48 640 if (flags & opf_def)
641 append_vdef (gimple_vop (cfun));
642 else
643 append_vuse (gimple_vop (cfun));
b66731e8 644}
645
b66731e8 646
75a70cf9 647/* Add *VAR_P to the appropriate operand array for statement STMT.
648 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
649 it will be added to the statement's real operands, otherwise it is
650 added to virtual operands. */
fa999566 651
652static void
75a70cf9 653add_stmt_operand (tree *var_p, gimple stmt, int flags)
b66731e8 654{
fa999566 655 tree var, sym;
656 var_ann_t v_ann;
b66731e8 657
75a70cf9 658 gcc_assert (SSA_VAR_P (*var_p));
b66731e8 659
4fb5e5ca 660 var = *var_p;
fa999566 661 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
662 v_ann = var_ann (sym);
b66731e8 663
4fb5e5ca 664 /* Mark statements with volatile operands. */
665 if (TREE_THIS_VOLATILE (sym))
75a70cf9 666 gimple_set_has_volatile_ops (stmt, true);
b66731e8 667
4fb5e5ca 668 if (is_gimple_reg (sym))
b66731e8 669 {
fa999566 670 /* The variable is a GIMPLE register. Add it to real operands. */
4fb5e5ca 671 if (flags & opf_def)
fa999566 672 append_def (var_p);
673 else
674 append_use (var_p);
b66731e8 675 }
fa999566 676 else
dd277d48 677 add_virtual_operand (stmt, flags);
fa999566 678}
b66731e8 679
6d5ec6f8 680/* Mark the base address of REF as having its address taken.
681 REF may be a single variable whose address has been taken or any
682 other valid GIMPLE memory reference (structure reference, array,
683 etc). */
b66731e8 684
fa999566 685static void
6d5ec6f8 686mark_address_taken (tree ref)
4ec25329 687{
dd277d48 688 tree var;
b66731e8 689
dd277d48 690 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
691 as the only thing we take the address of. If VAR is a structure,
692 taking the address of a field means that the whole structure may
693 be referenced using pointer arithmetic. See PR 21407 and the
694 ensuing mailing list discussion. */
695 var = get_base_address (ref);
6d5ec6f8 696 if (var && DECL_P (var))
697 TREE_ADDRESSABLE (var) = 1;
22aa74c4 698}
699
4ec25329 700
cb7f680b 701/* A subroutine of get_expr_operands to handle INDIRECT_REF,
702 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
703
704 STMT is the statement being processed, EXPR is the INDIRECT_REF
705 that got us here.
706
707 FLAGS is as in get_expr_operands.
708
cb7f680b 709 RECURSE_ON_BASE should be set to true if we want to continue
710 calling get_expr_operands on the base pointer, and false if
711 something else will do it for us. */
712
713static void
dd277d48 714get_indirect_ref_operands (gimple stmt, tree expr, int flags,
4ec25329 715 bool recurse_on_base)
cb7f680b 716{
717 tree *pptr = &TREE_OPERAND (expr, 0);
cb7f680b 718
719 if (TREE_THIS_VOLATILE (expr))
75a70cf9 720 gimple_set_has_volatile_ops (stmt, true);
cb7f680b 721
dd277d48 722 /* Add the VOP. */
723 add_virtual_operand (stmt, flags);
724
725 /* If requested, add a USE operand for the base pointer. */
726 if (recurse_on_base)
9845d120 727 get_expr_operands (stmt, pptr,
728 opf_use | (flags & opf_no_vops));
cb7f680b 729}
a002e999 730
4ec25329 731
fa999566 732/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
4ee9c684 733
734static void
75a70cf9 735get_tmr_operands (gimple stmt, tree expr, int flags)
4ee9c684 736{
4fb5e5ca 737 /* First record the real operands. */
afcada6e 738 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
739 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
4ee9c684 740
fa999566 741 if (TMR_SYMBOL (expr))
6d5ec6f8 742 mark_address_taken (TMR_SYMBOL (expr));
4ee9c684 743
dd277d48 744 add_virtual_operand (stmt, flags);
fa999566 745}
746
747
75a70cf9 748/* If STMT is a call that may clobber globals and other symbols that
749 escape, add them to the VDEF/VUSE lists for it. */
fa999566 750
751static void
dd277d48 752maybe_add_call_vops (gimple stmt)
fa999566 753{
75a70cf9 754 int call_flags = gimple_call_flags (stmt);
fa999566 755
4fb5e5ca 756 /* If aliases have been computed already, add VDEF or VUSE
fa999566 757 operands for all the symbols that have been found to be
4fb5e5ca 758 call-clobbered. */
dd277d48 759 if (!(call_flags & ECF_NOVOPS))
fa999566 760 {
761 /* A 'pure' or a 'const' function never call-clobbers anything.
762 A 'noreturn' function might, but since we don't return anyway
763 there is no point in recording that. */
75a70cf9 764 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
dd277d48 765 add_virtual_operand (stmt, opf_def);
fa999566 766 else if (!(call_flags & ECF_CONST))
dd277d48 767 add_virtual_operand (stmt, opf_use);
fa999566 768 }
fa999566 769}
770
771
772/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
773
774static void
75a70cf9 775get_asm_expr_operands (gimple stmt)
fa999566 776{
75a70cf9 777 size_t i, noutputs;
4fb5e5ca 778 const char **oconstraints;
fa999566 779 const char *constraint;
780 bool allows_mem, allows_reg, is_inout;
4fb5e5ca 781
75a70cf9 782 noutputs = gimple_asm_noutputs (stmt);
4fb5e5ca 783 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
fa999566 784
4fb5e5ca 785 /* Gather all output operands. */
75a70cf9 786 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
fa999566 787 {
75a70cf9 788 tree link = gimple_asm_output_op (stmt, i);
f6255040 789 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
790 oconstraints[i] = constraint;
791 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
792 &allows_reg, &is_inout);
fa999566 793
794 /* This should have been split in gimplify_asm_expr. */
795 gcc_assert (!allows_reg || !is_inout);
796
797 /* Memory operands are addressable. Note that STMT needs the
798 address of this operand. */
799 if (!allows_reg && allows_mem)
800 {
801 tree t = get_base_address (TREE_VALUE (link));
75a70cf9 802 if (t && DECL_P (t))
6d5ec6f8 803 mark_address_taken (t);
fa999566 804 }
805
4fb5e5ca 806 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
fa999566 807 }
808
4fb5e5ca 809 /* Gather all input operands. */
75a70cf9 810 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
fa999566 811 {
75a70cf9 812 tree link = gimple_asm_input_op (stmt, i);
fa999566 813 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4fb5e5ca 814 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
815 &allows_mem, &allows_reg);
fa999566 816
817 /* Memory operands are addressable. Note that STMT needs the
818 address of this operand. */
819 if (!allows_reg && allows_mem)
820 {
821 tree t = get_base_address (TREE_VALUE (link));
75a70cf9 822 if (t && DECL_P (t))
6d5ec6f8 823 mark_address_taken (t);
fa999566 824 }
825
826 get_expr_operands (stmt, &TREE_VALUE (link), 0);
827 }
828
4fb5e5ca 829 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
75a70cf9 830 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
831 {
832 tree link = gimple_asm_clobber_op (stmt, i);
833 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
834 {
dd277d48 835 add_virtual_operand (stmt, opf_def);
75a70cf9 836 break;
837 }
838 }
f6255040 839}
840
841
fa999566 842/* Recursively scan the expression pointed to by EXPR_P in statement
f6255040 843 STMT. FLAGS is one of the OPF_* constants modifying how to
844 interpret the operands found. */
fa999566 845
846static void
75a70cf9 847get_expr_operands (gimple stmt, tree *expr_p, int flags)
fa999566 848{
849 enum tree_code code;
f0d6e81c 850 enum tree_code_class codeclass;
fa999566 851 tree expr = *expr_p;
9845d120 852 int uflags = opf_use;
fa999566 853
854 if (expr == NULL)
855 return;
856
9845d120 857 if (is_gimple_debug (stmt))
858 uflags |= (flags & opf_no_vops);
859
fa999566 860 code = TREE_CODE (expr);
f0d6e81c 861 codeclass = TREE_CODE_CLASS (code);
fa999566 862
863 switch (code)
864 {
865 case ADDR_EXPR:
866 /* Taking the address of a variable does not represent a
867 reference to it, but the fact that the statement takes its
868 address will be of interest to some passes (e.g. alias
869 resolution). */
9845d120 870 if (!is_gimple_debug (stmt))
871 mark_address_taken (TREE_OPERAND (expr, 0));
fa999566 872
873 /* If the address is invariant, there may be no interesting
874 variable references inside. */
875 if (is_gimple_min_invariant (expr))
876 return;
877
878 /* Otherwise, there may be variables referenced inside but there
879 should be no VUSEs created, since the referenced objects are
880 not really accessed. The only operands that we should find
881 here are ARRAY_REF indices which will always be real operands
882 (GIMPLE does not allow non-registers as array indices). */
883 flags |= opf_no_vops;
884 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
885 return;
886
887 case SSA_NAME:
75a70cf9 888 add_stmt_operand (expr_p, stmt, flags);
fa999566 889 return;
890
891 case VAR_DECL:
892 case PARM_DECL:
893 case RESULT_DECL:
75a70cf9 894 add_stmt_operand (expr_p, stmt, flags);
2afb4be3 895 return;
fa999566 896
688ff29b 897 case DEBUG_EXPR_DECL:
898 gcc_assert (gimple_debug_bind_p (stmt));
899 return;
900
fa999566 901 case MISALIGNED_INDIRECT_REF:
902 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
903 /* fall through */
904
905 case ALIGN_INDIRECT_REF:
906 case INDIRECT_REF:
dd277d48 907 get_indirect_ref_operands (stmt, expr, flags, true);
fa999566 908 return;
909
910 case TARGET_MEM_REF:
911 get_tmr_operands (stmt, expr, flags);
912 return;
913
fa999566 914 case ARRAY_REF:
f6255040 915 case ARRAY_RANGE_REF:
fa999566 916 case COMPONENT_REF:
917 case REALPART_EXPR:
918 case IMAGPART_EXPR:
919 {
8e4c4d3b 920 if (TREE_THIS_VOLATILE (expr))
75a70cf9 921 gimple_set_has_volatile_ops (stmt, true);
8e4c4d3b 922
4fb5e5ca 923 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2be14d8b 924
925 if (code == COMPONENT_REF)
7fecfde9 926 {
8e4c4d3b 927 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
75a70cf9 928 gimple_set_has_volatile_ops (stmt, true);
9845d120 929 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
7fecfde9 930 }
f6255040 931 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
03c253f3 932 {
9845d120 933 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
934 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
935 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
03c253f3 936 }
a002e999 937
2be14d8b 938 return;
939 }
a002e999 940
80f06481 941 case WITH_SIZE_EXPR:
454b4e1f 942 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
80f06481 943 and an rvalue reference to its second argument. */
9845d120 944 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
5b110d39 945 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
80f06481 946 return;
947
07c03fb0 948 case COND_EXPR:
bd2ec699 949 case VEC_COND_EXPR:
9845d120 950 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
951 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
952 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
07c03fb0 953 return;
954
f9c6943b 955 case CONSTRUCTOR:
956 {
957 /* General aggregate CONSTRUCTORs have been decomposed, but they
958 are still in use as the COMPLEX_EXPR equivalent for vectors. */
c75b4594 959 constructor_elt *ce;
960 unsigned HOST_WIDE_INT idx;
f9c6943b 961
c75b4594 962 for (idx = 0;
963 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
964 idx++)
9845d120 965 get_expr_operands (stmt, &ce->value, uflags);
f9c6943b 966
967 return;
968 }
969
c9a1e1e0 970 case BIT_FIELD_REF:
1e342984 971 if (TREE_THIS_VOLATILE (expr))
972 gimple_set_has_volatile_ops (stmt, true);
973 /* FALLTHRU */
974
f6255040 975 case TRUTH_NOT_EXPR:
2c0bc8ce 976 case VIEW_CONVERT_EXPR:
c9a1e1e0 977 do_unary:
5b110d39 978 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
4ee9c684 979 return;
4ee9c684 980
c9a1e1e0 981 case TRUTH_AND_EXPR:
982 case TRUTH_OR_EXPR:
983 case TRUTH_XOR_EXPR:
984 case COMPOUND_EXPR:
985 case OBJ_TYPE_REF:
88dbf20f 986 case ASSERT_EXPR:
c9a1e1e0 987 do_binary:
988 {
5b110d39 989 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
990 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
c9a1e1e0 991 return;
992 }
993
4a61a337 994 case DOT_PROD_EXPR:
b056d812 995 case REALIGN_LOAD_EXPR:
996 {
997 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
998 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
999 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1000 return;
1001 }
1002
c9a1e1e0 1003 case FUNCTION_DECL:
c9a1e1e0 1004 case LABEL_DECL:
bef99423 1005 case CONST_DECL:
75a70cf9 1006 case CASE_LABEL_EXPR:
fa999566 1007 /* Expressions that make no memory references. */
c9a1e1e0 1008 return;
fa999566 1009
1010 default:
f0d6e81c 1011 if (codeclass == tcc_unary)
fa999566 1012 goto do_unary;
f0d6e81c 1013 if (codeclass == tcc_binary || codeclass == tcc_comparison)
fa999566 1014 goto do_binary;
f0d6e81c 1015 if (codeclass == tcc_constant || codeclass == tcc_type)
fa999566 1016 return;
a002e999 1017 }
c9a1e1e0 1018
fa999566 1019 /* If we get here, something has gone wrong. */
1020#ifdef ENABLE_CHECKING
1021 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1022 debug_tree (expr);
1023 fputs ("\n", stderr);
1024#endif
1025 gcc_unreachable ();
c9a1e1e0 1026}
1027
a002e999 1028
f6255040 1029/* Parse STMT looking for operands. When finished, the various
1030 build_* operand vectors will have potential operands in them. */
1031
aed164c3 1032static void
75a70cf9 1033parse_ssa_operands (gimple stmt)
aed164c3 1034{
75a70cf9 1035 enum gimple_code code = gimple_code (stmt);
aed164c3 1036
75a70cf9 1037 if (code == GIMPLE_ASM)
1038 get_asm_expr_operands (stmt);
9845d120 1039 else if (is_gimple_debug (stmt))
1040 {
1041 if (gimple_debug_bind_p (stmt)
1042 && gimple_debug_bind_has_value_p (stmt))
1043 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
1044 opf_use | opf_no_vops);
1045 }
75a70cf9 1046 else
fa999566 1047 {
75a70cf9 1048 size_t i, start = 0;
fa999566 1049
75a70cf9 1050 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1051 {
1052 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1053 start = 1;
1054 }
fa999566 1055
75a70cf9 1056 for (i = start; i < gimple_num_ops (stmt); i++)
1057 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
fa999566 1058
75a70cf9 1059 /* Add call-clobbered operands, if needed. */
1060 if (code == GIMPLE_CALL)
dd277d48 1061 maybe_add_call_vops (stmt);
ca9c9daf 1062 }
aed164c3 1063}
1064
a002e999 1065
fa999566 1066/* Create an operands cache for STMT. */
c9a1e1e0 1067
1068static void
75a70cf9 1069build_ssa_operands (gimple stmt)
c9a1e1e0 1070{
6d5ec6f8 1071 /* Initially assume that the statement has no volatile operands. */
75a70cf9 1072 gimple_set_has_volatile_ops (stmt, false);
75a70cf9 1073
fa999566 1074 start_ssa_stmt_operands ();
fa999566 1075 parse_ssa_operands (stmt);
fa999566 1076 finalize_ssa_stmt_operands (stmt);
1077}
39b644e9 1078
4ec25329 1079
28c92cbb 1080/* Releases the operands of STMT back to their freelists, and clears
1081 the stmt operand lists. */
1082
1083void
75a70cf9 1084free_stmt_operands (gimple stmt)
28c92cbb 1085{
75a70cf9 1086 def_optype_p defs = gimple_def_ops (stmt), last_def;
1087 use_optype_p uses = gimple_use_ops (stmt), last_use;
28c92cbb 1088
1089 if (defs)
1090 {
1091 for (last_def = defs; last_def->next; last_def = last_def->next)
1092 continue;
1093 last_def->next = gimple_ssa_operands (cfun)->free_defs;
1094 gimple_ssa_operands (cfun)->free_defs = defs;
75a70cf9 1095 gimple_set_def_ops (stmt, NULL);
28c92cbb 1096 }
1097
1098 if (uses)
1099 {
1100 for (last_use = uses; last_use->next; last_use = last_use->next)
1101 delink_imm_use (USE_OP_PTR (last_use));
1102 delink_imm_use (USE_OP_PTR (last_use));
1103 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1104 gimple_ssa_operands (cfun)->free_uses = uses;
75a70cf9 1105 gimple_set_use_ops (stmt, NULL);
28c92cbb 1106 }
1107
75a70cf9 1108 if (gimple_has_mem_ops (stmt))
1109 {
dd277d48 1110 gimple_set_vuse (stmt, NULL_TREE);
1111 gimple_set_vdef (stmt, NULL_TREE);
75a70cf9 1112 }
c9a1e1e0 1113}
1114
0b3f639d 1115
7dd75889 1116/* Get the operands of statement STMT. */
a002e999 1117
fa999566 1118void
75a70cf9 1119update_stmt_operands (gimple stmt)
fa999566 1120{
f6255040 1121 /* If update_stmt_operands is called before SSA is initialized, do
1122 nothing. */
fa999566 1123 if (!ssa_operands_active ())
1124 return;
2b99acb8 1125
fa999566 1126 timevar_push (TV_TREE_OPS);
2b99acb8 1127
75a70cf9 1128 gcc_assert (gimple_modified_p (stmt));
fa999566 1129 build_ssa_operands (stmt);
75a70cf9 1130 gimple_set_modified (stmt, false);
4ee9c684 1131
fa999566 1132 timevar_pop (TV_TREE_OPS);
1133}
b0b70f22 1134
f6255040 1135
fa999566 1136/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1137 to test the validity of the swap operation. */
b0b70f22 1138
fa999566 1139void
75a70cf9 1140swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
fa999566 1141{
1142 tree op0, op1;
1143 op0 = *exp0;
1144 op1 = *exp1;
0b3f639d 1145
f6255040 1146 /* If the operand cache is active, attempt to preserve the relative
1147 positions of these two operands in their respective immediate use
1148 lists. */
fa999566 1149 if (ssa_operands_active () && op0 != op1)
1150 {
1151 use_optype_p use0, use1, ptr;
1152 use0 = use1 = NULL;
0b3f639d 1153
fa999566 1154 /* Find the 2 operands in the cache, if they are there. */
75a70cf9 1155 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
fa999566 1156 if (USE_OP_PTR (ptr)->use == exp0)
1157 {
1158 use0 = ptr;
1159 break;
1160 }
0b3f639d 1161
75a70cf9 1162 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
fa999566 1163 if (USE_OP_PTR (ptr)->use == exp1)
1164 {
1165 use1 = ptr;
1166 break;
1167 }
1168
1169 /* If both uses don't have operand entries, there isn't much we can do
f6255040 1170 at this point. Presumably we don't need to worry about it. */
fa999566 1171 if (use0 && use1)
1172 {
1173 tree *tmp = USE_OP_PTR (use1)->use;
1174 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1175 USE_OP_PTR (use0)->use = tmp;
1176 }
0b3f639d 1177 }
fa999566 1178
1179 /* Now swap the data. */
1180 *exp0 = op1;
1181 *exp1 = op0;
0b3f639d 1182}
1183
75a70cf9 1184
22aa74c4 1185/* Scan the immediate_use list for VAR making sure its linked properly.
f6255040 1186 Return TRUE if there is a problem and emit an error message to F. */
22aa74c4 1187
1188bool
1189verify_imm_links (FILE *f, tree var)
1190{
b66731e8 1191 use_operand_p ptr, prev, list;
22aa74c4 1192 int count;
1193
1194 gcc_assert (TREE_CODE (var) == SSA_NAME);
1195
1196 list = &(SSA_NAME_IMM_USE_NODE (var));
1197 gcc_assert (list->use == NULL);
1198
1199 if (list->prev == NULL)
1200 {
1201 gcc_assert (list->next == NULL);
1202 return false;
1203 }
1204
1205 prev = list;
1206 count = 0;
1207 for (ptr = list->next; ptr != list; )
1208 {
1209 if (prev != ptr->prev)
1fa3a8f6 1210 goto error;
1211
22aa74c4 1212 if (ptr->use == NULL)
1fa3a8f6 1213 goto error; /* 2 roots, or SAFE guard node. */
1214 else if (*(ptr->use) != var)
1215 goto error;
22aa74c4 1216
1217 prev = ptr;
1218 ptr = ptr->next;
a002e999 1219
1220 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1221 problem. */
f04f077c 1222 if (count++ > 50000000)
1fa3a8f6 1223 goto error;
22aa74c4 1224 }
1225
1226 /* Verify list in the other direction. */
1227 prev = list;
1228 for (ptr = list->prev; ptr != list; )
1229 {
1230 if (prev != ptr->next)
1fa3a8f6 1231 goto error;
22aa74c4 1232 prev = ptr;
1233 ptr = ptr->prev;
1234 if (count-- < 0)
1fa3a8f6 1235 goto error;
22aa74c4 1236 }
1237
1238 if (count != 0)
1fa3a8f6 1239 goto error;
22aa74c4 1240
1241 return false;
1fa3a8f6 1242
1243 error:
75a70cf9 1244 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1fa3a8f6 1245 {
75a70cf9 1246 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1247 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1fa3a8f6 1248 }
1249 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1250 (void *)ptr->use);
1251 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1252 fprintf(f, "\n");
1253 return true;
22aa74c4 1254}
1255
1256
1257/* Dump all the immediate uses to FILE. */
1258
1259void
1260dump_immediate_uses_for (FILE *file, tree var)
1261{
1262 imm_use_iterator iter;
1263 use_operand_p use_p;
1264
1265 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1266
1267 print_generic_expr (file, var, TDF_SLIM);
1268 fprintf (file, " : -->");
1269 if (has_zero_uses (var))
1270 fprintf (file, " no uses.\n");
1271 else
1272 if (has_single_use (var))
1273 fprintf (file, " single use.\n");
1274 else
1275 fprintf (file, "%d uses.\n", num_imm_uses (var));
1276
1277 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1278 {
75a70cf9 1279 if (use_p->loc.stmt == NULL && use_p->use == NULL)
66c8f3a9 1280 fprintf (file, "***end of stmt iterator marker***\n");
b66731e8 1281 else
66c8f3a9 1282 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
75a70cf9 1283 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
66c8f3a9 1284 else
75a70cf9 1285 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
22aa74c4 1286 }
1287 fprintf(file, "\n");
1288}
1289
a002e999 1290
22aa74c4 1291/* Dump all the immediate uses to FILE. */
1292
1293void
1294dump_immediate_uses (FILE *file)
1295{
1296 tree var;
1297 unsigned int x;
1298
1299 fprintf (file, "Immediate_uses: \n\n");
1300 for (x = 1; x < num_ssa_names; x++)
1301 {
1302 var = ssa_name(x);
1303 if (!var)
1304 continue;
1305 dump_immediate_uses_for (file, var);
1306 }
1307}
1308
1309
1310/* Dump def-use edges on stderr. */
1311
1312void
1313debug_immediate_uses (void)
1314{
1315 dump_immediate_uses (stderr);
1316}
1317
f6255040 1318
22aa74c4 1319/* Dump def-use edges on stderr. */
1320
1321void
1322debug_immediate_uses_for (tree var)
1323{
1324 dump_immediate_uses_for (stderr, var);
5b110d39 1325}
de6ed584 1326
1327
dd277d48 1328/* Unlink STMTs virtual definition from the IL by propagating its use. */
1329
1330void
1331unlink_stmt_vdef (gimple stmt)
1332{
1333 use_operand_p use_p;
1334 imm_use_iterator iter;
1335 gimple use_stmt;
1336 tree vdef = gimple_vdef (stmt);
1337
1338 if (!vdef
1339 || TREE_CODE (vdef) != SSA_NAME)
1340 return;
1341
1342 FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
1343 {
1344 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1345 SET_USE (use_p, gimple_vuse (stmt));
1346 }
de6ed584 1347
dd277d48 1348 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
1349 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
de6ed584 1350}
dd277d48 1351