]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-operands.c
[Ada] Replace low-level membership tests with high-level routines
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
4ee9c684 1/* SSA operands management for trees.
fbd26352 2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4ee9c684 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
4ee9c684 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
4ee9c684 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
4ee9c684 24#include "tree.h"
9ef16211 25#include "gimple.h"
7c29e30e 26#include "timevar.h"
9ef16211 27#include "ssa.h"
7c29e30e 28#include "gimple-pretty-print.h"
29#include "diagnostic-core.h"
9ed99284 30#include "stmt.h"
31#include "print-tree.h"
b9ed1410 32#include "dumpfile.h"
85f3d834 33
5b110d39 34
48e1416a 35/* This file contains the code required to manage the operands cache of the
36 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
37 annotation. This cache contains operands that will be of interest to
38 optimizers and other passes wishing to manipulate the IL.
5b110d39 39
48e1416a 40 The operand type are broken up into REAL and VIRTUAL operands. The real
41 operands are represented as pointers into the stmt's operand tree. Thus
5b110d39 42 any manipulation of the real operands will be reflected in the actual tree.
48e1416a 43 Virtual operands are represented solely in the cache, although the base
44 variable for the SSA_NAME may, or may not occur in the stmt's tree.
5b110d39 45 Manipulation of the virtual operands will not be reflected in the stmt tree.
46
48e1416a 47 The routines in this file are concerned with creating this operand cache
5b110d39 48 from a stmt tree.
49
48e1416a 50 The operand tree is the parsed by the various get_* routines which look
51 through the stmt tree for the occurrence of operands which may be of
52 interest, and calls are made to the append_* routines whenever one is
53 found. There are 4 of these routines, each representing one of the
4fb5e5ca 54 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
5b110d39 55
48e1416a 56 The append_* routines check for duplication, and simply keep a list of
5b110d39 57 unique objects for each operand type in the build_* extendable vectors.
58
48e1416a 59 Once the stmt tree is completely parsed, the finalize_ssa_operands()
60 routine is called, which proceeds to perform the finalization routine
4fb5e5ca 61 on each of the 4 operand vectors which have been built up.
5b110d39 62
48e1416a 63 If the stmt had a previous operand cache, the finalization routines
64 attempt to match up the new operands with the old ones. If it's a perfect
65 match, the old vector is simply reused. If it isn't a perfect match, then
66 a new vector is created and the new operands are placed there. For
67 virtual operands, if the previous cache had SSA_NAME version of a
68 variable, and that same variable occurs in the same operands cache, then
5b110d39 69 the new cache vector will also get the same SSA_NAME.
70
4ec25329 71 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
72 operand vector for VUSE, then the new vector will also be modified
73 such that it contains 'a_5' rather than 'a'. */
5b110d39 74
4fb5e5ca 75
59b2314d 76/* Flags to describe operand properties in helpers. */
4ee9c684 77
78/* By default, operands are loaded. */
4fb5e5ca 79#define opf_use 0
4ee9c684 80
48e1416a 81/* Operand is the target of an assignment expression or a
f6255040 82 call-clobbered variable. */
4fb5e5ca 83#define opf_def (1 << 0)
2cf24776 84
4ee9c684 85/* No virtual operands should be created in the expression. This is used
86 when traversing ADDR_EXPR nodes which have different semantics than
87 other expressions. Inside an ADDR_EXPR node, the only operands that we
88 need to consider are indices into arrays. For instance, &a.b[i] should
89 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
90 VUSE for 'b'. */
4fb5e5ca 91#define opf_no_vops (1 << 1)
4ee9c684 92
182cf5a9 93/* Operand is in a place where address-taken does not imply addressable. */
94#define opf_non_addressable (1 << 3)
95
96/* Operand is in a place where opf_non_addressable does not apply. */
97#define opf_not_non_addressable (1 << 4)
98
1ce90822 99/* Operand is having its address taken. */
100#define opf_address_taken (1 << 5)
101
4ee9c684 102/* Array for building all the use operands. */
1762861d 103static vec<tree *> build_uses;
4ee9c684 104
dd277d48 105/* The built VDEF operand. */
106static tree build_vdef;
4ee9c684 107
dd277d48 108/* The built VUSE operand. */
109static tree build_vuse;
4ee9c684 110
48e1416a 111/* Bitmap obstack for our datastructures that needs to survive across
a7614546 112 compilations of multiple functions. */
363d040e 113static bitmap_obstack operands_bitmap_obstack;
085b7aab 114
42acab1c 115static void get_expr_operands (struct function *, gimple *, tree *, int);
fa999566 116
fcbe34ba 117/* Number of functions with initialized ssa_operands. */
118static int n_initialized = 0;
5b110d39 119
582791b0 120/* Accessor to tree-ssa-operands.c caches. */
121static inline struct ssa_operands *
122gimple_ssa_operands (const struct function *fun)
123{
124 return &fun->gimple_df->ssa_operands;
125}
126
fa999566 127
f6255040 128/* Return true if the SSA operands cache is active. */
5b110d39 129
b66731e8 130bool
8d672d12 131ssa_operands_active (struct function *fun)
4ee9c684 132{
8d672d12 133 if (fun == NULL)
75a70cf9 134 return false;
135
8d672d12 136 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
b66731e8 137}
4ee9c684 138
48e1416a 139
dd277d48 140/* Create the VOP variable, an artificial global variable to act as a
141 representative of all of the virtual operands FUD chain. */
fa999566 142
dd277d48 143static void
5084b2e4 144create_vop_var (struct function *fn)
dadb7503 145{
dd277d48 146 tree global_var;
147
5084b2e4 148 gcc_assert (fn->gimple_df->vop == NULL_TREE);
dd277d48 149
e60a6f7b 150 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
151 get_identifier (".MEM"),
dd277d48 152 void_type_node);
153 DECL_ARTIFICIAL (global_var) = 1;
8792d9c3 154 DECL_IGNORED_P (global_var) = 1;
dd277d48 155 TREE_READONLY (global_var) = 0;
156 DECL_EXTERNAL (global_var) = 1;
157 TREE_STATIC (global_var) = 1;
158 TREE_USED (global_var) = 1;
159 DECL_CONTEXT (global_var) = NULL_TREE;
160 TREE_THIS_VOLATILE (global_var) = 0;
161 TREE_ADDRESSABLE (global_var) = 0;
5084b2e4 162 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
dd277d48 163
5084b2e4 164 fn->gimple_df->vop = global_var;
dadb7503 165}
dadb7503 166
dd277d48 167/* These are the sizes of the operand memory buffer in bytes which gets
168 allocated each time more operands space is required. The final value is
169 the amount that is allocated every time after that.
170 In 1k we can fit 25 use operands (or 63 def operands) on a host with
171 8 byte pointers, that would be 10 statements each with 1 def and 2
172 uses. */
48e1416a 173
dadb7503 174#define OP_SIZE_INIT 0
dd277d48 175#define OP_SIZE_1 (1024 - sizeof (void *))
176#define OP_SIZE_2 (1024 * 4 - sizeof (void *))
177#define OP_SIZE_3 (1024 * 16 - sizeof (void *))
dadb7503 178
b66731e8 179/* Initialize the operand cache routines. */
180
181void
5084b2e4 182init_ssa_operands (struct function *fn)
b66731e8 183{
fcbe34ba 184 if (!n_initialized++)
185 {
f1f41a6c 186 build_uses.create (10);
dd277d48 187 build_vuse = NULL_TREE;
188 build_vdef = NULL_TREE;
363d040e 189 bitmap_obstack_initialize (&operands_bitmap_obstack);
fcbe34ba 190 }
191
5084b2e4 192 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
193 gimple_ssa_operands (fn)->operand_memory_index
194 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
195 gimple_ssa_operands (fn)->ops_active = true;
196 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
197 create_vop_var (fn);
b66731e8 198}
4ee9c684 199
5b110d39 200
b66731e8 201/* Dispose of anything required by the operand routines. */
202
203void
861b4e39 204fini_ssa_operands (struct function *fn)
b66731e8 205{
206 struct ssa_operand_memory_d *ptr;
4fb5e5ca 207
fcbe34ba 208 if (!--n_initialized)
209 {
f1f41a6c 210 build_uses.release ();
dd277d48 211 build_vdef = NULL_TREE;
212 build_vuse = NULL_TREE;
fcbe34ba 213 }
4fb5e5ca 214
861b4e39 215 gimple_ssa_operands (fn)->free_uses = NULL;
4fb5e5ca 216
861b4e39 217 while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
b66731e8 218 {
861b4e39 219 gimple_ssa_operands (fn)->operand_memory
220 = gimple_ssa_operands (fn)->operand_memory->next;
b66731e8 221 ggc_free (ptr);
5b110d39 222 }
223
861b4e39 224 gimple_ssa_operands (fn)->ops_active = false;
4fb5e5ca 225
363d040e 226 if (!n_initialized)
227 bitmap_obstack_release (&operands_bitmap_obstack);
75a70cf9 228
861b4e39 229 fn->gimple_df->vop = NULL_TREE;
b66731e8 230}
5b110d39 231
4ee9c684 232
dd277d48 233/* Return memory for an operand of size SIZE. */
48e1416a 234
b66731e8 235static inline void *
861b4e39 236ssa_operand_alloc (struct function *fn, unsigned size)
b66731e8 237{
238 char *ptr;
4fb5e5ca 239
5bb6976b 240 gcc_assert (size == sizeof (struct use_optype_d));
dd277d48 241
861b4e39 242 if (gimple_ssa_operands (fn)->operand_memory_index + size
243 >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
b66731e8 244 {
245 struct ssa_operand_memory_d *ptr;
dadb7503 246
861b4e39 247 switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
dd277d48 248 {
249 case OP_SIZE_INIT:
861b4e39 250 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
dd277d48 251 break;
252 case OP_SIZE_1:
861b4e39 253 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
dd277d48 254 break;
255 case OP_SIZE_2:
256 case OP_SIZE_3:
861b4e39 257 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
dd277d48 258 break;
259 default:
260 gcc_unreachable ();
261 }
dadb7503 262
ba72912a 263
25a27413 264 ptr = (ssa_operand_memory_d *) ggc_internal_alloc
265 (sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
ba72912a 266
861b4e39 267 ptr->next = gimple_ssa_operands (fn)->operand_memory;
268 gimple_ssa_operands (fn)->operand_memory = ptr;
269 gimple_ssa_operands (fn)->operand_memory_index = 0;
b66731e8 270 }
dd277d48 271
861b4e39 272 ptr = &(gimple_ssa_operands (fn)->operand_memory
273 ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
274 gimple_ssa_operands (fn)->operand_memory_index += size;
b66731e8 275 return ptr;
4ee9c684 276}
277
5b110d39 278
dadb7503 279/* Allocate a USE operand. */
280
4fb5e5ca 281static inline struct use_optype_d *
861b4e39 282alloc_use (struct function *fn)
4fb5e5ca 283{
284 struct use_optype_d *ret;
861b4e39 285 if (gimple_ssa_operands (fn)->free_uses)
4fb5e5ca 286 {
861b4e39 287 ret = gimple_ssa_operands (fn)->free_uses;
288 gimple_ssa_operands (fn)->free_uses
289 = gimple_ssa_operands (fn)->free_uses->next;
4fb5e5ca 290 }
291 else
dadb7503 292 ret = (struct use_optype_d *)
861b4e39 293 ssa_operand_alloc (fn, sizeof (struct use_optype_d));
4fb5e5ca 294 return ret;
295}
296
297
dadb7503 298/* Adds OP to the list of uses of statement STMT after LAST. */
b5b59dda 299
4fb5e5ca 300static inline use_optype_p
42acab1c 301add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
b5b59dda 302{
f0d6e81c 303 use_optype_p new_use;
304
861b4e39 305 new_use = alloc_use (fn);
f0d6e81c 306 USE_OP_PTR (new_use)->use = op;
307 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
308 last->next = new_use;
309 new_use->next = NULL;
310 return new_use;
b5b59dda 311}
312
b5b59dda 313
b5b59dda 314
b5b59dda 315/* Takes elements from build_defs and turns them into def operands of STMT.
f1f41a6c 316 TODO -- Make build_defs vec of tree *. */
b5b59dda 317
318static inline void
42acab1c 319finalize_ssa_defs (struct function *fn, gimple *stmt)
b5b59dda 320{
dd277d48 321 /* Pre-pend the vdef we may have built. */
322 if (build_vdef != NULL_TREE)
323 {
324 tree oldvdef = gimple_vdef (stmt);
325 if (oldvdef
326 && TREE_CODE (oldvdef) == SSA_NAME)
327 oldvdef = SSA_NAME_VAR (oldvdef);
328 if (oldvdef != build_vdef)
329 gimple_set_vdef (stmt, build_vdef);
dd277d48 330 }
331
dd277d48 332 /* Clear and unlink a no longer necessary VDEF. */
333 if (build_vdef == NULL_TREE
334 && gimple_vdef (stmt) != NULL_TREE)
335 {
336 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
337 {
338 unlink_stmt_vdef (stmt);
861b4e39 339 release_ssa_name_fn (fn, gimple_vdef (stmt));
dd277d48 340 }
341 gimple_set_vdef (stmt, NULL_TREE);
342 }
343
344 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
345 if (gimple_vdef (stmt)
346 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
e70e8b13 347 {
861b4e39 348 fn->gimple_df->rename_vops = 1;
349 fn->gimple_df->ssa_renaming_needed = 1;
e70e8b13 350 }
b5b59dda 351}
b66731e8 352
4ee9c684 353
1762861d 354/* Takes elements from build_uses and turns them into use operands of STMT. */
b5b59dda 355
356static inline void
42acab1c 357finalize_ssa_uses (struct function *fn, gimple *stmt)
b5b59dda 358{
359 unsigned new_i;
360 struct use_optype_d new_list;
361 use_optype_p old_ops, ptr, last;
b5b59dda 362
dd277d48 363 /* Pre-pend the VUSE we may have built. */
364 if (build_vuse != NULL_TREE)
365 {
366 tree oldvuse = gimple_vuse (stmt);
367 if (oldvuse
368 && TREE_CODE (oldvuse) == SSA_NAME)
369 oldvuse = SSA_NAME_VAR (oldvuse);
370 if (oldvuse != (build_vuse != NULL_TREE
371 ? build_vuse : build_vdef))
372 gimple_set_vuse (stmt, NULL_TREE);
1762861d 373 build_uses.safe_insert (0, gimple_vuse_ptr (stmt));
dd277d48 374 }
375
b5b59dda 376 new_list.next = NULL;
377 last = &new_list;
378
75a70cf9 379 old_ops = gimple_use_ops (stmt);
b5b59dda 380
dd277d48 381 /* Clear a no longer necessary VUSE. */
382 if (build_vuse == NULL_TREE
383 && gimple_vuse (stmt) != NULL_TREE)
384 gimple_set_vuse (stmt, NULL_TREE);
385
b5b59dda 386 /* If there is anything in the old list, free it. */
387 if (old_ops)
388 {
05d5a6da 389 for (ptr = old_ops; ptr->next; ptr = ptr->next)
b5b59dda 390 delink_imm_use (USE_OP_PTR (ptr));
05d5a6da 391 delink_imm_use (USE_OP_PTR (ptr));
392 ptr->next = gimple_ssa_operands (fn)->free_uses;
861b4e39 393 gimple_ssa_operands (fn)->free_uses = old_ops;
b5b59dda 394 }
395
dd277d48 396 /* If we added a VUSE, make sure to set the operand if it is not already
397 present and mark it for renaming. */
398 if (build_vuse != NULL_TREE
399 && gimple_vuse (stmt) == NULL_TREE)
400 {
861b4e39 401 gimple_set_vuse (stmt, gimple_vop (fn));
402 fn->gimple_df->rename_vops = 1;
403 fn->gimple_df->ssa_renaming_needed = 1;
dd277d48 404 }
405
09aca5bc 406 /* Now create nodes for all the new nodes. */
f1f41a6c 407 for (new_i = 0; new_i < build_uses.length (); new_i++)
e70e8b13 408 {
1762861d 409 tree *op = build_uses[new_i];
861b4e39 410 last = add_use_op (fn, stmt, op, last);
e70e8b13 411 }
09aca5bc 412
b5b59dda 413 /* Now set the stmt's operands. */
75a70cf9 414 gimple_set_use_ops (stmt, new_list.next);
4ee9c684 415}
5b110d39 416
4fb5e5ca 417
418/* Clear the in_list bits and empty the build array for VDEFs and
419 VUSEs. */
b5b59dda 420
421static inline void
4fb5e5ca 422cleanup_build_arrays (void)
b5b59dda 423{
dd277d48 424 build_vdef = NULL_TREE;
425 build_vuse = NULL_TREE;
f1f41a6c 426 build_uses.truncate (0);
2cf24776 427}
428
4ee9c684 429
5b110d39 430/* Finalize all the build vectors, fill the new ones into INFO. */
48e1416a 431
5b110d39 432static inline void
42acab1c 433finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
5b110d39 434{
861b4e39 435 finalize_ssa_defs (fn, stmt);
436 finalize_ssa_uses (fn, stmt);
4fb5e5ca 437 cleanup_build_arrays ();
4ee9c684 438}
439
440
5b110d39 441/* Start the process of building up operands vectors in INFO. */
442
443static inline void
444start_ssa_stmt_operands (void)
4ee9c684 445{
f1f41a6c 446 gcc_assert (build_uses.length () == 0);
dd277d48 447 gcc_assert (build_vuse == NULL_TREE);
448 gcc_assert (build_vdef == NULL_TREE);
4ee9c684 449}
450
451
5b110d39 452/* Add USE_P to the list of pointers to operands. */
4ee9c684 453
454static inline void
5b110d39 455append_use (tree *use_p)
4ee9c684 456{
1762861d 457 build_uses.safe_push (use_p);
4ee9c684 458}
459
460
4fb5e5ca 461/* Add VAR to the set of variables that require a VDEF operator. */
4ee9c684 462
5b110d39 463static inline void
4fb5e5ca 464append_vdef (tree var)
4ee9c684 465{
dd277d48 466 gcc_assert ((build_vdef == NULL_TREE
467 || build_vdef == var)
468 && (build_vuse == NULL_TREE
469 || build_vuse == var));
4fb5e5ca 470
dd277d48 471 build_vdef = var;
472 build_vuse = var;
4ee9c684 473}
474
475
4fb5e5ca 476/* Add VAR to the set of variables that require a VUSE operator. */
4ee9c684 477
5b110d39 478static inline void
479append_vuse (tree var)
4ee9c684 480{
dd277d48 481 gcc_assert (build_vuse == NULL_TREE
482 || build_vuse == var);
4ee9c684 483
dd277d48 484 build_vuse = var;
22aa74c4 485}
486
dd277d48 487/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
f0e6e3c1 488
dd277d48 489static void
861b4e39 490add_virtual_operand (struct function *fn,
42acab1c 491 gimple *stmt ATTRIBUTE_UNUSED, int flags)
dd277d48 492{
493 /* Add virtual operands to the stmt, unless the caller has specifically
494 requested not to do that (used when adding operands inside an
495 ADDR_EXPR expression). */
496 if (flags & opf_no_vops)
497 return;
498
9845d120 499 gcc_assert (!is_gimple_debug (stmt));
500
dd277d48 501 if (flags & opf_def)
861b4e39 502 append_vdef (gimple_vop (fn));
dd277d48 503 else
861b4e39 504 append_vuse (gimple_vop (fn));
b66731e8 505}
506
b66731e8 507
75a70cf9 508/* Add *VAR_P to the appropriate operand array for statement STMT.
509 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
510 it will be added to the statement's real operands, otherwise it is
511 added to virtual operands. */
fa999566 512
513static void
42acab1c 514add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
b66731e8 515{
2f4ec87c 516 tree var = *var_p;
b66731e8 517
f811bd19 518 gcc_assert (SSA_VAR_P (*var_p)
519 || TREE_CODE (*var_p) == STRING_CST
520 || TREE_CODE (*var_p) == CONST_DECL);
b66731e8 521
2f4ec87c 522 if (is_gimple_reg (var))
b66731e8 523 {
fa999566 524 /* The variable is a GIMPLE register. Add it to real operands. */
4fb5e5ca 525 if (flags & opf_def)
5bb6976b 526 ;
fa999566 527 else
528 append_use (var_p);
5bb6976b 529 if (DECL_P (*var_p))
861b4e39 530 fn->gimple_df->ssa_renaming_needed = 1;
b66731e8 531 }
fa999566 532 else
2f4ec87c 533 {
534 /* Mark statements with volatile operands. */
535 if (!(flags & opf_no_vops)
536 && TREE_THIS_VOLATILE (var))
537 gimple_set_has_volatile_ops (stmt, true);
538
539 /* The variable is a memory access. Add virtual operands. */
861b4e39 540 add_virtual_operand (fn, stmt, flags);
2f4ec87c 541 }
fa999566 542}
b66731e8 543
6d5ec6f8 544/* Mark the base address of REF as having its address taken.
545 REF may be a single variable whose address has been taken or any
546 other valid GIMPLE memory reference (structure reference, array,
547 etc). */
b66731e8 548
fa999566 549static void
6d5ec6f8 550mark_address_taken (tree ref)
4ec25329 551{
dd277d48 552 tree var;
b66731e8 553
dd277d48 554 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
555 as the only thing we take the address of. If VAR is a structure,
556 taking the address of a field means that the whole structure may
557 be referenced using pointer arithmetic. See PR 21407 and the
558 ensuing mailing list discussion. */
559 var = get_base_address (ref);
182cf5a9 560 if (var)
561 {
562 if (DECL_P (var))
563 TREE_ADDRESSABLE (var) = 1;
564 else if (TREE_CODE (var) == MEM_REF
565 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
566 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
567 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
568 }
22aa74c4 569}
570
4ec25329 571
5d9de213 572/* A subroutine of get_expr_operands to handle MEM_REF.
cb7f680b 573
182cf5a9 574 STMT is the statement being processed, EXPR is the MEM_REF
cb7f680b 575 that got us here.
48e1416a 576
5bb6976b 577 FLAGS is as in get_expr_operands. */
cb7f680b 578
579static void
1ce90822 580get_mem_ref_operands (struct function *fn,
42acab1c 581 gimple *stmt, tree expr, int flags)
cb7f680b 582{
583 tree *pptr = &TREE_OPERAND (expr, 0);
cb7f680b 584
587838bb 585 if (!(flags & opf_no_vops)
586 && TREE_THIS_VOLATILE (expr))
75a70cf9 587 gimple_set_has_volatile_ops (stmt, true);
cb7f680b 588
dd277d48 589 /* Add the VOP. */
861b4e39 590 add_virtual_operand (fn, stmt, flags);
dd277d48 591
592 /* If requested, add a USE operand for the base pointer. */
861b4e39 593 get_expr_operands (fn, stmt, pptr,
5bb6976b 594 opf_non_addressable | opf_use
595 | (flags & (opf_no_vops|opf_not_non_addressable)));
cb7f680b 596}
a002e999 597
4ec25329 598
fa999566 599/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
4ee9c684 600
601static void
42acab1c 602get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
4ee9c684 603{
587838bb 604 if (!(flags & opf_no_vops)
605 && TREE_THIS_VOLATILE (expr))
9c44b395 606 gimple_set_has_volatile_ops (stmt, true);
607
4fb5e5ca 608 /* First record the real operands. */
861b4e39 609 get_expr_operands (fn, stmt,
610 &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
611 get_expr_operands (fn, stmt,
612 &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
613 get_expr_operands (fn, stmt,
614 &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
615
616 add_virtual_operand (fn, stmt, flags);
fa999566 617}
618
619
75a70cf9 620/* If STMT is a call that may clobber globals and other symbols that
621 escape, add them to the VDEF/VUSE lists for it. */
fa999566 622
623static void
1a91d914 624maybe_add_call_vops (struct function *fn, gcall *stmt)
fa999566 625{
75a70cf9 626 int call_flags = gimple_call_flags (stmt);
fa999566 627
4fb5e5ca 628 /* If aliases have been computed already, add VDEF or VUSE
fa999566 629 operands for all the symbols that have been found to be
4fb5e5ca 630 call-clobbered. */
dd277d48 631 if (!(call_flags & ECF_NOVOPS))
fa999566 632 {
66be7346 633 /* A 'pure' or a 'const' function never call-clobbers anything. */
634 if (!(call_flags & (ECF_PURE | ECF_CONST)))
861b4e39 635 add_virtual_operand (fn, stmt, opf_def);
fa999566 636 else if (!(call_flags & ECF_CONST))
861b4e39 637 add_virtual_operand (fn, stmt, opf_use);
fa999566 638 }
fa999566 639}
640
641
642/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
643
644static void
1a91d914 645get_asm_stmt_operands (struct function *fn, gasm *stmt)
fa999566 646{
75a70cf9 647 size_t i, noutputs;
4fb5e5ca 648 const char **oconstraints;
fa999566 649 const char *constraint;
650 bool allows_mem, allows_reg, is_inout;
4fb5e5ca 651
75a70cf9 652 noutputs = gimple_asm_noutputs (stmt);
4fb5e5ca 653 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
fa999566 654
4fb5e5ca 655 /* Gather all output operands. */
75a70cf9 656 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
fa999566 657 {
75a70cf9 658 tree link = gimple_asm_output_op (stmt, i);
f6255040 659 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
660 oconstraints[i] = constraint;
661 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
662 &allows_reg, &is_inout);
fa999566 663
664 /* This should have been split in gimplify_asm_expr. */
665 gcc_assert (!allows_reg || !is_inout);
666
667 /* Memory operands are addressable. Note that STMT needs the
668 address of this operand. */
669 if (!allows_reg && allows_mem)
7f2d9047 670 mark_address_taken (TREE_VALUE (link));
fa999566 671
861b4e39 672 get_expr_operands (fn, stmt,
673 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
fa999566 674 }
675
4fb5e5ca 676 /* Gather all input operands. */
75a70cf9 677 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
fa999566 678 {
75a70cf9 679 tree link = gimple_asm_input_op (stmt, i);
fa999566 680 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4fb5e5ca 681 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
682 &allows_mem, &allows_reg);
fa999566 683
684 /* Memory operands are addressable. Note that STMT needs the
685 address of this operand. */
686 if (!allows_reg && allows_mem)
7f2d9047 687 mark_address_taken (TREE_VALUE (link));
fa999566 688
861b4e39 689 get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
fa999566 690 }
691
4fb5e5ca 692 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
97cf41ec 693 if (gimple_asm_clobbers_memory_p (stmt))
861b4e39 694 add_virtual_operand (fn, stmt, opf_def);
f6255040 695}
696
697
fa999566 698/* Recursively scan the expression pointed to by EXPR_P in statement
f6255040 699 STMT. FLAGS is one of the OPF_* constants modifying how to
700 interpret the operands found. */
fa999566 701
702static void
42acab1c 703get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
fa999566 704{
705 enum tree_code code;
f0d6e81c 706 enum tree_code_class codeclass;
fa999566 707 tree expr = *expr_p;
9845d120 708 int uflags = opf_use;
fa999566 709
710 if (expr == NULL)
711 return;
712
9845d120 713 if (is_gimple_debug (stmt))
714 uflags |= (flags & opf_no_vops);
715
fa999566 716 code = TREE_CODE (expr);
f0d6e81c 717 codeclass = TREE_CODE_CLASS (code);
fa999566 718
719 switch (code)
720 {
721 case ADDR_EXPR:
722 /* Taking the address of a variable does not represent a
723 reference to it, but the fact that the statement takes its
724 address will be of interest to some passes (e.g. alias
725 resolution). */
182cf5a9 726 if ((!(flags & opf_non_addressable)
727 || (flags & opf_not_non_addressable))
728 && !is_gimple_debug (stmt))
9845d120 729 mark_address_taken (TREE_OPERAND (expr, 0));
fa999566 730
fa999566 731 /* Otherwise, there may be variables referenced inside but there
732 should be no VUSEs created, since the referenced objects are
733 not really accessed. The only operands that we should find
734 here are ARRAY_REF indices which will always be real operands
735 (GIMPLE does not allow non-registers as array indices). */
736 flags |= opf_no_vops;
861b4e39 737 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
1ce90822 738 flags | opf_not_non_addressable | opf_address_taken);
fa999566 739 return;
740
741 case SSA_NAME:
fa999566 742 case VAR_DECL:
743 case PARM_DECL:
744 case RESULT_DECL:
94a54d80 745 case STRING_CST:
f811bd19 746 case CONST_DECL:
1ce90822 747 if (!(flags & opf_address_taken))
748 add_stmt_operand (fn, expr_p, stmt, flags);
2afb4be3 749 return;
fa999566 750
688ff29b 751 case DEBUG_EXPR_DECL:
752 gcc_assert (gimple_debug_bind_p (stmt));
753 return;
754
182cf5a9 755 case MEM_REF:
1ce90822 756 get_mem_ref_operands (fn, stmt, expr, flags);
fa999566 757 return;
758
759 case TARGET_MEM_REF:
861b4e39 760 get_tmr_operands (fn, stmt, expr, flags);
fa999566 761 return;
762
fa999566 763 case ARRAY_REF:
f6255040 764 case ARRAY_RANGE_REF:
fa999566 765 case COMPONENT_REF:
766 case REALPART_EXPR:
767 case IMAGPART_EXPR:
768 {
587838bb 769 if (!(flags & opf_no_vops)
770 && TREE_THIS_VOLATILE (expr))
75a70cf9 771 gimple_set_has_volatile_ops (stmt, true);
8e4c4d3b 772
861b4e39 773 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
48e1416a 774
2be14d8b 775 if (code == COMPONENT_REF)
7fecfde9 776 {
587838bb 777 if (!(flags & opf_no_vops)
778 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
75a70cf9 779 gimple_set_has_volatile_ops (stmt, true);
861b4e39 780 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
7fecfde9 781 }
f6255040 782 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
03c253f3 783 {
861b4e39 784 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
785 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
786 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
03c253f3 787 }
a002e999 788
2be14d8b 789 return;
790 }
a002e999 791
80f06481 792 case WITH_SIZE_EXPR:
454b4e1f 793 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
80f06481 794 and an rvalue reference to its second argument. */
861b4e39 795 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
796 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
80f06481 797 return;
798
07c03fb0 799 case COND_EXPR:
bd2ec699 800 case VEC_COND_EXPR:
f4803722 801 case VEC_PERM_EXPR:
861b4e39 802 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
803 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
804 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
07c03fb0 805 return;
806
f9c6943b 807 case CONSTRUCTOR:
808 {
809 /* General aggregate CONSTRUCTORs have been decomposed, but they
810 are still in use as the COMPLEX_EXPR equivalent for vectors. */
c75b4594 811 constructor_elt *ce;
812 unsigned HOST_WIDE_INT idx;
f9c6943b 813
3c25489e 814 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
815 the volatility to the statement, don't use TREE_CLOBBER_P for
816 mirroring the other uses of THIS_VOLATILE in this file. */
587838bb 817 if (!(flags & opf_no_vops)
818 && TREE_THIS_VOLATILE (expr))
3c25489e 819 gimple_set_has_volatile_ops (stmt, true);
820
c75b4594 821 for (idx = 0;
f1f41a6c 822 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
c75b4594 823 idx++)
861b4e39 824 get_expr_operands (fn, stmt, &ce->value, uflags);
f9c6943b 825
826 return;
827 }
828
c9a1e1e0 829 case BIT_FIELD_REF:
587838bb 830 if (!(flags & opf_no_vops)
831 && TREE_THIS_VOLATILE (expr))
1e342984 832 gimple_set_has_volatile_ops (stmt, true);
833 /* FALLTHRU */
834
2c0bc8ce 835 case VIEW_CONVERT_EXPR:
c9a1e1e0 836 do_unary:
861b4e39 837 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
4ee9c684 838 return;
4ee9c684 839
2506d97a 840 case BIT_INSERT_EXPR:
c9a1e1e0 841 case COMPOUND_EXPR:
842 case OBJ_TYPE_REF:
88dbf20f 843 case ASSERT_EXPR:
c9a1e1e0 844 do_binary:
845 {
861b4e39 846 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
847 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
c9a1e1e0 848 return;
849 }
850
4a61a337 851 case DOT_PROD_EXPR:
a2287001 852 case SAD_EXPR:
b056d812 853 case REALIGN_LOAD_EXPR:
00f4f705 854 case WIDEN_MULT_PLUS_EXPR:
855 case WIDEN_MULT_MINUS_EXPR:
b056d812 856 {
861b4e39 857 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
858 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
859 get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
00f4f705 860 return;
b056d812 861 }
862
c9a1e1e0 863 case FUNCTION_DECL:
c9a1e1e0 864 case LABEL_DECL:
75a70cf9 865 case CASE_LABEL_EXPR:
fa999566 866 /* Expressions that make no memory references. */
c9a1e1e0 867 return;
fa999566 868
869 default:
f0d6e81c 870 if (codeclass == tcc_unary)
fa999566 871 goto do_unary;
f0d6e81c 872 if (codeclass == tcc_binary || codeclass == tcc_comparison)
fa999566 873 goto do_binary;
f0d6e81c 874 if (codeclass == tcc_constant || codeclass == tcc_type)
fa999566 875 return;
a002e999 876 }
c9a1e1e0 877
fa999566 878 /* If we get here, something has gone wrong. */
382ecba7 879 if (flag_checking)
880 {
881 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
882 debug_tree (expr);
883 fputs ("\n", stderr);
884 gcc_unreachable ();
885 }
c9a1e1e0 886}
887
a002e999 888
f6255040 889/* Parse STMT looking for operands. When finished, the various
890 build_* operand vectors will have potential operands in them. */
891
aed164c3 892static void
42acab1c 893parse_ssa_operands (struct function *fn, gimple *stmt)
aed164c3 894{
75a70cf9 895 enum gimple_code code = gimple_code (stmt);
b65fbe25 896 size_t i, n, start = 0;
aed164c3 897
b65fbe25 898 switch (code)
9845d120 899 {
b65fbe25 900 case GIMPLE_ASM:
1a91d914 901 get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
b65fbe25 902 break;
903
904 case GIMPLE_TRANSACTION:
905 /* The start of a transaction is a memory barrier. */
861b4e39 906 add_virtual_operand (fn, stmt, opf_def | opf_use);
b65fbe25 907 break;
908
909 case GIMPLE_DEBUG:
9845d120 910 if (gimple_debug_bind_p (stmt)
911 && gimple_debug_bind_has_value_p (stmt))
861b4e39 912 get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
9845d120 913 opf_use | opf_no_vops);
b65fbe25 914 break;
fa999566 915
b65fbe25 916 case GIMPLE_RETURN:
861b4e39 917 append_vuse (gimple_vop (fn));
b65fbe25 918 goto do_default;
fa999566 919
b65fbe25 920 case GIMPLE_CALL:
75a70cf9 921 /* Add call-clobbered operands, if needed. */
1a91d914 922 maybe_add_call_vops (fn, as_a <gcall *> (stmt));
b65fbe25 923 /* FALLTHRU */
2109076a 924
b65fbe25 925 case GIMPLE_ASSIGN:
861b4e39 926 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
b65fbe25 927 start = 1;
928 /* FALLTHRU */
929
930 default:
931 do_default:
932 n = gimple_num_ops (stmt);
933 for (i = start; i < n; i++)
861b4e39 934 get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
b65fbe25 935 break;
ca9c9daf 936 }
aed164c3 937}
938
a002e999 939
fa999566 940/* Create an operands cache for STMT. */
c9a1e1e0 941
942static void
42acab1c 943build_ssa_operands (struct function *fn, gimple *stmt)
c9a1e1e0 944{
6d5ec6f8 945 /* Initially assume that the statement has no volatile operands. */
75a70cf9 946 gimple_set_has_volatile_ops (stmt, false);
75a70cf9 947
fa999566 948 start_ssa_stmt_operands ();
861b4e39 949 parse_ssa_operands (fn, stmt);
950 finalize_ssa_stmt_operands (fn, stmt);
fa999566 951}
39b644e9 952
85f3d834 953/* Verifies SSA statement operands. */
954
955DEBUG_FUNCTION bool
42acab1c 956verify_ssa_operands (struct function *fn, gimple *stmt)
85f3d834 957{
958 use_operand_p use_p;
959 def_operand_p def_p;
960 ssa_op_iter iter;
961 unsigned i;
1762861d 962 tree def;
85f3d834 963 bool volatile_p = gimple_has_volatile_ops (stmt);
964
965 /* build_ssa_operands w/o finalizing them. */
966 gimple_set_has_volatile_ops (stmt, false);
967 start_ssa_stmt_operands ();
861b4e39 968 parse_ssa_operands (fn, stmt);
85f3d834 969
970 /* Now verify the built operands are the same as present in STMT. */
971 def = gimple_vdef (stmt);
972 if (def
973 && TREE_CODE (def) == SSA_NAME)
974 def = SSA_NAME_VAR (def);
975 if (build_vdef != def)
976 {
62c34df8 977 error ("virtual definition of statement not up to date");
85f3d834 978 return true;
979 }
980 if (gimple_vdef (stmt)
981 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
982 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
983 {
62c34df8 984 error ("virtual def operand missing for statement");
85f3d834 985 return true;
986 }
987
1762861d 988 tree use = gimple_vuse (stmt);
85f3d834 989 if (use
990 && TREE_CODE (use) == SSA_NAME)
991 use = SSA_NAME_VAR (use);
992 if (build_vuse != use)
993 {
62c34df8 994 error ("virtual use of statement not up to date");
85f3d834 995 return true;
996 }
997 if (gimple_vuse (stmt)
998 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
999 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1000 {
62c34df8 1001 error ("virtual use operand missing for statement");
85f3d834 1002 return true;
1003 }
1004
1005 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1006 {
1762861d 1007 tree *op;
1008 FOR_EACH_VEC_ELT (build_uses, i, op)
85f3d834 1009 {
1762861d 1010 if (use_p->use == op)
85f3d834 1011 {
1762861d 1012 build_uses[i] = NULL;
85f3d834 1013 break;
1014 }
1015 }
f1f41a6c 1016 if (i == build_uses.length ())
85f3d834 1017 {
62c34df8 1018 error ("excess use operand for statement");
85f3d834 1019 debug_generic_expr (USE_FROM_PTR (use_p));
1020 return true;
1021 }
1022 }
1762861d 1023
1024 tree *op;
1025 FOR_EACH_VEC_ELT (build_uses, i, op)
1026 if (op != NULL)
85f3d834 1027 {
62c34df8 1028 error ("use operand missing for statement");
1762861d 1029 debug_generic_expr (*op);
85f3d834 1030 return true;
1031 }
1032
85f3d834 1033 if (gimple_has_volatile_ops (stmt) != volatile_p)
1034 {
62c34df8 1035 error ("statement volatile flag not up to date");
85f3d834 1036 return true;
1037 }
1038
1039 cleanup_build_arrays ();
1040 return false;
1041}
1042
4ec25329 1043
28c92cbb 1044/* Releases the operands of STMT back to their freelists, and clears
1045 the stmt operand lists. */
1046
1047void
42acab1c 1048free_stmt_operands (struct function *fn, gimple *stmt)
28c92cbb 1049{
75a70cf9 1050 use_optype_p uses = gimple_use_ops (stmt), last_use;
28c92cbb 1051
28c92cbb 1052 if (uses)
1053 {
1054 for (last_use = uses; last_use->next; last_use = last_use->next)
1055 delink_imm_use (USE_OP_PTR (last_use));
1056 delink_imm_use (USE_OP_PTR (last_use));
861b4e39 1057 last_use->next = gimple_ssa_operands (fn)->free_uses;
1058 gimple_ssa_operands (fn)->free_uses = uses;
75a70cf9 1059 gimple_set_use_ops (stmt, NULL);
28c92cbb 1060 }
1061
75a70cf9 1062 if (gimple_has_mem_ops (stmt))
1063 {
dd277d48 1064 gimple_set_vuse (stmt, NULL_TREE);
1065 gimple_set_vdef (stmt, NULL_TREE);
75a70cf9 1066 }
c9a1e1e0 1067}
1068
0b3f639d 1069
7dd75889 1070/* Get the operands of statement STMT. */
a002e999 1071
fa999566 1072void
42acab1c 1073update_stmt_operands (struct function *fn, gimple *stmt)
fa999566 1074{
f6255040 1075 /* If update_stmt_operands is called before SSA is initialized, do
1076 nothing. */
861b4e39 1077 if (!ssa_operands_active (fn))
fa999566 1078 return;
2b99acb8 1079
fa999566 1080 timevar_push (TV_TREE_OPS);
2b99acb8 1081
75a70cf9 1082 gcc_assert (gimple_modified_p (stmt));
861b4e39 1083 build_ssa_operands (fn, stmt);
75a70cf9 1084 gimple_set_modified (stmt, false);
4ee9c684 1085
fa999566 1086 timevar_pop (TV_TREE_OPS);
1087}
b0b70f22 1088
f6255040 1089
fa999566 1090/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1091 to test the validity of the swap operation. */
b0b70f22 1092
fa999566 1093void
42acab1c 1094swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
fa999566 1095{
1096 tree op0, op1;
1097 op0 = *exp0;
1098 op1 = *exp1;
0b3f639d 1099
8f6fa493 1100 if (op0 != op1)
fa999566 1101 {
8f6fa493 1102 /* Attempt to preserve the relative positions of these two operands in
1103 their * respective immediate use lists by adjusting their use pointer
1104 to point to the new operand position. */
fa999566 1105 use_optype_p use0, use1, ptr;
1106 use0 = use1 = NULL;
0b3f639d 1107
fa999566 1108 /* Find the 2 operands in the cache, if they are there. */
75a70cf9 1109 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
fa999566 1110 if (USE_OP_PTR (ptr)->use == exp0)
1111 {
1112 use0 = ptr;
1113 break;
1114 }
0b3f639d 1115
75a70cf9 1116 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
fa999566 1117 if (USE_OP_PTR (ptr)->use == exp1)
1118 {
1119 use1 = ptr;
1120 break;
1121 }
1122
f3f02af0 1123 /* And adjust their location to point to the new position of the
1124 operand. */
1125 if (use0)
1126 USE_OP_PTR (use0)->use = exp1;
1127 if (use1)
1128 USE_OP_PTR (use1)->use = exp0;
fa999566 1129
8f6fa493 1130 /* Now swap the data. */
1131 *exp0 = op1;
1132 *exp1 = op0;
1133 }
0b3f639d 1134}
1135
75a70cf9 1136
22aa74c4 1137/* Scan the immediate_use list for VAR making sure its linked properly.
f6255040 1138 Return TRUE if there is a problem and emit an error message to F. */
22aa74c4 1139
4b987fac 1140DEBUG_FUNCTION bool
22aa74c4 1141verify_imm_links (FILE *f, tree var)
1142{
b66731e8 1143 use_operand_p ptr, prev, list;
cc8edffe 1144 unsigned int count;
22aa74c4 1145
1146 gcc_assert (TREE_CODE (var) == SSA_NAME);
1147
1148 list = &(SSA_NAME_IMM_USE_NODE (var));
1149 gcc_assert (list->use == NULL);
1150
1151 if (list->prev == NULL)
1152 {
1153 gcc_assert (list->next == NULL);
1154 return false;
1155 }
1156
1157 prev = list;
1158 count = 0;
1159 for (ptr = list->next; ptr != list; )
1160 {
1161 if (prev != ptr->prev)
cc8edffe 1162 {
1163 fprintf (f, "prev != ptr->prev\n");
1164 goto error;
1165 }
48e1416a 1166
22aa74c4 1167 if (ptr->use == NULL)
cc8edffe 1168 {
1169 fprintf (f, "ptr->use == NULL\n");
1170 goto error; /* 2 roots, or SAFE guard node. */
1171 }
1fa3a8f6 1172 else if (*(ptr->use) != var)
cc8edffe 1173 {
1174 fprintf (f, "*(ptr->use) != var\n");
1175 goto error;
1176 }
22aa74c4 1177
1178 prev = ptr;
1179 ptr = ptr->next;
a002e999 1180
cc8edffe 1181 count++;
1182 if (count == 0)
1183 {
1184 fprintf (f, "number of immediate uses doesn't fit unsigned int\n");
1185 goto error;
1186 }
22aa74c4 1187 }
1188
1189 /* Verify list in the other direction. */
1190 prev = list;
1191 for (ptr = list->prev; ptr != list; )
1192 {
1193 if (prev != ptr->next)
cc8edffe 1194 {
1195 fprintf (f, "prev != ptr->next\n");
1196 goto error;
1197 }
22aa74c4 1198 prev = ptr;
1199 ptr = ptr->prev;
cc8edffe 1200 if (count == 0)
1201 {
1202 fprintf (f, "count-- < 0\n");
1203 goto error;
1204 }
1205 count--;
22aa74c4 1206 }
1207
1208 if (count != 0)
cc8edffe 1209 {
1210 fprintf (f, "count != 0\n");
1211 goto error;
1212 }
22aa74c4 1213
1214 return false;
1fa3a8f6 1215
1216 error:
75a70cf9 1217 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1fa3a8f6 1218 {
75a70cf9 1219 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1220 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1fa3a8f6 1221 }
48e1416a 1222 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1fa3a8f6 1223 (void *)ptr->use);
1224 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
9af5ce0c 1225 fprintf (f, "\n");
1fa3a8f6 1226 return true;
22aa74c4 1227}
1228
1229
1230/* Dump all the immediate uses to FILE. */
1231
1232void
1233dump_immediate_uses_for (FILE *file, tree var)
1234{
1235 imm_use_iterator iter;
1236 use_operand_p use_p;
1237
1238 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1239
1240 print_generic_expr (file, var, TDF_SLIM);
1241 fprintf (file, " : -->");
1242 if (has_zero_uses (var))
1243 fprintf (file, " no uses.\n");
1244 else
1245 if (has_single_use (var))
1246 fprintf (file, " single use.\n");
1247 else
1248 fprintf (file, "%d uses.\n", num_imm_uses (var));
1249
1250 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1251 {
75a70cf9 1252 if (use_p->loc.stmt == NULL && use_p->use == NULL)
66c8f3a9 1253 fprintf (file, "***end of stmt iterator marker***\n");
b66731e8 1254 else
66c8f3a9 1255 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
75a70cf9 1256 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
66c8f3a9 1257 else
75a70cf9 1258 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
22aa74c4 1259 }
9af5ce0c 1260 fprintf (file, "\n");
22aa74c4 1261}
1262
a002e999 1263
22aa74c4 1264/* Dump all the immediate uses to FILE. */
1265
1266void
1267dump_immediate_uses (FILE *file)
1268{
1269 tree var;
1270 unsigned int x;
1271
1272 fprintf (file, "Immediate_uses: \n\n");
f211616e 1273 FOR_EACH_SSA_NAME (x, var, cfun)
22aa74c4 1274 {
22aa74c4 1275 dump_immediate_uses_for (file, var);
1276 }
1277}
1278
1279
1280/* Dump def-use edges on stderr. */
1281
4b987fac 1282DEBUG_FUNCTION void
22aa74c4 1283debug_immediate_uses (void)
1284{
1285 dump_immediate_uses (stderr);
1286}
1287
f6255040 1288
22aa74c4 1289/* Dump def-use edges on stderr. */
1290
4b987fac 1291DEBUG_FUNCTION void
22aa74c4 1292debug_immediate_uses_for (tree var)
1293{
1294 dump_immediate_uses_for (stderr, var);
5b110d39 1295}
de6ed584 1296
1297
dd277d48 1298/* Unlink STMTs virtual definition from the IL by propagating its use. */
1299
1300void
42acab1c 1301unlink_stmt_vdef (gimple *stmt)
dd277d48 1302{
1303 use_operand_p use_p;
1304 imm_use_iterator iter;
42acab1c 1305 gimple *use_stmt;
dd277d48 1306 tree vdef = gimple_vdef (stmt);
13ff78a4 1307 tree vuse = gimple_vuse (stmt);
dd277d48 1308
1309 if (!vdef
1310 || TREE_CODE (vdef) != SSA_NAME)
1311 return;
1312
13ff78a4 1313 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
dd277d48 1314 {
1315 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
13ff78a4 1316 SET_USE (use_p, vuse);
dd277d48 1317 }
de6ed584 1318
13ff78a4 1319 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1320 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
de6ed584 1321}
dd277d48 1322
8f6fa493 1323/* Return true if the var whose chain of uses starts at PTR has a
1324 single nondebug use. Set USE_P and STMT to that single nondebug
1325 use, if so, or to NULL otherwise. */
1326bool
1327single_imm_use_1 (const ssa_use_operand_t *head,
42acab1c 1328 use_operand_p *use_p, gimple **stmt)
8f6fa493 1329{
1330 ssa_use_operand_t *ptr, *single_use = 0;
1331
1332 for (ptr = head->next; ptr != head; ptr = ptr->next)
44a57701 1333 if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
8f6fa493 1334 {
1335 if (single_use)
1336 {
1337 single_use = NULL;
1338 break;
1339 }
1340 single_use = ptr;
1341 }
1342
1343 if (use_p)
1344 *use_p = single_use;
1345
1346 if (stmt)
1347 *stmt = single_use ? single_use->loc.stmt : NULL;
1348
1349 return single_use;
1350}
44a57701 1351