]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-operands.c
2009-04-03 Richard Guenther <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
4ee9c684 1/* SSA operands management for trees.
75a70cf9 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4ee9c684 4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
690abe5d 34#include "toplev.h"
acc70efa 35#include "langhooks.h"
f7d118a9 36#include "ipa-reference.h"
5b110d39 37
597ff315 38/* This file contains the code required to manage the operands cache of the
5b110d39 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
597ff315 40 annotation. This cache contains operands that will be of interest to
5b110d39 41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
5b110d39 53 The operand tree is the parsed by the various get_* routines which look
91275768 54 through the stmt tree for the occurrence of operands which may be of
5b110d39 55 interest, and calls are made to the append_* routines whenever one is
4fb5e5ca 56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
5b110d39 58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
4fb5e5ca 64 on each of the 4 operand vectors which have been built up.
5b110d39 65
66 If the stmt had a previous operand cache, the finalization routines
20833d12 67 attempt to match up the new operands with the old ones. If it's a perfect
5b110d39 68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
4ec25329 74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
5b110d39 77
4fb5e5ca 78/* Structure storing statistics on how many call clobbers we have, and
79 how many where avoided. */
80
81static struct
82{
83 /* Number of call-clobbered ops we attempt to add to calls in
84 add_call_clobbered_mem_symbols. */
85 unsigned int clobbered_vars;
86
87 /* Number of write-clobbers (VDEFs) avoided by using
88 not_written information. */
89 unsigned int static_write_clobbers_avoided;
90
91 /* Number of reads (VUSEs) avoided by using not_read information. */
92 unsigned int static_read_clobbers_avoided;
93
94 /* Number of write-clobbers avoided because the variable can't escape to
95 this call. */
96 unsigned int unescapable_clobbers_avoided;
97
98 /* Number of read-only uses we attempt to add to calls in
99 add_call_read_mem_symbols. */
100 unsigned int readonly_clobbers;
101
102 /* Number of read-only uses we avoid using not_read information. */
103 unsigned int static_readonly_clobbers_avoided;
104} clobber_stats;
105
106
59b2314d 107/* Flags to describe operand properties in helpers. */
4ee9c684 108
109/* By default, operands are loaded. */
4fb5e5ca 110#define opf_use 0
4ee9c684 111
2cf24776 112/* Operand is the target of an assignment expression or a
f6255040 113 call-clobbered variable. */
4fb5e5ca 114#define opf_def (1 << 0)
2cf24776 115
4ee9c684 116/* No virtual operands should be created in the expression. This is used
117 when traversing ADDR_EXPR nodes which have different semantics than
118 other expressions. Inside an ADDR_EXPR node, the only operands that we
119 need to consider are indices into arrays. For instance, &a.b[i] should
120 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
121 VUSE for 'b'. */
4fb5e5ca 122#define opf_no_vops (1 << 1)
4ee9c684 123
4fb5e5ca 124/* Operand is an implicit reference. This is used to distinguish
75a70cf9 125 explicit assignments in the form of MODIFY_EXPR from
4fb5e5ca 126 clobbering sites like function calls or ASM_EXPRs. */
127#define opf_implicit (1 << 2)
868a0f34 128
4ee9c684 129/* Array for building all the def operands. */
ed542b9f 130static VEC(tree,heap) *build_defs;
4ee9c684 131
132/* Array for building all the use operands. */
ed542b9f 133static VEC(tree,heap) *build_uses;
4ee9c684 134
dd277d48 135/* The built VDEF operand. */
136static tree build_vdef;
4ee9c684 137
dd277d48 138/* The built VUSE operand. */
139static tree build_vuse;
4ee9c684 140
363d040e 141/* Bitmap obstack for our datastructures that needs to survive across
a7614546 142 compilations of multiple functions. */
363d040e 143static bitmap_obstack operands_bitmap_obstack;
085b7aab 144
75a70cf9 145static void get_expr_operands (gimple, tree *, int);
fa999566 146
fcbe34ba 147/* Number of functions with initialized ssa_operands. */
148static int n_initialized = 0;
5b110d39 149
dd277d48 150/* Stack of statements to change. Every call to
151 push_stmt_changes pushes the stmt onto the stack. Calls to
152 pop_stmt_changes pop a stmt off of the stack and compute the set
de6ed584 153 of changes for the popped statement. */
dd277d48 154static VEC(gimple_p,heap) *scb_stack;
de6ed584 155
7063afc3 156/* Return the DECL_UID of the base variable of T. */
5b110d39 157
b66731e8 158static inline unsigned
7ecb5bb2 159get_name_decl (const_tree t)
4ee9c684 160{
ed542b9f 161 if (TREE_CODE (t) != SSA_NAME)
162 return DECL_UID (t);
163 else
164 return DECL_UID (SSA_NAME_VAR (t));
4ee9c684 165}
166
fa999566 167
f6255040 168/* Return true if the SSA operands cache is active. */
5b110d39 169
b66731e8 170bool
171ssa_operands_active (void)
4ee9c684 172{
75a70cf9 173 /* This function may be invoked from contexts where CFUN is NULL
174 (IPA passes), return false for now. FIXME: operands may be
175 active in each individual function, maybe this function should
176 take CFUN as a parameter. */
177 if (cfun == NULL)
178 return false;
179
fcbe34ba 180 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
b66731e8 181}
4ee9c684 182
dd277d48 183
184/* Create the VOP variable, an artificial global variable to act as a
185 representative of all of the virtual operands FUD chain. */
fa999566 186
dd277d48 187static void
188create_vop_var (void)
dadb7503 189{
dd277d48 190 tree global_var;
191
192 gcc_assert (cfun->gimple_df->vop == NULL_TREE);
193
194 global_var = build_decl (VAR_DECL, get_identifier (".MEM"),
195 void_type_node);
196 DECL_ARTIFICIAL (global_var) = 1;
197 TREE_READONLY (global_var) = 0;
198 DECL_EXTERNAL (global_var) = 1;
199 TREE_STATIC (global_var) = 1;
200 TREE_USED (global_var) = 1;
201 DECL_CONTEXT (global_var) = NULL_TREE;
202 TREE_THIS_VOLATILE (global_var) = 0;
203 TREE_ADDRESSABLE (global_var) = 0;
204
205 create_var_ann (global_var);
206 add_referenced_var (global_var);
207 cfun->gimple_df->vop = global_var;
dadb7503 208}
dadb7503 209
dd277d48 210/* These are the sizes of the operand memory buffer in bytes which gets
211 allocated each time more operands space is required. The final value is
212 the amount that is allocated every time after that.
213 In 1k we can fit 25 use operands (or 63 def operands) on a host with
214 8 byte pointers, that would be 10 statements each with 1 def and 2
215 uses. */
dadb7503 216
217#define OP_SIZE_INIT 0
dd277d48 218#define OP_SIZE_1 (1024 - sizeof (void *))
219#define OP_SIZE_2 (1024 * 4 - sizeof (void *))
220#define OP_SIZE_3 (1024 * 16 - sizeof (void *))
dadb7503 221
b66731e8 222/* Initialize the operand cache routines. */
223
224void
225init_ssa_operands (void)
226{
fcbe34ba 227 if (!n_initialized++)
228 {
229 build_defs = VEC_alloc (tree, heap, 5);
230 build_uses = VEC_alloc (tree, heap, 10);
dd277d48 231 build_vuse = NULL_TREE;
232 build_vdef = NULL_TREE;
363d040e 233 bitmap_obstack_initialize (&operands_bitmap_obstack);
dd277d48 234 scb_stack = VEC_alloc (gimple_p, heap, 20);
fcbe34ba 235 }
236
237 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
363d040e 238 gimple_ssa_operands (cfun)->operand_memory_index
239 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
fcbe34ba 240 gimple_ssa_operands (cfun)->ops_active = true;
7bbb6ff8 241 memset (&clobber_stats, 0, sizeof (clobber_stats));
363d040e 242 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
dd277d48 243 create_vop_var ();
b66731e8 244}
4ee9c684 245
5b110d39 246
b66731e8 247/* Dispose of anything required by the operand routines. */
248
249void
250fini_ssa_operands (void)
251{
252 struct ssa_operand_memory_d *ptr;
4fb5e5ca 253
fcbe34ba 254 if (!--n_initialized)
255 {
256 VEC_free (tree, heap, build_defs);
257 VEC_free (tree, heap, build_uses);
dd277d48 258 build_vdef = NULL_TREE;
259 build_vuse = NULL_TREE;
4fb5e5ca 260
261 /* The change buffer stack had better be empty. */
dd277d48 262 gcc_assert (VEC_length (gimple_p, scb_stack) == 0);
263 VEC_free (gimple_p, heap, scb_stack);
4fb5e5ca 264 scb_stack = NULL;
fcbe34ba 265 }
4fb5e5ca 266
fcbe34ba 267 gimple_ssa_operands (cfun)->free_defs = NULL;
268 gimple_ssa_operands (cfun)->free_uses = NULL;
4fb5e5ca 269
fcbe34ba 270 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
b66731e8 271 {
fcbe34ba 272 gimple_ssa_operands (cfun)->operand_memory
273 = gimple_ssa_operands (cfun)->operand_memory->next;
b66731e8 274 ggc_free (ptr);
5b110d39 275 }
276
fcbe34ba 277 gimple_ssa_operands (cfun)->ops_active = false;
4fb5e5ca 278
363d040e 279 if (!n_initialized)
280 bitmap_obstack_release (&operands_bitmap_obstack);
75a70cf9 281
dd277d48 282 cfun->gimple_df->vop = NULL_TREE;
283
7bbb6ff8 284 if (dump_file && (dump_flags & TDF_STATS))
285 {
4fb5e5ca 286 fprintf (dump_file, "Original clobbered vars: %d\n",
fa999566 287 clobber_stats.clobbered_vars);
4fb5e5ca 288 fprintf (dump_file, "Static write clobbers avoided: %d\n",
fa999566 289 clobber_stats.static_write_clobbers_avoided);
4fb5e5ca 290 fprintf (dump_file, "Static read clobbers avoided: %d\n",
fa999566 291 clobber_stats.static_read_clobbers_avoided);
4fb5e5ca 292 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
fa999566 293 clobber_stats.unescapable_clobbers_avoided);
4fb5e5ca 294 fprintf (dump_file, "Original read-only clobbers: %d\n",
fa999566 295 clobber_stats.readonly_clobbers);
4fb5e5ca 296 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
fa999566 297 clobber_stats.static_readonly_clobbers_avoided);
7bbb6ff8 298 }
b66731e8 299}
5b110d39 300
4ee9c684 301
dd277d48 302/* Return memory for an operand of size SIZE. */
b66731e8 303
304static inline void *
305ssa_operand_alloc (unsigned size)
306{
307 char *ptr;
4fb5e5ca 308
dd277d48 309 gcc_assert (size == sizeof (struct use_optype_d)
310 || size == sizeof (struct def_optype_d));
311
fcbe34ba 312 if (gimple_ssa_operands (cfun)->operand_memory_index + size
363d040e 313 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
b66731e8 314 {
315 struct ssa_operand_memory_d *ptr;
dadb7503 316
dd277d48 317 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
318 {
319 case OP_SIZE_INIT:
320 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
321 break;
322 case OP_SIZE_1:
323 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
324 break;
325 case OP_SIZE_2:
326 case OP_SIZE_3:
327 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
328 break;
329 default:
330 gcc_unreachable ();
331 }
dadb7503 332
333 ptr = (struct ssa_operand_memory_d *)
dd277d48 334 ggc_alloc (sizeof (void *)
335 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
fcbe34ba 336 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
337 gimple_ssa_operands (cfun)->operand_memory = ptr;
338 gimple_ssa_operands (cfun)->operand_memory_index = 0;
b66731e8 339 }
dd277d48 340
fcbe34ba 341 ptr = &(gimple_ssa_operands (cfun)->operand_memory
342 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
343 gimple_ssa_operands (cfun)->operand_memory_index += size;
b66731e8 344 return ptr;
4ee9c684 345}
346
5b110d39 347
dadb7503 348/* Allocate a DEF operand. */
349
4fb5e5ca 350static inline struct def_optype_d *
351alloc_def (void)
352{
353 struct def_optype_d *ret;
354 if (gimple_ssa_operands (cfun)->free_defs)
355 {
356 ret = gimple_ssa_operands (cfun)->free_defs;
357 gimple_ssa_operands (cfun)->free_defs
358 = gimple_ssa_operands (cfun)->free_defs->next;
359 }
360 else
361 ret = (struct def_optype_d *)
dadb7503 362 ssa_operand_alloc (sizeof (struct def_optype_d));
4fb5e5ca 363 return ret;
364}
365
366
dadb7503 367/* Allocate a USE operand. */
368
4fb5e5ca 369static inline struct use_optype_d *
370alloc_use (void)
371{
372 struct use_optype_d *ret;
373 if (gimple_ssa_operands (cfun)->free_uses)
374 {
375 ret = gimple_ssa_operands (cfun)->free_uses;
376 gimple_ssa_operands (cfun)->free_uses
377 = gimple_ssa_operands (cfun)->free_uses->next;
378 }
379 else
dadb7503 380 ret = (struct use_optype_d *)
381 ssa_operand_alloc (sizeof (struct use_optype_d));
4fb5e5ca 382 return ret;
383}
384
385
dadb7503 386/* Adds OP to the list of defs after LAST. */
fd12afe9 387
4fb5e5ca 388static inline def_optype_p
dadb7503 389add_def_op (tree *op, def_optype_p last)
b5b59dda 390{
f0d6e81c 391 def_optype_p new_def;
b5b59dda 392
f0d6e81c 393 new_def = alloc_def ();
394 DEF_OP_PTR (new_def) = op;
395 last->next = new_def;
396 new_def->next = NULL;
397 return new_def;
b5b59dda 398}
399
dadb7503 400
401/* Adds OP to the list of uses of statement STMT after LAST. */
b5b59dda 402
4fb5e5ca 403static inline use_optype_p
75a70cf9 404add_use_op (gimple stmt, tree *op, use_optype_p last)
b5b59dda 405{
f0d6e81c 406 use_optype_p new_use;
407
408 new_use = alloc_use ();
409 USE_OP_PTR (new_use)->use = op;
410 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
411 last->next = new_use;
412 new_use->next = NULL;
413 return new_use;
b5b59dda 414}
415
b5b59dda 416
b5b59dda 417
b5b59dda 418/* Takes elements from build_defs and turns them into def operands of STMT.
dadb7503 419 TODO -- Make build_defs VEC of tree *. */
b5b59dda 420
421static inline void
75a70cf9 422finalize_ssa_defs (gimple stmt)
b5b59dda 423{
424 unsigned new_i;
425 struct def_optype_d new_list;
e817549b 426 def_optype_p old_ops, last;
dadb7503 427 unsigned int num = VEC_length (tree, build_defs);
428
429 /* There should only be a single real definition per assignment. */
75a70cf9 430 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
b5b59dda 431
dd277d48 432 /* Pre-pend the vdef we may have built. */
433 if (build_vdef != NULL_TREE)
434 {
435 tree oldvdef = gimple_vdef (stmt);
436 if (oldvdef
437 && TREE_CODE (oldvdef) == SSA_NAME)
438 oldvdef = SSA_NAME_VAR (oldvdef);
439 if (oldvdef != build_vdef)
440 gimple_set_vdef (stmt, build_vdef);
441 VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
442 ++num;
443 }
444
b5b59dda 445 new_list.next = NULL;
446 last = &new_list;
447
75a70cf9 448 old_ops = gimple_def_ops (stmt);
b5b59dda 449
450 new_i = 0;
5b110d39 451
dd277d48 452 /* Clear and unlink a no longer necessary VDEF. */
453 if (build_vdef == NULL_TREE
454 && gimple_vdef (stmt) != NULL_TREE)
455 {
456 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
457 {
458 unlink_stmt_vdef (stmt);
459 release_ssa_name (gimple_vdef (stmt));
460 }
461 gimple_set_vdef (stmt, NULL_TREE);
462 }
463
464 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
465 if (gimple_vdef (stmt)
466 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
467 mark_sym_for_renaming (gimple_vdef (stmt));
468
dadb7503 469 /* Check for the common case of 1 def that hasn't changed. */
470 if (old_ops && old_ops->next == NULL && num == 1
471 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
472 return;
b5b59dda 473
474 /* If there is anything in the old list, free it. */
475 if (old_ops)
476 {
fcbe34ba 477 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
478 gimple_ssa_operands (cfun)->free_defs = old_ops;
b5b59dda 479 }
480
dadb7503 481 /* If there is anything remaining in the build_defs list, simply emit it. */
482 for ( ; new_i < num; new_i++)
483 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
484
b5b59dda 485 /* Now set the stmt's operands. */
75a70cf9 486 gimple_set_def_ops (stmt, new_list.next);
b5b59dda 487}
b66731e8 488
4ee9c684 489
b5b59dda 490/* Takes elements from build_uses and turns them into use operands of STMT.
09aca5bc 491 TODO -- Make build_uses VEC of tree *. */
b5b59dda 492
493static inline void
75a70cf9 494finalize_ssa_uses (gimple stmt)
b5b59dda 495{
496 unsigned new_i;
497 struct use_optype_d new_list;
498 use_optype_p old_ops, ptr, last;
b5b59dda 499
dd277d48 500 /* Pre-pend the VUSE we may have built. */
501 if (build_vuse != NULL_TREE)
502 {
503 tree oldvuse = gimple_vuse (stmt);
504 if (oldvuse
505 && TREE_CODE (oldvuse) == SSA_NAME)
506 oldvuse = SSA_NAME_VAR (oldvuse);
507 if (oldvuse != (build_vuse != NULL_TREE
508 ? build_vuse : build_vdef))
509 gimple_set_vuse (stmt, NULL_TREE);
510 VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
511 }
512
b5b59dda 513 new_list.next = NULL;
514 last = &new_list;
515
75a70cf9 516 old_ops = gimple_use_ops (stmt);
b5b59dda 517
dd277d48 518 /* Clear a no longer necessary VUSE. */
519 if (build_vuse == NULL_TREE
520 && gimple_vuse (stmt) != NULL_TREE)
521 gimple_set_vuse (stmt, NULL_TREE);
522
b5b59dda 523 /* If there is anything in the old list, free it. */
524 if (old_ops)
525 {
526 for (ptr = old_ops; ptr; ptr = ptr->next)
527 delink_imm_use (USE_OP_PTR (ptr));
fcbe34ba 528 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
529 gimple_ssa_operands (cfun)->free_uses = old_ops;
b5b59dda 530 }
531
dd277d48 532 /* If we added a VUSE, make sure to set the operand if it is not already
533 present and mark it for renaming. */
534 if (build_vuse != NULL_TREE
535 && gimple_vuse (stmt) == NULL_TREE)
536 {
537 gimple_set_vuse (stmt, gimple_vop (cfun));
538 mark_sym_for_renaming (gimple_vop (cfun));
539 }
540
09aca5bc 541 /* Now create nodes for all the new nodes. */
542 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
dadb7503 543 last = add_use_op (stmt,
544 (tree *) VEC_index (tree, build_uses, new_i),
545 last);
09aca5bc 546
b5b59dda 547 /* Now set the stmt's operands. */
75a70cf9 548 gimple_set_use_ops (stmt, new_list.next);
4ee9c684 549}
5b110d39 550
4fb5e5ca 551
552/* Clear the in_list bits and empty the build array for VDEFs and
553 VUSEs. */
b5b59dda 554
555static inline void
4fb5e5ca 556cleanup_build_arrays (void)
b5b59dda 557{
dd277d48 558 build_vdef = NULL_TREE;
559 build_vuse = NULL_TREE;
4fb5e5ca 560 VEC_truncate (tree, build_defs, 0);
561 VEC_truncate (tree, build_uses, 0);
2cf24776 562}
563
4ee9c684 564
5b110d39 565/* Finalize all the build vectors, fill the new ones into INFO. */
b66731e8 566
5b110d39 567static inline void
75a70cf9 568finalize_ssa_stmt_operands (gimple stmt)
5b110d39 569{
b66731e8 570 finalize_ssa_defs (stmt);
571 finalize_ssa_uses (stmt);
4fb5e5ca 572 cleanup_build_arrays ();
4ee9c684 573}
574
575
5b110d39 576/* Start the process of building up operands vectors in INFO. */
577
578static inline void
579start_ssa_stmt_operands (void)
4ee9c684 580{
ed542b9f 581 gcc_assert (VEC_length (tree, build_defs) == 0);
582 gcc_assert (VEC_length (tree, build_uses) == 0);
dd277d48 583 gcc_assert (build_vuse == NULL_TREE);
584 gcc_assert (build_vdef == NULL_TREE);
4ee9c684 585}
586
587
5b110d39 588/* Add DEF_P to the list of pointers to operands. */
4ee9c684 589
590static inline void
5b110d39 591append_def (tree *def_p)
4ee9c684 592{
4fb5e5ca 593 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
4ee9c684 594}
595
596
5b110d39 597/* Add USE_P to the list of pointers to operands. */
4ee9c684 598
599static inline void
5b110d39 600append_use (tree *use_p)
4ee9c684 601{
4fb5e5ca 602 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
4ee9c684 603}
604
605
4fb5e5ca 606/* Add VAR to the set of variables that require a VDEF operator. */
4ee9c684 607
5b110d39 608static inline void
4fb5e5ca 609append_vdef (tree var)
4ee9c684 610{
dd277d48 611 gcc_assert ((build_vdef == NULL_TREE
612 || build_vdef == var)
613 && (build_vuse == NULL_TREE
614 || build_vuse == var));
4fb5e5ca 615
dd277d48 616 build_vdef = var;
617 build_vuse = var;
4ee9c684 618}
619
620
4fb5e5ca 621/* Add VAR to the set of variables that require a VUSE operator. */
4ee9c684 622
5b110d39 623static inline void
624append_vuse (tree var)
4ee9c684 625{
dd277d48 626 gcc_assert (build_vuse == NULL_TREE
627 || build_vuse == var);
4ee9c684 628
dd277d48 629 build_vuse = var;
22aa74c4 630}
631
dd277d48 632/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
f0e6e3c1 633
dd277d48 634static void
635add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
636{
637 /* Add virtual operands to the stmt, unless the caller has specifically
638 requested not to do that (used when adding operands inside an
639 ADDR_EXPR expression). */
640 if (flags & opf_no_vops)
641 return;
642
643 if (flags & opf_def)
644 append_vdef (gimple_vop (cfun));
645 else
646 append_vuse (gimple_vop (cfun));
b66731e8 647}
648
b66731e8 649
75a70cf9 650/* Add *VAR_P to the appropriate operand array for statement STMT.
651 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
652 it will be added to the statement's real operands, otherwise it is
653 added to virtual operands. */
fa999566 654
655static void
75a70cf9 656add_stmt_operand (tree *var_p, gimple stmt, int flags)
b66731e8 657{
fa999566 658 tree var, sym;
659 var_ann_t v_ann;
b66731e8 660
75a70cf9 661 gcc_assert (SSA_VAR_P (*var_p));
b66731e8 662
4fb5e5ca 663 var = *var_p;
fa999566 664 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
665 v_ann = var_ann (sym);
b66731e8 666
4fb5e5ca 667 /* Mark statements with volatile operands. */
668 if (TREE_THIS_VOLATILE (sym))
75a70cf9 669 gimple_set_has_volatile_ops (stmt, true);
b66731e8 670
4fb5e5ca 671 if (is_gimple_reg (sym))
b66731e8 672 {
fa999566 673 /* The variable is a GIMPLE register. Add it to real operands. */
4fb5e5ca 674 if (flags & opf_def)
fa999566 675 append_def (var_p);
676 else
677 append_use (var_p);
b66731e8 678 }
fa999566 679 else
dd277d48 680 add_virtual_operand (stmt, flags);
fa999566 681}
b66731e8 682
dd277d48 683/* Add the base address of REF to SET. */
b66731e8 684
fa999566 685static void
dd277d48 686add_to_addressable_set (tree ref, bitmap *set)
4ec25329 687{
dd277d48 688 tree var;
b66731e8 689
dd277d48 690 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
691 as the only thing we take the address of. If VAR is a structure,
692 taking the address of a field means that the whole structure may
693 be referenced using pointer arithmetic. See PR 21407 and the
694 ensuing mailing list discussion. */
695 var = get_base_address (ref);
696 if (var && SSA_VAR_P (var))
b66731e8 697 {
dd277d48 698 if (*set == NULL)
699 *set = BITMAP_ALLOC (&operands_bitmap_obstack);
fa999566 700
dd277d48 701 bitmap_set_bit (*set, DECL_UID (var));
702 TREE_ADDRESSABLE (var) = 1;
fa999566 703 }
dd277d48 704}
22aa74c4 705
dd277d48 706/* Add the base address of REF to the set of addresses taken by STMT.
707 REF may be a single variable whose address has been taken or any
708 other valid GIMPLE memory reference (structure reference, array,
709 etc). If the base address of REF is a decl that has sub-variables,
710 also add all of its sub-variables. */
711
712static void
713gimple_add_to_addresses_taken (gimple stmt, tree ref)
714{
715 gcc_assert (gimple_has_ops (stmt));
716 add_to_addressable_set (ref, gimple_addresses_taken_ptr (stmt));
22aa74c4 717}
718
4ec25329 719
cb7f680b 720/* A subroutine of get_expr_operands to handle INDIRECT_REF,
721 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
722
723 STMT is the statement being processed, EXPR is the INDIRECT_REF
724 that got us here.
725
726 FLAGS is as in get_expr_operands.
727
cb7f680b 728 RECURSE_ON_BASE should be set to true if we want to continue
729 calling get_expr_operands on the base pointer, and false if
730 something else will do it for us. */
731
732static void
dd277d48 733get_indirect_ref_operands (gimple stmt, tree expr, int flags,
4ec25329 734 bool recurse_on_base)
cb7f680b 735{
736 tree *pptr = &TREE_OPERAND (expr, 0);
cb7f680b 737
738 if (TREE_THIS_VOLATILE (expr))
75a70cf9 739 gimple_set_has_volatile_ops (stmt, true);
cb7f680b 740
dd277d48 741 /* Add the VOP. */
742 add_virtual_operand (stmt, flags);
743
744 /* If requested, add a USE operand for the base pointer. */
745 if (recurse_on_base)
746 get_expr_operands (stmt, pptr, opf_use);
cb7f680b 747}
a002e999 748
4ec25329 749
fa999566 750/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
4ee9c684 751
752static void
75a70cf9 753get_tmr_operands (gimple stmt, tree expr, int flags)
4ee9c684 754{
4fb5e5ca 755 /* First record the real operands. */
756 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
757 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
4ee9c684 758
fa999566 759 if (TMR_SYMBOL (expr))
75a70cf9 760 gimple_add_to_addresses_taken (stmt, TMR_SYMBOL (expr));
4ee9c684 761
dd277d48 762 add_virtual_operand (stmt, flags);
fa999566 763}
764
765
75a70cf9 766/* If STMT is a call that may clobber globals and other symbols that
767 escape, add them to the VDEF/VUSE lists for it. */
fa999566 768
769static void
dd277d48 770maybe_add_call_vops (gimple stmt)
fa999566 771{
75a70cf9 772 int call_flags = gimple_call_flags (stmt);
fa999566 773
4fb5e5ca 774 /* If aliases have been computed already, add VDEF or VUSE
fa999566 775 operands for all the symbols that have been found to be
4fb5e5ca 776 call-clobbered. */
dd277d48 777 if (!(call_flags & ECF_NOVOPS))
fa999566 778 {
779 /* A 'pure' or a 'const' function never call-clobbers anything.
780 A 'noreturn' function might, but since we don't return anyway
781 there is no point in recording that. */
75a70cf9 782 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
dd277d48 783 add_virtual_operand (stmt, opf_def);
fa999566 784 else if (!(call_flags & ECF_CONST))
dd277d48 785 add_virtual_operand (stmt, opf_use);
fa999566 786 }
fa999566 787}
788
789
790/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
791
792static void
75a70cf9 793get_asm_expr_operands (gimple stmt)
fa999566 794{
75a70cf9 795 size_t i, noutputs;
4fb5e5ca 796 const char **oconstraints;
fa999566 797 const char *constraint;
798 bool allows_mem, allows_reg, is_inout;
4fb5e5ca 799
75a70cf9 800 noutputs = gimple_asm_noutputs (stmt);
4fb5e5ca 801 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
fa999566 802
4fb5e5ca 803 /* Gather all output operands. */
75a70cf9 804 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
fa999566 805 {
75a70cf9 806 tree link = gimple_asm_output_op (stmt, i);
f6255040 807 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
808 oconstraints[i] = constraint;
809 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
810 &allows_reg, &is_inout);
fa999566 811
812 /* This should have been split in gimplify_asm_expr. */
813 gcc_assert (!allows_reg || !is_inout);
814
815 /* Memory operands are addressable. Note that STMT needs the
816 address of this operand. */
817 if (!allows_reg && allows_mem)
818 {
819 tree t = get_base_address (TREE_VALUE (link));
75a70cf9 820 if (t && DECL_P (t))
821 gimple_add_to_addresses_taken (stmt, t);
fa999566 822 }
823
4fb5e5ca 824 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
fa999566 825 }
826
4fb5e5ca 827 /* Gather all input operands. */
75a70cf9 828 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
fa999566 829 {
75a70cf9 830 tree link = gimple_asm_input_op (stmt, i);
fa999566 831 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4fb5e5ca 832 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
833 &allows_mem, &allows_reg);
fa999566 834
835 /* Memory operands are addressable. Note that STMT needs the
836 address of this operand. */
837 if (!allows_reg && allows_mem)
838 {
839 tree t = get_base_address (TREE_VALUE (link));
75a70cf9 840 if (t && DECL_P (t))
841 gimple_add_to_addresses_taken (stmt, t);
fa999566 842 }
843
844 get_expr_operands (stmt, &TREE_VALUE (link), 0);
845 }
846
4fb5e5ca 847 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
75a70cf9 848 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
849 {
850 tree link = gimple_asm_clobber_op (stmt, i);
851 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
852 {
dd277d48 853 add_virtual_operand (stmt, opf_def);
75a70cf9 854 break;
855 }
856 }
f6255040 857}
858
859
fa999566 860/* Recursively scan the expression pointed to by EXPR_P in statement
f6255040 861 STMT. FLAGS is one of the OPF_* constants modifying how to
862 interpret the operands found. */
fa999566 863
864static void
75a70cf9 865get_expr_operands (gimple stmt, tree *expr_p, int flags)
fa999566 866{
867 enum tree_code code;
f0d6e81c 868 enum tree_code_class codeclass;
fa999566 869 tree expr = *expr_p;
fa999566 870
871 if (expr == NULL)
872 return;
873
874 code = TREE_CODE (expr);
f0d6e81c 875 codeclass = TREE_CODE_CLASS (code);
fa999566 876
877 switch (code)
878 {
879 case ADDR_EXPR:
880 /* Taking the address of a variable does not represent a
881 reference to it, but the fact that the statement takes its
882 address will be of interest to some passes (e.g. alias
883 resolution). */
75a70cf9 884 gimple_add_to_addresses_taken (stmt, TREE_OPERAND (expr, 0));
fa999566 885
886 /* If the address is invariant, there may be no interesting
887 variable references inside. */
888 if (is_gimple_min_invariant (expr))
889 return;
890
891 /* Otherwise, there may be variables referenced inside but there
892 should be no VUSEs created, since the referenced objects are
893 not really accessed. The only operands that we should find
894 here are ARRAY_REF indices which will always be real operands
895 (GIMPLE does not allow non-registers as array indices). */
896 flags |= opf_no_vops;
897 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
898 return;
899
900 case SSA_NAME:
75a70cf9 901 add_stmt_operand (expr_p, stmt, flags);
fa999566 902 return;
903
904 case VAR_DECL:
905 case PARM_DECL:
906 case RESULT_DECL:
75a70cf9 907 add_stmt_operand (expr_p, stmt, flags);
2afb4be3 908 return;
fa999566 909
910 case MISALIGNED_INDIRECT_REF:
911 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
912 /* fall through */
913
914 case ALIGN_INDIRECT_REF:
915 case INDIRECT_REF:
dd277d48 916 get_indirect_ref_operands (stmt, expr, flags, true);
fa999566 917 return;
918
919 case TARGET_MEM_REF:
920 get_tmr_operands (stmt, expr, flags);
921 return;
922
fa999566 923 case ARRAY_REF:
f6255040 924 case ARRAY_RANGE_REF:
fa999566 925 case COMPONENT_REF:
926 case REALPART_EXPR:
927 case IMAGPART_EXPR:
928 {
2be14d8b 929 tree ref;
3fefae7a 930 HOST_WIDE_INT offset, size, maxsize;
2be14d8b 931
8e4c4d3b 932 if (TREE_THIS_VOLATILE (expr))
75a70cf9 933 gimple_set_has_volatile_ops (stmt, true);
8e4c4d3b 934
3fefae7a 935 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2afb4be3 936 if (TREE_CODE (ref) == INDIRECT_REF)
0b3f639d 937 {
dd277d48 938 get_indirect_ref_operands (stmt, ref, flags, false);
0b3f639d 939 flags |= opf_no_vops;
940 }
4f7e36ee 941
4fb5e5ca 942 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2be14d8b 943
944 if (code == COMPONENT_REF)
7fecfde9 945 {
8e4c4d3b 946 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
75a70cf9 947 gimple_set_has_volatile_ops (stmt, true);
4fb5e5ca 948 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
7fecfde9 949 }
f6255040 950 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
03c253f3 951 {
4fb5e5ca 952 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
953 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
954 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
03c253f3 955 }
a002e999 956
2be14d8b 957 return;
958 }
a002e999 959
80f06481 960 case WITH_SIZE_EXPR:
454b4e1f 961 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
80f06481 962 and an rvalue reference to its second argument. */
4fb5e5ca 963 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
5b110d39 964 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
80f06481 965 return;
966
07c03fb0 967 case COND_EXPR:
bd2ec699 968 case VEC_COND_EXPR:
4fb5e5ca 969 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
970 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
971 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
07c03fb0 972 return;
973
f9c6943b 974 case CONSTRUCTOR:
975 {
976 /* General aggregate CONSTRUCTORs have been decomposed, but they
977 are still in use as the COMPLEX_EXPR equivalent for vectors. */
c75b4594 978 constructor_elt *ce;
979 unsigned HOST_WIDE_INT idx;
f9c6943b 980
c75b4594 981 for (idx = 0;
982 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
983 idx++)
4fb5e5ca 984 get_expr_operands (stmt, &ce->value, opf_use);
f9c6943b 985
986 return;
987 }
988
c9a1e1e0 989 case BIT_FIELD_REF:
1e342984 990 if (TREE_THIS_VOLATILE (expr))
991 gimple_set_has_volatile_ops (stmt, true);
992 /* FALLTHRU */
993
f6255040 994 case TRUTH_NOT_EXPR:
2c0bc8ce 995 case VIEW_CONVERT_EXPR:
c9a1e1e0 996 do_unary:
5b110d39 997 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
4ee9c684 998 return;
4ee9c684 999
c9a1e1e0 1000 case TRUTH_AND_EXPR:
1001 case TRUTH_OR_EXPR:
1002 case TRUTH_XOR_EXPR:
1003 case COMPOUND_EXPR:
1004 case OBJ_TYPE_REF:
88dbf20f 1005 case ASSERT_EXPR:
c9a1e1e0 1006 do_binary:
1007 {
5b110d39 1008 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1009 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
c9a1e1e0 1010 return;
1011 }
1012
4a61a337 1013 case DOT_PROD_EXPR:
b056d812 1014 case REALIGN_LOAD_EXPR:
1015 {
1016 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1017 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1018 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1019 return;
1020 }
1021
66723563 1022 case CHANGE_DYNAMIC_TYPE_EXPR:
75a70cf9 1023 gcc_unreachable ();
cb7f680b 1024
c9a1e1e0 1025 case FUNCTION_DECL:
c9a1e1e0 1026 case LABEL_DECL:
bef99423 1027 case CONST_DECL:
75a70cf9 1028 case CASE_LABEL_EXPR:
1029 case FILTER_EXPR:
1030 case EXC_PTR_EXPR:
fa999566 1031 /* Expressions that make no memory references. */
c9a1e1e0 1032 return;
fa999566 1033
1034 default:
f0d6e81c 1035 if (codeclass == tcc_unary)
fa999566 1036 goto do_unary;
f0d6e81c 1037 if (codeclass == tcc_binary || codeclass == tcc_comparison)
fa999566 1038 goto do_binary;
f0d6e81c 1039 if (codeclass == tcc_constant || codeclass == tcc_type)
fa999566 1040 return;
a002e999 1041 }
c9a1e1e0 1042
fa999566 1043 /* If we get here, something has gone wrong. */
1044#ifdef ENABLE_CHECKING
1045 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1046 debug_tree (expr);
1047 fputs ("\n", stderr);
1048#endif
1049 gcc_unreachable ();
c9a1e1e0 1050}
1051
a002e999 1052
f6255040 1053/* Parse STMT looking for operands. When finished, the various
1054 build_* operand vectors will have potential operands in them. */
1055
aed164c3 1056static void
75a70cf9 1057parse_ssa_operands (gimple stmt)
aed164c3 1058{
75a70cf9 1059 enum gimple_code code = gimple_code (stmt);
aed164c3 1060
75a70cf9 1061 if (code == GIMPLE_ASM)
1062 get_asm_expr_operands (stmt);
1063 else
fa999566 1064 {
75a70cf9 1065 size_t i, start = 0;
fa999566 1066
75a70cf9 1067 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1068 {
1069 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1070 start = 1;
1071 }
fa999566 1072
75a70cf9 1073 for (i = start; i < gimple_num_ops (stmt); i++)
1074 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
fa999566 1075
75a70cf9 1076 /* Add call-clobbered operands, if needed. */
1077 if (code == GIMPLE_CALL)
dd277d48 1078 maybe_add_call_vops (stmt);
ca9c9daf 1079 }
aed164c3 1080}
1081
a002e999 1082
fa999566 1083/* Create an operands cache for STMT. */
c9a1e1e0 1084
1085static void
75a70cf9 1086build_ssa_operands (gimple stmt)
c9a1e1e0 1087{
4fb5e5ca 1088 /* Initially assume that the statement has no volatile operands and
1089 makes no memory references. */
75a70cf9 1090 gimple_set_has_volatile_ops (stmt, false);
75a70cf9 1091
58c50d3f 1092 /* Just clear the bitmap so we don't end up reallocating it over and over. */
75a70cf9 1093 if (gimple_addresses_taken (stmt))
1094 bitmap_clear (gimple_addresses_taken (stmt));
c9a1e1e0 1095
fa999566 1096 start_ssa_stmt_operands ();
fa999566 1097 parse_ssa_operands (stmt);
fa999566 1098 finalize_ssa_stmt_operands (stmt);
1099}
39b644e9 1100
4ec25329 1101
28c92cbb 1102/* Releases the operands of STMT back to their freelists, and clears
1103 the stmt operand lists. */
1104
1105void
75a70cf9 1106free_stmt_operands (gimple stmt)
28c92cbb 1107{
75a70cf9 1108 def_optype_p defs = gimple_def_ops (stmt), last_def;
1109 use_optype_p uses = gimple_use_ops (stmt), last_use;
28c92cbb 1110
1111 if (defs)
1112 {
1113 for (last_def = defs; last_def->next; last_def = last_def->next)
1114 continue;
1115 last_def->next = gimple_ssa_operands (cfun)->free_defs;
1116 gimple_ssa_operands (cfun)->free_defs = defs;
75a70cf9 1117 gimple_set_def_ops (stmt, NULL);
28c92cbb 1118 }
1119
1120 if (uses)
1121 {
1122 for (last_use = uses; last_use->next; last_use = last_use->next)
1123 delink_imm_use (USE_OP_PTR (last_use));
1124 delink_imm_use (USE_OP_PTR (last_use));
1125 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1126 gimple_ssa_operands (cfun)->free_uses = uses;
75a70cf9 1127 gimple_set_use_ops (stmt, NULL);
28c92cbb 1128 }
1129
75a70cf9 1130 if (gimple_has_ops (stmt))
1131 gimple_set_addresses_taken (stmt, NULL);
f6255040 1132
75a70cf9 1133 if (gimple_has_mem_ops (stmt))
1134 {
dd277d48 1135 gimple_set_vuse (stmt, NULL_TREE);
1136 gimple_set_vdef (stmt, NULL_TREE);
75a70cf9 1137 }
c9a1e1e0 1138}
1139
0b3f639d 1140
7dd75889 1141/* Get the operands of statement STMT. */
a002e999 1142
fa999566 1143void
75a70cf9 1144update_stmt_operands (gimple stmt)
fa999566 1145{
f6255040 1146 /* If update_stmt_operands is called before SSA is initialized, do
1147 nothing. */
fa999566 1148 if (!ssa_operands_active ())
1149 return;
2b99acb8 1150
fa999566 1151 timevar_push (TV_TREE_OPS);
2b99acb8 1152
75a70cf9 1153 gcc_assert (gimple_modified_p (stmt));
fa999566 1154 build_ssa_operands (stmt);
75a70cf9 1155 gimple_set_modified (stmt, false);
4ee9c684 1156
fa999566 1157 timevar_pop (TV_TREE_OPS);
1158}
b0b70f22 1159
f6255040 1160
fa999566 1161/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1162 to test the validity of the swap operation. */
b0b70f22 1163
fa999566 1164void
75a70cf9 1165swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
fa999566 1166{
1167 tree op0, op1;
1168 op0 = *exp0;
1169 op1 = *exp1;
0b3f639d 1170
f6255040 1171 /* If the operand cache is active, attempt to preserve the relative
1172 positions of these two operands in their respective immediate use
1173 lists. */
fa999566 1174 if (ssa_operands_active () && op0 != op1)
1175 {
1176 use_optype_p use0, use1, ptr;
1177 use0 = use1 = NULL;
0b3f639d 1178
fa999566 1179 /* Find the 2 operands in the cache, if they are there. */
75a70cf9 1180 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
fa999566 1181 if (USE_OP_PTR (ptr)->use == exp0)
1182 {
1183 use0 = ptr;
1184 break;
1185 }
0b3f639d 1186
75a70cf9 1187 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
fa999566 1188 if (USE_OP_PTR (ptr)->use == exp1)
1189 {
1190 use1 = ptr;
1191 break;
1192 }
1193
1194 /* If both uses don't have operand entries, there isn't much we can do
f6255040 1195 at this point. Presumably we don't need to worry about it. */
fa999566 1196 if (use0 && use1)
1197 {
1198 tree *tmp = USE_OP_PTR (use1)->use;
1199 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1200 USE_OP_PTR (use0)->use = tmp;
1201 }
0b3f639d 1202 }
fa999566 1203
1204 /* Now swap the data. */
1205 *exp0 = op1;
1206 *exp1 = op0;
0b3f639d 1207}
1208
75a70cf9 1209
22aa74c4 1210/* Scan the immediate_use list for VAR making sure its linked properly.
f6255040 1211 Return TRUE if there is a problem and emit an error message to F. */
22aa74c4 1212
1213bool
1214verify_imm_links (FILE *f, tree var)
1215{
b66731e8 1216 use_operand_p ptr, prev, list;
22aa74c4 1217 int count;
1218
1219 gcc_assert (TREE_CODE (var) == SSA_NAME);
1220
1221 list = &(SSA_NAME_IMM_USE_NODE (var));
1222 gcc_assert (list->use == NULL);
1223
1224 if (list->prev == NULL)
1225 {
1226 gcc_assert (list->next == NULL);
1227 return false;
1228 }
1229
1230 prev = list;
1231 count = 0;
1232 for (ptr = list->next; ptr != list; )
1233 {
1234 if (prev != ptr->prev)
1fa3a8f6 1235 goto error;
1236
22aa74c4 1237 if (ptr->use == NULL)
1fa3a8f6 1238 goto error; /* 2 roots, or SAFE guard node. */
1239 else if (*(ptr->use) != var)
1240 goto error;
22aa74c4 1241
1242 prev = ptr;
1243 ptr = ptr->next;
a002e999 1244
1245 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1246 problem. */
f04f077c 1247 if (count++ > 50000000)
1fa3a8f6 1248 goto error;
22aa74c4 1249 }
1250
1251 /* Verify list in the other direction. */
1252 prev = list;
1253 for (ptr = list->prev; ptr != list; )
1254 {
1255 if (prev != ptr->next)
1fa3a8f6 1256 goto error;
22aa74c4 1257 prev = ptr;
1258 ptr = ptr->prev;
1259 if (count-- < 0)
1fa3a8f6 1260 goto error;
22aa74c4 1261 }
1262
1263 if (count != 0)
1fa3a8f6 1264 goto error;
22aa74c4 1265
1266 return false;
1fa3a8f6 1267
1268 error:
75a70cf9 1269 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1fa3a8f6 1270 {
75a70cf9 1271 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1272 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1fa3a8f6 1273 }
1274 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1275 (void *)ptr->use);
1276 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1277 fprintf(f, "\n");
1278 return true;
22aa74c4 1279}
1280
1281
1282/* Dump all the immediate uses to FILE. */
1283
1284void
1285dump_immediate_uses_for (FILE *file, tree var)
1286{
1287 imm_use_iterator iter;
1288 use_operand_p use_p;
1289
1290 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1291
1292 print_generic_expr (file, var, TDF_SLIM);
1293 fprintf (file, " : -->");
1294 if (has_zero_uses (var))
1295 fprintf (file, " no uses.\n");
1296 else
1297 if (has_single_use (var))
1298 fprintf (file, " single use.\n");
1299 else
1300 fprintf (file, "%d uses.\n", num_imm_uses (var));
1301
1302 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1303 {
75a70cf9 1304 if (use_p->loc.stmt == NULL && use_p->use == NULL)
66c8f3a9 1305 fprintf (file, "***end of stmt iterator marker***\n");
b66731e8 1306 else
66c8f3a9 1307 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
75a70cf9 1308 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
66c8f3a9 1309 else
75a70cf9 1310 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
22aa74c4 1311 }
1312 fprintf(file, "\n");
1313}
1314
a002e999 1315
22aa74c4 1316/* Dump all the immediate uses to FILE. */
1317
1318void
1319dump_immediate_uses (FILE *file)
1320{
1321 tree var;
1322 unsigned int x;
1323
1324 fprintf (file, "Immediate_uses: \n\n");
1325 for (x = 1; x < num_ssa_names; x++)
1326 {
1327 var = ssa_name(x);
1328 if (!var)
1329 continue;
1330 dump_immediate_uses_for (file, var);
1331 }
1332}
1333
1334
1335/* Dump def-use edges on stderr. */
1336
1337void
1338debug_immediate_uses (void)
1339{
1340 dump_immediate_uses (stderr);
1341}
1342
f6255040 1343
22aa74c4 1344/* Dump def-use edges on stderr. */
1345
1346void
1347debug_immediate_uses_for (tree var)
1348{
1349 dump_immediate_uses_for (stderr, var);
5b110d39 1350}
de6ed584 1351
1352
dd277d48 1353/* Push *STMT_P on the SCB_STACK. This function is deprecated, do not
1354 introduce new uses of it. */
de6ed584 1355
1356void
75a70cf9 1357push_stmt_changes (gimple *stmt_p)
de6ed584 1358{
dd277d48 1359 gimple stmt = *stmt_p;
de6ed584 1360
1361 /* It makes no sense to keep track of PHI nodes. */
75a70cf9 1362 if (gimple_code (stmt) == GIMPLE_PHI)
de6ed584 1363 return;
1364
dd277d48 1365 VEC_safe_push (gimple_p, heap, scb_stack, stmt_p);
de6ed584 1366}
1367
dd277d48 1368/* Pop the top stmt from SCB_STACK and act on the differences between
de6ed584 1369 what was recorded by push_stmt_changes and the current state of
dd277d48 1370 the statement. This function is deprecated, do not introduce
1371 new uses of it. */
de6ed584 1372
1373void
75a70cf9 1374pop_stmt_changes (gimple *stmt_p)
de6ed584 1375{
dd277d48 1376 gimple *stmt2_p, stmt = *stmt_p;
de6ed584 1377 ssa_op_iter iter;
dd277d48 1378 tree op;
de6ed584 1379
1380 /* It makes no sense to keep track of PHI nodes. */
75a70cf9 1381 if (gimple_code (stmt) == GIMPLE_PHI)
de6ed584 1382 return;
1383
dd277d48 1384 stmt2_p = VEC_pop (gimple_p, scb_stack);
1385 gcc_assert (stmt_p == stmt2_p);
de6ed584 1386
1387 /* Force an operand re-scan on the statement and mark any newly
dd277d48 1388 exposed variables. This also will mark the virtual operand
1389 for renaming if necessary. */
de6ed584 1390 update_stmt (stmt);
1391
dd277d48 1392 /* Mark all the naked GIMPLE register operands for renaming.
1393 ??? Especially this is considered bad behavior of the caller,
1394 it should have updated SSA form manually. Even more so as
1395 we do not have a way to verify that no SSA names for op are
1396 already in use. */
de6ed584 1397 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
1398 if (DECL_P (op))
1399 mark_sym_for_renaming (op);
de6ed584 1400}
1401
dd277d48 1402/* Discard the topmost stmt from SCB_STACK. This is useful
de6ed584 1403 when the caller realized that it did not actually modified the
dd277d48 1404 statement. It avoids the expensive operand re-scan.
1405 This function is deprecated, do not introduce new uses of it. */
de6ed584 1406
1407void
75a70cf9 1408discard_stmt_changes (gimple *stmt_p)
de6ed584 1409{
dd277d48 1410 gimple *stmt2_p, stmt = *stmt_p;
de6ed584 1411
1412 /* It makes no sense to keep track of PHI nodes. */
75a70cf9 1413 if (gimple_code (stmt) == GIMPLE_PHI)
de6ed584 1414 return;
1415
dd277d48 1416 stmt2_p = VEC_pop (gimple_p, scb_stack);
1417 gcc_assert (stmt_p == stmt2_p);
1418}
1419
1420/* Unlink STMTs virtual definition from the IL by propagating its use. */
1421
1422void
1423unlink_stmt_vdef (gimple stmt)
1424{
1425 use_operand_p use_p;
1426 imm_use_iterator iter;
1427 gimple use_stmt;
1428 tree vdef = gimple_vdef (stmt);
1429
1430 if (!vdef
1431 || TREE_CODE (vdef) != SSA_NAME)
1432 return;
1433
1434 FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
1435 {
1436 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1437 SET_USE (use_p, gimple_vuse (stmt));
1438 }
de6ed584 1439
dd277d48 1440 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
1441 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
de6ed584 1442}
dd277d48 1443