]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-operands.c
tree-tailcall.c (find_tail_calls): Use XNEW.
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
6de9cd9a 1/* SSA operands management for trees.
ad616de1 2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
6de9cd9a
DN
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to
366ccddb
KC
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
4c714dd4 34#include "toplev.h"
6674a6ce 35#include "langhooks.h"
ea900239 36#include "ipa-reference.h"
1a24f92f 37
6cb38cd4 38/* This file contains the code required to manage the operands cache of the
1a24f92f 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
6cb38cd4 40 annotation. This cache contains operands that will be of interest to
1a24f92f
AM
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
1a24f92f 53 The operand tree is the parsed by the various get_* routines which look
2a7e31df 54 through the stmt tree for the occurrence of operands which may be of
1a24f92f
AM
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
f3b569ca 68 attempt to match up the new operands with the old ones. If it's a perfect
1a24f92f
AM
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
454ff5cb 75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
1a24f92f
AM
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79*/
80
81
1e6a5d3c 82/* Flags to describe operand properties in helpers. */
6de9cd9a
DN
83
84/* By default, operands are loaded. */
85#define opf_none 0
86
a32b97a2
BB
87/* Operand is the target of an assignment expression or a
88 call-clobbered variable */
6de9cd9a
DN
89#define opf_is_def (1 << 0)
90
a32b97a2 91/* Operand is the target of an assignment expression. */
50dc9a88 92#define opf_kill_def (1 << 1)
a32b97a2 93
6de9cd9a
DN
94/* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
50dc9a88 100#define opf_no_vops (1 << 2)
6de9cd9a 101
0d2bf6f0
RH
102/* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104#define opf_non_specific (1 << 3)
105
f47c96aa 106
6de9cd9a 107/* Array for building all the def operands. */
f3940b0e 108static VEC(tree,heap) *build_defs;
6de9cd9a
DN
109
110/* Array for building all the use operands. */
f3940b0e 111static VEC(tree,heap) *build_uses;
6de9cd9a 112
a32b97a2 113/* Array for building all the v_may_def operands. */
f3940b0e 114static VEC(tree,heap) *build_v_may_defs;
6de9cd9a
DN
115
116/* Array for building all the vuse operands. */
f3940b0e 117static VEC(tree,heap) *build_vuses;
6de9cd9a 118
a32b97a2 119/* Array for building all the v_must_def operands. */
f3940b0e 120static VEC(tree,heap) *build_v_must_defs;
a32b97a2 121
e288e2f5
AM
122/* True if the operands for call clobbered vars are cached and valid. */
123bool ssa_call_clobbered_cache_valid;
124bool ssa_ro_call_cache_valid;
125
6668f6a7 126/* These arrays are the cached operand vectors for call clobbered calls. */
3d6dcb7f
KH
127static VEC(tree,heap) *clobbered_v_may_defs;
128static VEC(tree,heap) *clobbered_vuses;
129static VEC(tree,heap) *ro_call_vuses;
e288e2f5
AM
130static bool clobbered_aliased_loads;
131static bool clobbered_aliased_stores;
132static bool ro_call_aliased_loads;
f47c96aa 133static bool ops_active = false;
4c124b4c 134
f47c96aa
AM
135static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
136static unsigned operand_memory_index;
4c124b4c 137
1a24f92f
AM
138static void get_expr_operands (tree, tree *, int);
139static void get_asm_expr_operands (tree);
140static void get_indirect_ref_operands (tree, tree, int);
ac182688 141static void get_tmr_operands (tree, tree, int);
1a24f92f
AM
142static void get_call_expr_operands (tree, tree);
143static inline void append_def (tree *);
144static inline void append_use (tree *);
145static void append_v_may_def (tree);
146static void append_v_must_def (tree);
ea900239 147static void add_call_clobber_ops (tree, tree);
85c33455 148static void add_call_read_ops (tree);
e288e2f5 149static void add_stmt_operand (tree *, stmt_ann_t, int);
f47c96aa
AM
150static void build_ssa_operands (tree stmt);
151
152static def_optype_p free_defs = NULL;
153static use_optype_p free_uses = NULL;
154static vuse_optype_p free_vuses = NULL;
155static maydef_optype_p free_maydefs = NULL;
156static mustdef_optype_p free_mustdefs = NULL;
1a24f92f 157
1a24f92f 158
c83eecad 159/* Return the DECL_UID of the base variable of T. */
1a24f92f 160
f47c96aa 161static inline unsigned
f3940b0e 162get_name_decl (tree t)
6de9cd9a 163{
f3940b0e
AM
164 if (TREE_CODE (t) != SSA_NAME)
165 return DECL_UID (t);
166 else
167 return DECL_UID (SSA_NAME_VAR (t));
6de9cd9a
DN
168}
169
f3940b0e 170/* Comparison function for qsort used in operand_build_sort_virtual. */
1a24f92f 171
f3940b0e
AM
172static int
173operand_build_cmp (const void *p, const void *q)
a32b97a2 174{
f3940b0e
AM
175 tree e1 = *((const tree *)p);
176 tree e2 = *((const tree *)q);
177 unsigned int u1,u2;
178
179 u1 = get_name_decl (e1);
180 u2 = get_name_decl (e2);
f47c96aa 181
f3940b0e 182 /* We want to sort in ascending order. They can never be equal. */
f47c96aa 183#ifdef ENABLE_CHECKING
f3940b0e 184 gcc_assert (u1 != u2);
f47c96aa 185#endif
f3940b0e 186 return (u1 > u2 ? 1 : -1);
a32b97a2
BB
187}
188
f3940b0e 189/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
1a24f92f 190
6de9cd9a 191static inline void
f3940b0e 192operand_build_sort_virtual (VEC(tree,heap) *list)
6de9cd9a 193{
f3940b0e
AM
194 int num = VEC_length (tree, list);
195 if (num < 2)
196 return;
197 if (num == 2)
6de9cd9a 198 {
f3940b0e
AM
199 if (get_name_decl (VEC_index (tree, list, 0))
200 > get_name_decl (VEC_index (tree, list, 1)))
201 {
202 /* Swap elements if in the wrong order. */
203 tree tmp = VEC_index (tree, list, 0);
204 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
205 VEC_replace (tree, list, 1, tmp);
206 }
f47c96aa 207 return;
6de9cd9a 208 }
f3940b0e
AM
209 /* There are 3 or more elements, call qsort. */
210 qsort (VEC_address (tree, list),
211 VEC_length (tree, list),
212 sizeof (tree),
213 operand_build_cmp);
6de9cd9a
DN
214}
215
f430bae8 216
1a24f92f 217
f47c96aa 218/* Return true if the ssa operands cache is active. */
1a24f92f 219
f47c96aa
AM
220bool
221ssa_operands_active (void)
6de9cd9a 222{
f47c96aa
AM
223 return ops_active;
224}
6de9cd9a 225
6de9cd9a 226
f47c96aa
AM
227/* Initialize the operand cache routines. */
228
229void
230init_ssa_operands (void)
231{
f3940b0e
AM
232 build_defs = VEC_alloc (tree, heap, 5);
233 build_uses = VEC_alloc (tree, heap, 10);
234 build_vuses = VEC_alloc (tree, heap, 25);
235 build_v_may_defs = VEC_alloc (tree, heap, 25);
236 build_v_must_defs = VEC_alloc (tree, heap, 25);
237
f47c96aa
AM
238 gcc_assert (operand_memory == NULL);
239 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
240 ops_active = true;
241}
6de9cd9a 242
1a24f92f 243
f47c96aa
AM
244/* Dispose of anything required by the operand routines. */
245
246void
247fini_ssa_operands (void)
248{
249 struct ssa_operand_memory_d *ptr;
f3940b0e
AM
250 VEC_free (tree, heap, build_defs);
251 VEC_free (tree, heap, build_uses);
252 VEC_free (tree, heap, build_v_must_defs);
253 VEC_free (tree, heap, build_v_may_defs);
254 VEC_free (tree, heap, build_vuses);
f47c96aa
AM
255 free_defs = NULL;
256 free_uses = NULL;
257 free_vuses = NULL;
258 free_maydefs = NULL;
259 free_mustdefs = NULL;
260 while ((ptr = operand_memory) != NULL)
261 {
262 operand_memory = operand_memory->next;
263 ggc_free (ptr);
1a24f92f
AM
264 }
265
3d6dcb7f
KH
266 VEC_free (tree, heap, clobbered_v_may_defs);
267 VEC_free (tree, heap, clobbered_vuses);
268 VEC_free (tree, heap, ro_call_vuses);
f47c96aa
AM
269 ops_active = false;
270}
1a24f92f 271
6de9cd9a 272
f47c96aa
AM
273/* Return memory for operands of SIZE chunks. */
274
275static inline void *
276ssa_operand_alloc (unsigned size)
277{
278 char *ptr;
279 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
280 {
281 struct ssa_operand_memory_d *ptr;
e1111e8e 282 ptr = GGC_NEW (struct ssa_operand_memory_d);
f47c96aa
AM
283 ptr->next = operand_memory;
284 operand_memory = ptr;
285 operand_memory_index = 0;
286 }
287 ptr = &(operand_memory->mem[operand_memory_index]);
288 operand_memory_index += size;
289 return ptr;
6de9cd9a
DN
290}
291
1a24f92f 292
2e48874f 293/* Make sure PTR is in the correct immediate use list. Since uses are simply
f430bae8
AM
294 pointers into the stmt TREE, there is no way of telling if anyone has
295 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
2e48874f 296 The contents are different, but the pointer is still the same. This
f430bae8 297 routine will check to make sure PTR is in the correct list, and if it isn't
3623aa70
AM
298 put it in the correct list. We cannot simply check the previous node
299 because all nodes in the same stmt might have be changed. */
f430bae8
AM
300
301static inline void
f47c96aa 302correct_use_link (use_operand_p ptr, tree stmt)
f430bae8 303{
f47c96aa 304 use_operand_p prev;
f430bae8
AM
305 tree root;
306
307 /* Fold_stmt () may have changed the stmt pointers. */
308 if (ptr->stmt != stmt)
309 ptr->stmt = stmt;
310
311 prev = ptr->prev;
312 if (prev)
313 {
a56b5394
AM
314 /* Find the root element, making sure we skip any safe iterators. */
315 while (prev->use != NULL || prev->stmt == NULL)
316 prev = prev->prev;
3623aa70
AM
317
318 /* Get the ssa_name of the list the node is in. */
a56b5394 319 root = prev->stmt;
f3b569ca 320 /* If it's the right list, simply return. */
f430bae8
AM
321 if (root == *(ptr->use))
322 return;
323 }
324 /* Its in the wrong list if we reach here. */
325 delink_imm_use (ptr);
326 link_imm_use (ptr, *(ptr->use));
327}
328
329
5dc2e333
AM
330/* This routine makes sure that PTR is in an immediate use list, and makes
331 sure the stmt pointer is set to the current stmt. Virtual uses do not need
332 the overhead of correct_use_link since they cannot be directly manipulated
333 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
334static inline void
335set_virtual_use_link (use_operand_p ptr, tree stmt)
336{
337 /* Fold_stmt () may have changed the stmt pointers. */
338 if (ptr->stmt != stmt)
339 ptr->stmt = stmt;
340
341 /* If this use isn't in a list, add it to the correct list. */
342 if (!ptr->prev)
343 link_imm_use (ptr, *(ptr->use));
344}
345
346
347
f47c96aa 348#define FINALIZE_OPBUILD build_defs
f3940b0e
AM
349#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
350 build_defs, (I))
351#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
352 build_defs, (I))
f47c96aa
AM
353#define FINALIZE_FUNC finalize_ssa_def_ops
354#define FINALIZE_ALLOC alloc_def
355#define FINALIZE_FREE free_defs
356#define FINALIZE_TYPE struct def_optype_d
357#define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
358#define FINALIZE_OPS DEF_OPS
359#define FINALIZE_BASE(VAR) VAR
360#define FINALIZE_BASE_TYPE tree *
361#define FINALIZE_BASE_ZERO NULL
362#define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
363#include "tree-ssa-opfinalize.h"
1a24f92f 364
f47c96aa
AM
365
366/* This routine will create stmt operands for STMT from the def build list. */
367
368static void
369finalize_ssa_defs (tree stmt)
6de9cd9a 370{
f3940b0e 371 unsigned int num = VEC_length (tree, build_defs);
f47c96aa
AM
372 /* There should only be a single real definition per assignment. */
373 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
6de9cd9a 374
f47c96aa
AM
375 /* If there is an old list, often the new list is identical, or close, so
376 find the elements at the beginning that are the same as the vector. */
377
378 finalize_ssa_def_ops (stmt);
f3940b0e 379 VEC_truncate (tree, build_defs, 0);
f47c96aa 380}
6de9cd9a 381
f47c96aa 382#define FINALIZE_OPBUILD build_uses
f3940b0e
AM
383#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
384 build_uses, (I))
385#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
386 build_uses, (I))
f47c96aa
AM
387#define FINALIZE_FUNC finalize_ssa_use_ops
388#define FINALIZE_ALLOC alloc_use
389#define FINALIZE_FREE free_uses
390#define FINALIZE_TYPE struct use_optype_d
391#define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
392#define FINALIZE_OPS USE_OPS
393#define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
5dc2e333 394#define FINALIZE_CORRECT_USE correct_use_link
f47c96aa
AM
395#define FINALIZE_BASE(VAR) VAR
396#define FINALIZE_BASE_TYPE tree *
397#define FINALIZE_BASE_ZERO NULL
398#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
399 (PTR)->use_ptr.use = (VAL); \
400 link_imm_use_stmt (&((PTR)->use_ptr), \
401 *(VAL), (STMT))
402#include "tree-ssa-opfinalize.h"
403
404/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
405
406static void
407finalize_ssa_uses (tree stmt)
408{
6de9cd9a
DN
409#ifdef ENABLE_CHECKING
410 {
411 unsigned x;
f3940b0e 412 unsigned num = VEC_length (tree, build_uses);
f47c96aa 413
6de9cd9a 414 /* If the pointer to the operand is the statement itself, something is
f47c96aa
AM
415 wrong. It means that we are pointing to a local variable (the
416 initial call to get_stmt_operands does not pass a pointer to a
417 statement). */
6de9cd9a 418 for (x = 0; x < num; x++)
f3940b0e 419 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
6de9cd9a
DN
420 }
421#endif
f47c96aa 422 finalize_ssa_use_ops (stmt);
f3940b0e 423 VEC_truncate (tree, build_uses, 0);
6de9cd9a 424}
f47c96aa
AM
425
426
427/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
428#define FINALIZE_OPBUILD build_v_may_defs
f3940b0e
AM
429#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
430#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
431 build_v_may_defs, (I)))
f47c96aa
AM
432#define FINALIZE_FUNC finalize_ssa_v_may_def_ops
433#define FINALIZE_ALLOC alloc_maydef
434#define FINALIZE_FREE free_maydefs
435#define FINALIZE_TYPE struct maydef_optype_d
436#define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
437#define FINALIZE_OPS MAYDEF_OPS
438#define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
5dc2e333 439#define FINALIZE_CORRECT_USE set_virtual_use_link
f47c96aa 440#define FINALIZE_BASE_ZERO 0
f3940b0e 441#define FINALIZE_BASE(VAR) get_name_decl (VAR)
f47c96aa
AM
442#define FINALIZE_BASE_TYPE unsigned
443#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
444 (PTR)->def_var = (VAL); \
445 (PTR)->use_var = (VAL); \
446 (PTR)->use_ptr.use = &((PTR)->use_var); \
447 link_imm_use_stmt (&((PTR)->use_ptr), \
448 (VAL), (STMT))
449#include "tree-ssa-opfinalize.h"
450
451
452static void
453finalize_ssa_v_may_defs (tree stmt)
6de9cd9a 454{
f47c96aa 455 finalize_ssa_v_may_def_ops (stmt);
6de9cd9a 456}
f47c96aa 457
6de9cd9a 458
e288e2f5
AM
459/* Clear the in_list bits and empty the build array for v_may_defs. */
460
461static inline void
462cleanup_v_may_defs (void)
463{
464 unsigned x, num;
f3940b0e 465 num = VEC_length (tree, build_v_may_defs);
e288e2f5
AM
466
467 for (x = 0; x < num; x++)
468 {
f3940b0e 469 tree t = VEC_index (tree, build_v_may_defs, x);
f47c96aa
AM
470 if (TREE_CODE (t) != SSA_NAME)
471 {
472 var_ann_t ann = var_ann (t);
473 ann->in_v_may_def_list = 0;
474 }
e288e2f5 475 }
f3940b0e 476 VEC_truncate (tree, build_v_may_defs, 0);
f47c96aa
AM
477}
478
479
480#define FINALIZE_OPBUILD build_vuses
f3940b0e
AM
481#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
482#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
483 build_vuses, (I)))
f47c96aa
AM
484#define FINALIZE_FUNC finalize_ssa_vuse_ops
485#define FINALIZE_ALLOC alloc_vuse
486#define FINALIZE_FREE free_vuses
487#define FINALIZE_TYPE struct vuse_optype_d
488#define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
489#define FINALIZE_OPS VUSE_OPS
490#define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
5dc2e333 491#define FINALIZE_CORRECT_USE set_virtual_use_link
f47c96aa 492#define FINALIZE_BASE_ZERO 0
f3940b0e 493#define FINALIZE_BASE(VAR) get_name_decl (VAR)
f47c96aa
AM
494#define FINALIZE_BASE_TYPE unsigned
495#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
496 (PTR)->use_var = (VAL); \
497 (PTR)->use_ptr.use = &((PTR)->use_var); \
498 link_imm_use_stmt (&((PTR)->use_ptr), \
499 (VAL), (STMT))
500#include "tree-ssa-opfinalize.h"
e288e2f5 501
1a24f92f 502
f47c96aa
AM
503/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
504
505static void
506finalize_ssa_vuses (tree stmt)
1a24f92f 507{
f47c96aa 508 unsigned num, num_v_may_defs;
f3940b0e 509 unsigned vuse_index;
6de9cd9a
DN
510
511 /* Remove superfluous VUSE operands. If the statement already has a
a32b97a2
BB
512 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
513 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
6de9cd9a
DN
514 suppose that variable 'a' is aliased:
515
516 # VUSE <a_2>
a32b97a2 517 # a_3 = V_MAY_DEF <a_2>
6de9cd9a
DN
518 a = a + 1;
519
a32b97a2 520 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
6de9cd9a
DN
521 operation. */
522
f3940b0e
AM
523 num = VEC_length (tree, build_vuses);
524 num_v_may_defs = VEC_length (tree, build_v_may_defs);
1a24f92f 525
f47c96aa 526 if (num > 0 && num_v_may_defs > 0)
6de9cd9a 527 {
f3940b0e 528 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
f47c96aa
AM
529 {
530 tree vuse;
f3940b0e 531 vuse = VEC_index (tree, build_vuses, vuse_index);
e288e2f5 532 if (TREE_CODE (vuse) != SSA_NAME)
6de9cd9a 533 {
e288e2f5
AM
534 var_ann_t ann = var_ann (vuse);
535 ann->in_vuse_list = 0;
536 if (ann->in_v_may_def_list)
537 {
f3940b0e 538 VEC_ordered_remove (tree, build_vuses, vuse_index);
f47c96aa 539 continue;
6de9cd9a 540 }
6de9cd9a 541 }
f3940b0e 542 vuse_index++;
6de9cd9a
DN
543 }
544 }
e288e2f5
AM
545 else
546 /* Clear out the in_list bits. */
f3940b0e
AM
547 for (vuse_index = 0;
548 vuse_index < VEC_length (tree, build_vuses);
549 vuse_index++)
e288e2f5 550 {
f3940b0e 551 tree t = VEC_index (tree, build_vuses, vuse_index);
e288e2f5
AM
552 if (TREE_CODE (t) != SSA_NAME)
553 {
554 var_ann_t ann = var_ann (t);
555 ann->in_vuse_list = 0;
556 }
557 }
558
f47c96aa
AM
559 finalize_ssa_vuse_ops (stmt);
560 /* The v_may_def build vector wasn't cleaned up because we needed it. */
e288e2f5 561 cleanup_v_may_defs ();
f47c96aa
AM
562
563 /* Free the vuses build vector. */
f3940b0e 564 VEC_truncate (tree, build_vuses, 0);
1a24f92f 565
6de9cd9a 566}
f47c96aa 567
1a24f92f 568/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
f47c96aa
AM
569
570#define FINALIZE_OPBUILD build_v_must_defs
f3940b0e
AM
571#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
572#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
573 build_v_must_defs, (I)))
f47c96aa
AM
574#define FINALIZE_FUNC finalize_ssa_v_must_def_ops
575#define FINALIZE_ALLOC alloc_mustdef
576#define FINALIZE_FREE free_mustdefs
577#define FINALIZE_TYPE struct mustdef_optype_d
578#define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
579#define FINALIZE_OPS MUSTDEF_OPS
580#define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
5dc2e333 581#define FINALIZE_CORRECT_USE set_virtual_use_link
f47c96aa 582#define FINALIZE_BASE_ZERO 0
f3940b0e 583#define FINALIZE_BASE(VAR) get_name_decl (VAR)
f47c96aa
AM
584#define FINALIZE_BASE_TYPE unsigned
585#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
586 (PTR)->def_var = (VAL); \
587 (PTR)->kill_var = (VAL); \
588 (PTR)->use_ptr.use = &((PTR)->kill_var);\
589 link_imm_use_stmt (&((PTR)->use_ptr), \
590 (VAL), (STMT))
591#include "tree-ssa-opfinalize.h"
1a24f92f 592
a32b97a2 593
f47c96aa
AM
594static void
595finalize_ssa_v_must_defs (tree stmt)
596{
c75ab022
DB
597 /* In the presence of subvars, there may be more than one V_MUST_DEF per
598 statement (one for each subvar). It is a bit expensive to verify that
599 all must-defs in a statement belong to subvars if there is more than one
600 MUST-def, so we don't do it. Suffice to say, if you reach here without
601 having subvars, and have num >1, you have hit a bug. */
a32b97a2 602
f47c96aa 603 finalize_ssa_v_must_def_ops (stmt);
f3940b0e 604 VEC_truncate (tree, build_v_must_defs, 0);
a32b97a2
BB
605}
606
6de9cd9a 607
1a24f92f 608/* Finalize all the build vectors, fill the new ones into INFO. */
f47c96aa 609
1a24f92f 610static inline void
f47c96aa 611finalize_ssa_stmt_operands (tree stmt)
1a24f92f 612{
f47c96aa
AM
613 finalize_ssa_defs (stmt);
614 finalize_ssa_uses (stmt);
615 finalize_ssa_v_must_defs (stmt);
616 finalize_ssa_v_may_defs (stmt);
617 finalize_ssa_vuses (stmt);
6de9cd9a
DN
618}
619
620
1a24f92f
AM
621/* Start the process of building up operands vectors in INFO. */
622
623static inline void
624start_ssa_stmt_operands (void)
6de9cd9a 625{
f3940b0e
AM
626 gcc_assert (VEC_length (tree, build_defs) == 0);
627 gcc_assert (VEC_length (tree, build_uses) == 0);
628 gcc_assert (VEC_length (tree, build_vuses) == 0);
629 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
630 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
6de9cd9a
DN
631}
632
633
1a24f92f 634/* Add DEF_P to the list of pointers to operands. */
6de9cd9a
DN
635
636static inline void
1a24f92f 637append_def (tree *def_p)
6de9cd9a 638{
f3940b0e 639 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
6de9cd9a
DN
640}
641
642
1a24f92f 643/* Add USE_P to the list of pointers to operands. */
6de9cd9a
DN
644
645static inline void
1a24f92f 646append_use (tree *use_p)
6de9cd9a 647{
f3940b0e 648 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
6de9cd9a
DN
649}
650
651
1a24f92f 652/* Add a new virtual may def for variable VAR to the build array. */
6de9cd9a 653
1a24f92f
AM
654static inline void
655append_v_may_def (tree var)
6de9cd9a 656{
f47c96aa
AM
657 if (TREE_CODE (var) != SSA_NAME)
658 {
659 var_ann_t ann = get_var_ann (var);
6de9cd9a 660
f47c96aa
AM
661 /* Don't allow duplicate entries. */
662 if (ann->in_v_may_def_list)
663 return;
664 ann->in_v_may_def_list = 1;
665 }
6de9cd9a 666
f3940b0e 667 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
6de9cd9a
DN
668}
669
670
1a24f92f 671/* Add VAR to the list of virtual uses. */
6de9cd9a 672
1a24f92f
AM
673static inline void
674append_vuse (tree var)
6de9cd9a 675{
6de9cd9a
DN
676
677 /* Don't allow duplicate entries. */
e288e2f5
AM
678 if (TREE_CODE (var) != SSA_NAME)
679 {
680 var_ann_t ann = get_var_ann (var);
681
682 if (ann->in_vuse_list || ann->in_v_may_def_list)
683 return;
684 ann->in_vuse_list = 1;
685 }
6de9cd9a 686
f3940b0e 687 VEC_safe_push (tree, heap, build_vuses, (tree)var);
6de9cd9a
DN
688}
689
a32b97a2 690
1a24f92f 691/* Add VAR to the list of virtual must definitions for INFO. */
a32b97a2 692
1a24f92f
AM
693static inline void
694append_v_must_def (tree var)
695{
696 unsigned i;
a32b97a2
BB
697
698 /* Don't allow duplicate entries. */
f3940b0e
AM
699 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
700 if (var == VEC_index (tree, build_v_must_defs, i))
1a24f92f 701 return;
a32b97a2 702
f3940b0e 703 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
a32b97a2
BB
704}
705
6de9cd9a 706
f430bae8 707/* Parse STMT looking for operands. OLD_OPS is the original stmt operand
f652d14b 708 cache for STMT, if it existed before. When finished, the various build_*
f430bae8
AM
709 operand vectors will have potential operands. in them. */
710
d05eae88 711static void
f430bae8 712parse_ssa_operands (tree stmt)
6de9cd9a
DN
713{
714 enum tree_code code;
6de9cd9a
DN
715
716 code = TREE_CODE (stmt);
717 switch (code)
718 {
719 case MODIFY_EXPR:
9390c347
RK
720 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
721 either only part of LHS is modified or if the RHS might throw,
722 otherwise, use V_MUST_DEF.
723
724 ??? If it might throw, we should represent somehow that it is killed
725 on the fallthrough path. */
726 {
727 tree lhs = TREE_OPERAND (stmt, 0);
728 int lhs_flags = opf_is_def;
729
730 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
731
732 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
733 or not the entire LHS is modified; that depends on what's
734 inside the VIEW_CONVERT_EXPR. */
735 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
736 lhs = TREE_OPERAND (lhs, 0);
737
7fac66d4
JH
738 if (TREE_CODE (lhs) != ARRAY_REF
739 && TREE_CODE (lhs) != ARRAY_RANGE_REF
9390c347
RK
740 && TREE_CODE (lhs) != BIT_FIELD_REF
741 && TREE_CODE (lhs) != REALPART_EXPR
742 && TREE_CODE (lhs) != IMAGPART_EXPR)
743 lhs_flags |= opf_kill_def;
744
745 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
746 }
6de9cd9a
DN
747 break;
748
749 case COND_EXPR:
1a24f92f 750 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
6de9cd9a
DN
751 break;
752
753 case SWITCH_EXPR:
1a24f92f 754 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
6de9cd9a
DN
755 break;
756
757 case ASM_EXPR:
1a24f92f 758 get_asm_expr_operands (stmt);
6de9cd9a
DN
759 break;
760
761 case RETURN_EXPR:
1a24f92f 762 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
6de9cd9a
DN
763 break;
764
765 case GOTO_EXPR:
1a24f92f 766 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
6de9cd9a
DN
767 break;
768
769 case LABEL_EXPR:
1a24f92f 770 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
6de9cd9a
DN
771 break;
772
773 /* These nodes contain no variable references. */
774 case BIND_EXPR:
775 case CASE_LABEL_EXPR:
776 case TRY_CATCH_EXPR:
777 case TRY_FINALLY_EXPR:
778 case EH_FILTER_EXPR:
779 case CATCH_EXPR:
780 case RESX_EXPR:
781 break;
782
783 default:
784 /* Notice that if get_expr_operands tries to use &STMT as the operand
0e61db61 785 pointer (which may only happen for USE operands), we will fail in
77c9db77
RH
786 append_use. This default will handle statements like empty
787 statements, or CALL_EXPRs that may appear on the RHS of a statement
6de9cd9a 788 or as statements themselves. */
1a24f92f 789 get_expr_operands (stmt, &stmt, opf_none);
6de9cd9a
DN
790 break;
791 }
f430bae8
AM
792}
793
d14c5160 794/* Create an operands cache for STMT. */
f430bae8
AM
795
796static void
f47c96aa 797build_ssa_operands (tree stmt)
f430bae8 798{
f47c96aa 799 stmt_ann_t ann = get_stmt_ann (stmt);
f430bae8
AM
800
801 /* Initially assume that the statement has no volatile operands, nor
802 makes aliased loads or stores. */
803 if (ann)
804 {
805 ann->has_volatile_ops = false;
806 ann->makes_aliased_stores = false;
807 ann->makes_aliased_loads = false;
808 }
809
810 start_ssa_stmt_operands ();
811
812 parse_ssa_operands (stmt);
f3940b0e
AM
813 operand_build_sort_virtual (build_vuses);
814 operand_build_sort_virtual (build_v_may_defs);
815 operand_build_sort_virtual (build_v_must_defs);
f430bae8 816
f47c96aa 817 finalize_ssa_stmt_operands (stmt);
1a24f92f
AM
818}
819
820
821/* Free any operands vectors in OPS. */
6844185d 822void
1a24f92f
AM
823free_ssa_operands (stmt_operands_p ops)
824{
f47c96aa
AM
825 ops->def_ops = NULL;
826 ops->use_ops = NULL;
827 ops->maydef_ops = NULL;
828 ops->mustdef_ops = NULL;
829 ops->vuse_ops = NULL;
f47c96aa 830}
f47c96aa
AM
831
832
833/* Get the operands of statement STMT. Note that repeated calls to
834 get_stmt_operands for the same statement will do nothing until the
835 statement is marked modified by a call to mark_stmt_modified(). */
836
837void
838update_stmt_operands (tree stmt)
839{
840 stmt_ann_t ann = get_stmt_ann (stmt);
841 /* If get_stmt_operands is called before SSA is initialized, dont
842 do anything. */
843 if (!ssa_operands_active ())
844 return;
845 /* The optimizers cannot handle statements that are nothing but a
846 _DECL. This indicates a bug in the gimplifier. */
847 gcc_assert (!SSA_VAR_P (stmt));
848
849 gcc_assert (ann->modified);
850
851 timevar_push (TV_TREE_OPS);
852
853 build_ssa_operands (stmt);
854
855 /* Clear the modified bit for STMT. Subsequent calls to
856 get_stmt_operands for this statement will do nothing until the
857 statement is marked modified by a call to mark_stmt_modified(). */
858 ann->modified = 0;
859
860 timevar_pop (TV_TREE_OPS);
861}
862
863
864/* Copies virtual operands from SRC to DST. */
865
866void
867copy_virtual_operands (tree dest, tree src)
868{
869 tree t;
870 ssa_op_iter iter, old_iter;
871 use_operand_p use_p, u2;
872 def_operand_p def_p, d2;
873
874 build_ssa_operands (dest);
875
395bda42 876 /* Copy all the virtual fields. */
f47c96aa
AM
877 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
878 append_vuse (t);
879 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
880 append_v_may_def (t);
881 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
882 append_v_must_def (t);
883
f3940b0e
AM
884 if (VEC_length (tree, build_vuses) == 0
885 && VEC_length (tree, build_v_may_defs) == 0
886 && VEC_length (tree, build_v_must_defs) == 0)
f47c96aa
AM
887 return;
888
889 /* Now commit the virtual operands to this stmt. */
890 finalize_ssa_v_must_defs (dest);
891 finalize_ssa_v_may_defs (dest);
892 finalize_ssa_vuses (dest);
893
894 /* Finally, set the field to the same values as then originals. */
895
896
897 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
898 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
899 {
900 gcc_assert (!op_iter_done (&old_iter));
901 SET_USE (use_p, t);
902 t = op_iter_next_tree (&old_iter);
903 }
904 gcc_assert (op_iter_done (&old_iter));
905
906 op_iter_init_maydef (&old_iter, src, &u2, &d2);
907 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
908 {
909 gcc_assert (!op_iter_done (&old_iter));
910 SET_USE (use_p, USE_FROM_PTR (u2));
911 SET_DEF (def_p, DEF_FROM_PTR (d2));
912 op_iter_next_maymustdef (&u2, &d2, &old_iter);
913 }
914 gcc_assert (op_iter_done (&old_iter));
915
916 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
917 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
918 {
919 gcc_assert (!op_iter_done (&old_iter));
920 SET_USE (use_p, USE_FROM_PTR (u2));
921 SET_DEF (def_p, DEF_FROM_PTR (d2));
922 op_iter_next_maymustdef (&u2, &d2, &old_iter);
923 }
924 gcc_assert (op_iter_done (&old_iter));
925
1a24f92f
AM
926}
927
928
f47c96aa
AM
929/* Specifically for use in DOM's expression analysis. Given a store, we
930 create an artificial stmt which looks like a load from the store, this can
931 be used to eliminate redundant loads. OLD_OPS are the operands from the
932 store stmt, and NEW_STMT is the new load which represents a load of the
933 values stored. */
934
935void
936create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
937{
938 stmt_ann_t ann;
939 tree op;
940 ssa_op_iter iter;
941 use_operand_p use_p;
942 unsigned x;
943
944 ann = get_stmt_ann (new_stmt);
945
946 /* process the stmt looking for operands. */
947 start_ssa_stmt_operands ();
948 parse_ssa_operands (new_stmt);
949
f3940b0e 950 for (x = 0; x < VEC_length (tree, build_vuses); x++)
f47c96aa 951 {
f3940b0e 952 tree t = VEC_index (tree, build_vuses, x);
f47c96aa
AM
953 if (TREE_CODE (t) != SSA_NAME)
954 {
955 var_ann_t ann = var_ann (t);
956 ann->in_vuse_list = 0;
957 }
958 }
959
f3940b0e 960 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
f47c96aa 961 {
f3940b0e 962 tree t = VEC_index (tree, build_v_may_defs, x);
f47c96aa
AM
963 if (TREE_CODE (t) != SSA_NAME)
964 {
965 var_ann_t ann = var_ann (t);
966 ann->in_v_may_def_list = 0;
967 }
968 }
969 /* Remove any virtual operands that were found. */
f3940b0e
AM
970 VEC_truncate (tree, build_v_may_defs, 0);
971 VEC_truncate (tree, build_v_must_defs, 0);
972 VEC_truncate (tree, build_vuses, 0);
f47c96aa
AM
973
974 /* For each VDEF on the original statement, we want to create a
975 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
976 statement. */
977 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
978 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
979 append_vuse (op);
980
981 /* Now build the operands for this new stmt. */
982 finalize_ssa_stmt_operands (new_stmt);
983
984 /* All uses in this fake stmt must not be in the immediate use lists. */
985 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
986 delink_imm_use (use_p);
987}
f430bae8 988
3c7d0735 989void
f47c96aa 990swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
f430bae8
AM
991{
992 tree op0, op1;
993 op0 = *exp0;
994 op1 = *exp1;
995
996 /* If the operand cache is active, attempt to preserve the relative positions
997 of these two operands in their respective immediate use lists. */
f47c96aa 998 if (ssa_operands_active () && op0 != op1)
f430bae8 999 {
f47c96aa
AM
1000 use_optype_p use0, use1, ptr;
1001 use0 = use1 = NULL;
f430bae8 1002 /* Find the 2 operands in the cache, if they are there. */
f47c96aa
AM
1003 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1004 if (USE_OP_PTR (ptr)->use == exp0)
f430bae8 1005 {
f47c96aa 1006 use0 = ptr;
f430bae8
AM
1007 break;
1008 }
f47c96aa
AM
1009 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1010 if (USE_OP_PTR (ptr)->use == exp1)
f430bae8 1011 {
f47c96aa 1012 use1 = ptr;
f430bae8
AM
1013 break;
1014 }
d566f6ef
KH
1015 /* If both uses don't have operand entries, there isn't much we can do
1016 at this point. Presumably we dont need to worry about it. */
f47c96aa 1017 if (use0 && use1)
f430bae8 1018 {
f47c96aa
AM
1019 tree *tmp = USE_OP_PTR (use1)->use;
1020 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1021 USE_OP_PTR (use0)->use = tmp;
f430bae8
AM
1022 }
1023 }
1024
1025 /* Now swap the data. */
1026 *exp0 = op1;
1027 *exp1 = op0;
1028}
1029
206048bd
VR
1030/* Recursively scan the expression pointed to by EXPR_P in statement referred
1031 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1032 the operands found. */
6de9cd9a
DN
1033
1034static void
1a24f92f 1035get_expr_operands (tree stmt, tree *expr_p, int flags)
6de9cd9a
DN
1036{
1037 enum tree_code code;
6615c446 1038 enum tree_code_class class;
6de9cd9a 1039 tree expr = *expr_p;
e288e2f5 1040 stmt_ann_t s_ann = stmt_ann (stmt);
6de9cd9a 1041
7d3bf067 1042 if (expr == NULL)
6de9cd9a
DN
1043 return;
1044
1045 code = TREE_CODE (expr);
1046 class = TREE_CODE_CLASS (code);
1047
310de761 1048 switch (code)
6de9cd9a 1049 {
310de761
RH
1050 case ADDR_EXPR:
1051 /* We could have the address of a component, array member,
1052 etc which has interesting variable references. */
6de9cd9a 1053 /* Taking the address of a variable does not represent a
1a24f92f 1054 reference to it, but the fact that the stmt takes its address will be
6de9cd9a 1055 of interest to some passes (e.g. alias resolution). */
e288e2f5 1056 add_stmt_operand (expr_p, s_ann, 0);
6de9cd9a 1057
d397dbcd
DN
1058 /* If the address is invariant, there may be no interesting variable
1059 references inside. */
1060 if (is_gimple_min_invariant (expr))
6de9cd9a
DN
1061 return;
1062
1063 /* There should be no VUSEs created, since the referenced objects are
1064 not really accessed. The only operands that we should find here
1065 are ARRAY_REF indices which will always be real operands (GIMPLE
1066 does not allow non-registers as array indices). */
1067 flags |= opf_no_vops;
1068
1a24f92f 1069 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
310de761 1070 return;
44de5aeb 1071
310de761 1072 case SSA_NAME:
326eda4b
DB
1073 case STRUCT_FIELD_TAG:
1074 case TYPE_MEMORY_TAG:
1075 case NAME_MEMORY_TAG:
310de761
RH
1076 case VAR_DECL:
1077 case PARM_DECL:
1078 case RESULT_DECL:
9ec9d82b 1079 case CONST_DECL:
c75ab022
DB
1080 {
1081 subvar_t svars;
1082
1083 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1084 Otherwise, add the variable itself.
1085 Whether it goes to USES or DEFS depends on the operand flags. */
1086 if (var_can_have_subvars (expr)
1087 && (svars = get_subvars_for_var (expr)))
1088 {
1089 subvar_t sv;
1090 for (sv = svars; sv; sv = sv->next)
1091 add_stmt_operand (&sv->var, s_ann, flags);
1092 }
1093 else
1094 {
1095 add_stmt_operand (expr_p, s_ann, flags);
1096 }
1097 return;
1098 }
7ccf35ed
DN
1099 case MISALIGNED_INDIRECT_REF:
1100 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1101 /* fall through */
1102
1103 case ALIGN_INDIRECT_REF:
310de761 1104 case INDIRECT_REF:
1a24f92f 1105 get_indirect_ref_operands (stmt, expr, flags);
6de9cd9a 1106 return;
6de9cd9a 1107
ac182688
ZD
1108 case TARGET_MEM_REF:
1109 get_tmr_operands (stmt, expr, flags);
1110 return;
1111
310de761
RH
1112 case ARRAY_REF:
1113 case ARRAY_RANGE_REF:
1114 /* Treat array references as references to the virtual variable
1115 representing the array. The virtual variable for an ARRAY_REF
1116 is the VAR_DECL for the array. */
1117
6de9cd9a
DN
1118 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1119 according to the value of IS_DEF. Recurse if the LHS of the
1120 ARRAY_REF node is not a regular variable. */
1121 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
e288e2f5 1122 add_stmt_operand (expr_p, s_ann, flags);
6de9cd9a 1123 else
1a24f92f 1124 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
6de9cd9a 1125
1a24f92f
AM
1126 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1127 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1128 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
6de9cd9a 1129 return;
6de9cd9a 1130
310de761
RH
1131 case COMPONENT_REF:
1132 case REALPART_EXPR:
1133 case IMAGPART_EXPR:
c75ab022
DB
1134 {
1135 tree ref;
6bec9271 1136 HOST_WIDE_INT offset, size, maxsize;
c75ab022
DB
1137 /* This component ref becomes an access to all of the subvariables
1138 it can touch, if we can determine that, but *NOT* the real one.
1139 If we can't determine which fields we could touch, the recursion
1140 will eventually get to a variable and add *all* of its subvars, or
1141 whatever is the minimum correct subset. */
1142
6bec9271
RG
1143 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1144 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
c75ab022
DB
1145 {
1146 subvar_t svars = get_subvars_for_var (ref);
1147 subvar_t sv;
1148 for (sv = svars; sv; sv = sv->next)
1149 {
1150 bool exact;
6bec9271 1151 if (overlap_subvar (offset, maxsize, sv, &exact))
c75ab022 1152 {
98b6d477 1153 int subvar_flags = flags;
6bec9271
RG
1154 if (!exact
1155 || size != maxsize)
7fac66d4
JH
1156 subvar_flags &= ~opf_kill_def;
1157 add_stmt_operand (&sv->var, s_ann, subvar_flags);
c75ab022
DB
1158 }
1159 }
1160 }
1161 else
1162 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1163 flags & ~opf_kill_def);
1164
1165 if (code == COMPONENT_REF)
305a1321 1166 {
707db096 1167 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
305a1321
MM
1168 s_ann->has_volatile_ops = true;
1169 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1170 }
c75ab022
DB
1171 return;
1172 }
d25cee4d 1173 case WITH_SIZE_EXPR:
0e28378a 1174 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
d25cee4d 1175 and an rvalue reference to its second argument. */
1a24f92f
AM
1176 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1177 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
d25cee4d
RH
1178 return;
1179
310de761 1180 case CALL_EXPR:
1a24f92f 1181 get_call_expr_operands (stmt, expr);
6de9cd9a 1182 return;
6de9cd9a 1183
40923b20 1184 case COND_EXPR:
ad9f20cb
DP
1185 case VEC_COND_EXPR:
1186 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
40923b20
DP
1187 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1188 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1189 return;
1190
310de761 1191 case MODIFY_EXPR:
d25cee4d
RH
1192 {
1193 int subflags;
1194 tree op;
1195
1a24f92f 1196 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
d25cee4d
RH
1197
1198 op = TREE_OPERAND (expr, 0);
1199 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1200 op = TREE_OPERAND (expr, 0);
a9315f66
RK
1201 if (TREE_CODE (op) == ARRAY_REF
1202 || TREE_CODE (op) == ARRAY_RANGE_REF
d25cee4d
RH
1203 || TREE_CODE (op) == REALPART_EXPR
1204 || TREE_CODE (op) == IMAGPART_EXPR)
1205 subflags = opf_is_def;
1206 else
1207 subflags = opf_is_def | opf_kill_def;
1208
1a24f92f 1209 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
d25cee4d
RH
1210 return;
1211 }
6de9cd9a 1212
7b48e1e0
RH
1213 case CONSTRUCTOR:
1214 {
1215 /* General aggregate CONSTRUCTORs have been decomposed, but they
1216 are still in use as the COMPLEX_EXPR equivalent for vectors. */
4038c495
GB
1217 constructor_elt *ce;
1218 unsigned HOST_WIDE_INT idx;
7b48e1e0 1219
4038c495
GB
1220 for (idx = 0;
1221 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1222 idx++)
1223 get_expr_operands (stmt, &ce->value, opf_none);
7b48e1e0
RH
1224
1225 return;
1226 }
1227
310de761
RH
1228 case TRUTH_NOT_EXPR:
1229 case BIT_FIELD_REF:
4626c433 1230 case VIEW_CONVERT_EXPR:
310de761 1231 do_unary:
1a24f92f 1232 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
6de9cd9a 1233 return;
6de9cd9a 1234
310de761
RH
1235 case TRUTH_AND_EXPR:
1236 case TRUTH_OR_EXPR:
1237 case TRUTH_XOR_EXPR:
1238 case COMPOUND_EXPR:
1239 case OBJ_TYPE_REF:
0bca51f0 1240 case ASSERT_EXPR:
310de761
RH
1241 do_binary:
1242 {
1a24f92f
AM
1243 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1244 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
310de761
RH
1245 return;
1246 }
1247
7ccf35ed
DN
1248 case REALIGN_LOAD_EXPR:
1249 {
1250 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1251 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1252 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1253 return;
1254 }
1255
310de761
RH
1256 case BLOCK:
1257 case FUNCTION_DECL:
1258 case EXC_PTR_EXPR:
1259 case FILTER_EXPR:
1260 case LABEL_DECL:
310de761 1261 /* Expressions that make no memory references. */
6de9cd9a 1262 return;
310de761
RH
1263
1264 default:
6615c446 1265 if (class == tcc_unary)
310de761 1266 goto do_unary;
6615c446 1267 if (class == tcc_binary || class == tcc_comparison)
310de761 1268 goto do_binary;
6615c446 1269 if (class == tcc_constant || class == tcc_type)
310de761 1270 return;
6de9cd9a
DN
1271 }
1272
1273 /* If we get here, something has gone wrong. */
1e128c5f 1274#ifdef ENABLE_CHECKING
6de9cd9a
DN
1275 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1276 debug_tree (expr);
1277 fputs ("\n", stderr);
1e128c5f
GB
1278 internal_error ("internal error");
1279#endif
1280 gcc_unreachable ();
6de9cd9a
DN
1281}
1282
7c35745c 1283
6cb38cd4 1284/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
a6d02559
DN
1285
1286static void
1a24f92f 1287get_asm_expr_operands (tree stmt)
a6d02559 1288{
1a24f92f 1289 stmt_ann_t s_ann = stmt_ann (stmt);
a6d02559
DN
1290 int noutputs = list_length (ASM_OUTPUTS (stmt));
1291 const char **oconstraints
1292 = (const char **) alloca ((noutputs) * sizeof (const char *));
1293 int i;
1294 tree link;
1295 const char *constraint;
1296 bool allows_mem, allows_reg, is_inout;
a6d02559
DN
1297
1298 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1299 {
1300 oconstraints[i] = constraint
1301 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1302 parse_output_constraint (&constraint, i, 0, 0,
1303 &allows_mem, &allows_reg, &is_inout);
1304
a6d02559 1305 /* This should have been split in gimplify_asm_expr. */
1e128c5f 1306 gcc_assert (!allows_reg || !is_inout);
a6d02559
DN
1307
1308 /* Memory operands are addressable. Note that STMT needs the
1309 address of this operand. */
1310 if (!allows_reg && allows_mem)
1311 {
1312 tree t = get_base_address (TREE_VALUE (link));
e8ca4159
DN
1313 if (t && DECL_P (t) && s_ann)
1314 add_to_addressable_set (t, &s_ann->addresses_taken);
a6d02559
DN
1315 }
1316
1a24f92f 1317 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
a6d02559
DN
1318 }
1319
1320 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1321 {
1322 constraint
1323 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1324 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1325 oconstraints, &allows_mem, &allows_reg);
1326
1327 /* Memory operands are addressable. Note that STMT needs the
1328 address of this operand. */
1329 if (!allows_reg && allows_mem)
1330 {
1331 tree t = get_base_address (TREE_VALUE (link));
e8ca4159
DN
1332 if (t && DECL_P (t) && s_ann)
1333 add_to_addressable_set (t, &s_ann->addresses_taken);
a6d02559
DN
1334 }
1335
1a24f92f 1336 get_expr_operands (stmt, &TREE_VALUE (link), 0);
a6d02559
DN
1337 }
1338
7c35745c 1339
a6d02559 1340 /* Clobber memory for asm ("" : : : "memory"); */
7c35745c
DN
1341 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1342 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1343 {
3cd8c58a 1344 unsigned i;
87c476a2 1345 bitmap_iterator bi;
7c35745c 1346
7c35745c
DN
1347 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1348 decided to group them). */
1349 if (global_var)
e288e2f5 1350 add_stmt_operand (&global_var, s_ann, opf_is_def);
7c35745c 1351 else
87c476a2 1352 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
a3648cfc
DB
1353 {
1354 tree var = referenced_var (i);
1355 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1356 }
a6d02559 1357
7c35745c 1358 /* Now clobber all addressables. */
87c476a2 1359 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
7c35745c
DN
1360 {
1361 tree var = referenced_var (i);
d19e9499
DB
1362
1363 /* Subvars are explicitly represented in this list, so
1364 we don't need the original to be added to the clobber
1365 ops, but the original *will* be in this list because
1366 we keep the addressability of the original
1367 variable up-to-date so we don't screw up the rest of
1368 the backend. */
1369 if (var_can_have_subvars (var)
1370 && get_subvars_for_var (var) != NULL)
1371 continue;
1372
0d2bf6f0 1373 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
87c476a2 1374 }
a6d02559 1375
7c35745c
DN
1376 break;
1377 }
a6d02559
DN
1378}
1379
7ccf35ed
DN
1380/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1381 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
310de761
RH
1382
1383static void
1a24f92f 1384get_indirect_ref_operands (tree stmt, tree expr, int flags)
310de761
RH
1385{
1386 tree *pptr = &TREE_OPERAND (expr, 0);
1387 tree ptr = *pptr;
e288e2f5 1388 stmt_ann_t s_ann = stmt_ann (stmt);
1a24f92f 1389
50dc9a88
DN
1390 /* Stores into INDIRECT_REF operands are never killing definitions. */
1391 flags &= ~opf_kill_def;
310de761
RH
1392
1393 if (SSA_VAR_P (ptr))
1394 {
c1b763fa
DN
1395 struct ptr_info_def *pi = NULL;
1396
1397 /* If PTR has flow-sensitive points-to information, use it. */
1398 if (TREE_CODE (ptr) == SSA_NAME
1399 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1400 && pi->name_mem_tag)
310de761 1401 {
c1b763fa 1402 /* PTR has its own memory tag. Use it. */
e288e2f5 1403 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
310de761
RH
1404 }
1405 else
1406 {
c1b763fa
DN
1407 /* If PTR is not an SSA_NAME or it doesn't have a name
1408 tag, use its type memory tag. */
e288e2f5 1409 var_ann_t v_ann;
c1b763fa
DN
1410
1411 /* If we are emitting debugging dumps, display a warning if
1412 PTR is an SSA_NAME with no flow-sensitive alias
1413 information. That means that we may need to compute
1414 aliasing again. */
1415 if (dump_file
1416 && TREE_CODE (ptr) == SSA_NAME
1417 && pi == NULL)
310de761 1418 {
c1b763fa
DN
1419 fprintf (dump_file,
1420 "NOTE: no flow-sensitive alias info for ");
1421 print_generic_expr (dump_file, ptr, dump_flags);
1422 fprintf (dump_file, " in ");
1423 print_generic_stmt (dump_file, stmt, dump_flags);
310de761 1424 }
310de761 1425
c1b763fa
DN
1426 if (TREE_CODE (ptr) == SSA_NAME)
1427 ptr = SSA_NAME_VAR (ptr);
e288e2f5
AM
1428 v_ann = var_ann (ptr);
1429 if (v_ann->type_mem_tag)
1430 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
310de761
RH
1431 }
1432 }
1433
1434 /* If a constant is used as a pointer, we can't generate a real
1435 operand for it but we mark the statement volatile to prevent
1436 optimizations from messing things up. */
1437 else if (TREE_CODE (ptr) == INTEGER_CST)
1438 {
e288e2f5
AM
1439 if (s_ann)
1440 s_ann->has_volatile_ops = true;
310de761
RH
1441 return;
1442 }
1443
1444 /* Everything else *should* have been folded elsewhere, but users
1445 are smarter than we in finding ways to write invalid code. We
0e61db61 1446 cannot just assert here. If we were absolutely certain that we
310de761
RH
1447 do handle all valid cases, then we could just do nothing here.
1448 That seems optimistic, so attempt to do something logical... */
1449 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1450 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1451 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1452 {
1453 /* Make sure we know the object is addressable. */
1454 pptr = &TREE_OPERAND (ptr, 0);
e288e2f5 1455 add_stmt_operand (pptr, s_ann, 0);
310de761
RH
1456
1457 /* Mark the object itself with a VUSE. */
1458 pptr = &TREE_OPERAND (*pptr, 0);
1a24f92f 1459 get_expr_operands (stmt, pptr, flags);
310de761
RH
1460 return;
1461 }
1462
1463 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1464 else
1e128c5f 1465 gcc_unreachable ();
310de761
RH
1466
1467 /* Add a USE operand for the base pointer. */
1a24f92f 1468 get_expr_operands (stmt, pptr, opf_none);
310de761
RH
1469}
1470
ac182688
ZD
1471/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1472
1473static void
1474get_tmr_operands (tree stmt, tree expr, int flags)
1475{
1476 tree tag = TMR_TAG (expr);
1477
1478 /* First record the real operands. */
1479 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1480 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1481
1482 /* MEM_REFs should never be killing. */
1483 flags &= ~opf_kill_def;
1484
1485 if (TMR_SYMBOL (expr))
e8ca4159
DN
1486 {
1487 stmt_ann_t ann = stmt_ann (stmt);
1488 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1489 }
ac182688
ZD
1490
1491 if (tag)
e63c84d8 1492 get_expr_operands (stmt, &tag, flags);
ac182688
ZD
1493 else
1494 /* Something weird, so ensure that we will be careful. */
1495 stmt_ann (stmt)->has_volatile_ops = true;
1496}
1497
310de761
RH
1498/* A subroutine of get_expr_operands to handle CALL_EXPR. */
1499
1500static void
1a24f92f 1501get_call_expr_operands (tree stmt, tree expr)
310de761
RH
1502{
1503 tree op;
1504 int call_flags = call_expr_flags (expr);
1505
90c1d75a
DN
1506 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1507 operands for all the symbols that have been found to be
1508 call-clobbered.
1509
1510 Note that if aliases have not been computed, the global effects
1511 of calls will not be included in the SSA web. This is fine
1512 because no optimizer should run before aliases have been
1513 computed. By not bothering with virtual operands for CALL_EXPRs
1514 we avoid adding superfluous virtual operands, which can be a
1515 significant compile time sink (See PR 15855). */
dcd6de6d
ZD
1516 if (aliases_computed_p
1517 && !bitmap_empty_p (call_clobbered_vars)
1518 && !(call_flags & ECF_NOVOPS))
310de761 1519 {
0bca51f0 1520 /* A 'pure' or a 'const' function never call-clobbers anything.
310de761
RH
1521 A 'noreturn' function might, but since we don't return anyway
1522 there is no point in recording that. */
c597ef4e
DN
1523 if (TREE_SIDE_EFFECTS (expr)
1524 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
ea900239 1525 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
c0e1b12f 1526 else if (!(call_flags & ECF_CONST))
85c33455 1527 add_call_read_ops (stmt);
310de761 1528 }
e288e2f5
AM
1529
1530 /* Find uses in the called function. */
1531 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1532
1533 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1534 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1535
1536 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1537
310de761
RH
1538}
1539
6de9cd9a 1540
1a24f92f 1541/* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
6de9cd9a
DN
1542 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1543 the statement's real operands, otherwise it is added to virtual
1a24f92f 1544 operands. */
6de9cd9a
DN
1545
1546static void
e288e2f5 1547add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
6de9cd9a
DN
1548{
1549 bool is_real_op;
1550 tree var, sym;
6de9cd9a
DN
1551 var_ann_t v_ann;
1552
1553 var = *var_p;
1554 STRIP_NOPS (var);
1555
6de9cd9a
DN
1556 /* If the operand is an ADDR_EXPR, add its operand to the list of
1557 variables that have had their address taken in this statement. */
e8ca4159 1558 if (TREE_CODE (var) == ADDR_EXPR && s_ann)
6de9cd9a 1559 {
e8ca4159 1560 add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
6de9cd9a
DN
1561 return;
1562 }
1563
1564 /* If the original variable is not a scalar, it will be added to the list
1565 of virtual operands. In that case, use its base symbol as the virtual
1566 variable representing it. */
1567 is_real_op = is_gimple_reg (var);
1568 if (!is_real_op && !DECL_P (var))
1569 var = get_virtual_var (var);
1570
1571 /* If VAR is not a variable that we care to optimize, do nothing. */
1572 if (var == NULL_TREE || !SSA_VAR_P (var))
1573 return;
1574
1575 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1576 v_ann = var_ann (sym);
1577
e79b60a7
DN
1578 /* Mark statements with volatile operands. Optimizers should back
1579 off from statements having volatile operands. */
1580 if (TREE_THIS_VOLATILE (sym) && s_ann)
1581 s_ann->has_volatile_ops = true;
6de9cd9a 1582
0bca51f0
DN
1583 /* If the variable cannot be modified and this is a V_MAY_DEF change
1584 it into a VUSE. This happens when read-only variables are marked
0fa2e4df 1585 call-clobbered and/or aliased to writable variables. So we only
0d2bf6f0
RH
1586 check that this only happens on non-specific stores.
1587
1588 Note that if this is a specific store, i.e. associated with a
1589 modify_expr, then we can't suppress the V_DEF, lest we run into
1590 validation problems.
1591
1592 This can happen when programs cast away const, leaving us with a
1593 store to read-only memory. If the statement is actually executed
1594 at runtime, then the program is ill formed. If the statement is
1595 not executed then all is well. At the very least, we cannot ICE. */
1596 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
0bca51f0
DN
1597 {
1598 gcc_assert (!is_real_op);
0d2bf6f0 1599 flags &= ~(opf_is_def | opf_kill_def);
0bca51f0
DN
1600 }
1601
6de9cd9a
DN
1602 if (is_real_op)
1603 {
1604 /* The variable is a GIMPLE register. Add it to real operands. */
1605 if (flags & opf_is_def)
1a24f92f 1606 append_def (var_p);
6de9cd9a 1607 else
1a24f92f 1608 append_use (var_p);
6de9cd9a
DN
1609 }
1610 else
1611 {
1612 varray_type aliases;
1613
1614 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1615 virtual operands, unless the caller has specifically requested
1616 not to add virtual operands (used when adding operands inside an
1617 ADDR_EXPR expression). */
1618 if (flags & opf_no_vops)
1619 return;
1620
1621 aliases = v_ann->may_aliases;
1622
6de9cd9a
DN
1623 if (aliases == NULL)
1624 {
1625 /* The variable is not aliased or it is an alias tag. */
1626 if (flags & opf_is_def)
1627 {
ed7f7d85 1628 if (flags & opf_kill_def)
50dc9a88 1629 {
c75ab022
DB
1630 /* Only regular variables or struct fields may get a
1631 V_MUST_DEF operand. */
326eda4b
DB
1632 gcc_assert (!MTAG_P (var)
1633 || TREE_CODE (var) == STRUCT_FIELD_TAG);
50dc9a88
DN
1634 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1635 variable definitions. */
1636 append_v_must_def (var);
1637 }
a32b97a2 1638 else
50dc9a88
DN
1639 {
1640 /* Add a V_MAY_DEF for call-clobbered variables and
1641 memory tags. */
1642 append_v_may_def (var);
1643 }
6de9cd9a
DN
1644 }
1645 else
1646 {
1a24f92f
AM
1647 append_vuse (var);
1648 if (s_ann && v_ann->is_alias_tag)
6de9cd9a
DN
1649 s_ann->makes_aliased_loads = 1;
1650 }
1651 }
1652 else
1653 {
1654 size_t i;
1655
1656 /* The variable is aliased. Add its aliases to the virtual
1657 operands. */
1e128c5f 1658 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
6de9cd9a
DN
1659
1660 if (flags & opf_is_def)
1661 {
1662 /* If the variable is also an alias tag, add a virtual
1663 operand for it, otherwise we will miss representing
1664 references to the members of the variable's alias set.
1665 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1666 if (v_ann->is_alias_tag)
e8ca4159 1667 append_v_may_def (var);
6de9cd9a
DN
1668
1669 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
e8ca4159 1670 append_v_may_def (VARRAY_TREE (aliases, i));
6de9cd9a 1671
e8ca4159 1672 if (s_ann)
1a24f92f 1673 s_ann->makes_aliased_stores = 1;
6de9cd9a
DN
1674 }
1675 else
1676 {
50dc9a88
DN
1677 /* Similarly, append a virtual uses for VAR itself, when
1678 it is an alias tag. */
6de9cd9a 1679 if (v_ann->is_alias_tag)
1a24f92f 1680 append_vuse (var);
6de9cd9a
DN
1681
1682 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1a24f92f 1683 append_vuse (VARRAY_TREE (aliases, i));
6de9cd9a 1684
1a24f92f
AM
1685 if (s_ann)
1686 s_ann->makes_aliased_loads = 1;
6de9cd9a
DN
1687 }
1688 }
1689 }
1690}
1691
c75ab022 1692
e8ca4159
DN
1693/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1694 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1695 a single variable whose address has been taken or any other valid
1696 GIMPLE memory reference (structure reference, array, etc). If the
1697 base address of REF is a decl that has sub-variables, also add all
1698 of its sub-variables. */
6de9cd9a 1699
e8ca4159
DN
1700void
1701add_to_addressable_set (tree ref, bitmap *addresses_taken)
6de9cd9a 1702{
e8ca4159 1703 tree var;
c75ab022 1704 subvar_t svars;
c75ab022 1705
e8ca4159
DN
1706 gcc_assert (addresses_taken);
1707
23e66a36 1708 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
e8ca4159
DN
1709 as the only thing we take the address of. If VAR is a structure,
1710 taking the address of a field means that the whole structure may
1711 be referenced using pointer arithmetic. See PR 21407 and the
1712 ensuing mailing list discussion. */
1713 var = get_base_address (ref);
6de9cd9a
DN
1714 if (var && SSA_VAR_P (var))
1715 {
e8ca4159
DN
1716 if (*addresses_taken == NULL)
1717 *addresses_taken = BITMAP_GGC_ALLOC ();
c75ab022 1718
c75ab022
DB
1719 if (var_can_have_subvars (var)
1720 && (svars = get_subvars_for_var (var)))
1721 {
1722 subvar_t sv;
1723 for (sv = svars; sv; sv = sv->next)
e8ca4159
DN
1724 {
1725 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1726 TREE_ADDRESSABLE (sv->var) = 1;
1727 }
c75ab022 1728 }
9044951e 1729 else
e8ca4159
DN
1730 {
1731 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1732 TREE_ADDRESSABLE (var) = 1;
1733 }
6de9cd9a
DN
1734 }
1735}
1736
e8ca4159 1737
6de9cd9a
DN
1738/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1739 clobbered variables in the function. */
1740
1741static void
ea900239 1742add_call_clobber_ops (tree stmt, tree callee)
6de9cd9a 1743{
f47c96aa 1744 unsigned u;
e288e2f5
AM
1745 tree t;
1746 bitmap_iterator bi;
1747 stmt_ann_t s_ann = stmt_ann (stmt);
1748 struct stmt_ann_d empty_ann;
ea900239 1749 bitmap not_read_b, not_written_b;
e288e2f5 1750
6de9cd9a
DN
1751 /* Functions that are not const, pure or never return may clobber
1752 call-clobbered variables. */
e288e2f5
AM
1753 if (s_ann)
1754 s_ann->makes_clobbering_call = true;
6de9cd9a 1755
e288e2f5
AM
1756 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1757 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
6de9cd9a 1758 if (global_var)
6de9cd9a 1759 {
e288e2f5
AM
1760 add_stmt_operand (&global_var, s_ann, opf_is_def);
1761 return;
1762 }
6de9cd9a 1763
ea900239
DB
1764 /* FIXME - if we have better information from the static vars
1765 analysis, we need to make the cache call site specific. This way
1766 we can have the performance benefits even if we are doing good
1767 optimization. */
1768
1769 /* Get info for local and module level statics. There is a bit
1770 set for each static if the call being processed does not read
1771 or write that variable. */
1772
1773 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1774 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1775
e288e2f5 1776 /* If cache is valid, copy the elements into the build vectors. */
ea900239
DB
1777 if (ssa_call_clobbered_cache_valid
1778 && (!not_read_b || bitmap_empty_p (not_read_b))
1779 && (!not_written_b || bitmap_empty_p (not_written_b)))
e288e2f5 1780 {
f3940b0e 1781 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
6de9cd9a 1782 {
f3940b0e 1783 t = VEC_index (tree, clobbered_vuses, u);
e288e2f5
AM
1784 gcc_assert (TREE_CODE (t) != SSA_NAME);
1785 var_ann (t)->in_vuse_list = 1;
f3940b0e 1786 VEC_safe_push (tree, heap, build_vuses, (tree)t);
e288e2f5 1787 }
f3940b0e 1788 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
e288e2f5 1789 {
f3940b0e 1790 t = VEC_index (tree, clobbered_v_may_defs, u);
e288e2f5
AM
1791 gcc_assert (TREE_CODE (t) != SSA_NAME);
1792 var_ann (t)->in_v_may_def_list = 1;
f3940b0e 1793 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
87c476a2 1794 }
e288e2f5
AM
1795 if (s_ann)
1796 {
1797 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1798 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1799 }
1800 return;
1801 }
1802
1803 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1804
1805 /* Add a V_MAY_DEF operand for every call clobbered variable. */
f47c96aa 1806 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
e288e2f5 1807 {
f47c96aa 1808 tree var = referenced_var (u);
0bca51f0 1809 if (unmodifiable_var_p (var))
e288e2f5
AM
1810 add_stmt_operand (&var, &empty_ann, opf_none);
1811 else
ea900239
DB
1812 {
1813 bool not_read
1814 = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1815 bool not_written
1816 = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1817
b0d008ab 1818 if (not_written)
ea900239
DB
1819 {
1820 if (!not_read)
1821 add_stmt_operand (&var, &empty_ann, opf_none);
1822 }
1823 else
1824 add_stmt_operand (&var, &empty_ann, opf_is_def);
1825 }
e288e2f5
AM
1826 }
1827
ea900239
DB
1828 if ((!not_read_b || bitmap_empty_p (not_read_b))
1829 && (!not_written_b || bitmap_empty_p (not_written_b)))
e288e2f5 1830 {
ea900239
DB
1831 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1832 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1833
1834 /* Set the flags for a stmt's annotation. */
1835 if (s_ann)
1836 {
1837 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1838 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1839 }
e288e2f5 1840
ea900239
DB
1841 /* Prepare empty cache vectors. */
1842 VEC_truncate (tree, clobbered_vuses, 0);
1843 VEC_truncate (tree, clobbered_v_may_defs, 0);
e288e2f5 1844
ea900239 1845 /* Now fill the clobbered cache with the values that have been found. */
f3940b0e 1846 for (u = 0; u < VEC_length (tree, build_vuses); u++)
ea900239 1847 VEC_safe_push (tree, heap, clobbered_vuses,
f3940b0e 1848 VEC_index (tree, build_vuses, u));
f47c96aa 1849
f3940b0e 1850 gcc_assert (VEC_length (tree, build_vuses)
ea900239 1851 == VEC_length (tree, clobbered_vuses));
f47c96aa 1852
f3940b0e 1853 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
ea900239 1854 VEC_safe_push (tree, heap, clobbered_v_may_defs,
f3940b0e 1855 VEC_index (tree, build_v_may_defs, u));
f47c96aa 1856
f3940b0e 1857 gcc_assert (VEC_length (tree, build_v_may_defs)
ea900239 1858 == VEC_length (tree, clobbered_v_may_defs));
e288e2f5 1859
ea900239
DB
1860 ssa_call_clobbered_cache_valid = true;
1861 }
6de9cd9a
DN
1862}
1863
1864
1865/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1866 function. */
1867
1868static void
85c33455 1869add_call_read_ops (tree stmt)
6de9cd9a 1870{
f47c96aa 1871 unsigned u;
e288e2f5 1872 tree t;
87c476a2 1873 bitmap_iterator bi;
e288e2f5
AM
1874 stmt_ann_t s_ann = stmt_ann (stmt);
1875 struct stmt_ann_d empty_ann;
87c476a2 1876
e288e2f5
AM
1877 /* if the function is not pure, it may reference memory. Add
1878 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1879 for the heuristic used to decide whether to create .GLOBAL_VAR. */
6de9cd9a 1880 if (global_var)
6de9cd9a 1881 {
e288e2f5
AM
1882 add_stmt_operand (&global_var, s_ann, opf_none);
1883 return;
1884 }
1885
1886 /* If cache is valid, copy the elements into the build vector. */
1887 if (ssa_ro_call_cache_valid)
1888 {
f3940b0e 1889 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
6de9cd9a 1890 {
f3940b0e 1891 t = VEC_index (tree, ro_call_vuses, u);
e288e2f5
AM
1892 gcc_assert (TREE_CODE (t) != SSA_NAME);
1893 var_ann (t)->in_vuse_list = 1;
f3940b0e 1894 VEC_safe_push (tree, heap, build_vuses, (tree)t);
87c476a2 1895 }
e288e2f5
AM
1896 if (s_ann)
1897 s_ann->makes_aliased_loads = ro_call_aliased_loads;
1898 return;
1899 }
1900
1901 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1902
1903 /* Add a VUSE for each call-clobbered variable. */
f47c96aa 1904 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
e288e2f5 1905 {
f47c96aa 1906 tree var = referenced_var (u);
0d2bf6f0 1907 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
6de9cd9a 1908 }
e288e2f5
AM
1909
1910 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1911 if (s_ann)
1912 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1913
6668f6a7 1914 /* Prepare empty cache vectors. */
3d6dcb7f 1915 VEC_truncate (tree, ro_call_vuses, 0);
f47c96aa 1916
e288e2f5 1917 /* Now fill the clobbered cache with the values that have been found. */
f3940b0e 1918 for (u = 0; u < VEC_length (tree, build_vuses); u++)
3d6dcb7f 1919 VEC_safe_push (tree, heap, ro_call_vuses,
f3940b0e 1920 VEC_index (tree, build_vuses, u));
e288e2f5 1921
f3940b0e 1922 gcc_assert (VEC_length (tree, build_vuses)
3d6dcb7f 1923 == VEC_length (tree, ro_call_vuses));
6de9cd9a 1924
f47c96aa 1925 ssa_ro_call_cache_valid = true;
5f240ec4
ZD
1926}
1927
1a24f92f 1928
f430bae8
AM
1929/* Scan the immediate_use list for VAR making sure its linked properly.
1930 return RTUE iof there is a problem. */
1931
1932bool
1933verify_imm_links (FILE *f, tree var)
1934{
f47c96aa 1935 use_operand_p ptr, prev, list;
f430bae8
AM
1936 int count;
1937
1938 gcc_assert (TREE_CODE (var) == SSA_NAME);
1939
1940 list = &(SSA_NAME_IMM_USE_NODE (var));
1941 gcc_assert (list->use == NULL);
1942
1943 if (list->prev == NULL)
1944 {
1945 gcc_assert (list->next == NULL);
1946 return false;
1947 }
1948
1949 prev = list;
1950 count = 0;
1951 for (ptr = list->next; ptr != list; )
1952 {
1953 if (prev != ptr->prev)
0e61db61
NS
1954 goto error;
1955
f430bae8 1956 if (ptr->use == NULL)
0e61db61
NS
1957 goto error; /* 2 roots, or SAFE guard node. */
1958 else if (*(ptr->use) != var)
1959 goto error;
f430bae8
AM
1960
1961 prev = ptr;
1962 ptr = ptr->next;
e84d8064
AM
1963 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1964 if (count++ > 50000000)
0e61db61 1965 goto error;
f430bae8
AM
1966 }
1967
1968 /* Verify list in the other direction. */
1969 prev = list;
1970 for (ptr = list->prev; ptr != list; )
1971 {
1972 if (prev != ptr->next)
0e61db61 1973 goto error;
f430bae8
AM
1974 prev = ptr;
1975 ptr = ptr->prev;
1976 if (count-- < 0)
0e61db61 1977 goto error;
f430bae8
AM
1978 }
1979
1980 if (count != 0)
0e61db61 1981 goto error;
f430bae8
AM
1982
1983 return false;
0e61db61
NS
1984
1985 error:
1986 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1987 {
1988 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1989 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1990 }
1991 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1992 (void *)ptr->use);
1993 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1994 fprintf(f, "\n");
1995 return true;
f430bae8
AM
1996}
1997
1998
1999/* Dump all the immediate uses to FILE. */
2000
2001void
2002dump_immediate_uses_for (FILE *file, tree var)
2003{
2004 imm_use_iterator iter;
2005 use_operand_p use_p;
2006
2007 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2008
2009 print_generic_expr (file, var, TDF_SLIM);
2010 fprintf (file, " : -->");
2011 if (has_zero_uses (var))
2012 fprintf (file, " no uses.\n");
2013 else
2014 if (has_single_use (var))
2015 fprintf (file, " single use.\n");
2016 else
2017 fprintf (file, "%d uses.\n", num_imm_uses (var));
2018
2019 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2020 {
f47c96aa
AM
2021 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2022 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2023 else
2024 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
f430bae8
AM
2025 }
2026 fprintf(file, "\n");
2027}
2028
2029/* Dump all the immediate uses to FILE. */
2030
2031void
2032dump_immediate_uses (FILE *file)
2033{
2034 tree var;
2035 unsigned int x;
2036
2037 fprintf (file, "Immediate_uses: \n\n");
2038 for (x = 1; x < num_ssa_names; x++)
2039 {
2040 var = ssa_name(x);
2041 if (!var)
2042 continue;
2043 dump_immediate_uses_for (file, var);
2044 }
2045}
2046
2047
2048/* Dump def-use edges on stderr. */
2049
2050void
2051debug_immediate_uses (void)
2052{
2053 dump_immediate_uses (stderr);
2054}
2055
2056/* Dump def-use edges on stderr. */
2057
2058void
2059debug_immediate_uses_for (tree var)
2060{
2061 dump_immediate_uses_for (stderr, var);
1a24f92f 2062}
6de9cd9a 2063#include "gt-tree-ssa-operands.h"