]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-operands.c
backport: ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
6de9cd9a 1/* SSA operands management for trees.
726a989a
RB
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
6de9cd9a
DN
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
4c714dd4 34#include "toplev.h"
6674a6ce 35#include "langhooks.h"
ea900239 36#include "ipa-reference.h"
1a24f92f 37
6cb38cd4 38/* This file contains the code required to manage the operands cache of the
1a24f92f 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
6cb38cd4 40 annotation. This cache contains operands that will be of interest to
1a24f92f
AM
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
1a24f92f 53 The operand tree is the parsed by the various get_* routines which look
2a7e31df 54 through the stmt tree for the occurrence of operands which may be of
1a24f92f 55 interest, and calls are made to the append_* routines whenever one is
38635499
DN
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
1a24f92f
AM
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
38635499 64 on each of the 4 operand vectors which have been built up.
1a24f92f
AM
65
66 If the stmt had a previous operand cache, the finalization routines
f3b569ca 67 attempt to match up the new operands with the old ones. If it's a perfect
1a24f92f
AM
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
28f6b1e4
DN
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
1a24f92f 77
726a989a
RB
78/* Helper functions from gimple.c. These are GIMPLE manipulation
79 routines that only the operand scanner should need. */
80void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
81void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
38635499
DN
82
83/* Structure storing statistics on how many call clobbers we have, and
84 how many where avoided. */
85
86static struct
87{
88 /* Number of call-clobbered ops we attempt to add to calls in
89 add_call_clobbered_mem_symbols. */
90 unsigned int clobbered_vars;
91
92 /* Number of write-clobbers (VDEFs) avoided by using
93 not_written information. */
94 unsigned int static_write_clobbers_avoided;
95
96 /* Number of reads (VUSEs) avoided by using not_read information. */
97 unsigned int static_read_clobbers_avoided;
98
99 /* Number of write-clobbers avoided because the variable can't escape to
100 this call. */
101 unsigned int unescapable_clobbers_avoided;
102
103 /* Number of read-only uses we attempt to add to calls in
104 add_call_read_mem_symbols. */
105 unsigned int readonly_clobbers;
106
107 /* Number of read-only uses we avoid using not_read information. */
108 unsigned int static_readonly_clobbers_avoided;
109} clobber_stats;
110
111
1e6a5d3c 112/* Flags to describe operand properties in helpers. */
6de9cd9a
DN
113
114/* By default, operands are loaded. */
38635499 115#define opf_use 0
6de9cd9a 116
a32b97a2 117/* Operand is the target of an assignment expression or a
65ad7c63 118 call-clobbered variable. */
38635499 119#define opf_def (1 << 0)
a32b97a2 120
6de9cd9a
DN
121/* No virtual operands should be created in the expression. This is used
122 when traversing ADDR_EXPR nodes which have different semantics than
123 other expressions. Inside an ADDR_EXPR node, the only operands that we
124 need to consider are indices into arrays. For instance, &a.b[i] should
125 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
126 VUSE for 'b'. */
38635499 127#define opf_no_vops (1 << 1)
6de9cd9a 128
38635499 129/* Operand is an implicit reference. This is used to distinguish
726a989a 130 explicit assignments in the form of MODIFY_EXPR from
38635499
DN
131 clobbering sites like function calls or ASM_EXPRs. */
132#define opf_implicit (1 << 2)
0d2bf6f0 133
6de9cd9a 134/* Array for building all the def operands. */
f3940b0e 135static VEC(tree,heap) *build_defs;
6de9cd9a
DN
136
137/* Array for building all the use operands. */
f3940b0e 138static VEC(tree,heap) *build_uses;
6de9cd9a 139
38635499
DN
140/* Set for building all the VDEF operands. */
141static VEC(tree,heap) *build_vdefs;
6de9cd9a 142
38635499 143/* Set for building all the VUSE operands. */
f3940b0e 144static VEC(tree,heap) *build_vuses;
6de9cd9a 145
497f1b81 146/* Bitmap obstack for our datastructures that needs to survive across
04b5b56c 147 compilations of multiple functions. */
497f1b81 148static bitmap_obstack operands_bitmap_obstack;
6e7e772d 149
38635499
DN
150/* Set for building all the loaded symbols. */
151static bitmap build_loads;
152
153/* Set for building all the stored symbols. */
154static bitmap build_stores;
a32b97a2 155
726a989a 156static void get_expr_operands (gimple, tree *, int);
02075bb2 157
456cde30
JH
158/* Number of functions with initialized ssa_operands. */
159static int n_initialized = 0;
1a24f92f 160
cfaab3a9
DN
161/* Statement change buffer. Data structure used to record state
162 information for statements. This is used to determine what needs
163 to be done in order to update the SSA web after a statement is
164 modified by a pass. If STMT is a statement that has just been
165 created, or needs to be folded via fold_stmt, or anything that
166 changes its physical structure then the pass should:
167
168 1- Call push_stmt_changes (&stmt) to record the current state of
169 STMT before any modifications are made.
170
171 2- Make all appropriate modifications to the statement.
172
173 3- Call pop_stmt_changes (&stmt) to find new symbols that
174 need to be put in SSA form, SSA name mappings for names that
175 have disappeared, recompute invariantness for address
176 expressions, cleanup EH information, etc.
177
178 If it is possible to determine that the statement was not modified,
179 instead of calling pop_stmt_changes it is quicker to call
180 discard_stmt_changes to avoid the expensive and unnecessary operand
181 re-scan and change comparison. */
182
183struct scb_d
184{
185 /* Pointer to the statement being modified. */
726a989a 186 gimple *stmt_p;
cfaab3a9
DN
187
188 /* If the statement references memory these are the sets of symbols
189 loaded and stored by the statement. */
190 bitmap loads;
191 bitmap stores;
192};
193
194typedef struct scb_d *scb_t;
195DEF_VEC_P(scb_t);
196DEF_VEC_ALLOC_P(scb_t,heap);
197
198/* Stack of statement change buffers (SCB). Every call to
199 push_stmt_changes pushes a new buffer onto the stack. Calls to
200 pop_stmt_changes pop a buffer off of the stack and compute the set
201 of changes for the popped statement. */
202static VEC(scb_t,heap) *scb_stack;
203
c83eecad 204/* Return the DECL_UID of the base variable of T. */
1a24f92f 205
f47c96aa 206static inline unsigned
ed7a4b4b 207get_name_decl (const_tree t)
6de9cd9a 208{
f3940b0e
AM
209 if (TREE_CODE (t) != SSA_NAME)
210 return DECL_UID (t);
211 else
212 return DECL_UID (SSA_NAME_VAR (t));
6de9cd9a
DN
213}
214
02075bb2 215
f3940b0e 216/* Comparison function for qsort used in operand_build_sort_virtual. */
1a24f92f 217
a9990582 218int
f3940b0e 219operand_build_cmp (const void *p, const void *q)
a32b97a2 220{
ed7a4b4b
KG
221 const_tree const e1 = *((const_tree const *)p);
222 const_tree const e2 = *((const_tree const *)q);
223 const unsigned int u1 = get_name_decl (e1);
224 const unsigned int u2 = get_name_decl (e2);
f47c96aa 225
f3940b0e 226 /* We want to sort in ascending order. They can never be equal. */
f47c96aa 227#ifdef ENABLE_CHECKING
f3940b0e 228 gcc_assert (u1 != u2);
f47c96aa 229#endif
f3940b0e 230 return (u1 > u2 ? 1 : -1);
a32b97a2
BB
231}
232
02075bb2 233
f3940b0e 234/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
1a24f92f 235
6de9cd9a 236static inline void
f3940b0e 237operand_build_sort_virtual (VEC(tree,heap) *list)
6de9cd9a 238{
f3940b0e 239 int num = VEC_length (tree, list);
65ad7c63 240
f3940b0e
AM
241 if (num < 2)
242 return;
65ad7c63 243
f3940b0e 244 if (num == 2)
6de9cd9a 245 {
f3940b0e
AM
246 if (get_name_decl (VEC_index (tree, list, 0))
247 > get_name_decl (VEC_index (tree, list, 1)))
248 {
249 /* Swap elements if in the wrong order. */
250 tree tmp = VEC_index (tree, list, 0);
251 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
252 VEC_replace (tree, list, 1, tmp);
253 }
f47c96aa 254 return;
6de9cd9a 255 }
65ad7c63 256
f3940b0e
AM
257 /* There are 3 or more elements, call qsort. */
258 qsort (VEC_address (tree, list),
259 VEC_length (tree, list),
260 sizeof (tree),
261 operand_build_cmp);
6de9cd9a
DN
262}
263
65ad7c63 264/* Return true if the SSA operands cache is active. */
1a24f92f 265
f47c96aa
AM
266bool
267ssa_operands_active (void)
6de9cd9a 268{
726a989a
RB
269 /* This function may be invoked from contexts where CFUN is NULL
270 (IPA passes), return false for now. FIXME: operands may be
271 active in each individual function, maybe this function should
272 take CFUN as a parameter. */
273 if (cfun == NULL)
274 return false;
275
456cde30 276 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
f47c96aa 277}
6de9cd9a 278
02075bb2 279
79f99d42
AM
280/* VOPs are of variable sized, so the free list maps "free buckets" to the
281 following table:
282 bucket # operands
283 ------ ----------
284 0 1
285 1 2
286 ...
287 15 16
288 16 17-24
289 17 25-32
290 18 31-40
291 ...
292 29 121-128
293 Any VOPs larger than this are simply added to the largest bucket when they
294 are freed. */
295
296
297/* Return the number of operands used in bucket BUCKET. */
298
299static inline int
300vop_free_bucket_size (int bucket)
301{
302#ifdef ENABLE_CHECKING
303 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
304#endif
305 if (bucket < 16)
306 return bucket + 1;
307 return (bucket - 13) * 8;
308}
309
310
311/* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
312 beyond the end of the bucket table, return -1. */
313
314static inline int
315vop_free_bucket_index (int num)
316{
4cdffd96 317 gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
79f99d42
AM
318
319 /* Sizes 1 through 16 use buckets 0-15. */
320 if (num <= 16)
321 return num - 1;
4cdffd96
JH
322 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
323 num = 14 + (num - 1) / 8;
324 if (num >= NUM_VOP_FREE_BUCKETS)
325 return -1;
326 else
327 return num;
79f99d42
AM
328}
329
330
331/* Initialize the VOP free buckets. */
332
333static inline void
334init_vop_buckets (void)
335{
336 int x;
337
338 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
339 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
340}
341
342
343/* Add PTR to the appropriate VOP bucket. */
344
345static inline void
346add_vop_to_freelist (voptype_p ptr)
347{
348 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
349
350 /* Too large, use the largest bucket so its not a complete throw away. */
351 if (bucket == -1)
352 bucket = NUM_VOP_FREE_BUCKETS - 1;
353
354 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
355 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
356}
357
358
359/* These are the sizes of the operand memory buffer which gets allocated each
360 time more operands space is required. The final value is the amount that is
361 allocated every time after that. */
362
363#define OP_SIZE_INIT 0
364#define OP_SIZE_1 30
365#define OP_SIZE_2 110
366#define OP_SIZE_3 511
367
f47c96aa
AM
368/* Initialize the operand cache routines. */
369
370void
371init_ssa_operands (void)
372{
456cde30
JH
373 if (!n_initialized++)
374 {
375 build_defs = VEC_alloc (tree, heap, 5);
376 build_uses = VEC_alloc (tree, heap, 10);
377 build_vuses = VEC_alloc (tree, heap, 25);
38635499 378 build_vdefs = VEC_alloc (tree, heap, 25);
497f1b81
JH
379 bitmap_obstack_initialize (&operands_bitmap_obstack);
380 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
381 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
38635499 382 scb_stack = VEC_alloc (scb_t, heap, 20);
456cde30
JH
383 }
384
385 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
38635499 386 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
497f1b81
JH
387 gimple_ssa_operands (cfun)->operand_memory_index
388 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
456cde30 389 gimple_ssa_operands (cfun)->ops_active = true;
d16a5e36 390 memset (&clobber_stats, 0, sizeof (clobber_stats));
79f99d42 391 init_vop_buckets ();
497f1b81 392 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
f47c96aa 393}
6de9cd9a 394
1a24f92f 395
f47c96aa
AM
396/* Dispose of anything required by the operand routines. */
397
398void
399fini_ssa_operands (void)
400{
401 struct ssa_operand_memory_d *ptr;
38635499
DN
402 unsigned ix;
403 tree mpt;
404
456cde30
JH
405 if (!--n_initialized)
406 {
407 VEC_free (tree, heap, build_defs);
408 VEC_free (tree, heap, build_uses);
38635499 409 VEC_free (tree, heap, build_vdefs);
456cde30 410 VEC_free (tree, heap, build_vuses);
38635499
DN
411 BITMAP_FREE (build_loads);
412 BITMAP_FREE (build_stores);
413
414 /* The change buffer stack had better be empty. */
415 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
416 VEC_free (scb_t, heap, scb_stack);
417 scb_stack = NULL;
456cde30 418 }
38635499 419
456cde30
JH
420 gimple_ssa_operands (cfun)->free_defs = NULL;
421 gimple_ssa_operands (cfun)->free_uses = NULL;
38635499 422
456cde30 423 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
f47c96aa 424 {
456cde30
JH
425 gimple_ssa_operands (cfun)->operand_memory
426 = gimple_ssa_operands (cfun)->operand_memory->next;
f47c96aa 427 ggc_free (ptr);
1a24f92f
AM
428 }
429
38635499
DN
430 for (ix = 0;
431 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
432 ix++)
433 {
434 if (mpt)
435 BITMAP_FREE (MPT_SYMBOLS (mpt));
436 }
437
438 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
439
456cde30 440 gimple_ssa_operands (cfun)->ops_active = false;
38635499 441
497f1b81
JH
442 if (!n_initialized)
443 bitmap_obstack_release (&operands_bitmap_obstack);
726a989a 444
d16a5e36
DB
445 if (dump_file && (dump_flags & TDF_STATS))
446 {
38635499 447 fprintf (dump_file, "Original clobbered vars: %d\n",
02075bb2 448 clobber_stats.clobbered_vars);
38635499 449 fprintf (dump_file, "Static write clobbers avoided: %d\n",
02075bb2 450 clobber_stats.static_write_clobbers_avoided);
38635499 451 fprintf (dump_file, "Static read clobbers avoided: %d\n",
02075bb2 452 clobber_stats.static_read_clobbers_avoided);
38635499 453 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
02075bb2 454 clobber_stats.unescapable_clobbers_avoided);
38635499 455 fprintf (dump_file, "Original read-only clobbers: %d\n",
02075bb2 456 clobber_stats.readonly_clobbers);
38635499 457 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
02075bb2 458 clobber_stats.static_readonly_clobbers_avoided);
d16a5e36 459 }
f47c96aa 460}
1a24f92f 461
6de9cd9a 462
f47c96aa
AM
463/* Return memory for operands of SIZE chunks. */
464
465static inline void *
466ssa_operand_alloc (unsigned size)
467{
468 char *ptr;
38635499 469
456cde30 470 if (gimple_ssa_operands (cfun)->operand_memory_index + size
497f1b81 471 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
f47c96aa
AM
472 {
473 struct ssa_operand_memory_d *ptr;
79f99d42 474
497f1b81
JH
475 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
476 gimple_ssa_operands (cfun)->ssa_operand_mem_size
477 = OP_SIZE_1 * sizeof (struct voptype_d);
79f99d42 478 else
497f1b81
JH
479 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
480 == OP_SIZE_1 * sizeof (struct voptype_d))
481 gimple_ssa_operands (cfun)->ssa_operand_mem_size
482 = OP_SIZE_2 * sizeof (struct voptype_d);
79f99d42 483 else
497f1b81
JH
484 gimple_ssa_operands (cfun)->ssa_operand_mem_size
485 = OP_SIZE_3 * sizeof (struct voptype_d);
79f99d42
AM
486
487 /* Go right to the maximum size if the request is too large. */
497f1b81
JH
488 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
489 gimple_ssa_operands (cfun)->ssa_operand_mem_size
490 = OP_SIZE_3 * sizeof (struct voptype_d);
79f99d42 491
ee6ec666
RG
492 /* We can reliably trigger the case that we need arbitrary many
493 operands (see PR34093), so allocate a buffer just for this request. */
494 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
495 gimple_ssa_operands (cfun)->ssa_operand_mem_size = size;
79f99d42
AM
496
497 ptr = (struct ssa_operand_memory_d *)
498 ggc_alloc (sizeof (struct ssa_operand_memory_d)
497f1b81 499 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
456cde30
JH
500 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
501 gimple_ssa_operands (cfun)->operand_memory = ptr;
502 gimple_ssa_operands (cfun)->operand_memory_index = 0;
f47c96aa 503 }
456cde30
JH
504 ptr = &(gimple_ssa_operands (cfun)->operand_memory
505 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
506 gimple_ssa_operands (cfun)->operand_memory_index += size;
f47c96aa 507 return ptr;
6de9cd9a
DN
508}
509
1a24f92f 510
79f99d42
AM
511/* Allocate a DEF operand. */
512
38635499
DN
513static inline struct def_optype_d *
514alloc_def (void)
515{
516 struct def_optype_d *ret;
517 if (gimple_ssa_operands (cfun)->free_defs)
518 {
519 ret = gimple_ssa_operands (cfun)->free_defs;
520 gimple_ssa_operands (cfun)->free_defs
521 = gimple_ssa_operands (cfun)->free_defs->next;
522 }
523 else
524 ret = (struct def_optype_d *)
79f99d42 525 ssa_operand_alloc (sizeof (struct def_optype_d));
38635499
DN
526 return ret;
527}
528
529
79f99d42
AM
530/* Allocate a USE operand. */
531
38635499
DN
532static inline struct use_optype_d *
533alloc_use (void)
534{
535 struct use_optype_d *ret;
536 if (gimple_ssa_operands (cfun)->free_uses)
537 {
538 ret = gimple_ssa_operands (cfun)->free_uses;
539 gimple_ssa_operands (cfun)->free_uses
540 = gimple_ssa_operands (cfun)->free_uses->next;
541 }
542 else
79f99d42
AM
543 ret = (struct use_optype_d *)
544 ssa_operand_alloc (sizeof (struct use_optype_d));
38635499
DN
545 return ret;
546}
547
548
79f99d42 549/* Allocate a vop with NUM elements. */
38635499 550
79f99d42
AM
551static inline struct voptype_d *
552alloc_vop (int num)
38635499 553{
79f99d42
AM
554 struct voptype_d *ret = NULL;
555 int alloc_size = 0;
556
557 int bucket = vop_free_bucket_index (num);
558 if (bucket != -1)
38635499 559 {
79f99d42
AM
560 /* If there is a free operand, use it. */
561 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
562 {
563 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
564 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
565 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
566 }
567 else
568 alloc_size = vop_free_bucket_size(bucket);
38635499
DN
569 }
570 else
79f99d42 571 alloc_size = num;
38635499 572
79f99d42
AM
573 if (alloc_size > 0)
574 ret = (struct voptype_d *)ssa_operand_alloc (
575 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
38635499 576
38635499
DN
577 VUSE_VECT_NUM_ELEM (ret->usev) = num;
578 return ret;
579}
580
f430bae8 581
5dc2e333 582/* This routine makes sure that PTR is in an immediate use list, and makes
6c00f606 583 sure the stmt pointer is set to the current stmt. */
02075bb2 584
5dc2e333 585static inline void
726a989a 586set_virtual_use_link (use_operand_p ptr, gimple stmt)
5dc2e333 587{
65ad7c63 588 /* fold_stmt may have changed the stmt pointers. */
726a989a
RB
589 if (ptr->loc.stmt != stmt)
590 ptr->loc.stmt = stmt;
5dc2e333
AM
591
592 /* If this use isn't in a list, add it to the correct list. */
593 if (!ptr->prev)
594 link_imm_use (ptr, *(ptr->use));
595}
596
79f99d42
AM
597
598/* Adds OP to the list of defs after LAST. */
5dc2e333 599
38635499 600static inline def_optype_p
79f99d42 601add_def_op (tree *op, def_optype_p last)
ac574e1b 602{
c22940cd 603 def_optype_p new_def;
ac574e1b 604
c22940cd
TN
605 new_def = alloc_def ();
606 DEF_OP_PTR (new_def) = op;
607 last->next = new_def;
608 new_def->next = NULL;
609 return new_def;
ac574e1b
ZD
610}
611
79f99d42
AM
612
613/* Adds OP to the list of uses of statement STMT after LAST. */
ac574e1b 614
38635499 615static inline use_optype_p
726a989a 616add_use_op (gimple stmt, tree *op, use_optype_p last)
ac574e1b 617{
c22940cd
TN
618 use_optype_p new_use;
619
620 new_use = alloc_use ();
621 USE_OP_PTR (new_use)->use = op;
622 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
623 last->next = new_use;
624 new_use->next = NULL;
625 return new_use;
ac574e1b
ZD
626}
627
ac574e1b 628
28f6b1e4
DN
629/* Return a virtual op pointer with NUM elements which are all
630 initialized to OP and are linked into the immediate uses for STMT.
631 The new vop is appended after PREV. */
79f99d42
AM
632
633static inline voptype_p
726a989a 634add_vop (gimple stmt, tree op, int num, voptype_p prev)
ac574e1b 635{
c22940cd 636 voptype_p new_vop;
38635499
DN
637 int x;
638
c22940cd 639 new_vop = alloc_vop (num);
38635499
DN
640 for (x = 0; x < num; x++)
641 {
c22940cd
TN
642 VUSE_OP_PTR (new_vop, x)->prev = NULL;
643 SET_VUSE_OP (new_vop, x, op);
644 VUSE_OP_PTR (new_vop, x)->use = &new_vop->usev.uses[x].use_var;
645 link_imm_use_stmt (VUSE_OP_PTR (new_vop, x),
646 new_vop->usev.uses[x].use_var, stmt);
38635499 647 }
ac574e1b 648
79f99d42 649 if (prev)
c22940cd
TN
650 prev->next = new_vop;
651 new_vop->next = NULL;
652 return new_vop;
ac574e1b
ZD
653}
654
38635499 655
79f99d42 656/* Adds OP to the list of vuses of statement STMT after LAST, and moves
ac574e1b
ZD
657 LAST to the new element. */
658
79f99d42 659static inline voptype_p
726a989a 660add_vuse_op (gimple stmt, tree op, int num, voptype_p last)
ac574e1b 661{
c22940cd
TN
662 voptype_p new_vop = add_vop (stmt, op, num, last);
663 VDEF_RESULT (new_vop) = NULL_TREE;
664 return new_vop;
79f99d42 665}
38635499 666
ac574e1b 667
79f99d42
AM
668/* Adds OP to the list of vdefs of statement STMT after LAST, and moves
669 LAST to the new element. */
670
671static inline voptype_p
726a989a 672add_vdef_op (gimple stmt, tree op, int num, voptype_p last)
79f99d42 673{
c22940cd
TN
674 voptype_p new_vop = add_vop (stmt, op, num, last);
675 VDEF_RESULT (new_vop) = op;
676 return new_vop;
ac574e1b 677}
79f99d42 678
ac574e1b 679
ac574e1b 680/* Takes elements from build_defs and turns them into def operands of STMT.
79f99d42 681 TODO -- Make build_defs VEC of tree *. */
ac574e1b
ZD
682
683static inline void
726a989a 684finalize_ssa_defs (gimple stmt)
ac574e1b
ZD
685{
686 unsigned new_i;
687 struct def_optype_d new_list;
6677e189 688 def_optype_p old_ops, last;
79f99d42
AM
689 unsigned int num = VEC_length (tree, build_defs);
690
691 /* There should only be a single real definition per assignment. */
726a989a 692 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
ac574e1b
ZD
693
694 new_list.next = NULL;
695 last = &new_list;
696
726a989a 697 old_ops = gimple_def_ops (stmt);
ac574e1b
ZD
698
699 new_i = 0;
1a24f92f 700
79f99d42
AM
701 /* Check for the common case of 1 def that hasn't changed. */
702 if (old_ops && old_ops->next == NULL && num == 1
703 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
704 return;
ac574e1b
ZD
705
706 /* If there is anything in the old list, free it. */
707 if (old_ops)
708 {
456cde30
JH
709 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
710 gimple_ssa_operands (cfun)->free_defs = old_ops;
ac574e1b
ZD
711 }
712
79f99d42
AM
713 /* If there is anything remaining in the build_defs list, simply emit it. */
714 for ( ; new_i < num; new_i++)
715 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
716
ac574e1b 717 /* Now set the stmt's operands. */
726a989a 718 gimple_set_def_ops (stmt, new_list.next);
ac574e1b
ZD
719
720#ifdef ENABLE_CHECKING
721 {
6677e189 722 def_optype_p ptr;
ac574e1b 723 unsigned x = 0;
726a989a 724 for (ptr = gimple_def_ops (stmt); ptr; ptr = ptr->next)
ac574e1b
ZD
725 x++;
726
79f99d42 727 gcc_assert (x == num);
ac574e1b
ZD
728 }
729#endif
730}
f47c96aa 731
6de9cd9a 732
ac574e1b 733/* Takes elements from build_uses and turns them into use operands of STMT.
6c00f606 734 TODO -- Make build_uses VEC of tree *. */
ac574e1b
ZD
735
736static inline void
726a989a 737finalize_ssa_uses (gimple stmt)
ac574e1b
ZD
738{
739 unsigned new_i;
740 struct use_optype_d new_list;
741 use_optype_p old_ops, ptr, last;
ac574e1b
ZD
742
743 new_list.next = NULL;
744 last = &new_list;
745
726a989a 746 old_ops = gimple_use_ops (stmt);
ac574e1b 747
ac574e1b
ZD
748 /* If there is anything in the old list, free it. */
749 if (old_ops)
750 {
751 for (ptr = old_ops; ptr; ptr = ptr->next)
752 delink_imm_use (USE_OP_PTR (ptr));
456cde30
JH
753 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
754 gimple_ssa_operands (cfun)->free_uses = old_ops;
ac574e1b
ZD
755 }
756
6c00f606
AM
757 /* Now create nodes for all the new nodes. */
758 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
79f99d42
AM
759 last = add_use_op (stmt,
760 (tree *) VEC_index (tree, build_uses, new_i),
761 last);
6c00f606 762
ac574e1b 763 /* Now set the stmt's operands. */
726a989a 764 gimple_set_use_ops (stmt, new_list.next);
ac574e1b
ZD
765
766#ifdef ENABLE_CHECKING
767 {
768 unsigned x = 0;
726a989a 769 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
ac574e1b
ZD
770 x++;
771
772 gcc_assert (x == VEC_length (tree, build_uses));
773 }
774#endif
775}
f47c96aa 776
ac574e1b 777
38635499 778/* Takes elements from BUILD_VDEFS and turns them into vdef operands of
726a989a 779 STMT. */
ac574e1b
ZD
780
781static inline void
726a989a 782finalize_ssa_vdefs (gimple stmt)
ac574e1b
ZD
783{
784 unsigned new_i;
79f99d42
AM
785 struct voptype_d new_list;
786 voptype_p old_ops, ptr, last;
38635499
DN
787
788 /* Set the symbols referenced by STMT. */
726a989a 789 gimple_set_stored_syms (stmt, build_stores, &operands_bitmap_obstack);
38635499
DN
790
791 /* If aliases have not been computed, do not instantiate a virtual
792 operator on STMT. Initially, we only compute the SSA form on
793 GIMPLE registers. The virtual SSA form is only computed after
794 alias analysis, so virtual operators will remain unrenamed and
795 the verifier will complain. However, alias analysis needs to
796 access symbol load/store information, so we need to compute
797 those. */
798 if (!gimple_aliases_computed_p (cfun))
799 return;
ac574e1b
ZD
800
801 new_list.next = NULL;
802 last = &new_list;
803
726a989a 804 old_ops = gimple_vdef_ops (stmt);
ac574e1b 805 new_i = 0;
38635499 806 while (old_ops && new_i < VEC_length (tree, build_vdefs))
ac574e1b 807 {
38635499
DN
808 tree op = VEC_index (tree, build_vdefs, new_i);
809 unsigned new_uid = get_name_decl (op);
810 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
ac574e1b 811
38635499
DN
812 /* FIXME, for now each VDEF operator should have at most one
813 operand in their RHS. */
814 gcc_assert (VDEF_NUM (old_ops) == 1);
815
816 if (old_uid == new_uid)
ac574e1b 817 {
38635499 818 /* If the symbols are the same, reuse the existing operand. */
79f99d42
AM
819 last->next = old_ops;
820 last = old_ops;
821 old_ops = old_ops->next;
822 last->next = NULL;
38635499 823 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
ac574e1b
ZD
824 new_i++;
825 }
38635499 826 else if (old_uid < new_uid)
ac574e1b 827 {
38635499 828 /* If old is less than new, old goes to the free list. */
79f99d42 829 voptype_p next;
38635499 830 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
79f99d42
AM
831 next = old_ops->next;
832 add_vop_to_freelist (old_ops);
833 old_ops = next;
ac574e1b
ZD
834 }
835 else
836 {
837 /* This is a new operand. */
79f99d42 838 last = add_vdef_op (stmt, op, 1, last);
ac574e1b
ZD
839 new_i++;
840 }
841 }
842
38635499
DN
843 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
844 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
79f99d42 845 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
ac574e1b
ZD
846
847 /* If there is anything in the old list, free it. */
848 if (old_ops)
849 {
79f99d42
AM
850 for (ptr = old_ops; ptr; ptr = last)
851 {
852 last = ptr->next;
853 delink_imm_use (VDEF_OP_PTR (ptr, 0));
854 add_vop_to_freelist (ptr);
855 }
ac574e1b
ZD
856 }
857
38635499 858 /* Now set STMT's operands. */
726a989a 859 gimple_set_vdef_ops (stmt, new_list.next);
ac574e1b
ZD
860
861#ifdef ENABLE_CHECKING
862 {
863 unsigned x = 0;
726a989a 864 for (ptr = gimple_vdef_ops (stmt); ptr; ptr = ptr->next)
ac574e1b
ZD
865 x++;
866
38635499 867 gcc_assert (x == VEC_length (tree, build_vdefs));
ac574e1b
ZD
868 }
869#endif
870}
871
38635499 872
38635499 873/* Takes elements from BUILD_VUSES and turns them into VUSE operands of
ac574e1b
ZD
874 STMT. */
875
876static inline void
726a989a 877finalize_ssa_vuse_ops (gimple stmt)
ac574e1b 878{
b2bcf557 879 unsigned new_i, old_i;
79f99d42 880 voptype_p old_ops, last;
38635499 881 VEC(tree,heap) *new_ops;
ac574e1b 882
38635499 883 /* Set the symbols referenced by STMT. */
726a989a 884 gimple_set_loaded_syms (stmt, build_loads, &operands_bitmap_obstack);
38635499
DN
885
886 /* If aliases have not been computed, do not instantiate a virtual
887 operator on STMT. Initially, we only compute the SSA form on
888 GIMPLE registers. The virtual SSA form is only computed after
889 alias analysis, so virtual operators will remain unrenamed and
890 the verifier will complain. However, alias analysis needs to
891 access symbol load/store information, so we need to compute
892 those. */
893 if (!gimple_aliases_computed_p (cfun))
894 return;
895
896 /* STMT should have at most one VUSE operator. */
726a989a 897 old_ops = gimple_vuse_ops (stmt);
38635499 898 gcc_assert (old_ops == NULL || old_ops->next == NULL);
ac574e1b 899
38635499
DN
900 new_ops = NULL;
901 new_i = old_i = 0;
902 while (old_ops
903 && old_i < VUSE_NUM (old_ops)
904 && new_i < VEC_length (tree, build_vuses))
ac574e1b 905 {
38635499
DN
906 tree new_op = VEC_index (tree, build_vuses, new_i);
907 tree old_op = VUSE_OP (old_ops, old_i);
908 unsigned new_uid = get_name_decl (new_op);
909 unsigned old_uid = get_name_decl (old_op);
1a24f92f 910
38635499 911 if (old_uid == new_uid)
ac574e1b 912 {
38635499
DN
913 /* If the symbols are the same, reuse the existing operand. */
914 VEC_safe_push (tree, heap, new_ops, old_op);
ac574e1b 915 new_i++;
38635499 916 old_i++;
ac574e1b 917 }
38635499 918 else if (old_uid < new_uid)
ac574e1b 919 {
38635499
DN
920 /* If OLD_UID is less than NEW_UID, the old operand has
921 disappeared, skip to the next old operand. */
922 old_i++;
ac574e1b
ZD
923 }
924 else
925 {
926 /* This is a new operand. */
38635499 927 VEC_safe_push (tree, heap, new_ops, new_op);
ac574e1b
ZD
928 new_i++;
929 }
930 }
931
932 /* If there is anything remaining in the build_vuses list, simply emit it. */
933 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
38635499 934 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
ac574e1b
ZD
935
936 /* If there is anything in the old list, free it. */
937 if (old_ops)
938 {
38635499
DN
939 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
940 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
79f99d42 941 add_vop_to_freelist (old_ops);
726a989a 942 gimple_set_vuse_ops (stmt, NULL);
ac574e1b
ZD
943 }
944
38635499
DN
945 /* If there are any operands, instantiate a VUSE operator for STMT. */
946 if (new_ops)
947 {
948 tree op;
949 unsigned i;
950
79f99d42 951 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
38635499
DN
952
953 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
954 SET_USE (VUSE_OP_PTR (last, (int) i), op);
955
726a989a 956 gimple_set_vuse_ops (stmt, last);
285463b5 957 VEC_free (tree, heap, new_ops);
38635499 958 }
ac574e1b
ZD
959
960#ifdef ENABLE_CHECKING
961 {
38635499
DN
962 unsigned x;
963
726a989a 964 if (gimple_vuse_ops (stmt))
38635499 965 {
726a989a
RB
966 gcc_assert (gimple_vuse_ops (stmt)->next == NULL);
967 x = VUSE_NUM (gimple_vuse_ops (stmt));
38635499
DN
968 }
969 else
970 x = 0;
ac574e1b
ZD
971
972 gcc_assert (x == VEC_length (tree, build_vuses));
973 }
974#endif
975}
38635499
DN
976
977/* Return a new VUSE operand vector for STMT. */
f47c96aa
AM
978
979static void
726a989a 980finalize_ssa_vuses (gimple stmt)
1a24f92f 981{
38635499 982 unsigned num, num_vdefs;
f3940b0e 983 unsigned vuse_index;
6de9cd9a
DN
984
985 /* Remove superfluous VUSE operands. If the statement already has a
38635499
DN
986 VDEF operator for a variable 'a', then a VUSE for 'a' is not
987 needed because VDEFs imply a VUSE of the variable. For instance,
988 suppose that variable 'a' is pointed-to by p and q:
6de9cd9a
DN
989
990 # VUSE <a_2>
38635499
DN
991 # a_3 = VDEF <a_2>
992 *p = *q;
6de9cd9a 993
65ad7c63 994 The VUSE <a_2> is superfluous because it is implied by the
38635499 995 VDEF operator. */
f3940b0e 996 num = VEC_length (tree, build_vuses);
38635499 997 num_vdefs = VEC_length (tree, build_vdefs);
1a24f92f 998
38635499
DN
999 if (num > 0 && num_vdefs > 0)
1000 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1001 {
1002 tree vuse;
1003 vuse = VEC_index (tree, build_vuses, vuse_index);
1004 if (TREE_CODE (vuse) != SSA_NAME)
1005 {
1006 var_ann_t ann = var_ann (vuse);
1007 ann->in_vuse_list = 0;
1008 if (ann->in_vdef_list)
1009 {
1010 VEC_ordered_remove (tree, build_vuses, vuse_index);
1011 continue;
1012 }
1013 }
1014 vuse_index++;
1015 }
e288e2f5 1016
f47c96aa 1017 finalize_ssa_vuse_ops (stmt);
6de9cd9a 1018}
1a24f92f 1019
38635499
DN
1020
1021/* Clear the in_list bits and empty the build array for VDEFs and
1022 VUSEs. */
ac574e1b
ZD
1023
1024static inline void
38635499 1025cleanup_build_arrays (void)
ac574e1b 1026{
38635499
DN
1027 unsigned i;
1028 tree t;
ac574e1b 1029
38635499
DN
1030 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1031 if (TREE_CODE (t) != SSA_NAME)
1032 var_ann (t)->in_vdef_list = false;
ac574e1b 1033
38635499
DN
1034 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1035 if (TREE_CODE (t) != SSA_NAME)
1036 var_ann (t)->in_vuse_list = false;
ac574e1b 1037
38635499
DN
1038 VEC_truncate (tree, build_vdefs, 0);
1039 VEC_truncate (tree, build_vuses, 0);
1040 VEC_truncate (tree, build_defs, 0);
1041 VEC_truncate (tree, build_uses, 0);
1042 bitmap_clear (build_loads);
1043 bitmap_clear (build_stores);
a32b97a2
BB
1044}
1045
6de9cd9a 1046
1a24f92f 1047/* Finalize all the build vectors, fill the new ones into INFO. */
f47c96aa 1048
1a24f92f 1049static inline void
726a989a 1050finalize_ssa_stmt_operands (gimple stmt)
1a24f92f 1051{
f47c96aa
AM
1052 finalize_ssa_defs (stmt);
1053 finalize_ssa_uses (stmt);
726a989a
RB
1054 if (gimple_has_mem_ops (stmt))
1055 {
1056 finalize_ssa_vdefs (stmt);
1057 finalize_ssa_vuses (stmt);
1058 }
38635499 1059 cleanup_build_arrays ();
6de9cd9a
DN
1060}
1061
1062
1a24f92f
AM
1063/* Start the process of building up operands vectors in INFO. */
1064
1065static inline void
1066start_ssa_stmt_operands (void)
6de9cd9a 1067{
f3940b0e
AM
1068 gcc_assert (VEC_length (tree, build_defs) == 0);
1069 gcc_assert (VEC_length (tree, build_uses) == 0);
1070 gcc_assert (VEC_length (tree, build_vuses) == 0);
38635499
DN
1071 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1072 gcc_assert (bitmap_empty_p (build_loads));
1073 gcc_assert (bitmap_empty_p (build_stores));
6de9cd9a
DN
1074}
1075
1076
1a24f92f 1077/* Add DEF_P to the list of pointers to operands. */
6de9cd9a
DN
1078
1079static inline void
1a24f92f 1080append_def (tree *def_p)
6de9cd9a 1081{
38635499 1082 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
6de9cd9a
DN
1083}
1084
1085
1a24f92f 1086/* Add USE_P to the list of pointers to operands. */
6de9cd9a
DN
1087
1088static inline void
1a24f92f 1089append_use (tree *use_p)
6de9cd9a 1090{
38635499 1091 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
6de9cd9a
DN
1092}
1093
1094
38635499 1095/* Add VAR to the set of variables that require a VDEF operator. */
6de9cd9a 1096
1a24f92f 1097static inline void
38635499 1098append_vdef (tree var)
6de9cd9a 1099{
38635499
DN
1100 tree sym;
1101
f47c96aa
AM
1102 if (TREE_CODE (var) != SSA_NAME)
1103 {
38635499
DN
1104 tree mpt;
1105 var_ann_t ann;
1106
1107 /* If VAR belongs to a memory partition, use it instead of VAR. */
1108 mpt = memory_partition (var);
1109 if (mpt)
1110 var = mpt;
6de9cd9a 1111
f47c96aa 1112 /* Don't allow duplicate entries. */
38635499
DN
1113 ann = get_var_ann (var);
1114 if (ann->in_vdef_list)
1115 return;
1116
1117 ann->in_vdef_list = true;
1118 sym = var;
f47c96aa 1119 }
38635499
DN
1120 else
1121 sym = SSA_NAME_VAR (var);
6de9cd9a 1122
38635499
DN
1123 VEC_safe_push (tree, heap, build_vdefs, var);
1124 bitmap_set_bit (build_stores, DECL_UID (sym));
6de9cd9a
DN
1125}
1126
1127
38635499 1128/* Add VAR to the set of variables that require a VUSE operator. */
6de9cd9a 1129
1a24f92f
AM
1130static inline void
1131append_vuse (tree var)
6de9cd9a 1132{
38635499
DN
1133 tree sym;
1134
e288e2f5
AM
1135 if (TREE_CODE (var) != SSA_NAME)
1136 {
38635499
DN
1137 tree mpt;
1138 var_ann_t ann;
6de9cd9a 1139
38635499
DN
1140 /* If VAR belongs to a memory partition, use it instead of VAR. */
1141 mpt = memory_partition (var);
1142 if (mpt)
1143 var = mpt;
a32b97a2 1144
38635499
DN
1145 /* Don't allow duplicate entries. */
1146 ann = get_var_ann (var);
468b40bc 1147 if (ann->in_vuse_list)
38635499 1148 return;
468b40bc
JW
1149 else if (ann->in_vdef_list)
1150 {
1151 /* We don't want a vuse if we already have a vdef, but we must
1152 still put this in build_loads. */
1153 bitmap_set_bit (build_loads, DECL_UID (var));
1154 return;
1155 }
a32b97a2 1156
38635499
DN
1157 ann->in_vuse_list = true;
1158 sym = var;
1159 }
1160 else
1161 sym = SSA_NAME_VAR (var);
a32b97a2 1162
38635499
DN
1163 VEC_safe_push (tree, heap, build_vuses, var);
1164 bitmap_set_bit (build_loads, DECL_UID (sym));
a32b97a2
BB
1165}
1166
6de9cd9a 1167
02075bb2
DN
1168/* REF is a tree that contains the entire pointer dereference
1169 expression, if available, or NULL otherwise. ALIAS is the variable
1170 we are asking if REF can access. OFFSET and SIZE come from the
7b765bed
DB
1171 memory access expression that generated this virtual operand.
1172
1173 XXX: We should handle the NO_ALIAS attributes here. */
9390c347 1174
02075bb2
DN
1175static bool
1176access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1177 HOST_WIDE_INT size)
38635499 1178{
02075bb2
DN
1179 bool offsetgtz = offset > 0;
1180 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1181 tree base = ref ? get_base_address (ref) : NULL;
6de9cd9a 1182
548a6c6d
DN
1183 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1184 using a call-clobbered memory tag. By definition, call-clobbered
1185 memory tags can always touch .GLOBAL_VAR. */
5cd4ec7f 1186 if (alias == gimple_global_var (cfun))
548a6c6d
DN
1187 return true;
1188
7b765bed
DB
1189 /* If ref is a TARGET_MEM_REF, just return true, as we can't really
1190 disambiguate them right now. */
1191 if (ref && TREE_CODE (ref) == TARGET_MEM_REF)
1192 return true;
1193
02075bb2
DN
1194 /* Without strict aliasing, it is impossible for a component access
1195 through a pointer to touch a random variable, unless that
1196 variable *is* a structure or a pointer.
6de9cd9a 1197
02075bb2
DN
1198 That is, given p->c, and some random global variable b,
1199 there is no legal way that p->c could be an access to b.
1200
1201 Without strict aliasing on, we consider it legal to do something
1202 like:
6de9cd9a 1203
02075bb2
DN
1204 struct foos { int l; };
1205 int foo;
1206 static struct foos *getfoo(void);
1207 int main (void)
1208 {
1209 struct foos *f = getfoo();
1210 f->l = 1;
1211 foo = 2;
1212 if (f->l == 1)
1213 abort();
1214 exit(0);
1215 }
1216 static struct foos *getfoo(void)
1217 { return (struct foos *)&foo; }
1218
1219 (taken from 20000623-1.c)
832a0c1d
DB
1220
1221 The docs also say/imply that access through union pointers
1222 is legal (but *not* if you take the address of the union member,
1223 i.e. the inverse), such that you can do
1224
1225 typedef union {
1226 int d;
1227 } U;
1228
1229 int rv;
1230 void breakme()
1231 {
1232 U *rv0;
1233 U *pretmp = (U*)&rv;
1234 rv0 = pretmp;
1235 rv0->d = 42;
1236 }
1237 To implement this, we just punt on accesses through union
1238 pointers entirely.
ccf64c83
RG
1239
1240 Another case we have to allow is accessing a variable
1241 through an array access at offset zero. This happens from
1242 code generated by the fortran frontend like
1243
1244 char[1:1] & my_char_ref;
1245 char my_char;
1246 my_char_ref_1 = (char[1:1] &) &my_char;
1247 D.874_2 = (*my_char_ref_1)[1]{lb: 1 sz: 1};
02075bb2 1248 */
5611cf0b 1249 if (ref
02075bb2
DN
1250 && flag_strict_aliasing
1251 && TREE_CODE (ref) != INDIRECT_REF
1252 && !MTAG_P (alias)
7b765bed 1253 && base
832a0c1d
DB
1254 && (TREE_CODE (base) != INDIRECT_REF
1255 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
ccf64c83
RG
1256 && (TREE_CODE (base) != INDIRECT_REF
1257 || TREE_CODE (ref) != ARRAY_REF
1258 || offset != 0
1259 || (DECL_SIZE (alias)
1260 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1261 && size != -1
1262 && (unsigned HOST_WIDE_INT)size
1263 != TREE_INT_CST_LOW (DECL_SIZE (alias))))
02075bb2
DN
1264 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1265 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
5da10ac7 1266 && !var_ann (alias)->is_heapvar
aa666e00
AP
1267 /* When the struct has may_alias attached to it, we need not to
1268 return true. */
1269 && get_alias_set (base))
02075bb2
DN
1270 {
1271#ifdef ACCESS_DEBUGGING
1272 fprintf (stderr, "Access to ");
1273 print_generic_expr (stderr, ref, 0);
1274 fprintf (stderr, " may not touch ");
1275 print_generic_expr (stderr, alias, 0);
1276 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1277#endif
1278 return false;
1279 }
6de9cd9a 1280
02075bb2
DN
1281 /* If the offset of the access is greater than the size of one of
1282 the possible aliases, it can't be touching that alias, because it
1283 would be past the end of the structure. */
1284 else if (ref
1285 && flag_strict_aliasing
1286 && TREE_CODE (ref) != INDIRECT_REF
1287 && !MTAG_P (alias)
1288 && !POINTER_TYPE_P (TREE_TYPE (alias))
1289 && offsetgtz
1290 && DECL_SIZE (alias)
1291 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1292 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1293 {
1294#ifdef ACCESS_DEBUGGING
1295 fprintf (stderr, "Access to ");
1296 print_generic_expr (stderr, ref, 0);
1297 fprintf (stderr, " may not touch ");
1298 print_generic_expr (stderr, alias, 0);
1299 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1300#endif
1301 return false;
1302 }
6de9cd9a 1303
02075bb2 1304 return true;
f430bae8
AM
1305}
1306
726a989a 1307/* Add VAR to the virtual operands for STMT. FLAGS is as in
02075bb2
DN
1308 get_expr_operands. FULL_REF is a tree that contains the entire
1309 pointer dereference expression, if available, or NULL otherwise.
1310 OFFSET and SIZE come from the memory access expression that
6e7e772d
DN
1311 generated this virtual operand. IS_CALL_SITE is true if the
1312 affected statement is a call site. */
02075bb2 1313
7b765bed 1314static void
726a989a 1315add_virtual_operand (tree var, gimple stmt, int flags,
02075bb2 1316 tree full_ref, HOST_WIDE_INT offset,
6e7e772d 1317 HOST_WIDE_INT size, bool is_call_site)
f430bae8 1318{
306219a2 1319 bitmap aliases = NULL;
02075bb2
DN
1320 tree sym;
1321 var_ann_t v_ann;
f430bae8 1322
02075bb2
DN
1323 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1324 v_ann = var_ann (sym);
1325
38635499 1326 /* Mark the statement as having memory operands. */
726a989a 1327 gimple_set_references_memory (stmt, true);
38635499 1328
38635499 1329 /* If the variable cannot be modified and this is a VDEF change
02075bb2
DN
1330 it into a VUSE. This happens when read-only variables are marked
1331 call-clobbered and/or aliased to writable variables. So we only
1332 check that this only happens on non-specific stores.
1a24f92f 1333
02075bb2 1334 Note that if this is a specific store, i.e. associated with a
726a989a 1335 MODIFY_EXPR, then we can't suppress the VDEF, lest we run
02075bb2 1336 into validation problems.
1a24f92f 1337
02075bb2
DN
1338 This can happen when programs cast away const, leaving us with a
1339 store to read-only memory. If the statement is actually executed
1340 at runtime, then the program is ill formed. If the statement is
1341 not executed then all is well. At the very least, we cannot ICE. */
38635499
DN
1342 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1343 flags &= ~opf_def;
02075bb2
DN
1344
1345 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1346 virtual operands, unless the caller has specifically requested
1347 not to add virtual operands (used when adding operands inside an
1348 ADDR_EXPR expression). */
1349 if (flags & opf_no_vops)
f47c96aa 1350 return;
02075bb2 1351
306219a2
DB
1352 if (MTAG_P (var))
1353 aliases = MTAG_ALIASES (var);
6e7e772d 1354
3b302421 1355 if (aliases == NULL)
02075bb2 1356 {
726a989a
RB
1357 if (!gimple_aliases_computed_p (cfun) && (flags & opf_def))
1358 gimple_set_has_volatile_ops (stmt, true);
6e7e772d 1359
02075bb2 1360 /* The variable is not aliased or it is an alias tag. */
38635499
DN
1361 if (flags & opf_def)
1362 append_vdef (var);
02075bb2
DN
1363 else
1364 append_vuse (var);
1365 }
1366 else
1367 {
3b302421
RG
1368 bitmap_iterator bi;
1369 unsigned int i;
03c4c2e0 1370 bool none_added = true;
02075bb2
DN
1371
1372 /* The variable is aliased. Add its aliases to the virtual
1373 operands. */
306219a2 1374 gcc_assert (!bitmap_empty_p (aliases));
03c4c2e0 1375
3b302421 1376 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
07bd6227 1377 {
3b302421
RG
1378 tree al = referenced_var (i);
1379
5611cf0b
RG
1380 /* Call-clobbered tags may have non-call-clobbered
1381 symbols in their alias sets. Ignore them if we are
1382 adding VOPs for a call site. */
1383 if (is_call_site && !is_call_clobbered (al))
1384 continue;
1385
1386 /* If we do not know the full reference tree or if the access is
1387 unspecified [0, -1], we cannot prune it. Otherwise try doing
1388 so using access_can_touch_variable. */
1389 if (full_ref
1390 && !access_can_touch_variable (full_ref, al, offset, size))
1391 continue;
1392
1393 if (flags & opf_def)
1394 append_vdef (al);
8ad6aff3 1395 else
5611cf0b
RG
1396 append_vuse (al);
1397 none_added = false;
07bd6227 1398 }
03c4c2e0 1399
38635499 1400 if (flags & opf_def)
02075bb2 1401 {
1296c31f
DB
1402 /* If the variable is also an alias tag, add a virtual
1403 operand for it, otherwise we will miss representing
1404 references to the members of the variable's alias set.
1405 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1406
1407 It is also necessary to add bare defs on clobbers for
1408 SMT's, so that bare SMT uses caused by pruning all the
1409 aliases will link up properly with calls. In order to
1410 keep the number of these bare defs we add down to the
1411 minimum necessary, we keep track of which SMT's were used
1412 alone in statement vdefs or VUSEs. */
1413 if (none_added
1414 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1415 && is_call_site))
7b765bed 1416 append_vdef (var);
02075bb2
DN
1417 }
1418 else
1419 {
d37d06fe
DN
1420 /* Even if no aliases have been added, we still need to
1421 establish def-use and use-def chains, lest
1422 transformations think that this is not a memory
1423 reference. For an example of this scenario, see
1424 testsuite/g++.dg/opt/cleanup1.C. */
1425 if (none_added)
02075bb2
DN
1426 append_vuse (var);
1427 }
1428 }
f47c96aa
AM
1429}
1430
f47c96aa 1431
726a989a
RB
1432/* Add *VAR_P to the appropriate operand array for statement STMT.
1433 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
1434 it will be added to the statement's real operands, otherwise it is
1435 added to virtual operands. */
02075bb2
DN
1436
1437static void
726a989a 1438add_stmt_operand (tree *var_p, gimple stmt, int flags)
f47c96aa 1439{
02075bb2
DN
1440 tree var, sym;
1441 var_ann_t v_ann;
f47c96aa 1442
726a989a 1443 gcc_assert (SSA_VAR_P (*var_p));
f47c96aa 1444
38635499 1445 var = *var_p;
02075bb2
DN
1446 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1447 v_ann = var_ann (sym);
f47c96aa 1448
38635499
DN
1449 /* Mark statements with volatile operands. */
1450 if (TREE_THIS_VOLATILE (sym))
726a989a 1451 gimple_set_has_volatile_ops (stmt, true);
f47c96aa 1452
38635499 1453 if (is_gimple_reg (sym))
f47c96aa 1454 {
02075bb2 1455 /* The variable is a GIMPLE register. Add it to real operands. */
38635499 1456 if (flags & opf_def)
02075bb2
DN
1457 append_def (var_p);
1458 else
1459 append_use (var_p);
f47c96aa 1460 }
02075bb2 1461 else
726a989a 1462 add_virtual_operand (var, stmt, flags, NULL_TREE, 0, -1, false);
02075bb2 1463}
f47c96aa 1464
a509ebb5
RL
1465/* Subroutine of get_indirect_ref_operands. ADDR is the address
1466 that is dereferenced, the meaning of the rest of the arguments
1467 is the same as in get_indirect_ref_operands. */
f47c96aa 1468
02075bb2 1469static void
726a989a
RB
1470get_addr_dereference_operands (gimple stmt, tree *addr, int flags,
1471 tree full_ref, HOST_WIDE_INT offset,
1472 HOST_WIDE_INT size, bool recurse_on_base)
28f6b1e4
DN
1473{
1474 tree ptr = *addr;
f47c96aa 1475
726a989a
RB
1476 /* Mark the statement as having memory operands. */
1477 gimple_set_references_memory (stmt, true);
f47c96aa 1478
02075bb2 1479 if (SSA_VAR_P (ptr))
f47c96aa 1480 {
02075bb2
DN
1481 struct ptr_info_def *pi = NULL;
1482
1483 /* If PTR has flow-sensitive points-to information, use it. */
1484 if (TREE_CODE (ptr) == SSA_NAME
1485 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1486 && pi->name_mem_tag)
f47c96aa 1487 {
02075bb2 1488 /* PTR has its own memory tag. Use it. */
726a989a 1489 add_virtual_operand (pi->name_mem_tag, stmt, flags,
6e7e772d 1490 full_ref, offset, size, false);
f47c96aa 1491 }
02075bb2 1492 else
f47c96aa 1493 {
02075bb2 1494 /* If PTR is not an SSA_NAME or it doesn't have a name
18cd8a03 1495 tag, use its symbol memory tag. */
02075bb2 1496 var_ann_t v_ann;
f47c96aa 1497
02075bb2
DN
1498 /* If we are emitting debugging dumps, display a warning if
1499 PTR is an SSA_NAME with no flow-sensitive alias
1500 information. That means that we may need to compute
cb2d412c
RG
1501 aliasing again or that a propagation pass forgot to
1502 update the alias information on the pointers. */
02075bb2
DN
1503 if (dump_file
1504 && TREE_CODE (ptr) == SSA_NAME
cb2d412c 1505 && (pi == NULL
9f09b13f
RG
1506 || (pi->name_mem_tag == NULL_TREE
1507 && !pi->pt_anything)))
02075bb2
DN
1508 {
1509 fprintf (dump_file,
1510 "NOTE: no flow-sensitive alias info for ");
1511 print_generic_expr (dump_file, ptr, dump_flags);
1512 fprintf (dump_file, " in ");
726a989a 1513 print_gimple_stmt (dump_file, stmt, 0, 0);
02075bb2 1514 }
f430bae8 1515
02075bb2
DN
1516 if (TREE_CODE (ptr) == SSA_NAME)
1517 ptr = SSA_NAME_VAR (ptr);
1518 v_ann = var_ann (ptr);
f430bae8 1519
07bd6227
RG
1520 /* If we don't know what this pointer points to then we have
1521 to make sure to not prune virtual operands based on offset
1522 and size. */
18cd8a03 1523 if (v_ann->symbol_mem_tag)
54b7b17d 1524 {
726a989a 1525 add_virtual_operand (v_ann->symbol_mem_tag, stmt, flags,
54b7b17d
RG
1526 full_ref, 0, -1, false);
1527 /* Make sure we add the SMT itself. */
1528 if (!(flags & opf_no_vops))
1529 {
1530 if (flags & opf_def)
1531 append_vdef (v_ann->symbol_mem_tag);
1532 else
1533 append_vuse (v_ann->symbol_mem_tag);
1534 }
1535 }
6e7e772d
DN
1536
1537 /* Aliasing information is missing; mark statement as
1538 volatile so we won't optimize it out too actively. */
b65e51a8 1539 else if (!gimple_aliases_computed_p (cfun)
7faade0f 1540 && (flags & opf_def))
726a989a 1541 gimple_set_has_volatile_ops (stmt, true);
f430bae8
AM
1542 }
1543 }
02075bb2
DN
1544 else if (TREE_CODE (ptr) == INTEGER_CST)
1545 {
1546 /* If a constant is used as a pointer, we can't generate a real
1547 operand for it but we mark the statement volatile to prevent
1548 optimizations from messing things up. */
726a989a 1549 gimple_set_has_volatile_ops (stmt, true);
02075bb2
DN
1550 return;
1551 }
1552 else
1553 {
1554 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1555 gcc_unreachable ();
1556 }
f430bae8 1557
02075bb2
DN
1558 /* If requested, add a USE operand for the base pointer. */
1559 if (recurse_on_base)
a509ebb5 1560 get_expr_operands (stmt, addr, opf_use);
f430bae8
AM
1561}
1562
28f6b1e4 1563
a509ebb5
RL
1564/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1565 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1566
1567 STMT is the statement being processed, EXPR is the INDIRECT_REF
1568 that got us here.
1569
1570 FLAGS is as in get_expr_operands.
1571
1572 FULL_REF contains the full pointer dereference expression, if we
1573 have it, or NULL otherwise.
1574
1575 OFFSET and SIZE are the location of the access inside the
1576 dereferenced pointer, if known.
1577
1578 RECURSE_ON_BASE should be set to true if we want to continue
1579 calling get_expr_operands on the base pointer, and false if
1580 something else will do it for us. */
1581
1582static void
726a989a 1583get_indirect_ref_operands (gimple stmt, tree expr, int flags, tree full_ref,
28f6b1e4
DN
1584 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1585 bool recurse_on_base)
a509ebb5
RL
1586{
1587 tree *pptr = &TREE_OPERAND (expr, 0);
a509ebb5
RL
1588
1589 if (TREE_THIS_VOLATILE (expr))
726a989a 1590 gimple_set_has_volatile_ops (stmt, true);
a509ebb5 1591
28f6b1e4
DN
1592 get_addr_dereference_operands (stmt, pptr, flags, full_ref, offset, size,
1593 recurse_on_base);
a509ebb5 1594}
643519b7 1595
28f6b1e4 1596
02075bb2 1597/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
6de9cd9a
DN
1598
1599static void
726a989a 1600get_tmr_operands (gimple stmt, tree expr, int flags)
6de9cd9a 1601{
7b765bed 1602 tree tag;
6de9cd9a 1603
726a989a
RB
1604 /* Mark the statement as having memory operands. */
1605 gimple_set_references_memory (stmt, true);
6de9cd9a 1606
38635499
DN
1607 /* First record the real operands. */
1608 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1609 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
6de9cd9a 1610
02075bb2 1611 if (TMR_SYMBOL (expr))
726a989a 1612 gimple_add_to_addresses_taken (stmt, TMR_SYMBOL (expr));
6de9cd9a 1613
38635499 1614 tag = TMR_TAG (expr);
02075bb2
DN
1615 if (!tag)
1616 {
1617 /* Something weird, so ensure that we will be careful. */
726a989a 1618 gimple_set_has_volatile_ops (stmt, true);
310de761 1619 return;
02075bb2 1620 }
7b765bed 1621 if (!MTAG_P (tag))
02075bb2
DN
1622 {
1623 get_expr_operands (stmt, &tag, flags);
1624 return;
1625 }
643519b7 1626
726a989a 1627 add_virtual_operand (tag, stmt, flags, expr, 0, -1, false);
02075bb2 1628}
643519b7 1629
7ccf35ed 1630
02075bb2
DN
1631/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1632 clobbered variables in the function. */
6de9cd9a 1633
02075bb2 1634static void
726a989a 1635add_call_clobber_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED)
02075bb2 1636{
3b302421
RG
1637 unsigned u;
1638 bitmap_iterator bi;
02075bb2 1639 bitmap not_read_b, not_written_b;
15c15196 1640
726a989a 1641 gcc_assert (!(gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)));
15c15196 1642
6e7e772d 1643 /* If we created .GLOBAL_VAR earlier, just use it. */
5cd4ec7f 1644 if (gimple_global_var (cfun))
02075bb2 1645 {
5cd4ec7f 1646 tree var = gimple_global_var (cfun);
726a989a 1647 add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true);
6de9cd9a 1648 return;
02075bb2 1649 }
6de9cd9a 1650
02075bb2
DN
1651 /* Get info for local and module level statics. There is a bit
1652 set for each static if the call being processed does not read
1653 or write that variable. */
1654 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
726a989a
RB
1655 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1656
38635499 1657 /* Add a VDEF operand for every call clobbered variable. */
3b302421 1658 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
02075bb2 1659 {
3b302421 1660 tree var = referenced_var_lookup (u);
02075bb2
DN
1661 tree real_var = var;
1662 bool not_read;
1663 bool not_written;
02075bb2 1664
6e7e772d
DN
1665 not_read = not_read_b
1666 ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1667 : false;
1668
1669 not_written = not_written_b
1670 ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
1671 : false;
02075bb2
DN
1672 gcc_assert (!unmodifiable_var_p (var));
1673
1674 clobber_stats.clobbered_vars++;
1675
1676 /* See if this variable is really clobbered by this function. */
1677
02075bb2
DN
1678 if (not_written)
1679 {
1680 clobber_stats.static_write_clobbers_avoided++;
1681 if (!not_read)
726a989a 1682 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
02075bb2
DN
1683 else
1684 clobber_stats.static_read_clobbers_avoided++;
1685 }
1686 else
726a989a 1687 add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true);
02075bb2 1688 }
02075bb2
DN
1689}
1690
1691
1692/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1693 function. */
1694
1695static void
726a989a 1696add_call_read_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED)
02075bb2 1697{
3b302421
RG
1698 unsigned u;
1699 bitmap_iterator bi;
02075bb2 1700 bitmap not_read_b;
15c15196
RG
1701
1702 /* Const functions do not reference memory. */
726a989a 1703 if (gimple_call_flags (stmt) & ECF_CONST)
15c15196 1704 return;
02075bb2 1705
15c15196
RG
1706 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1707
1708 /* For pure functions we compute non-escaped uses separately. */
726a989a 1709 if (gimple_call_flags (stmt) & ECF_PURE)
15c15196
RG
1710 EXECUTE_IF_SET_IN_BITMAP (gimple_call_used_vars (cfun), 0, u, bi)
1711 {
1712 tree var = referenced_var_lookup (u);
1713 tree real_var = var;
1714 bool not_read;
1715
1716 if (unmodifiable_var_p (var))
1717 continue;
1718
1719 not_read = not_read_b
1720 ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1721 : false;
1722
1723 clobber_stats.readonly_clobbers++;
1724
1725 /* See if this variable is really used by this function. */
1726 if (!not_read)
726a989a 1727 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
15c15196
RG
1728 else
1729 clobber_stats.static_readonly_clobbers_avoided++;
1730 }
1731
1732 /* Add a VUSE for .GLOBAL_VAR if it has been created. See
1733 add_referenced_var for the heuristic used to decide whether to
1734 create .GLOBAL_VAR. */
5cd4ec7f 1735 if (gimple_global_var (cfun))
02075bb2 1736 {
5cd4ec7f 1737 tree var = gimple_global_var (cfun);
726a989a 1738 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
02075bb2
DN
1739 return;
1740 }
02075bb2
DN
1741
1742 /* Add a VUSE for each call-clobbered variable. */
3b302421 1743 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
02075bb2 1744 {
3b302421 1745 tree var = referenced_var (u);
02075bb2
DN
1746 tree real_var = var;
1747 bool not_read;
3b302421 1748
02075bb2
DN
1749 clobber_stats.readonly_clobbers++;
1750
65ad7c63
DN
1751 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1752 : false;
02075bb2
DN
1753
1754 if (not_read)
1755 {
1756 clobber_stats.static_readonly_clobbers_avoided++;
1757 continue;
1758 }
1759
726a989a 1760 add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
02075bb2
DN
1761 }
1762}
1763
1764
726a989a
RB
1765/* If STMT is a call that may clobber globals and other symbols that
1766 escape, add them to the VDEF/VUSE lists for it. */
02075bb2
DN
1767
1768static void
726a989a 1769maybe_add_call_clobbered_vops (gimple stmt)
02075bb2 1770{
726a989a 1771 int call_flags = gimple_call_flags (stmt);
02075bb2 1772
726a989a
RB
1773 /* Mark the statement as having memory operands. */
1774 gimple_set_references_memory (stmt, true);
38635499
DN
1775
1776 /* If aliases have been computed already, add VDEF or VUSE
02075bb2 1777 operands for all the symbols that have been found to be
38635499 1778 call-clobbered. */
726a989a 1779 if (gimple_aliases_computed_p (cfun) && !(call_flags & ECF_NOVOPS))
02075bb2
DN
1780 {
1781 /* A 'pure' or a 'const' function never call-clobbers anything.
1782 A 'noreturn' function might, but since we don't return anyway
1783 there is no point in recording that. */
726a989a
RB
1784 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1785 add_call_clobber_ops (stmt, gimple_call_fndecl (stmt));
02075bb2 1786 else if (!(call_flags & ECF_CONST))
726a989a 1787 add_call_read_ops (stmt, gimple_call_fndecl (stmt));
02075bb2 1788 }
02075bb2
DN
1789}
1790
1791
1792/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1793
1794static void
726a989a 1795get_asm_expr_operands (gimple stmt)
02075bb2 1796{
726a989a 1797 size_t i, noutputs;
38635499 1798 const char **oconstraints;
02075bb2
DN
1799 const char *constraint;
1800 bool allows_mem, allows_reg, is_inout;
38635499 1801
726a989a 1802 noutputs = gimple_asm_noutputs (stmt);
38635499 1803 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
02075bb2 1804
38635499 1805 /* Gather all output operands. */
726a989a 1806 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
02075bb2 1807 {
726a989a 1808 tree link = gimple_asm_output_op (stmt, i);
65ad7c63
DN
1809 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1810 oconstraints[i] = constraint;
1811 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1812 &allows_reg, &is_inout);
02075bb2
DN
1813
1814 /* This should have been split in gimplify_asm_expr. */
1815 gcc_assert (!allows_reg || !is_inout);
1816
1817 /* Memory operands are addressable. Note that STMT needs the
1818 address of this operand. */
1819 if (!allows_reg && allows_mem)
1820 {
1821 tree t = get_base_address (TREE_VALUE (link));
726a989a
RB
1822 if (t && DECL_P (t))
1823 gimple_add_to_addresses_taken (stmt, t);
02075bb2
DN
1824 }
1825
38635499 1826 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
02075bb2
DN
1827 }
1828
38635499 1829 /* Gather all input operands. */
726a989a 1830 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
02075bb2 1831 {
726a989a 1832 tree link = gimple_asm_input_op (stmt, i);
02075bb2 1833 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
38635499
DN
1834 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1835 &allows_mem, &allows_reg);
02075bb2
DN
1836
1837 /* Memory operands are addressable. Note that STMT needs the
1838 address of this operand. */
1839 if (!allows_reg && allows_mem)
1840 {
1841 tree t = get_base_address (TREE_VALUE (link));
726a989a
RB
1842 if (t && DECL_P (t))
1843 gimple_add_to_addresses_taken (stmt, t);
02075bb2
DN
1844 }
1845
1846 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1847 }
1848
38635499 1849 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
726a989a
RB
1850 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
1851 {
1852 tree link = gimple_asm_clobber_op (stmt, i);
1853 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1854 {
1855 unsigned i;
1856 bitmap_iterator bi;
65ad7c63 1857
726a989a
RB
1858 /* Mark the statement as having memory operands. */
1859 gimple_set_references_memory (stmt, true);
65ad7c63 1860
726a989a
RB
1861 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1862 {
1863 tree var = referenced_var (i);
1864 add_stmt_operand (&var, stmt, opf_def | opf_implicit);
1865 }
65ad7c63 1866
726a989a
RB
1867 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1868 {
1869 tree var = referenced_var (i);
1870 add_stmt_operand (&var, stmt, opf_def | opf_implicit);
1871 }
1872 break;
1873 }
1874 }
65ad7c63
DN
1875}
1876
1877
02075bb2 1878/* Recursively scan the expression pointed to by EXPR_P in statement
65ad7c63
DN
1879 STMT. FLAGS is one of the OPF_* constants modifying how to
1880 interpret the operands found. */
02075bb2
DN
1881
1882static void
726a989a 1883get_expr_operands (gimple stmt, tree *expr_p, int flags)
02075bb2
DN
1884{
1885 enum tree_code code;
c22940cd 1886 enum tree_code_class codeclass;
02075bb2 1887 tree expr = *expr_p;
02075bb2
DN
1888
1889 if (expr == NULL)
1890 return;
1891
1892 code = TREE_CODE (expr);
c22940cd 1893 codeclass = TREE_CODE_CLASS (code);
02075bb2
DN
1894
1895 switch (code)
1896 {
1897 case ADDR_EXPR:
1898 /* Taking the address of a variable does not represent a
1899 reference to it, but the fact that the statement takes its
1900 address will be of interest to some passes (e.g. alias
1901 resolution). */
726a989a 1902 gimple_add_to_addresses_taken (stmt, TREE_OPERAND (expr, 0));
02075bb2
DN
1903
1904 /* If the address is invariant, there may be no interesting
1905 variable references inside. */
1906 if (is_gimple_min_invariant (expr))
1907 return;
1908
1909 /* Otherwise, there may be variables referenced inside but there
1910 should be no VUSEs created, since the referenced objects are
1911 not really accessed. The only operands that we should find
1912 here are ARRAY_REF indices which will always be real operands
1913 (GIMPLE does not allow non-registers as array indices). */
1914 flags |= opf_no_vops;
1915 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1916 return;
1917
1918 case SSA_NAME:
18cd8a03 1919 case SYMBOL_MEMORY_TAG:
02075bb2 1920 case NAME_MEMORY_TAG:
726a989a 1921 add_stmt_operand (expr_p, stmt, flags);
02075bb2
DN
1922 return;
1923
1924 case VAR_DECL:
1925 case PARM_DECL:
1926 case RESULT_DECL:
726a989a 1927 add_stmt_operand (expr_p, stmt, flags);
5611cf0b 1928 return;
02075bb2
DN
1929
1930 case MISALIGNED_INDIRECT_REF:
1931 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1932 /* fall through */
1933
1934 case ALIGN_INDIRECT_REF:
1935 case INDIRECT_REF:
7b765bed 1936 get_indirect_ref_operands (stmt, expr, flags, expr, 0, -1, true);
02075bb2
DN
1937 return;
1938
1939 case TARGET_MEM_REF:
1940 get_tmr_operands (stmt, expr, flags);
1941 return;
1942
02075bb2 1943 case ARRAY_REF:
65ad7c63 1944 case ARRAY_RANGE_REF:
02075bb2
DN
1945 case COMPONENT_REF:
1946 case REALPART_EXPR:
1947 case IMAGPART_EXPR:
1948 {
c75ab022 1949 tree ref;
6bec9271 1950 HOST_WIDE_INT offset, size, maxsize;
c75ab022 1951
b65e51a8 1952 if (TREE_THIS_VOLATILE (expr))
726a989a 1953 gimple_set_has_volatile_ops (stmt, true);
b65e51a8 1954
6bec9271 1955 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
5611cf0b 1956 if (TREE_CODE (ref) == INDIRECT_REF)
3c0b6c43 1957 {
65ad7c63
DN
1958 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1959 maxsize, false);
3c0b6c43
DB
1960 flags |= opf_no_vops;
1961 }
758cf3f2 1962
38635499 1963 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
c75ab022
DB
1964
1965 if (code == COMPONENT_REF)
305a1321 1966 {
b65e51a8 1967 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
726a989a 1968 gimple_set_has_volatile_ops (stmt, true);
38635499 1969 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
305a1321 1970 }
65ad7c63 1971 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
a916f21d 1972 {
38635499
DN
1973 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
1974 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
1975 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
a916f21d 1976 }
643519b7 1977
c75ab022
DB
1978 return;
1979 }
643519b7 1980
d25cee4d 1981 case WITH_SIZE_EXPR:
0e28378a 1982 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
d25cee4d 1983 and an rvalue reference to its second argument. */
38635499 1984 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
1a24f92f 1985 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
d25cee4d
RH
1986 return;
1987
40923b20 1988 case COND_EXPR:
ad9f20cb 1989 case VEC_COND_EXPR:
38635499
DN
1990 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
1991 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
1992 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
40923b20
DP
1993 return;
1994
7b48e1e0
RH
1995 case CONSTRUCTOR:
1996 {
1997 /* General aggregate CONSTRUCTORs have been decomposed, but they
1998 are still in use as the COMPLEX_EXPR equivalent for vectors. */
4038c495
GB
1999 constructor_elt *ce;
2000 unsigned HOST_WIDE_INT idx;
7b48e1e0 2001
4038c495
GB
2002 for (idx = 0;
2003 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2004 idx++)
38635499 2005 get_expr_operands (stmt, &ce->value, opf_use);
7b48e1e0
RH
2006
2007 return;
2008 }
2009
310de761 2010 case BIT_FIELD_REF:
65ad7c63 2011 case TRUTH_NOT_EXPR:
4626c433 2012 case VIEW_CONVERT_EXPR:
310de761 2013 do_unary:
1a24f92f 2014 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
6de9cd9a 2015 return;
6de9cd9a 2016
310de761
RH
2017 case TRUTH_AND_EXPR:
2018 case TRUTH_OR_EXPR:
2019 case TRUTH_XOR_EXPR:
2020 case COMPOUND_EXPR:
2021 case OBJ_TYPE_REF:
0bca51f0 2022 case ASSERT_EXPR:
310de761
RH
2023 do_binary:
2024 {
1a24f92f
AM
2025 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2026 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
310de761
RH
2027 return;
2028 }
2029
20f06221 2030 case DOT_PROD_EXPR:
7ccf35ed
DN
2031 case REALIGN_LOAD_EXPR:
2032 {
2033 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2034 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2035 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2036 return;
2037 }
2038
058dcc25 2039 case CHANGE_DYNAMIC_TYPE_EXPR:
726a989a 2040 gcc_unreachable ();
a509ebb5 2041
310de761 2042 case FUNCTION_DECL:
310de761 2043 case LABEL_DECL:
243cdfa8 2044 case CONST_DECL:
726a989a
RB
2045 case CASE_LABEL_EXPR:
2046 case FILTER_EXPR:
2047 case EXC_PTR_EXPR:
02075bb2 2048 /* Expressions that make no memory references. */
310de761 2049 return;
02075bb2
DN
2050
2051 default:
c22940cd 2052 if (codeclass == tcc_unary)
02075bb2 2053 goto do_unary;
c22940cd 2054 if (codeclass == tcc_binary || codeclass == tcc_comparison)
02075bb2 2055 goto do_binary;
c22940cd 2056 if (codeclass == tcc_constant || codeclass == tcc_type)
02075bb2 2057 return;
643519b7 2058 }
310de761 2059
02075bb2
DN
2060 /* If we get here, something has gone wrong. */
2061#ifdef ENABLE_CHECKING
2062 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2063 debug_tree (expr);
2064 fputs ("\n", stderr);
2065#endif
2066 gcc_unreachable ();
310de761
RH
2067}
2068
643519b7 2069
65ad7c63
DN
2070/* Parse STMT looking for operands. When finished, the various
2071 build_* operand vectors will have potential operands in them. */
2072
ac182688 2073static void
726a989a 2074parse_ssa_operands (gimple stmt)
ac182688 2075{
726a989a 2076 enum gimple_code code = gimple_code (stmt);
ac182688 2077
726a989a
RB
2078 if (code == GIMPLE_ASM)
2079 get_asm_expr_operands (stmt);
2080 else
02075bb2 2081 {
726a989a 2082 size_t i, start = 0;
02075bb2 2083
726a989a
RB
2084 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
2085 {
2086 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
2087 start = 1;
2088 }
02075bb2 2089
726a989a
RB
2090 for (i = start; i < gimple_num_ops (stmt); i++)
2091 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
02075bb2 2092
726a989a
RB
2093 /* Add call-clobbered operands, if needed. */
2094 if (code == GIMPLE_CALL)
2095 maybe_add_call_clobbered_vops (stmt);
9be7ee44 2096 }
ac182688
ZD
2097}
2098
643519b7 2099
02075bb2 2100/* Create an operands cache for STMT. */
310de761
RH
2101
2102static void
726a989a 2103build_ssa_operands (gimple stmt)
310de761 2104{
38635499
DN
2105 /* Initially assume that the statement has no volatile operands and
2106 makes no memory references. */
726a989a
RB
2107 gimple_set_has_volatile_ops (stmt, false);
2108 gimple_set_references_memory (stmt, false);
2109
d586d6d1 2110 /* Just clear the bitmap so we don't end up reallocating it over and over. */
726a989a
RB
2111 if (gimple_addresses_taken (stmt))
2112 bitmap_clear (gimple_addresses_taken (stmt));
310de761 2113
02075bb2 2114 start_ssa_stmt_operands ();
02075bb2
DN
2115 parse_ssa_operands (stmt);
2116 operand_build_sort_virtual (build_vuses);
38635499 2117 operand_build_sort_virtual (build_vdefs);
02075bb2 2118 finalize_ssa_stmt_operands (stmt);
38635499
DN
2119
2120 /* For added safety, assume that statements with volatile operands
2121 also reference memory. */
726a989a
RB
2122 if (gimple_has_volatile_ops (stmt))
2123 gimple_set_references_memory (stmt, true);
02075bb2 2124}
e288e2f5 2125
28f6b1e4 2126
5f40b3cb
ZD
2127/* Releases the operands of STMT back to their freelists, and clears
2128 the stmt operand lists. */
2129
2130void
726a989a 2131free_stmt_operands (gimple stmt)
5f40b3cb 2132{
726a989a
RB
2133 def_optype_p defs = gimple_def_ops (stmt), last_def;
2134 use_optype_p uses = gimple_use_ops (stmt), last_use;
2135 voptype_p vuses = gimple_vuse_ops (stmt);
2136 voptype_p vdefs = gimple_vdef_ops (stmt), vdef, next_vdef;
5f40b3cb
ZD
2137 unsigned i;
2138
2139 if (defs)
2140 {
2141 for (last_def = defs; last_def->next; last_def = last_def->next)
2142 continue;
2143 last_def->next = gimple_ssa_operands (cfun)->free_defs;
2144 gimple_ssa_operands (cfun)->free_defs = defs;
726a989a 2145 gimple_set_def_ops (stmt, NULL);
5f40b3cb
ZD
2146 }
2147
2148 if (uses)
2149 {
2150 for (last_use = uses; last_use->next; last_use = last_use->next)
2151 delink_imm_use (USE_OP_PTR (last_use));
2152 delink_imm_use (USE_OP_PTR (last_use));
2153 last_use->next = gimple_ssa_operands (cfun)->free_uses;
2154 gimple_ssa_operands (cfun)->free_uses = uses;
726a989a 2155 gimple_set_use_ops (stmt, NULL);
5f40b3cb
ZD
2156 }
2157
2158 if (vuses)
2159 {
2160 for (i = 0; i < VUSE_NUM (vuses); i++)
2161 delink_imm_use (VUSE_OP_PTR (vuses, i));
2162 add_vop_to_freelist (vuses);
726a989a 2163 gimple_set_vuse_ops (stmt, NULL);
5f40b3cb
ZD
2164 }
2165
2166 if (vdefs)
2167 {
2168 for (vdef = vdefs; vdef; vdef = next_vdef)
2169 {
2170 next_vdef = vdef->next;
2171 delink_imm_use (VDEF_OP_PTR (vdef, 0));
2172 add_vop_to_freelist (vdef);
2173 }
726a989a 2174 gimple_set_vdef_ops (stmt, NULL);
5f40b3cb 2175 }
28f6b1e4 2176
726a989a
RB
2177 if (gimple_has_ops (stmt))
2178 gimple_set_addresses_taken (stmt, NULL);
65ad7c63 2179
726a989a
RB
2180 if (gimple_has_mem_ops (stmt))
2181 {
2182 gimple_set_stored_syms (stmt, NULL, &operands_bitmap_obstack);
2183 gimple_set_loaded_syms (stmt, NULL, &operands_bitmap_obstack);
2184 }
310de761
RH
2185}
2186
3c0b6c43 2187
2434ab1d 2188/* Get the operands of statement STMT. */
643519b7 2189
02075bb2 2190void
726a989a 2191update_stmt_operands (gimple stmt)
02075bb2 2192{
65ad7c63
DN
2193 /* If update_stmt_operands is called before SSA is initialized, do
2194 nothing. */
02075bb2
DN
2195 if (!ssa_operands_active ())
2196 return;
943261d7 2197
02075bb2 2198 timevar_push (TV_TREE_OPS);
943261d7 2199
726a989a 2200 gcc_assert (gimple_modified_p (stmt));
02075bb2 2201 build_ssa_operands (stmt);
726a989a 2202 gimple_set_modified (stmt, false);
6de9cd9a 2203
02075bb2
DN
2204 timevar_pop (TV_TREE_OPS);
2205}
faf7c678 2206
65ad7c63 2207
02075bb2 2208/* Copies virtual operands from SRC to DST. */
3c0b6c43 2209
02075bb2 2210void
726a989a 2211copy_virtual_operands (gimple dest, gimple src)
6de9cd9a 2212{
b2bcf557 2213 unsigned int i, n;
79f99d42
AM
2214 voptype_p src_vuses, dest_vuses;
2215 voptype_p src_vdefs, dest_vdefs;
2216 struct voptype_d vuse;
2217 struct voptype_d vdef;
38635499 2218
726a989a
RB
2219 if (!gimple_has_mem_ops (src))
2220 return;
02075bb2 2221
726a989a
RB
2222 gimple_set_vdef_ops (dest, NULL);
2223 gimple_set_vuse_ops (dest, NULL);
02075bb2 2224
726a989a
RB
2225 gimple_set_stored_syms (dest, gimple_stored_syms (src),
2226 &operands_bitmap_obstack);
2227 gimple_set_loaded_syms (dest, gimple_loaded_syms (src),
2228 &operands_bitmap_obstack);
6de9cd9a 2229
38635499
DN
2230 /* Copy all the VUSE operators and corresponding operands. */
2231 dest_vuses = &vuse;
726a989a
RB
2232 for (src_vuses = gimple_vuse_ops (src);
2233 src_vuses;
2234 src_vuses = src_vuses->next)
02075bb2 2235 {
38635499 2236 n = VUSE_NUM (src_vuses);
79f99d42 2237 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
38635499
DN
2238 for (i = 0; i < n; i++)
2239 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2240
726a989a
RB
2241 if (gimple_vuse_ops (dest) == NULL)
2242 gimple_set_vuse_ops (dest, vuse.next);
02075bb2 2243 }
6de9cd9a 2244
38635499
DN
2245 /* Copy all the VDEF operators and corresponding operands. */
2246 dest_vdefs = &vdef;
726a989a
RB
2247 for (src_vdefs = gimple_vdef_ops (src);
2248 src_vdefs;
2249 src_vdefs = src_vdefs->next)
38635499
DN
2250 {
2251 n = VUSE_NUM (src_vdefs);
79f99d42 2252 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
38635499
DN
2253 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2254 for (i = 0; i < n; i++)
2255 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2256
726a989a
RB
2257 if (gimple_vdef_ops (dest) == NULL)
2258 gimple_set_vdef_ops (dest, vdef.next);
38635499 2259 }
02075bb2 2260}
a6c550f9 2261
a6c550f9 2262
02075bb2
DN
2263/* Specifically for use in DOM's expression analysis. Given a store, we
2264 create an artificial stmt which looks like a load from the store, this can
2265 be used to eliminate redundant loads. OLD_OPS are the operands from the
2266 store stmt, and NEW_STMT is the new load which represents a load of the
ae4dbd44
RG
2267 values stored. If DELINK_IMM_USES_P is specified, the immediate
2268 uses of this stmt will be de-linked. */
02075bb2
DN
2269
2270void
726a989a 2271create_ssa_artificial_load_stmt (gimple new_stmt, gimple old_stmt,
ae4dbd44 2272 bool delink_imm_uses_p)
02075bb2 2273{
02075bb2
DN
2274 tree op;
2275 ssa_op_iter iter;
2276 use_operand_p use_p;
38635499 2277 unsigned i;
02075bb2 2278
726a989a 2279 gimple_set_modified (new_stmt, false);
02075bb2 2280
38635499 2281 /* Process NEW_STMT looking for operands. */
02075bb2
DN
2282 start_ssa_stmt_operands ();
2283 parse_ssa_operands (new_stmt);
a6c550f9 2284
38635499
DN
2285 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2286 if (TREE_CODE (op) != SSA_NAME)
2287 var_ann (op)->in_vuse_list = false;
02075bb2 2288
54b7b17d 2289 for (i = 0; VEC_iterate (tree, build_vdefs, i, op); i++)
38635499
DN
2290 if (TREE_CODE (op) != SSA_NAME)
2291 var_ann (op)->in_vdef_list = false;
6de9cd9a 2292
02075bb2 2293 /* Remove any virtual operands that were found. */
38635499 2294 VEC_truncate (tree, build_vdefs, 0);
02075bb2 2295 VEC_truncate (tree, build_vuses, 0);
faf7c678 2296
54b7b17d
RG
2297 /* Clear the loads and stores bitmaps. */
2298 bitmap_clear (build_loads);
2299 bitmap_clear (build_stores);
2300
02075bb2 2301 /* For each VDEF on the original statement, we want to create a
38635499
DN
2302 VUSE of the VDEF result operand on the new statement. */
2303 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
02075bb2 2304 append_vuse (op);
38635499 2305
02075bb2 2306 finalize_ssa_stmt_operands (new_stmt);
3c0b6c43 2307
02075bb2 2308 /* All uses in this fake stmt must not be in the immediate use lists. */
ae4dbd44
RG
2309 if (delink_imm_uses_p)
2310 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2311 delink_imm_use (use_p);
02075bb2 2312}
3c0b6c43 2313
3c0b6c43 2314
02075bb2
DN
2315/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2316 to test the validity of the swap operation. */
faf7c678 2317
02075bb2 2318void
726a989a 2319swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
02075bb2
DN
2320{
2321 tree op0, op1;
2322 op0 = *exp0;
2323 op1 = *exp1;
3c0b6c43 2324
65ad7c63
DN
2325 /* If the operand cache is active, attempt to preserve the relative
2326 positions of these two operands in their respective immediate use
2327 lists. */
02075bb2
DN
2328 if (ssa_operands_active () && op0 != op1)
2329 {
2330 use_optype_p use0, use1, ptr;
2331 use0 = use1 = NULL;
3c0b6c43 2332
02075bb2 2333 /* Find the 2 operands in the cache, if they are there. */
726a989a 2334 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
02075bb2
DN
2335 if (USE_OP_PTR (ptr)->use == exp0)
2336 {
2337 use0 = ptr;
2338 break;
2339 }
3c0b6c43 2340
726a989a 2341 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
02075bb2
DN
2342 if (USE_OP_PTR (ptr)->use == exp1)
2343 {
2344 use1 = ptr;
2345 break;
2346 }
2347
2348 /* If both uses don't have operand entries, there isn't much we can do
65ad7c63 2349 at this point. Presumably we don't need to worry about it. */
02075bb2
DN
2350 if (use0 && use1)
2351 {
2352 tree *tmp = USE_OP_PTR (use1)->use;
2353 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2354 USE_OP_PTR (use0)->use = tmp;
2355 }
3c0b6c43 2356 }
02075bb2
DN
2357
2358 /* Now swap the data. */
2359 *exp0 = op1;
2360 *exp1 = op0;
3c0b6c43
DB
2361}
2362
726a989a 2363/* Add the base address of REF to SET. */
6de9cd9a 2364
e8ca4159 2365void
726a989a 2366add_to_addressable_set (tree ref, bitmap *set)
6de9cd9a 2367{
e8ca4159 2368 tree var;
c75ab022 2369
23e66a36 2370 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
e8ca4159
DN
2371 as the only thing we take the address of. If VAR is a structure,
2372 taking the address of a field means that the whole structure may
2373 be referenced using pointer arithmetic. See PR 21407 and the
2374 ensuing mailing list discussion. */
2375 var = get_base_address (ref);
6de9cd9a
DN
2376 if (var && SSA_VAR_P (var))
2377 {
726a989a
RB
2378 if (*set == NULL)
2379 *set = BITMAP_ALLOC (&operands_bitmap_obstack);
2380
2381 bitmap_set_bit (*set, DECL_UID (var));
5611cf0b 2382 TREE_ADDRESSABLE (var) = 1;
6de9cd9a
DN
2383 }
2384}
2385
643519b7 2386
726a989a
RB
2387/* Add the base address of REF to the set of addresses taken by STMT.
2388 REF may be a single variable whose address has been taken or any
2389 other valid GIMPLE memory reference (structure reference, array,
2390 etc). If the base address of REF is a decl that has sub-variables,
2391 also add all of its sub-variables. */
2392
2393void
2394gimple_add_to_addresses_taken (gimple stmt, tree ref)
2395{
2396 gcc_assert (gimple_has_ops (stmt));
2397 add_to_addressable_set (ref, gimple_addresses_taken_ptr (stmt));
2398}
2399
2400
f430bae8 2401/* Scan the immediate_use list for VAR making sure its linked properly.
65ad7c63 2402 Return TRUE if there is a problem and emit an error message to F. */
f430bae8
AM
2403
2404bool
2405verify_imm_links (FILE *f, tree var)
2406{
f47c96aa 2407 use_operand_p ptr, prev, list;
f430bae8
AM
2408 int count;
2409
2410 gcc_assert (TREE_CODE (var) == SSA_NAME);
2411
2412 list = &(SSA_NAME_IMM_USE_NODE (var));
2413 gcc_assert (list->use == NULL);
2414
2415 if (list->prev == NULL)
2416 {
2417 gcc_assert (list->next == NULL);
2418 return false;
2419 }
2420
2421 prev = list;
2422 count = 0;
2423 for (ptr = list->next; ptr != list; )
2424 {
2425 if (prev != ptr->prev)
0e61db61
NS
2426 goto error;
2427
f430bae8 2428 if (ptr->use == NULL)
0e61db61
NS
2429 goto error; /* 2 roots, or SAFE guard node. */
2430 else if (*(ptr->use) != var)
2431 goto error;
f430bae8
AM
2432
2433 prev = ptr;
2434 ptr = ptr->next;
643519b7
DN
2435
2436 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2437 problem. */
e84d8064 2438 if (count++ > 50000000)
0e61db61 2439 goto error;
f430bae8
AM
2440 }
2441
2442 /* Verify list in the other direction. */
2443 prev = list;
2444 for (ptr = list->prev; ptr != list; )
2445 {
2446 if (prev != ptr->next)
0e61db61 2447 goto error;
f430bae8
AM
2448 prev = ptr;
2449 ptr = ptr->prev;
2450 if (count-- < 0)
0e61db61 2451 goto error;
f430bae8
AM
2452 }
2453
2454 if (count != 0)
0e61db61 2455 goto error;
f430bae8
AM
2456
2457 return false;
0e61db61
NS
2458
2459 error:
726a989a 2460 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
0e61db61 2461 {
726a989a
RB
2462 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
2463 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
0e61db61
NS
2464 }
2465 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2466 (void *)ptr->use);
2467 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2468 fprintf(f, "\n");
2469 return true;
f430bae8
AM
2470}
2471
2472
2473/* Dump all the immediate uses to FILE. */
2474
2475void
2476dump_immediate_uses_for (FILE *file, tree var)
2477{
2478 imm_use_iterator iter;
2479 use_operand_p use_p;
2480
2481 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2482
2483 print_generic_expr (file, var, TDF_SLIM);
2484 fprintf (file, " : -->");
2485 if (has_zero_uses (var))
2486 fprintf (file, " no uses.\n");
2487 else
2488 if (has_single_use (var))
2489 fprintf (file, " single use.\n");
2490 else
2491 fprintf (file, "%d uses.\n", num_imm_uses (var));
2492
2493 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2494 {
726a989a 2495 if (use_p->loc.stmt == NULL && use_p->use == NULL)
afd83fe4 2496 fprintf (file, "***end of stmt iterator marker***\n");
f47c96aa 2497 else
afd83fe4 2498 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
726a989a 2499 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
afd83fe4 2500 else
726a989a 2501 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
f430bae8
AM
2502 }
2503 fprintf(file, "\n");
2504}
2505
643519b7 2506
f430bae8
AM
2507/* Dump all the immediate uses to FILE. */
2508
2509void
2510dump_immediate_uses (FILE *file)
2511{
2512 tree var;
2513 unsigned int x;
2514
2515 fprintf (file, "Immediate_uses: \n\n");
2516 for (x = 1; x < num_ssa_names; x++)
2517 {
2518 var = ssa_name(x);
2519 if (!var)
2520 continue;
2521 dump_immediate_uses_for (file, var);
2522 }
2523}
2524
2525
2526/* Dump def-use edges on stderr. */
2527
2528void
2529debug_immediate_uses (void)
2530{
2531 dump_immediate_uses (stderr);
2532}
2533
65ad7c63 2534
f430bae8
AM
2535/* Dump def-use edges on stderr. */
2536
2537void
2538debug_immediate_uses_for (tree var)
2539{
2540 dump_immediate_uses_for (stderr, var);
1a24f92f 2541}
cfaab3a9
DN
2542
2543
2544/* Create a new change buffer for the statement pointed by STMT_P and
2545 push the buffer into SCB_STACK. Each change buffer
2546 records state information needed to determine what changed in the
2547 statement. Mainly, this keeps track of symbols that may need to be
2548 put into SSA form, SSA name replacements and other information
2549 needed to keep the SSA form up to date. */
2550
2551void
726a989a 2552push_stmt_changes (gimple *stmt_p)
cfaab3a9 2553{
726a989a 2554 gimple stmt;
cfaab3a9 2555 scb_t buf;
726a989a 2556
cfaab3a9
DN
2557 stmt = *stmt_p;
2558
2559 /* It makes no sense to keep track of PHI nodes. */
726a989a 2560 if (gimple_code (stmt) == GIMPLE_PHI)
cfaab3a9
DN
2561 return;
2562
c22940cd 2563 buf = XNEW (struct scb_d);
cfaab3a9
DN
2564 memset (buf, 0, sizeof *buf);
2565
2566 buf->stmt_p = stmt_p;
2567
726a989a 2568 if (gimple_references_memory_p (stmt))
cfaab3a9
DN
2569 {
2570 tree op;
2571 ssa_op_iter i;
2572
2573 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2574 {
2575 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2576 if (buf->loads == NULL)
2577 buf->loads = BITMAP_ALLOC (NULL);
2578 bitmap_set_bit (buf->loads, DECL_UID (sym));
2579 }
2580
38635499 2581 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
cfaab3a9
DN
2582 {
2583 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2584 if (buf->stores == NULL)
2585 buf->stores = BITMAP_ALLOC (NULL);
2586 bitmap_set_bit (buf->stores, DECL_UID (sym));
2587 }
2588 }
2589
2590 VEC_safe_push (scb_t, heap, scb_stack, buf);
2591}
2592
2593
2594/* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2595 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2596
2597static void
2598mark_difference_for_renaming (bitmap s1, bitmap s2)
2599{
2600 if (s1 == NULL && s2 == NULL)
2601 return;
2602
2603 if (s1 && s2 == NULL)
2604 mark_set_for_renaming (s1);
2605 else if (s1 == NULL && s2)
2606 mark_set_for_renaming (s2);
2607 else if (!bitmap_equal_p (s1, s2))
2608 {
2609 bitmap t1 = BITMAP_ALLOC (NULL);
833248d2 2610 bitmap_xor (t1, s1, s2);
cfaab3a9 2611 mark_set_for_renaming (t1);
cfaab3a9 2612 BITMAP_FREE (t1);
cfaab3a9
DN
2613 }
2614}
2615
2616
2617/* Pop the top SCB from SCB_STACK and act on the differences between
2618 what was recorded by push_stmt_changes and the current state of
2619 the statement. */
2620
2621void
726a989a 2622pop_stmt_changes (gimple *stmt_p)
cfaab3a9 2623{
726a989a
RB
2624 tree op;
2625 gimple stmt;
cfaab3a9
DN
2626 ssa_op_iter iter;
2627 bitmap loads, stores;
2628 scb_t buf;
2629
2630 stmt = *stmt_p;
2631
2632 /* It makes no sense to keep track of PHI nodes. */
726a989a 2633 if (gimple_code (stmt) == GIMPLE_PHI)
cfaab3a9
DN
2634 return;
2635
2636 buf = VEC_pop (scb_t, scb_stack);
2637 gcc_assert (stmt_p == buf->stmt_p);
2638
2639 /* Force an operand re-scan on the statement and mark any newly
2640 exposed variables. */
2641 update_stmt (stmt);
2642
2643 /* Determine whether any memory symbols need to be renamed. If the
2644 sets of loads and stores are different after the statement is
2645 modified, then the affected symbols need to be renamed.
2646
2647 Note that it may be possible for the statement to not reference
2648 memory anymore, but we still need to act on the differences in
2649 the sets of symbols. */
2650 loads = stores = NULL;
726a989a 2651 if (gimple_references_memory_p (stmt))
cfaab3a9
DN
2652 {
2653 tree op;
2654 ssa_op_iter i;
2655
2656 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2657 {
2658 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2659 if (loads == NULL)
2660 loads = BITMAP_ALLOC (NULL);
2661 bitmap_set_bit (loads, DECL_UID (sym));
2662 }
2663
38635499 2664 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
cfaab3a9
DN
2665 {
2666 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2667 if (stores == NULL)
2668 stores = BITMAP_ALLOC (NULL);
2669 bitmap_set_bit (stores, DECL_UID (sym));
cfaab3a9
DN
2670 }
2671 }
2672
2673 /* If LOADS is different from BUF->LOADS, the affected
2674 symbols need to be marked for renaming. */
2675 mark_difference_for_renaming (loads, buf->loads);
2676
2677 /* Similarly for STORES and BUF->STORES. */
2678 mark_difference_for_renaming (stores, buf->stores);
2679
2680 /* Mark all the naked GIMPLE register operands for renaming. */
2681 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2682 if (DECL_P (op))
2683 mark_sym_for_renaming (op);
2684
2685 /* FIXME, need to add more finalizers here. Cleanup EH info,
2686 recompute invariants for address expressions, add
2687 SSA replacement mappings, etc. For instance, given
2688 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2689 the form:
2690
2691 # SMT.4_20 = VDEF <SMT.4_16>
2692 D.1576_11 = 1.0e+0;
2693
2694 So, the VDEF will disappear, but instead of marking SMT.4 for
2695 renaming it would be far more efficient to establish a
2696 replacement mapping that would replace every reference of
2697 SMT.4_20 with SMT.4_16. */
2698
2699 /* Free memory used by the buffer. */
2700 BITMAP_FREE (buf->loads);
2701 BITMAP_FREE (buf->stores);
2702 BITMAP_FREE (loads);
2703 BITMAP_FREE (stores);
2704 buf->stmt_p = NULL;
2705 free (buf);
2706}
2707
2708
2709/* Discard the topmost change buffer from SCB_STACK. This is useful
2710 when the caller realized that it did not actually modified the
2711 statement. It avoids the expensive operand re-scan. */
2712
2713void
726a989a 2714discard_stmt_changes (gimple *stmt_p)
cfaab3a9
DN
2715{
2716 scb_t buf;
726a989a 2717 gimple stmt;
cfaab3a9
DN
2718
2719 /* It makes no sense to keep track of PHI nodes. */
2720 stmt = *stmt_p;
726a989a 2721 if (gimple_code (stmt) == GIMPLE_PHI)
cfaab3a9
DN
2722 return;
2723
2724 buf = VEC_pop (scb_t, scb_stack);
2725 gcc_assert (stmt_p == buf->stmt_p);
2726
2727 /* Free memory used by the buffer. */
2728 BITMAP_FREE (buf->loads);
2729 BITMAP_FREE (buf->stores);
2730 buf->stmt_p = NULL;
2731 free (buf);
2732}