]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-operands.c
In gcc/
[thirdparty/gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79 */
80
81
82 /* Flags to describe operand properties in helpers. */
83
84 /* By default, operands are loaded. */
85 #define opf_none 0
86
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
90
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
93
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
101
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
105
106
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
109
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
112
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
115
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
118
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
121
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid;
124 bool ssa_ro_call_cache_valid;
125
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree,heap) *clobbered_v_may_defs;
128 static VEC(tree,heap) *clobbered_vuses;
129 static VEC(tree,heap) *ro_call_vuses;
130 static bool clobbered_aliased_loads;
131 static bool clobbered_aliased_stores;
132 static bool ro_call_aliased_loads;
133 static bool ops_active = false;
134
135 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
136 static unsigned operand_memory_index;
137
138 static void get_expr_operands (tree, tree *, int);
139 static void get_asm_expr_operands (tree);
140 static void get_indirect_ref_operands (tree, tree, int);
141 static void get_tmr_operands (tree, tree, int);
142 static void get_call_expr_operands (tree, tree);
143 static inline void append_def (tree *);
144 static inline void append_use (tree *);
145 static void append_v_may_def (tree);
146 static void append_v_must_def (tree);
147 static void add_call_clobber_ops (tree, tree);
148 static void add_call_read_ops (tree);
149 static void add_stmt_operand (tree *, stmt_ann_t, int);
150 static void build_ssa_operands (tree stmt);
151
152 static def_optype_p free_defs = NULL;
153 static use_optype_p free_uses = NULL;
154 static vuse_optype_p free_vuses = NULL;
155 static maydef_optype_p free_maydefs = NULL;
156 static mustdef_optype_p free_mustdefs = NULL;
157
158
159 /* Return the DECL_UID of the base variable of T. */
160
161 static inline unsigned
162 get_name_decl (tree t)
163 {
164 if (TREE_CODE (t) != SSA_NAME)
165 return DECL_UID (t);
166 else
167 return DECL_UID (SSA_NAME_VAR (t));
168 }
169
170 /* Comparison function for qsort used in operand_build_sort_virtual. */
171
172 static int
173 operand_build_cmp (const void *p, const void *q)
174 {
175 tree e1 = *((const tree *)p);
176 tree e2 = *((const tree *)q);
177 unsigned int u1,u2;
178
179 u1 = get_name_decl (e1);
180 u2 = get_name_decl (e2);
181
182 /* We want to sort in ascending order. They can never be equal. */
183 #ifdef ENABLE_CHECKING
184 gcc_assert (u1 != u2);
185 #endif
186 return (u1 > u2 ? 1 : -1);
187 }
188
189 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
190
191 static inline void
192 operand_build_sort_virtual (VEC(tree,heap) *list)
193 {
194 int num = VEC_length (tree, list);
195 if (num < 2)
196 return;
197 if (num == 2)
198 {
199 if (get_name_decl (VEC_index (tree, list, 0))
200 > get_name_decl (VEC_index (tree, list, 1)))
201 {
202 /* Swap elements if in the wrong order. */
203 tree tmp = VEC_index (tree, list, 0);
204 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
205 VEC_replace (tree, list, 1, tmp);
206 }
207 return;
208 }
209 /* There are 3 or more elements, call qsort. */
210 qsort (VEC_address (tree, list),
211 VEC_length (tree, list),
212 sizeof (tree),
213 operand_build_cmp);
214 }
215
216
217
218 /* Return true if the ssa operands cache is active. */
219
220 bool
221 ssa_operands_active (void)
222 {
223 return ops_active;
224 }
225
226
227 /* Initialize the operand cache routines. */
228
229 void
230 init_ssa_operands (void)
231 {
232 build_defs = VEC_alloc (tree, heap, 5);
233 build_uses = VEC_alloc (tree, heap, 10);
234 build_vuses = VEC_alloc (tree, heap, 25);
235 build_v_may_defs = VEC_alloc (tree, heap, 25);
236 build_v_must_defs = VEC_alloc (tree, heap, 25);
237
238 gcc_assert (operand_memory == NULL);
239 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
240 ops_active = true;
241 }
242
243
244 /* Dispose of anything required by the operand routines. */
245
246 void
247 fini_ssa_operands (void)
248 {
249 struct ssa_operand_memory_d *ptr;
250 VEC_free (tree, heap, build_defs);
251 VEC_free (tree, heap, build_uses);
252 VEC_free (tree, heap, build_v_must_defs);
253 VEC_free (tree, heap, build_v_may_defs);
254 VEC_free (tree, heap, build_vuses);
255 free_defs = NULL;
256 free_uses = NULL;
257 free_vuses = NULL;
258 free_maydefs = NULL;
259 free_mustdefs = NULL;
260 while ((ptr = operand_memory) != NULL)
261 {
262 operand_memory = operand_memory->next;
263 ggc_free (ptr);
264 }
265
266 VEC_free (tree, heap, clobbered_v_may_defs);
267 VEC_free (tree, heap, clobbered_vuses);
268 VEC_free (tree, heap, ro_call_vuses);
269 ops_active = false;
270 }
271
272
273 /* Return memory for operands of SIZE chunks. */
274
275 static inline void *
276 ssa_operand_alloc (unsigned size)
277 {
278 char *ptr;
279 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
280 {
281 struct ssa_operand_memory_d *ptr;
282 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
283 ptr->next = operand_memory;
284 operand_memory = ptr;
285 operand_memory_index = 0;
286 }
287 ptr = &(operand_memory->mem[operand_memory_index]);
288 operand_memory_index += size;
289 return ptr;
290 }
291
292
293 /* Make sure PTR is in the correct immediate use list. Since uses are simply
294 pointers into the stmt TREE, there is no way of telling if anyone has
295 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
296 The contents are different, but the pointer is still the same. This
297 routine will check to make sure PTR is in the correct list, and if it isn't
298 put it in the correct list. We cannot simply check the previous node
299 because all nodes in the same stmt might have be changed. */
300
301 static inline void
302 correct_use_link (use_operand_p ptr, tree stmt)
303 {
304 use_operand_p prev;
305 tree root;
306
307 /* Fold_stmt () may have changed the stmt pointers. */
308 if (ptr->stmt != stmt)
309 ptr->stmt = stmt;
310
311 prev = ptr->prev;
312 if (prev)
313 {
314 /* Find the root element, making sure we skip any safe iterators. */
315 while (prev->use != NULL || prev->stmt == NULL)
316 prev = prev->prev;
317
318 /* Get the ssa_name of the list the node is in. */
319 root = prev->stmt;
320 /* If it's the right list, simply return. */
321 if (root == *(ptr->use))
322 return;
323 }
324 /* Its in the wrong list if we reach here. */
325 delink_imm_use (ptr);
326 link_imm_use (ptr, *(ptr->use));
327 }
328
329
330 /* This routine makes sure that PTR is in an immediate use list, and makes
331 sure the stmt pointer is set to the current stmt. Virtual uses do not need
332 the overhead of correct_use_link since they cannot be directly manipulated
333 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
334 static inline void
335 set_virtual_use_link (use_operand_p ptr, tree stmt)
336 {
337 /* Fold_stmt () may have changed the stmt pointers. */
338 if (ptr->stmt != stmt)
339 ptr->stmt = stmt;
340
341 /* If this use isn't in a list, add it to the correct list. */
342 if (!ptr->prev)
343 link_imm_use (ptr, *(ptr->use));
344 }
345
346
347
348 #define FINALIZE_OPBUILD build_defs
349 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
350 build_defs, (I))
351 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
352 build_defs, (I))
353 #define FINALIZE_FUNC finalize_ssa_def_ops
354 #define FINALIZE_ALLOC alloc_def
355 #define FINALIZE_FREE free_defs
356 #define FINALIZE_TYPE struct def_optype_d
357 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
358 #define FINALIZE_OPS DEF_OPS
359 #define FINALIZE_BASE(VAR) VAR
360 #define FINALIZE_BASE_TYPE tree *
361 #define FINALIZE_BASE_ZERO NULL
362 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
363 #include "tree-ssa-opfinalize.h"
364
365
366 /* This routine will create stmt operands for STMT from the def build list. */
367
368 static void
369 finalize_ssa_defs (tree stmt)
370 {
371 unsigned int num = VEC_length (tree, build_defs);
372 /* There should only be a single real definition per assignment. */
373 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
374
375 /* If there is an old list, often the new list is identical, or close, so
376 find the elements at the beginning that are the same as the vector. */
377
378 finalize_ssa_def_ops (stmt);
379 VEC_truncate (tree, build_defs, 0);
380 }
381
382 #define FINALIZE_OPBUILD build_uses
383 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
384 build_uses, (I))
385 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
386 build_uses, (I))
387 #define FINALIZE_FUNC finalize_ssa_use_ops
388 #define FINALIZE_ALLOC alloc_use
389 #define FINALIZE_FREE free_uses
390 #define FINALIZE_TYPE struct use_optype_d
391 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
392 #define FINALIZE_OPS USE_OPS
393 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
394 #define FINALIZE_CORRECT_USE correct_use_link
395 #define FINALIZE_BASE(VAR) VAR
396 #define FINALIZE_BASE_TYPE tree *
397 #define FINALIZE_BASE_ZERO NULL
398 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
399 (PTR)->use_ptr.use = (VAL); \
400 link_imm_use_stmt (&((PTR)->use_ptr), \
401 *(VAL), (STMT))
402 #include "tree-ssa-opfinalize.h"
403
404 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
405
406 static void
407 finalize_ssa_uses (tree stmt)
408 {
409 #ifdef ENABLE_CHECKING
410 {
411 unsigned x;
412 unsigned num = VEC_length (tree, build_uses);
413
414 /* If the pointer to the operand is the statement itself, something is
415 wrong. It means that we are pointing to a local variable (the
416 initial call to get_stmt_operands does not pass a pointer to a
417 statement). */
418 for (x = 0; x < num; x++)
419 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
420 }
421 #endif
422 finalize_ssa_use_ops (stmt);
423 VEC_truncate (tree, build_uses, 0);
424 }
425
426
427 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
428 #define FINALIZE_OPBUILD build_v_may_defs
429 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
430 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
431 build_v_may_defs, (I)))
432 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
433 #define FINALIZE_ALLOC alloc_maydef
434 #define FINALIZE_FREE free_maydefs
435 #define FINALIZE_TYPE struct maydef_optype_d
436 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
437 #define FINALIZE_OPS MAYDEF_OPS
438 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
439 #define FINALIZE_CORRECT_USE set_virtual_use_link
440 #define FINALIZE_BASE_ZERO 0
441 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
442 #define FINALIZE_BASE_TYPE unsigned
443 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
444 (PTR)->def_var = (VAL); \
445 (PTR)->use_var = (VAL); \
446 (PTR)->use_ptr.use = &((PTR)->use_var); \
447 link_imm_use_stmt (&((PTR)->use_ptr), \
448 (VAL), (STMT))
449 #include "tree-ssa-opfinalize.h"
450
451
452 static void
453 finalize_ssa_v_may_defs (tree stmt)
454 {
455 finalize_ssa_v_may_def_ops (stmt);
456 }
457
458
459 /* Clear the in_list bits and empty the build array for v_may_defs. */
460
461 static inline void
462 cleanup_v_may_defs (void)
463 {
464 unsigned x, num;
465 num = VEC_length (tree, build_v_may_defs);
466
467 for (x = 0; x < num; x++)
468 {
469 tree t = VEC_index (tree, build_v_may_defs, x);
470 if (TREE_CODE (t) != SSA_NAME)
471 {
472 var_ann_t ann = var_ann (t);
473 ann->in_v_may_def_list = 0;
474 }
475 }
476 VEC_truncate (tree, build_v_may_defs, 0);
477 }
478
479
480 #define FINALIZE_OPBUILD build_vuses
481 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
482 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
483 build_vuses, (I)))
484 #define FINALIZE_FUNC finalize_ssa_vuse_ops
485 #define FINALIZE_ALLOC alloc_vuse
486 #define FINALIZE_FREE free_vuses
487 #define FINALIZE_TYPE struct vuse_optype_d
488 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
489 #define FINALIZE_OPS VUSE_OPS
490 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
491 #define FINALIZE_CORRECT_USE set_virtual_use_link
492 #define FINALIZE_BASE_ZERO 0
493 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
494 #define FINALIZE_BASE_TYPE unsigned
495 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
496 (PTR)->use_var = (VAL); \
497 (PTR)->use_ptr.use = &((PTR)->use_var); \
498 link_imm_use_stmt (&((PTR)->use_ptr), \
499 (VAL), (STMT))
500 #include "tree-ssa-opfinalize.h"
501
502
503 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
504
505 static void
506 finalize_ssa_vuses (tree stmt)
507 {
508 unsigned num, num_v_may_defs;
509 unsigned vuse_index;
510
511 /* Remove superfluous VUSE operands. If the statement already has a
512 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
513 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
514 suppose that variable 'a' is aliased:
515
516 # VUSE <a_2>
517 # a_3 = V_MAY_DEF <a_2>
518 a = a + 1;
519
520 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
521 operation. */
522
523 num = VEC_length (tree, build_vuses);
524 num_v_may_defs = VEC_length (tree, build_v_may_defs);
525
526 if (num > 0 && num_v_may_defs > 0)
527 {
528 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
529 {
530 tree vuse;
531 vuse = VEC_index (tree, build_vuses, vuse_index);
532 if (TREE_CODE (vuse) != SSA_NAME)
533 {
534 var_ann_t ann = var_ann (vuse);
535 ann->in_vuse_list = 0;
536 if (ann->in_v_may_def_list)
537 {
538 VEC_ordered_remove (tree, build_vuses, vuse_index);
539 continue;
540 }
541 }
542 vuse_index++;
543 }
544 }
545 else
546 /* Clear out the in_list bits. */
547 for (vuse_index = 0;
548 vuse_index < VEC_length (tree, build_vuses);
549 vuse_index++)
550 {
551 tree t = VEC_index (tree, build_vuses, vuse_index);
552 if (TREE_CODE (t) != SSA_NAME)
553 {
554 var_ann_t ann = var_ann (t);
555 ann->in_vuse_list = 0;
556 }
557 }
558
559 finalize_ssa_vuse_ops (stmt);
560 /* The v_may_def build vector wasn't cleaned up because we needed it. */
561 cleanup_v_may_defs ();
562
563 /* Free the vuses build vector. */
564 VEC_truncate (tree, build_vuses, 0);
565
566 }
567
568 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
569
570 #define FINALIZE_OPBUILD build_v_must_defs
571 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
572 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
573 build_v_must_defs, (I)))
574 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
575 #define FINALIZE_ALLOC alloc_mustdef
576 #define FINALIZE_FREE free_mustdefs
577 #define FINALIZE_TYPE struct mustdef_optype_d
578 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
579 #define FINALIZE_OPS MUSTDEF_OPS
580 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
581 #define FINALIZE_CORRECT_USE set_virtual_use_link
582 #define FINALIZE_BASE_ZERO 0
583 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
584 #define FINALIZE_BASE_TYPE unsigned
585 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
586 (PTR)->def_var = (VAL); \
587 (PTR)->kill_var = (VAL); \
588 (PTR)->use_ptr.use = &((PTR)->kill_var);\
589 link_imm_use_stmt (&((PTR)->use_ptr), \
590 (VAL), (STMT))
591 #include "tree-ssa-opfinalize.h"
592
593
594 static void
595 finalize_ssa_v_must_defs (tree stmt)
596 {
597 /* In the presence of subvars, there may be more than one V_MUST_DEF per
598 statement (one for each subvar). It is a bit expensive to verify that
599 all must-defs in a statement belong to subvars if there is more than one
600 MUST-def, so we don't do it. Suffice to say, if you reach here without
601 having subvars, and have num >1, you have hit a bug. */
602
603 finalize_ssa_v_must_def_ops (stmt);
604 VEC_truncate (tree, build_v_must_defs, 0);
605 }
606
607
608 /* Finalize all the build vectors, fill the new ones into INFO. */
609
610 static inline void
611 finalize_ssa_stmt_operands (tree stmt)
612 {
613 finalize_ssa_defs (stmt);
614 finalize_ssa_uses (stmt);
615 finalize_ssa_v_must_defs (stmt);
616 finalize_ssa_v_may_defs (stmt);
617 finalize_ssa_vuses (stmt);
618 }
619
620
621 /* Start the process of building up operands vectors in INFO. */
622
623 static inline void
624 start_ssa_stmt_operands (void)
625 {
626 gcc_assert (VEC_length (tree, build_defs) == 0);
627 gcc_assert (VEC_length (tree, build_uses) == 0);
628 gcc_assert (VEC_length (tree, build_vuses) == 0);
629 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
630 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
631 }
632
633
634 /* Add DEF_P to the list of pointers to operands. */
635
636 static inline void
637 append_def (tree *def_p)
638 {
639 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
640 }
641
642
643 /* Add USE_P to the list of pointers to operands. */
644
645 static inline void
646 append_use (tree *use_p)
647 {
648 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
649 }
650
651
652 /* Add a new virtual may def for variable VAR to the build array. */
653
654 static inline void
655 append_v_may_def (tree var)
656 {
657 if (TREE_CODE (var) != SSA_NAME)
658 {
659 var_ann_t ann = get_var_ann (var);
660
661 /* Don't allow duplicate entries. */
662 if (ann->in_v_may_def_list)
663 return;
664 ann->in_v_may_def_list = 1;
665 }
666
667 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
668 }
669
670
671 /* Add VAR to the list of virtual uses. */
672
673 static inline void
674 append_vuse (tree var)
675 {
676
677 /* Don't allow duplicate entries. */
678 if (TREE_CODE (var) != SSA_NAME)
679 {
680 var_ann_t ann = get_var_ann (var);
681
682 if (ann->in_vuse_list || ann->in_v_may_def_list)
683 return;
684 ann->in_vuse_list = 1;
685 }
686
687 VEC_safe_push (tree, heap, build_vuses, (tree)var);
688 }
689
690
691 /* Add VAR to the list of virtual must definitions for INFO. */
692
693 static inline void
694 append_v_must_def (tree var)
695 {
696 unsigned i;
697
698 /* Don't allow duplicate entries. */
699 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
700 if (var == VEC_index (tree, build_v_must_defs, i))
701 return;
702
703 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
704 }
705
706
707 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
708 cache for STMT, if it existed before. When finished, the various build_*
709 operand vectors will have potential operands. in them. */
710
711 static void
712 parse_ssa_operands (tree stmt)
713 {
714 enum tree_code code;
715
716 code = TREE_CODE (stmt);
717 switch (code)
718 {
719 case MODIFY_EXPR:
720 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
721 either only part of LHS is modified or if the RHS might throw,
722 otherwise, use V_MUST_DEF.
723
724 ??? If it might throw, we should represent somehow that it is killed
725 on the fallthrough path. */
726 {
727 tree lhs = TREE_OPERAND (stmt, 0);
728 int lhs_flags = opf_is_def;
729
730 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
731
732 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
733 or not the entire LHS is modified; that depends on what's
734 inside the VIEW_CONVERT_EXPR. */
735 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
736 lhs = TREE_OPERAND (lhs, 0);
737
738 if (TREE_CODE (lhs) != ARRAY_REF
739 && TREE_CODE (lhs) != ARRAY_RANGE_REF
740 && TREE_CODE (lhs) != BIT_FIELD_REF
741 && TREE_CODE (lhs) != REALPART_EXPR
742 && TREE_CODE (lhs) != IMAGPART_EXPR)
743 lhs_flags |= opf_kill_def;
744
745 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
746 }
747 break;
748
749 case COND_EXPR:
750 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
751 break;
752
753 case SWITCH_EXPR:
754 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
755 break;
756
757 case ASM_EXPR:
758 get_asm_expr_operands (stmt);
759 break;
760
761 case RETURN_EXPR:
762 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
763 break;
764
765 case GOTO_EXPR:
766 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
767 break;
768
769 case LABEL_EXPR:
770 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
771 break;
772
773 /* These nodes contain no variable references. */
774 case BIND_EXPR:
775 case CASE_LABEL_EXPR:
776 case TRY_CATCH_EXPR:
777 case TRY_FINALLY_EXPR:
778 case EH_FILTER_EXPR:
779 case CATCH_EXPR:
780 case RESX_EXPR:
781 break;
782
783 default:
784 /* Notice that if get_expr_operands tries to use &STMT as the operand
785 pointer (which may only happen for USE operands), we will fail in
786 append_use. This default will handle statements like empty
787 statements, or CALL_EXPRs that may appear on the RHS of a statement
788 or as statements themselves. */
789 get_expr_operands (stmt, &stmt, opf_none);
790 break;
791 }
792 }
793
794 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
795 original operands, and if ANN is non-null, appropriate stmt flags are set
796 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
797 stmt, and none of the operands will be entered into their respective
798 immediate uses tables. This is to allow stmts to be processed when they
799 are not actually in the CFG.
800
801 Note that some fields in old_ops may change to NULL, although none of the
802 memory they originally pointed to will be destroyed. It is appropriate
803 to call free_stmt_operands() on the value returned in old_ops.
804
805 The rationale for this: Certain optimizations wish to examine the difference
806 between new_ops and old_ops after processing. If a set of operands don't
807 change, new_ops will simply assume the pointer in old_ops, and the old_ops
808 pointer will be set to NULL, indicating no memory needs to be cleared.
809 Usage might appear something like:
810
811 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
812 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
813 <* compare old_ops_copy and new_ops *>
814 free_ssa_operands (old_ops); */
815
816 static void
817 build_ssa_operands (tree stmt)
818 {
819 stmt_ann_t ann = get_stmt_ann (stmt);
820
821 /* Initially assume that the statement has no volatile operands, nor
822 makes aliased loads or stores. */
823 if (ann)
824 {
825 ann->has_volatile_ops = false;
826 ann->makes_aliased_stores = false;
827 ann->makes_aliased_loads = false;
828 }
829
830 start_ssa_stmt_operands ();
831
832 parse_ssa_operands (stmt);
833 operand_build_sort_virtual (build_vuses);
834 operand_build_sort_virtual (build_v_may_defs);
835 operand_build_sort_virtual (build_v_must_defs);
836
837 finalize_ssa_stmt_operands (stmt);
838 }
839
840
841 /* Free any operands vectors in OPS. */
842 void
843 free_ssa_operands (stmt_operands_p ops)
844 {
845 ops->def_ops = NULL;
846 ops->use_ops = NULL;
847 ops->maydef_ops = NULL;
848 ops->mustdef_ops = NULL;
849 ops->vuse_ops = NULL;
850 }
851
852
853 /* Get the operands of statement STMT. Note that repeated calls to
854 get_stmt_operands for the same statement will do nothing until the
855 statement is marked modified by a call to mark_stmt_modified(). */
856
857 void
858 update_stmt_operands (tree stmt)
859 {
860 stmt_ann_t ann = get_stmt_ann (stmt);
861 /* If get_stmt_operands is called before SSA is initialized, dont
862 do anything. */
863 if (!ssa_operands_active ())
864 return;
865 /* The optimizers cannot handle statements that are nothing but a
866 _DECL. This indicates a bug in the gimplifier. */
867 gcc_assert (!SSA_VAR_P (stmt));
868
869 gcc_assert (ann->modified);
870
871 timevar_push (TV_TREE_OPS);
872
873 build_ssa_operands (stmt);
874
875 /* Clear the modified bit for STMT. Subsequent calls to
876 get_stmt_operands for this statement will do nothing until the
877 statement is marked modified by a call to mark_stmt_modified(). */
878 ann->modified = 0;
879
880 timevar_pop (TV_TREE_OPS);
881 }
882
883
884 /* Copies virtual operands from SRC to DST. */
885
886 void
887 copy_virtual_operands (tree dest, tree src)
888 {
889 tree t;
890 ssa_op_iter iter, old_iter;
891 use_operand_p use_p, u2;
892 def_operand_p def_p, d2;
893
894 build_ssa_operands (dest);
895
896 /* Copy all the virtual fields. */
897 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
898 append_vuse (t);
899 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
900 append_v_may_def (t);
901 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
902 append_v_must_def (t);
903
904 if (VEC_length (tree, build_vuses) == 0
905 && VEC_length (tree, build_v_may_defs) == 0
906 && VEC_length (tree, build_v_must_defs) == 0)
907 return;
908
909 /* Now commit the virtual operands to this stmt. */
910 finalize_ssa_v_must_defs (dest);
911 finalize_ssa_v_may_defs (dest);
912 finalize_ssa_vuses (dest);
913
914 /* Finally, set the field to the same values as then originals. */
915
916
917 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
918 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
919 {
920 gcc_assert (!op_iter_done (&old_iter));
921 SET_USE (use_p, t);
922 t = op_iter_next_tree (&old_iter);
923 }
924 gcc_assert (op_iter_done (&old_iter));
925
926 op_iter_init_maydef (&old_iter, src, &u2, &d2);
927 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
928 {
929 gcc_assert (!op_iter_done (&old_iter));
930 SET_USE (use_p, USE_FROM_PTR (u2));
931 SET_DEF (def_p, DEF_FROM_PTR (d2));
932 op_iter_next_maymustdef (&u2, &d2, &old_iter);
933 }
934 gcc_assert (op_iter_done (&old_iter));
935
936 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
937 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
938 {
939 gcc_assert (!op_iter_done (&old_iter));
940 SET_USE (use_p, USE_FROM_PTR (u2));
941 SET_DEF (def_p, DEF_FROM_PTR (d2));
942 op_iter_next_maymustdef (&u2, &d2, &old_iter);
943 }
944 gcc_assert (op_iter_done (&old_iter));
945
946 }
947
948
949 /* Specifically for use in DOM's expression analysis. Given a store, we
950 create an artificial stmt which looks like a load from the store, this can
951 be used to eliminate redundant loads. OLD_OPS are the operands from the
952 store stmt, and NEW_STMT is the new load which represents a load of the
953 values stored. */
954
955 void
956 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
957 {
958 stmt_ann_t ann;
959 tree op;
960 ssa_op_iter iter;
961 use_operand_p use_p;
962 unsigned x;
963
964 ann = get_stmt_ann (new_stmt);
965
966 /* process the stmt looking for operands. */
967 start_ssa_stmt_operands ();
968 parse_ssa_operands (new_stmt);
969
970 for (x = 0; x < VEC_length (tree, build_vuses); x++)
971 {
972 tree t = VEC_index (tree, build_vuses, x);
973 if (TREE_CODE (t) != SSA_NAME)
974 {
975 var_ann_t ann = var_ann (t);
976 ann->in_vuse_list = 0;
977 }
978 }
979
980 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
981 {
982 tree t = VEC_index (tree, build_v_may_defs, x);
983 if (TREE_CODE (t) != SSA_NAME)
984 {
985 var_ann_t ann = var_ann (t);
986 ann->in_v_may_def_list = 0;
987 }
988 }
989 /* Remove any virtual operands that were found. */
990 VEC_truncate (tree, build_v_may_defs, 0);
991 VEC_truncate (tree, build_v_must_defs, 0);
992 VEC_truncate (tree, build_vuses, 0);
993
994 /* For each VDEF on the original statement, we want to create a
995 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
996 statement. */
997 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
998 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
999 append_vuse (op);
1000
1001 /* Now build the operands for this new stmt. */
1002 finalize_ssa_stmt_operands (new_stmt);
1003
1004 /* All uses in this fake stmt must not be in the immediate use lists. */
1005 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1006 delink_imm_use (use_p);
1007 }
1008
1009 void
1010 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1011 {
1012 tree op0, op1;
1013 op0 = *exp0;
1014 op1 = *exp1;
1015
1016 /* If the operand cache is active, attempt to preserve the relative positions
1017 of these two operands in their respective immediate use lists. */
1018 if (ssa_operands_active () && op0 != op1)
1019 {
1020 use_optype_p use0, use1, ptr;
1021 use0 = use1 = NULL;
1022 /* Find the 2 operands in the cache, if they are there. */
1023 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1024 if (USE_OP_PTR (ptr)->use == exp0)
1025 {
1026 use0 = ptr;
1027 break;
1028 }
1029 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1030 if (USE_OP_PTR (ptr)->use == exp1)
1031 {
1032 use1 = ptr;
1033 break;
1034 }
1035 /* If both uses don't have operand entries, there isn't much we can do
1036 at this point. Presumably we dont need to worry about it. */
1037 if (use0 && use1)
1038 {
1039 tree *tmp = USE_OP_PTR (use1)->use;
1040 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1041 USE_OP_PTR (use0)->use = tmp;
1042 }
1043 }
1044
1045 /* Now swap the data. */
1046 *exp0 = op1;
1047 *exp1 = op0;
1048 }
1049
1050
1051 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1052 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1053 the operands found. */
1054
1055 static void
1056 get_expr_operands (tree stmt, tree *expr_p, int flags)
1057 {
1058 enum tree_code code;
1059 enum tree_code_class class;
1060 tree expr = *expr_p;
1061 stmt_ann_t s_ann = stmt_ann (stmt);
1062
1063 if (expr == NULL)
1064 return;
1065
1066 code = TREE_CODE (expr);
1067 class = TREE_CODE_CLASS (code);
1068
1069 switch (code)
1070 {
1071 case ADDR_EXPR:
1072 /* We could have the address of a component, array member,
1073 etc which has interesting variable references. */
1074 /* Taking the address of a variable does not represent a
1075 reference to it, but the fact that the stmt takes its address will be
1076 of interest to some passes (e.g. alias resolution). */
1077 add_stmt_operand (expr_p, s_ann, 0);
1078
1079 /* If the address is invariant, there may be no interesting variable
1080 references inside. */
1081 if (is_gimple_min_invariant (expr))
1082 return;
1083
1084 /* There should be no VUSEs created, since the referenced objects are
1085 not really accessed. The only operands that we should find here
1086 are ARRAY_REF indices which will always be real operands (GIMPLE
1087 does not allow non-registers as array indices). */
1088 flags |= opf_no_vops;
1089
1090 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1091 return;
1092
1093 case SSA_NAME:
1094 case STRUCT_FIELD_TAG:
1095 case TYPE_MEMORY_TAG:
1096 case NAME_MEMORY_TAG:
1097 case VAR_DECL:
1098 case PARM_DECL:
1099 case RESULT_DECL:
1100 case CONST_DECL:
1101 {
1102 subvar_t svars;
1103
1104 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1105 Otherwise, add the variable itself.
1106 Whether it goes to USES or DEFS depends on the operand flags. */
1107 if (var_can_have_subvars (expr)
1108 && (svars = get_subvars_for_var (expr)))
1109 {
1110 subvar_t sv;
1111 for (sv = svars; sv; sv = sv->next)
1112 add_stmt_operand (&sv->var, s_ann, flags);
1113 }
1114 else
1115 {
1116 add_stmt_operand (expr_p, s_ann, flags);
1117 }
1118 return;
1119 }
1120 case MISALIGNED_INDIRECT_REF:
1121 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1122 /* fall through */
1123
1124 case ALIGN_INDIRECT_REF:
1125 case INDIRECT_REF:
1126 get_indirect_ref_operands (stmt, expr, flags);
1127 return;
1128
1129 case TARGET_MEM_REF:
1130 get_tmr_operands (stmt, expr, flags);
1131 return;
1132
1133 case ARRAY_REF:
1134 case ARRAY_RANGE_REF:
1135 /* Treat array references as references to the virtual variable
1136 representing the array. The virtual variable for an ARRAY_REF
1137 is the VAR_DECL for the array. */
1138
1139 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1140 according to the value of IS_DEF. Recurse if the LHS of the
1141 ARRAY_REF node is not a regular variable. */
1142 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1143 add_stmt_operand (expr_p, s_ann, flags);
1144 else
1145 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1146
1147 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1148 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1149 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1150 return;
1151
1152 case COMPONENT_REF:
1153 case REALPART_EXPR:
1154 case IMAGPART_EXPR:
1155 {
1156 tree ref;
1157 unsigned HOST_WIDE_INT offset, size;
1158 /* This component ref becomes an access to all of the subvariables
1159 it can touch, if we can determine that, but *NOT* the real one.
1160 If we can't determine which fields we could touch, the recursion
1161 will eventually get to a variable and add *all* of its subvars, or
1162 whatever is the minimum correct subset. */
1163
1164 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1165 if (ref)
1166 {
1167 subvar_t svars = get_subvars_for_var (ref);
1168 subvar_t sv;
1169 for (sv = svars; sv; sv = sv->next)
1170 {
1171 bool exact;
1172 if (overlap_subvar (offset, size, sv, &exact))
1173 {
1174 int subvar_flags = flags;
1175 if (!exact)
1176 subvar_flags &= ~opf_kill_def;
1177 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1178 }
1179 }
1180 }
1181 else
1182 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1183 flags & ~opf_kill_def);
1184
1185 if (code == COMPONENT_REF)
1186 {
1187 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1188 s_ann->has_volatile_ops = true;
1189 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1190 }
1191 return;
1192 }
1193 case WITH_SIZE_EXPR:
1194 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1195 and an rvalue reference to its second argument. */
1196 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1197 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1198 return;
1199
1200 case CALL_EXPR:
1201 get_call_expr_operands (stmt, expr);
1202 return;
1203
1204 case COND_EXPR:
1205 case VEC_COND_EXPR:
1206 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1207 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1208 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1209 return;
1210
1211 case MODIFY_EXPR:
1212 {
1213 int subflags;
1214 tree op;
1215
1216 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1217
1218 op = TREE_OPERAND (expr, 0);
1219 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1220 op = TREE_OPERAND (expr, 0);
1221 if (TREE_CODE (op) == ARRAY_REF
1222 || TREE_CODE (op) == ARRAY_RANGE_REF
1223 || TREE_CODE (op) == REALPART_EXPR
1224 || TREE_CODE (op) == IMAGPART_EXPR)
1225 subflags = opf_is_def;
1226 else
1227 subflags = opf_is_def | opf_kill_def;
1228
1229 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1230 return;
1231 }
1232
1233 case CONSTRUCTOR:
1234 {
1235 /* General aggregate CONSTRUCTORs have been decomposed, but they
1236 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1237 constructor_elt *ce;
1238 unsigned HOST_WIDE_INT idx;
1239
1240 for (idx = 0;
1241 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1242 idx++)
1243 get_expr_operands (stmt, &ce->value, opf_none);
1244
1245 return;
1246 }
1247
1248 case TRUTH_NOT_EXPR:
1249 case BIT_FIELD_REF:
1250 case VIEW_CONVERT_EXPR:
1251 do_unary:
1252 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1253 return;
1254
1255 case TRUTH_AND_EXPR:
1256 case TRUTH_OR_EXPR:
1257 case TRUTH_XOR_EXPR:
1258 case COMPOUND_EXPR:
1259 case OBJ_TYPE_REF:
1260 case ASSERT_EXPR:
1261 do_binary:
1262 {
1263 tree op0 = TREE_OPERAND (expr, 0);
1264 tree op1 = TREE_OPERAND (expr, 1);
1265
1266 /* If it would be profitable to swap the operands, then do so to
1267 canonicalize the statement, enabling better optimization.
1268
1269 By placing canonicalization of such expressions here we
1270 transparently keep statements in canonical form, even
1271 when the statement is modified. */
1272 if (tree_swap_operands_p (op0, op1, false))
1273 {
1274 /* For relationals we need to swap the operands
1275 and change the code. */
1276 if (code == LT_EXPR
1277 || code == GT_EXPR
1278 || code == LE_EXPR
1279 || code == GE_EXPR)
1280 {
1281 TREE_SET_CODE (expr, swap_tree_comparison (code));
1282 swap_tree_operands (stmt,
1283 &TREE_OPERAND (expr, 0),
1284 &TREE_OPERAND (expr, 1));
1285 }
1286
1287 /* For a commutative operator we can just swap the operands. */
1288 else if (commutative_tree_code (code))
1289 {
1290 swap_tree_operands (stmt,
1291 &TREE_OPERAND (expr, 0),
1292 &TREE_OPERAND (expr, 1));
1293 }
1294 }
1295
1296 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1297 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1298 return;
1299 }
1300
1301 case REALIGN_LOAD_EXPR:
1302 {
1303 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1304 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1305 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1306 return;
1307 }
1308
1309 case BLOCK:
1310 case FUNCTION_DECL:
1311 case EXC_PTR_EXPR:
1312 case FILTER_EXPR:
1313 case LABEL_DECL:
1314 /* Expressions that make no memory references. */
1315 return;
1316
1317 default:
1318 if (class == tcc_unary)
1319 goto do_unary;
1320 if (class == tcc_binary || class == tcc_comparison)
1321 goto do_binary;
1322 if (class == tcc_constant || class == tcc_type)
1323 return;
1324 }
1325
1326 /* If we get here, something has gone wrong. */
1327 #ifdef ENABLE_CHECKING
1328 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1329 debug_tree (expr);
1330 fputs ("\n", stderr);
1331 internal_error ("internal error");
1332 #endif
1333 gcc_unreachable ();
1334 }
1335
1336
1337 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1338
1339 static void
1340 get_asm_expr_operands (tree stmt)
1341 {
1342 stmt_ann_t s_ann = stmt_ann (stmt);
1343 int noutputs = list_length (ASM_OUTPUTS (stmt));
1344 const char **oconstraints
1345 = (const char **) alloca ((noutputs) * sizeof (const char *));
1346 int i;
1347 tree link;
1348 const char *constraint;
1349 bool allows_mem, allows_reg, is_inout;
1350
1351 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1352 {
1353 oconstraints[i] = constraint
1354 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1355 parse_output_constraint (&constraint, i, 0, 0,
1356 &allows_mem, &allows_reg, &is_inout);
1357
1358 /* This should have been split in gimplify_asm_expr. */
1359 gcc_assert (!allows_reg || !is_inout);
1360
1361 /* Memory operands are addressable. Note that STMT needs the
1362 address of this operand. */
1363 if (!allows_reg && allows_mem)
1364 {
1365 tree t = get_base_address (TREE_VALUE (link));
1366 if (t && DECL_P (t) && s_ann)
1367 add_to_addressable_set (t, &s_ann->addresses_taken);
1368 }
1369
1370 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1371 }
1372
1373 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1374 {
1375 constraint
1376 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1377 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1378 oconstraints, &allows_mem, &allows_reg);
1379
1380 /* Memory operands are addressable. Note that STMT needs the
1381 address of this operand. */
1382 if (!allows_reg && allows_mem)
1383 {
1384 tree t = get_base_address (TREE_VALUE (link));
1385 if (t && DECL_P (t) && s_ann)
1386 add_to_addressable_set (t, &s_ann->addresses_taken);
1387 }
1388
1389 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1390 }
1391
1392
1393 /* Clobber memory for asm ("" : : : "memory"); */
1394 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1395 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1396 {
1397 unsigned i;
1398 bitmap_iterator bi;
1399
1400 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1401 decided to group them). */
1402 if (global_var)
1403 add_stmt_operand (&global_var, s_ann, opf_is_def);
1404 else
1405 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1406 {
1407 tree var = referenced_var (i);
1408 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1409 }
1410
1411 /* Now clobber all addressables. */
1412 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1413 {
1414 tree var = referenced_var (i);
1415
1416 /* Subvars are explicitly represented in this list, so
1417 we don't need the original to be added to the clobber
1418 ops, but the original *will* be in this list because
1419 we keep the addressability of the original
1420 variable up-to-date so we don't screw up the rest of
1421 the backend. */
1422 if (var_can_have_subvars (var)
1423 && get_subvars_for_var (var) != NULL)
1424 continue;
1425
1426 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1427 }
1428
1429 break;
1430 }
1431 }
1432
1433 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1434 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1435
1436 static void
1437 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1438 {
1439 tree *pptr = &TREE_OPERAND (expr, 0);
1440 tree ptr = *pptr;
1441 stmt_ann_t s_ann = stmt_ann (stmt);
1442
1443 /* Stores into INDIRECT_REF operands are never killing definitions. */
1444 flags &= ~opf_kill_def;
1445
1446 if (SSA_VAR_P (ptr))
1447 {
1448 struct ptr_info_def *pi = NULL;
1449
1450 /* If PTR has flow-sensitive points-to information, use it. */
1451 if (TREE_CODE (ptr) == SSA_NAME
1452 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1453 && pi->name_mem_tag)
1454 {
1455 /* PTR has its own memory tag. Use it. */
1456 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1457 }
1458 else
1459 {
1460 /* If PTR is not an SSA_NAME or it doesn't have a name
1461 tag, use its type memory tag. */
1462 var_ann_t v_ann;
1463
1464 /* If we are emitting debugging dumps, display a warning if
1465 PTR is an SSA_NAME with no flow-sensitive alias
1466 information. That means that we may need to compute
1467 aliasing again. */
1468 if (dump_file
1469 && TREE_CODE (ptr) == SSA_NAME
1470 && pi == NULL)
1471 {
1472 fprintf (dump_file,
1473 "NOTE: no flow-sensitive alias info for ");
1474 print_generic_expr (dump_file, ptr, dump_flags);
1475 fprintf (dump_file, " in ");
1476 print_generic_stmt (dump_file, stmt, dump_flags);
1477 }
1478
1479 if (TREE_CODE (ptr) == SSA_NAME)
1480 ptr = SSA_NAME_VAR (ptr);
1481 v_ann = var_ann (ptr);
1482 if (v_ann->type_mem_tag)
1483 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1484 }
1485 }
1486
1487 /* If a constant is used as a pointer, we can't generate a real
1488 operand for it but we mark the statement volatile to prevent
1489 optimizations from messing things up. */
1490 else if (TREE_CODE (ptr) == INTEGER_CST)
1491 {
1492 if (s_ann)
1493 s_ann->has_volatile_ops = true;
1494 return;
1495 }
1496
1497 /* Everything else *should* have been folded elsewhere, but users
1498 are smarter than we in finding ways to write invalid code. We
1499 cannot just assert here. If we were absolutely certain that we
1500 do handle all valid cases, then we could just do nothing here.
1501 That seems optimistic, so attempt to do something logical... */
1502 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1503 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1504 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1505 {
1506 /* Make sure we know the object is addressable. */
1507 pptr = &TREE_OPERAND (ptr, 0);
1508 add_stmt_operand (pptr, s_ann, 0);
1509
1510 /* Mark the object itself with a VUSE. */
1511 pptr = &TREE_OPERAND (*pptr, 0);
1512 get_expr_operands (stmt, pptr, flags);
1513 return;
1514 }
1515
1516 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1517 else
1518 gcc_unreachable ();
1519
1520 /* Add a USE operand for the base pointer. */
1521 get_expr_operands (stmt, pptr, opf_none);
1522 }
1523
1524 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1525
1526 static void
1527 get_tmr_operands (tree stmt, tree expr, int flags)
1528 {
1529 tree tag = TMR_TAG (expr);
1530
1531 /* First record the real operands. */
1532 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1533 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1534
1535 /* MEM_REFs should never be killing. */
1536 flags &= ~opf_kill_def;
1537
1538 if (TMR_SYMBOL (expr))
1539 {
1540 stmt_ann_t ann = stmt_ann (stmt);
1541 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1542 }
1543
1544 if (tag)
1545 get_expr_operands (stmt, &tag, flags);
1546 else
1547 /* Something weird, so ensure that we will be careful. */
1548 stmt_ann (stmt)->has_volatile_ops = true;
1549 }
1550
1551 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1552
1553 static void
1554 get_call_expr_operands (tree stmt, tree expr)
1555 {
1556 tree op;
1557 int call_flags = call_expr_flags (expr);
1558
1559 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1560 operands for all the symbols that have been found to be
1561 call-clobbered.
1562
1563 Note that if aliases have not been computed, the global effects
1564 of calls will not be included in the SSA web. This is fine
1565 because no optimizer should run before aliases have been
1566 computed. By not bothering with virtual operands for CALL_EXPRs
1567 we avoid adding superfluous virtual operands, which can be a
1568 significant compile time sink (See PR 15855). */
1569 if (aliases_computed_p
1570 && !bitmap_empty_p (call_clobbered_vars)
1571 && !(call_flags & ECF_NOVOPS))
1572 {
1573 /* A 'pure' or a 'const' function never call-clobbers anything.
1574 A 'noreturn' function might, but since we don't return anyway
1575 there is no point in recording that. */
1576 if (TREE_SIDE_EFFECTS (expr)
1577 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1578 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1579 else if (!(call_flags & ECF_CONST))
1580 add_call_read_ops (stmt);
1581 }
1582
1583 /* Find uses in the called function. */
1584 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1585
1586 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1587 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1588
1589 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1590
1591 }
1592
1593
1594 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1595 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1596 the statement's real operands, otherwise it is added to virtual
1597 operands. */
1598
1599 static void
1600 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1601 {
1602 bool is_real_op;
1603 tree var, sym;
1604 var_ann_t v_ann;
1605
1606 var = *var_p;
1607 STRIP_NOPS (var);
1608
1609 /* If the operand is an ADDR_EXPR, add its operand to the list of
1610 variables that have had their address taken in this statement. */
1611 if (TREE_CODE (var) == ADDR_EXPR && s_ann)
1612 {
1613 add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
1614 return;
1615 }
1616
1617 /* If the original variable is not a scalar, it will be added to the list
1618 of virtual operands. In that case, use its base symbol as the virtual
1619 variable representing it. */
1620 is_real_op = is_gimple_reg (var);
1621 if (!is_real_op && !DECL_P (var))
1622 var = get_virtual_var (var);
1623
1624 /* If VAR is not a variable that we care to optimize, do nothing. */
1625 if (var == NULL_TREE || !SSA_VAR_P (var))
1626 return;
1627
1628 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1629 v_ann = var_ann (sym);
1630
1631 /* Mark statements with volatile operands. Optimizers should back
1632 off from statements having volatile operands. */
1633 if (TREE_THIS_VOLATILE (sym) && s_ann)
1634 s_ann->has_volatile_ops = true;
1635
1636 /* If the variable cannot be modified and this is a V_MAY_DEF change
1637 it into a VUSE. This happens when read-only variables are marked
1638 call-clobbered and/or aliased to writable variables. So we only
1639 check that this only happens on non-specific stores.
1640
1641 Note that if this is a specific store, i.e. associated with a
1642 modify_expr, then we can't suppress the V_DEF, lest we run into
1643 validation problems.
1644
1645 This can happen when programs cast away const, leaving us with a
1646 store to read-only memory. If the statement is actually executed
1647 at runtime, then the program is ill formed. If the statement is
1648 not executed then all is well. At the very least, we cannot ICE. */
1649 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1650 {
1651 gcc_assert (!is_real_op);
1652 flags &= ~(opf_is_def | opf_kill_def);
1653 }
1654
1655 if (is_real_op)
1656 {
1657 /* The variable is a GIMPLE register. Add it to real operands. */
1658 if (flags & opf_is_def)
1659 append_def (var_p);
1660 else
1661 append_use (var_p);
1662 }
1663 else
1664 {
1665 varray_type aliases;
1666
1667 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1668 virtual operands, unless the caller has specifically requested
1669 not to add virtual operands (used when adding operands inside an
1670 ADDR_EXPR expression). */
1671 if (flags & opf_no_vops)
1672 return;
1673
1674 aliases = v_ann->may_aliases;
1675
1676 if (aliases == NULL)
1677 {
1678 /* The variable is not aliased or it is an alias tag. */
1679 if (flags & opf_is_def)
1680 {
1681 if (flags & opf_kill_def)
1682 {
1683 /* Only regular variables or struct fields may get a
1684 V_MUST_DEF operand. */
1685 gcc_assert (!MTAG_P (var)
1686 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1687 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1688 variable definitions. */
1689 append_v_must_def (var);
1690 }
1691 else
1692 {
1693 /* Add a V_MAY_DEF for call-clobbered variables and
1694 memory tags. */
1695 append_v_may_def (var);
1696 }
1697 }
1698 else
1699 {
1700 append_vuse (var);
1701 if (s_ann && v_ann->is_alias_tag)
1702 s_ann->makes_aliased_loads = 1;
1703 }
1704 }
1705 else
1706 {
1707 size_t i;
1708
1709 /* The variable is aliased. Add its aliases to the virtual
1710 operands. */
1711 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1712
1713 if (flags & opf_is_def)
1714 {
1715 /* If the variable is also an alias tag, add a virtual
1716 operand for it, otherwise we will miss representing
1717 references to the members of the variable's alias set.
1718 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1719 if (v_ann->is_alias_tag)
1720 append_v_may_def (var);
1721
1722 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1723 append_v_may_def (VARRAY_TREE (aliases, i));
1724
1725 if (s_ann)
1726 s_ann->makes_aliased_stores = 1;
1727 }
1728 else
1729 {
1730 /* Similarly, append a virtual uses for VAR itself, when
1731 it is an alias tag. */
1732 if (v_ann->is_alias_tag)
1733 append_vuse (var);
1734
1735 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1736 append_vuse (VARRAY_TREE (aliases, i));
1737
1738 if (s_ann)
1739 s_ann->makes_aliased_loads = 1;
1740 }
1741 }
1742 }
1743 }
1744
1745
1746 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1747 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1748 a single variable whose address has been taken or any other valid
1749 GIMPLE memory reference (structure reference, array, etc). If the
1750 base address of REF is a decl that has sub-variables, also add all
1751 of its sub-variables. */
1752
1753 void
1754 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1755 {
1756 tree var;
1757 subvar_t svars;
1758
1759 gcc_assert (addresses_taken);
1760
1761 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1762 as the only thing we take the address of. If VAR is a structure,
1763 taking the address of a field means that the whole structure may
1764 be referenced using pointer arithmetic. See PR 21407 and the
1765 ensuing mailing list discussion. */
1766 var = get_base_address (ref);
1767 if (var && SSA_VAR_P (var))
1768 {
1769 if (*addresses_taken == NULL)
1770 *addresses_taken = BITMAP_GGC_ALLOC ();
1771
1772 if (var_can_have_subvars (var)
1773 && (svars = get_subvars_for_var (var)))
1774 {
1775 subvar_t sv;
1776 for (sv = svars; sv; sv = sv->next)
1777 {
1778 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1779 TREE_ADDRESSABLE (sv->var) = 1;
1780 }
1781 }
1782 else
1783 {
1784 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1785 TREE_ADDRESSABLE (var) = 1;
1786 }
1787 }
1788 }
1789
1790
1791 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1792 clobbered variables in the function. */
1793
1794 static void
1795 add_call_clobber_ops (tree stmt, tree callee)
1796 {
1797 unsigned u;
1798 tree t;
1799 bitmap_iterator bi;
1800 stmt_ann_t s_ann = stmt_ann (stmt);
1801 struct stmt_ann_d empty_ann;
1802 bitmap not_read_b, not_written_b;
1803
1804 /* Functions that are not const, pure or never return may clobber
1805 call-clobbered variables. */
1806 if (s_ann)
1807 s_ann->makes_clobbering_call = true;
1808
1809 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1810 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1811 if (global_var)
1812 {
1813 add_stmt_operand (&global_var, s_ann, opf_is_def);
1814 return;
1815 }
1816
1817 /* FIXME - if we have better information from the static vars
1818 analysis, we need to make the cache call site specific. This way
1819 we can have the performance benefits even if we are doing good
1820 optimization. */
1821
1822 /* Get info for local and module level statics. There is a bit
1823 set for each static if the call being processed does not read
1824 or write that variable. */
1825
1826 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1827 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1828
1829 /* If cache is valid, copy the elements into the build vectors. */
1830 if (ssa_call_clobbered_cache_valid
1831 && (!not_read_b || bitmap_empty_p (not_read_b))
1832 && (!not_written_b || bitmap_empty_p (not_written_b)))
1833 {
1834 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1835 {
1836 t = VEC_index (tree, clobbered_vuses, u);
1837 gcc_assert (TREE_CODE (t) != SSA_NAME);
1838 var_ann (t)->in_vuse_list = 1;
1839 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1840 }
1841 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1842 {
1843 t = VEC_index (tree, clobbered_v_may_defs, u);
1844 gcc_assert (TREE_CODE (t) != SSA_NAME);
1845 var_ann (t)->in_v_may_def_list = 1;
1846 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1847 }
1848 if (s_ann)
1849 {
1850 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1851 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1852 }
1853 return;
1854 }
1855
1856 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1857
1858 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1859 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1860 {
1861 tree var = referenced_var (u);
1862 if (unmodifiable_var_p (var))
1863 add_stmt_operand (&var, &empty_ann, opf_none);
1864 else
1865 {
1866 bool not_read
1867 = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1868 bool not_written
1869 = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1870
1871 if ((TREE_READONLY (var)
1872 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1873 || not_written)
1874 {
1875 if (!not_read)
1876 add_stmt_operand (&var, &empty_ann, opf_none);
1877 }
1878 else
1879 add_stmt_operand (&var, &empty_ann, opf_is_def);
1880 }
1881 }
1882
1883 if ((!not_read_b || bitmap_empty_p (not_read_b))
1884 && (!not_written_b || bitmap_empty_p (not_written_b)))
1885 {
1886 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1887 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1888
1889 /* Set the flags for a stmt's annotation. */
1890 if (s_ann)
1891 {
1892 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1893 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1894 }
1895
1896 /* Prepare empty cache vectors. */
1897 VEC_truncate (tree, clobbered_vuses, 0);
1898 VEC_truncate (tree, clobbered_v_may_defs, 0);
1899
1900 /* Now fill the clobbered cache with the values that have been found. */
1901 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1902 VEC_safe_push (tree, heap, clobbered_vuses,
1903 VEC_index (tree, build_vuses, u));
1904
1905 gcc_assert (VEC_length (tree, build_vuses)
1906 == VEC_length (tree, clobbered_vuses));
1907
1908 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1909 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1910 VEC_index (tree, build_v_may_defs, u));
1911
1912 gcc_assert (VEC_length (tree, build_v_may_defs)
1913 == VEC_length (tree, clobbered_v_may_defs));
1914
1915 ssa_call_clobbered_cache_valid = true;
1916 }
1917 }
1918
1919
1920 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1921 function. */
1922
1923 static void
1924 add_call_read_ops (tree stmt)
1925 {
1926 unsigned u;
1927 tree t;
1928 bitmap_iterator bi;
1929 stmt_ann_t s_ann = stmt_ann (stmt);
1930 struct stmt_ann_d empty_ann;
1931
1932 /* if the function is not pure, it may reference memory. Add
1933 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1934 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1935 if (global_var)
1936 {
1937 add_stmt_operand (&global_var, s_ann, opf_none);
1938 return;
1939 }
1940
1941 /* If cache is valid, copy the elements into the build vector. */
1942 if (ssa_ro_call_cache_valid)
1943 {
1944 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1945 {
1946 t = VEC_index (tree, ro_call_vuses, u);
1947 gcc_assert (TREE_CODE (t) != SSA_NAME);
1948 var_ann (t)->in_vuse_list = 1;
1949 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1950 }
1951 if (s_ann)
1952 s_ann->makes_aliased_loads = ro_call_aliased_loads;
1953 return;
1954 }
1955
1956 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1957
1958 /* Add a VUSE for each call-clobbered variable. */
1959 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1960 {
1961 tree var = referenced_var (u);
1962 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1963 }
1964
1965 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1966 if (s_ann)
1967 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1968
1969 /* Prepare empty cache vectors. */
1970 VEC_truncate (tree, ro_call_vuses, 0);
1971
1972 /* Now fill the clobbered cache with the values that have been found. */
1973 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1974 VEC_safe_push (tree, heap, ro_call_vuses,
1975 VEC_index (tree, build_vuses, u));
1976
1977 gcc_assert (VEC_length (tree, build_vuses)
1978 == VEC_length (tree, ro_call_vuses));
1979
1980 ssa_ro_call_cache_valid = true;
1981 }
1982
1983
1984 /* Scan the immediate_use list for VAR making sure its linked properly.
1985 return RTUE iof there is a problem. */
1986
1987 bool
1988 verify_imm_links (FILE *f, tree var)
1989 {
1990 use_operand_p ptr, prev, list;
1991 int count;
1992
1993 gcc_assert (TREE_CODE (var) == SSA_NAME);
1994
1995 list = &(SSA_NAME_IMM_USE_NODE (var));
1996 gcc_assert (list->use == NULL);
1997
1998 if (list->prev == NULL)
1999 {
2000 gcc_assert (list->next == NULL);
2001 return false;
2002 }
2003
2004 prev = list;
2005 count = 0;
2006 for (ptr = list->next; ptr != list; )
2007 {
2008 if (prev != ptr->prev)
2009 goto error;
2010
2011 if (ptr->use == NULL)
2012 goto error; /* 2 roots, or SAFE guard node. */
2013 else if (*(ptr->use) != var)
2014 goto error;
2015
2016 prev = ptr;
2017 ptr = ptr->next;
2018 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
2019 if (count++ > 50000000)
2020 goto error;
2021 }
2022
2023 /* Verify list in the other direction. */
2024 prev = list;
2025 for (ptr = list->prev; ptr != list; )
2026 {
2027 if (prev != ptr->next)
2028 goto error;
2029 prev = ptr;
2030 ptr = ptr->prev;
2031 if (count-- < 0)
2032 goto error;
2033 }
2034
2035 if (count != 0)
2036 goto error;
2037
2038 return false;
2039
2040 error:
2041 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2042 {
2043 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2044 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2045 }
2046 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2047 (void *)ptr->use);
2048 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2049 fprintf(f, "\n");
2050 return true;
2051 }
2052
2053
2054 /* Dump all the immediate uses to FILE. */
2055
2056 void
2057 dump_immediate_uses_for (FILE *file, tree var)
2058 {
2059 imm_use_iterator iter;
2060 use_operand_p use_p;
2061
2062 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2063
2064 print_generic_expr (file, var, TDF_SLIM);
2065 fprintf (file, " : -->");
2066 if (has_zero_uses (var))
2067 fprintf (file, " no uses.\n");
2068 else
2069 if (has_single_use (var))
2070 fprintf (file, " single use.\n");
2071 else
2072 fprintf (file, "%d uses.\n", num_imm_uses (var));
2073
2074 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2075 {
2076 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2077 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2078 else
2079 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2080 }
2081 fprintf(file, "\n");
2082 }
2083
2084 /* Dump all the immediate uses to FILE. */
2085
2086 void
2087 dump_immediate_uses (FILE *file)
2088 {
2089 tree var;
2090 unsigned int x;
2091
2092 fprintf (file, "Immediate_uses: \n\n");
2093 for (x = 1; x < num_ssa_names; x++)
2094 {
2095 var = ssa_name(x);
2096 if (!var)
2097 continue;
2098 dump_immediate_uses_for (file, var);
2099 }
2100 }
2101
2102
2103 /* Dump def-use edges on stderr. */
2104
2105 void
2106 debug_immediate_uses (void)
2107 {
2108 dump_immediate_uses (stderr);
2109 }
2110
2111 /* Dump def-use edges on stderr. */
2112
2113 void
2114 debug_immediate_uses_for (tree var)
2115 {
2116 dump_immediate_uses_for (stderr, var);
2117 }
2118 #include "gt-tree-ssa-operands.h"