1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
36 #include "langhooks.h"
37 #include "ipa-reference.h"
39 /* This file contains the code required to manage the operands cache of the
40 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
41 annotation. This cache contains operands that will be of interest to
42 optimizers and other passes wishing to manipulate the IL.
44 The operand type are broken up into REAL and VIRTUAL operands. The real
45 operands are represented as pointers into the stmt's operand tree. Thus
46 any manipulation of the real operands will be reflected in the actual tree.
47 Virtual operands are represented solely in the cache, although the base
48 variable for the SSA_NAME may, or may not occur in the stmt's tree.
49 Manipulation of the virtual operands will not be reflected in the stmt tree.
51 The routines in this file are concerned with creating this operand cache
54 The operand tree is the parsed by the various get_* routines which look
55 through the stmt tree for the occurrence of operands which may be of
56 interest, and calls are made to the append_* routines whenever one is
57 found. There are 4 of these routines, each representing one of the
58 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 4 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
76 operand vector for VUSE, then the new vector will also be modified
77 such that it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars
;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided
;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided
;
95 /* Number of write-clobbers avoided because the variable can't escape to
97 unsigned int unescapable_clobbers_avoided
;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers
;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided
;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of MODIFY_EXPR from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree
,heap
) *build_defs
;
133 /* Array for building all the use operands. */
134 static VEC(tree
,heap
) *build_uses
;
136 /* The built VDEF operand. */
137 static tree build_vdef
;
139 /* The built VUSE operand. */
140 static tree build_vuse
;
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack
;
146 static void get_expr_operands (gimple
, tree
*, int);
148 /* Number of functions with initialized ssa_operands. */
149 static int n_initialized
= 0;
151 /* Return the DECL_UID of the base variable of T. */
153 static inline unsigned
154 get_name_decl (const_tree t
)
156 if (TREE_CODE (t
) != SSA_NAME
)
159 return DECL_UID (SSA_NAME_VAR (t
));
163 /* Return true if the SSA operands cache is active. */
166 ssa_operands_active (void)
168 /* This function may be invoked from contexts where CFUN is NULL
169 (IPA passes), return false for now. FIXME: operands may be
170 active in each individual function, maybe this function should
171 take CFUN as a parameter. */
175 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
179 /* Create the VOP variable, an artificial global variable to act as a
180 representative of all of the virtual operands FUD chain. */
183 create_vop_var (void)
187 gcc_assert (cfun
->gimple_df
->vop
== NULL_TREE
);
189 global_var
= build_decl (BUILTINS_LOCATION
, VAR_DECL
,
190 get_identifier (".MEM"),
192 DECL_ARTIFICIAL (global_var
) = 1;
193 TREE_READONLY (global_var
) = 0;
194 DECL_EXTERNAL (global_var
) = 1;
195 TREE_STATIC (global_var
) = 1;
196 TREE_USED (global_var
) = 1;
197 DECL_CONTEXT (global_var
) = NULL_TREE
;
198 TREE_THIS_VOLATILE (global_var
) = 0;
199 TREE_ADDRESSABLE (global_var
) = 0;
201 create_var_ann (global_var
);
202 add_referenced_var (global_var
);
203 cfun
->gimple_df
->vop
= global_var
;
206 /* These are the sizes of the operand memory buffer in bytes which gets
207 allocated each time more operands space is required. The final value is
208 the amount that is allocated every time after that.
209 In 1k we can fit 25 use operands (or 63 def operands) on a host with
210 8 byte pointers, that would be 10 statements each with 1 def and 2
213 #define OP_SIZE_INIT 0
214 #define OP_SIZE_1 (1024 - sizeof (void *))
215 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
216 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
218 /* Initialize the operand cache routines. */
221 init_ssa_operands (void)
223 if (!n_initialized
++)
225 build_defs
= VEC_alloc (tree
, heap
, 5);
226 build_uses
= VEC_alloc (tree
, heap
, 10);
227 build_vuse
= NULL_TREE
;
228 build_vdef
= NULL_TREE
;
229 bitmap_obstack_initialize (&operands_bitmap_obstack
);
232 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
233 gimple_ssa_operands (cfun
)->operand_memory_index
234 = gimple_ssa_operands (cfun
)->ssa_operand_mem_size
;
235 gimple_ssa_operands (cfun
)->ops_active
= true;
236 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
237 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
242 /* Dispose of anything required by the operand routines. */
245 fini_ssa_operands (void)
247 struct ssa_operand_memory_d
*ptr
;
249 if (!--n_initialized
)
251 VEC_free (tree
, heap
, build_defs
);
252 VEC_free (tree
, heap
, build_uses
);
253 build_vdef
= NULL_TREE
;
254 build_vuse
= NULL_TREE
;
257 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
258 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
260 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
262 gimple_ssa_operands (cfun
)->operand_memory
263 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
267 gimple_ssa_operands (cfun
)->ops_active
= false;
270 bitmap_obstack_release (&operands_bitmap_obstack
);
272 cfun
->gimple_df
->vop
= NULL_TREE
;
274 if (dump_file
&& (dump_flags
& TDF_STATS
))
276 fprintf (dump_file
, "Original clobbered vars: %d\n",
277 clobber_stats
.clobbered_vars
);
278 fprintf (dump_file
, "Static write clobbers avoided: %d\n",
279 clobber_stats
.static_write_clobbers_avoided
);
280 fprintf (dump_file
, "Static read clobbers avoided: %d\n",
281 clobber_stats
.static_read_clobbers_avoided
);
282 fprintf (dump_file
, "Unescapable clobbers avoided: %d\n",
283 clobber_stats
.unescapable_clobbers_avoided
);
284 fprintf (dump_file
, "Original read-only clobbers: %d\n",
285 clobber_stats
.readonly_clobbers
);
286 fprintf (dump_file
, "Static read-only clobbers avoided: %d\n",
287 clobber_stats
.static_readonly_clobbers_avoided
);
292 /* Return memory for an operand of size SIZE. */
295 ssa_operand_alloc (unsigned size
)
299 gcc_assert (size
== sizeof (struct use_optype_d
)
300 || size
== sizeof (struct def_optype_d
));
302 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
303 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
305 struct ssa_operand_memory_d
*ptr
;
307 switch (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
310 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_1
;
313 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_2
;
317 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_3
;
323 ptr
= (struct ssa_operand_memory_d
*)
324 ggc_alloc (sizeof (void *)
325 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
);
326 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
327 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
328 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
331 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
332 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
333 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
338 /* Allocate a DEF operand. */
340 static inline struct def_optype_d
*
343 struct def_optype_d
*ret
;
344 if (gimple_ssa_operands (cfun
)->free_defs
)
346 ret
= gimple_ssa_operands (cfun
)->free_defs
;
347 gimple_ssa_operands (cfun
)->free_defs
348 = gimple_ssa_operands (cfun
)->free_defs
->next
;
351 ret
= (struct def_optype_d
*)
352 ssa_operand_alloc (sizeof (struct def_optype_d
));
357 /* Allocate a USE operand. */
359 static inline struct use_optype_d
*
362 struct use_optype_d
*ret
;
363 if (gimple_ssa_operands (cfun
)->free_uses
)
365 ret
= gimple_ssa_operands (cfun
)->free_uses
;
366 gimple_ssa_operands (cfun
)->free_uses
367 = gimple_ssa_operands (cfun
)->free_uses
->next
;
370 ret
= (struct use_optype_d
*)
371 ssa_operand_alloc (sizeof (struct use_optype_d
));
376 /* Adds OP to the list of defs after LAST. */
378 static inline def_optype_p
379 add_def_op (tree
*op
, def_optype_p last
)
381 def_optype_p new_def
;
383 new_def
= alloc_def ();
384 DEF_OP_PTR (new_def
) = op
;
385 last
->next
= new_def
;
386 new_def
->next
= NULL
;
391 /* Adds OP to the list of uses of statement STMT after LAST. */
393 static inline use_optype_p
394 add_use_op (gimple stmt
, tree
*op
, use_optype_p last
)
396 use_optype_p new_use
;
398 new_use
= alloc_use ();
399 USE_OP_PTR (new_use
)->use
= op
;
400 link_imm_use_stmt (USE_OP_PTR (new_use
), *op
, stmt
);
401 last
->next
= new_use
;
402 new_use
->next
= NULL
;
408 /* Takes elements from build_defs and turns them into def operands of STMT.
409 TODO -- Make build_defs VEC of tree *. */
412 finalize_ssa_defs (gimple stmt
)
415 struct def_optype_d new_list
;
416 def_optype_p old_ops
, last
;
417 unsigned int num
= VEC_length (tree
, build_defs
);
419 /* There should only be a single real definition per assignment. */
420 gcc_assert ((stmt
&& gimple_code (stmt
) != GIMPLE_ASSIGN
) || num
<= 1);
422 /* Pre-pend the vdef we may have built. */
423 if (build_vdef
!= NULL_TREE
)
425 tree oldvdef
= gimple_vdef (stmt
);
427 && TREE_CODE (oldvdef
) == SSA_NAME
)
428 oldvdef
= SSA_NAME_VAR (oldvdef
);
429 if (oldvdef
!= build_vdef
)
430 gimple_set_vdef (stmt
, build_vdef
);
431 VEC_safe_insert (tree
, heap
, build_defs
, 0, (tree
)gimple_vdef_ptr (stmt
));
435 new_list
.next
= NULL
;
438 old_ops
= gimple_def_ops (stmt
);
442 /* Clear and unlink a no longer necessary VDEF. */
443 if (build_vdef
== NULL_TREE
444 && gimple_vdef (stmt
) != NULL_TREE
)
446 if (TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
448 unlink_stmt_vdef (stmt
);
449 release_ssa_name (gimple_vdef (stmt
));
451 gimple_set_vdef (stmt
, NULL_TREE
);
454 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
455 if (gimple_vdef (stmt
)
456 && TREE_CODE (gimple_vdef (stmt
)) != SSA_NAME
)
457 mark_sym_for_renaming (gimple_vdef (stmt
));
459 /* Check for the common case of 1 def that hasn't changed. */
460 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
461 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
464 /* If there is anything in the old list, free it. */
467 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
468 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
471 /* If there is anything remaining in the build_defs list, simply emit it. */
472 for ( ; new_i
< num
; new_i
++)
473 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
475 /* Now set the stmt's operands. */
476 gimple_set_def_ops (stmt
, new_list
.next
);
480 /* Takes elements from build_uses and turns them into use operands of STMT.
481 TODO -- Make build_uses VEC of tree *. */
484 finalize_ssa_uses (gimple stmt
)
487 struct use_optype_d new_list
;
488 use_optype_p old_ops
, ptr
, last
;
490 /* Pre-pend the VUSE we may have built. */
491 if (build_vuse
!= NULL_TREE
)
493 tree oldvuse
= gimple_vuse (stmt
);
495 && TREE_CODE (oldvuse
) == SSA_NAME
)
496 oldvuse
= SSA_NAME_VAR (oldvuse
);
497 if (oldvuse
!= (build_vuse
!= NULL_TREE
498 ? build_vuse
: build_vdef
))
499 gimple_set_vuse (stmt
, NULL_TREE
);
500 VEC_safe_insert (tree
, heap
, build_uses
, 0, (tree
)gimple_vuse_ptr (stmt
));
503 new_list
.next
= NULL
;
506 old_ops
= gimple_use_ops (stmt
);
508 /* Clear a no longer necessary VUSE. */
509 if (build_vuse
== NULL_TREE
510 && gimple_vuse (stmt
) != NULL_TREE
)
511 gimple_set_vuse (stmt
, NULL_TREE
);
513 /* If there is anything in the old list, free it. */
516 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
517 delink_imm_use (USE_OP_PTR (ptr
));
518 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
519 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
522 /* If we added a VUSE, make sure to set the operand if it is not already
523 present and mark it for renaming. */
524 if (build_vuse
!= NULL_TREE
525 && gimple_vuse (stmt
) == NULL_TREE
)
527 gimple_set_vuse (stmt
, gimple_vop (cfun
));
528 mark_sym_for_renaming (gimple_vop (cfun
));
531 /* Now create nodes for all the new nodes. */
532 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
533 last
= add_use_op (stmt
,
534 (tree
*) VEC_index (tree
, build_uses
, new_i
),
537 /* Now set the stmt's operands. */
538 gimple_set_use_ops (stmt
, new_list
.next
);
542 /* Clear the in_list bits and empty the build array for VDEFs and
546 cleanup_build_arrays (void)
548 build_vdef
= NULL_TREE
;
549 build_vuse
= NULL_TREE
;
550 VEC_truncate (tree
, build_defs
, 0);
551 VEC_truncate (tree
, build_uses
, 0);
555 /* Finalize all the build vectors, fill the new ones into INFO. */
558 finalize_ssa_stmt_operands (gimple stmt
)
560 finalize_ssa_defs (stmt
);
561 finalize_ssa_uses (stmt
);
562 cleanup_build_arrays ();
566 /* Start the process of building up operands vectors in INFO. */
569 start_ssa_stmt_operands (void)
571 gcc_assert (VEC_length (tree
, build_defs
) == 0);
572 gcc_assert (VEC_length (tree
, build_uses
) == 0);
573 gcc_assert (build_vuse
== NULL_TREE
);
574 gcc_assert (build_vdef
== NULL_TREE
);
578 /* Add DEF_P to the list of pointers to operands. */
581 append_def (tree
*def_p
)
583 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
587 /* Add USE_P to the list of pointers to operands. */
590 append_use (tree
*use_p
)
592 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
596 /* Add VAR to the set of variables that require a VDEF operator. */
599 append_vdef (tree var
)
604 gcc_assert ((build_vdef
== NULL_TREE
605 || build_vdef
== var
)
606 && (build_vuse
== NULL_TREE
607 || build_vuse
== var
));
614 /* Add VAR to the set of variables that require a VUSE operator. */
617 append_vuse (tree var
)
622 gcc_assert (build_vuse
== NULL_TREE
623 || build_vuse
== var
);
628 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
631 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED
, int flags
)
633 /* Add virtual operands to the stmt, unless the caller has specifically
634 requested not to do that (used when adding operands inside an
635 ADDR_EXPR expression). */
636 if (flags
& opf_no_vops
)
639 gcc_assert (!is_gimple_debug (stmt
));
642 append_vdef (gimple_vop (cfun
));
644 append_vuse (gimple_vop (cfun
));
648 /* Add *VAR_P to the appropriate operand array for statement STMT.
649 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
650 it will be added to the statement's real operands, otherwise it is
651 added to virtual operands. */
654 add_stmt_operand (tree
*var_p
, gimple stmt
, int flags
)
658 gcc_assert (SSA_VAR_P (*var_p
));
661 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
663 /* Mark statements with volatile operands. */
664 if (TREE_THIS_VOLATILE (sym
))
665 gimple_set_has_volatile_ops (stmt
, true);
667 if (is_gimple_reg (sym
))
669 /* The variable is a GIMPLE register. Add it to real operands. */
676 add_virtual_operand (stmt
, flags
);
679 /* Mark the base address of REF as having its address taken.
680 REF may be a single variable whose address has been taken or any
681 other valid GIMPLE memory reference (structure reference, array,
685 mark_address_taken (tree ref
)
689 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
690 as the only thing we take the address of. If VAR is a structure,
691 taking the address of a field means that the whole structure may
692 be referenced using pointer arithmetic. See PR 21407 and the
693 ensuing mailing list discussion. */
694 var
= get_base_address (ref
);
695 if (var
&& DECL_P (var
))
696 TREE_ADDRESSABLE (var
) = 1;
700 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
701 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
703 STMT is the statement being processed, EXPR is the INDIRECT_REF
706 FLAGS is as in get_expr_operands.
708 RECURSE_ON_BASE should be set to true if we want to continue
709 calling get_expr_operands on the base pointer, and false if
710 something else will do it for us. */
713 get_indirect_ref_operands (gimple stmt
, tree expr
, int flags
,
714 bool recurse_on_base
)
716 tree
*pptr
= &TREE_OPERAND (expr
, 0);
718 if (TREE_THIS_VOLATILE (expr
))
719 gimple_set_has_volatile_ops (stmt
, true);
722 add_virtual_operand (stmt
, flags
);
724 /* If requested, add a USE operand for the base pointer. */
726 get_expr_operands (stmt
, pptr
,
727 opf_use
| (flags
& opf_no_vops
));
731 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
734 get_tmr_operands (gimple stmt
, tree expr
, int flags
)
736 if (TREE_THIS_VOLATILE (expr
))
737 gimple_set_has_volatile_ops (stmt
, true);
739 /* First record the real operands. */
740 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
| (flags
& opf_no_vops
));
741 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
| (flags
& opf_no_vops
));
743 if (TMR_SYMBOL (expr
))
744 mark_address_taken (TMR_SYMBOL (expr
));
746 add_virtual_operand (stmt
, flags
);
750 /* If STMT is a call that may clobber globals and other symbols that
751 escape, add them to the VDEF/VUSE lists for it. */
754 maybe_add_call_vops (gimple stmt
)
756 int call_flags
= gimple_call_flags (stmt
);
758 /* If aliases have been computed already, add VDEF or VUSE
759 operands for all the symbols that have been found to be
761 if (!(call_flags
& ECF_NOVOPS
))
763 /* A 'pure' or a 'const' function never call-clobbers anything.
764 A 'noreturn' function might, but since we don't return anyway
765 there is no point in recording that. */
766 if (!(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
767 add_virtual_operand (stmt
, opf_def
);
768 else if (!(call_flags
& ECF_CONST
))
769 add_virtual_operand (stmt
, opf_use
);
774 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
777 get_asm_expr_operands (gimple stmt
)
780 const char **oconstraints
;
781 const char *constraint
;
782 bool allows_mem
, allows_reg
, is_inout
;
784 noutputs
= gimple_asm_noutputs (stmt
);
785 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
787 /* Gather all output operands. */
788 for (i
= 0; i
< gimple_asm_noutputs (stmt
); i
++)
790 tree link
= gimple_asm_output_op (stmt
, i
);
791 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
792 oconstraints
[i
] = constraint
;
793 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
794 &allows_reg
, &is_inout
);
796 /* This should have been split in gimplify_asm_expr. */
797 gcc_assert (!allows_reg
|| !is_inout
);
799 /* Memory operands are addressable. Note that STMT needs the
800 address of this operand. */
801 if (!allows_reg
&& allows_mem
)
802 mark_address_taken (TREE_VALUE (link
));
804 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
);
807 /* Gather all input operands. */
808 for (i
= 0; i
< gimple_asm_ninputs (stmt
); i
++)
810 tree link
= gimple_asm_input_op (stmt
, i
);
811 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
812 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
813 &allows_mem
, &allows_reg
);
815 /* Memory operands are addressable. Note that STMT needs the
816 address of this operand. */
817 if (!allows_reg
&& allows_mem
)
818 mark_address_taken (TREE_VALUE (link
));
820 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
823 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
824 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
826 tree link
= gimple_asm_clobber_op (stmt
, i
);
827 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
829 add_virtual_operand (stmt
, opf_def
);
836 /* Recursively scan the expression pointed to by EXPR_P in statement
837 STMT. FLAGS is one of the OPF_* constants modifying how to
838 interpret the operands found. */
841 get_expr_operands (gimple stmt
, tree
*expr_p
, int flags
)
844 enum tree_code_class codeclass
;
846 int uflags
= opf_use
;
851 if (is_gimple_debug (stmt
))
852 uflags
|= (flags
& opf_no_vops
);
854 code
= TREE_CODE (expr
);
855 codeclass
= TREE_CODE_CLASS (code
);
860 /* Taking the address of a variable does not represent a
861 reference to it, but the fact that the statement takes its
862 address will be of interest to some passes (e.g. alias
864 if (!is_gimple_debug (stmt
))
865 mark_address_taken (TREE_OPERAND (expr
, 0));
867 /* If the address is invariant, there may be no interesting
868 variable references inside. */
869 if (is_gimple_min_invariant (expr
))
872 /* Otherwise, there may be variables referenced inside but there
873 should be no VUSEs created, since the referenced objects are
874 not really accessed. The only operands that we should find
875 here are ARRAY_REF indices which will always be real operands
876 (GIMPLE does not allow non-registers as array indices). */
877 flags
|= opf_no_vops
;
878 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
882 add_stmt_operand (expr_p
, stmt
, flags
);
888 add_stmt_operand (expr_p
, stmt
, flags
);
891 case DEBUG_EXPR_DECL
:
892 gcc_assert (gimple_debug_bind_p (stmt
));
895 case MISALIGNED_INDIRECT_REF
:
896 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
899 case ALIGN_INDIRECT_REF
:
901 get_indirect_ref_operands (stmt
, expr
, flags
, true);
905 get_tmr_operands (stmt
, expr
, flags
);
909 case ARRAY_RANGE_REF
:
914 if (TREE_THIS_VOLATILE (expr
))
915 gimple_set_has_volatile_ops (stmt
, true);
917 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
919 if (code
== COMPONENT_REF
)
921 if (TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
922 gimple_set_has_volatile_ops (stmt
, true);
923 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
925 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
927 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
928 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
929 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), uflags
);
936 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
937 and an rvalue reference to its second argument. */
938 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
939 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
944 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), uflags
);
945 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), uflags
);
946 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), uflags
);
951 /* General aggregate CONSTRUCTORs have been decomposed, but they
952 are still in use as the COMPLEX_EXPR equivalent for vectors. */
954 unsigned HOST_WIDE_INT idx
;
957 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
959 get_expr_operands (stmt
, &ce
->value
, uflags
);
965 if (TREE_THIS_VOLATILE (expr
))
966 gimple_set_has_volatile_ops (stmt
, true);
970 case VIEW_CONVERT_EXPR
:
972 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
983 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
984 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
989 case REALIGN_LOAD_EXPR
:
991 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
992 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
993 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
1000 case CASE_LABEL_EXPR
:
1001 /* Expressions that make no memory references. */
1005 if (codeclass
== tcc_unary
)
1007 if (codeclass
== tcc_binary
|| codeclass
== tcc_comparison
)
1009 if (codeclass
== tcc_constant
|| codeclass
== tcc_type
)
1013 /* If we get here, something has gone wrong. */
1014 #ifdef ENABLE_CHECKING
1015 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1017 fputs ("\n", stderr
);
1023 /* Parse STMT looking for operands. When finished, the various
1024 build_* operand vectors will have potential operands in them. */
1027 parse_ssa_operands (gimple stmt
)
1029 enum gimple_code code
= gimple_code (stmt
);
1031 if (code
== GIMPLE_ASM
)
1032 get_asm_expr_operands (stmt
);
1033 else if (is_gimple_debug (stmt
))
1035 if (gimple_debug_bind_p (stmt
)
1036 && gimple_debug_bind_has_value_p (stmt
))
1037 get_expr_operands (stmt
, gimple_debug_bind_get_value_ptr (stmt
),
1038 opf_use
| opf_no_vops
);
1042 size_t i
, start
= 0;
1044 if (code
== GIMPLE_ASSIGN
|| code
== GIMPLE_CALL
)
1046 get_expr_operands (stmt
, gimple_op_ptr (stmt
, 0), opf_def
);
1050 for (i
= start
; i
< gimple_num_ops (stmt
); i
++)
1051 get_expr_operands (stmt
, gimple_op_ptr (stmt
, i
), opf_use
);
1053 /* Add call-clobbered operands, if needed. */
1054 if (code
== GIMPLE_CALL
)
1055 maybe_add_call_vops (stmt
);
1060 /* Create an operands cache for STMT. */
1063 build_ssa_operands (gimple stmt
)
1065 /* Initially assume that the statement has no volatile operands. */
1066 gimple_set_has_volatile_ops (stmt
, false);
1068 start_ssa_stmt_operands ();
1069 parse_ssa_operands (stmt
);
1070 finalize_ssa_stmt_operands (stmt
);
1074 /* Releases the operands of STMT back to their freelists, and clears
1075 the stmt operand lists. */
1078 free_stmt_operands (gimple stmt
)
1080 def_optype_p defs
= gimple_def_ops (stmt
), last_def
;
1081 use_optype_p uses
= gimple_use_ops (stmt
), last_use
;
1085 for (last_def
= defs
; last_def
->next
; last_def
= last_def
->next
)
1087 last_def
->next
= gimple_ssa_operands (cfun
)->free_defs
;
1088 gimple_ssa_operands (cfun
)->free_defs
= defs
;
1089 gimple_set_def_ops (stmt
, NULL
);
1094 for (last_use
= uses
; last_use
->next
; last_use
= last_use
->next
)
1095 delink_imm_use (USE_OP_PTR (last_use
));
1096 delink_imm_use (USE_OP_PTR (last_use
));
1097 last_use
->next
= gimple_ssa_operands (cfun
)->free_uses
;
1098 gimple_ssa_operands (cfun
)->free_uses
= uses
;
1099 gimple_set_use_ops (stmt
, NULL
);
1102 if (gimple_has_mem_ops (stmt
))
1104 gimple_set_vuse (stmt
, NULL_TREE
);
1105 gimple_set_vdef (stmt
, NULL_TREE
);
1110 /* Get the operands of statement STMT. */
1113 update_stmt_operands (gimple stmt
)
1115 /* If update_stmt_operands is called before SSA is initialized, do
1117 if (!ssa_operands_active ())
1120 timevar_push (TV_TREE_OPS
);
1122 gcc_assert (gimple_modified_p (stmt
));
1123 build_ssa_operands (stmt
);
1124 gimple_set_modified (stmt
, false);
1126 timevar_pop (TV_TREE_OPS
);
1130 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1131 to test the validity of the swap operation. */
1134 swap_tree_operands (gimple stmt
, tree
*exp0
, tree
*exp1
)
1140 /* If the operand cache is active, attempt to preserve the relative
1141 positions of these two operands in their respective immediate use
1143 if (ssa_operands_active () && op0
!= op1
)
1145 use_optype_p use0
, use1
, ptr
;
1148 /* Find the 2 operands in the cache, if they are there. */
1149 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1150 if (USE_OP_PTR (ptr
)->use
== exp0
)
1156 for (ptr
= gimple_use_ops (stmt
); ptr
; ptr
= ptr
->next
)
1157 if (USE_OP_PTR (ptr
)->use
== exp1
)
1163 /* If both uses don't have operand entries, there isn't much we can do
1164 at this point. Presumably we don't need to worry about it. */
1167 tree
*tmp
= USE_OP_PTR (use1
)->use
;
1168 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
1169 USE_OP_PTR (use0
)->use
= tmp
;
1173 /* Now swap the data. */
1179 /* Scan the immediate_use list for VAR making sure its linked properly.
1180 Return TRUE if there is a problem and emit an error message to F. */
1183 verify_imm_links (FILE *f
, tree var
)
1185 use_operand_p ptr
, prev
, list
;
1188 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
1190 list
= &(SSA_NAME_IMM_USE_NODE (var
));
1191 gcc_assert (list
->use
== NULL
);
1193 if (list
->prev
== NULL
)
1195 gcc_assert (list
->next
== NULL
);
1201 for (ptr
= list
->next
; ptr
!= list
; )
1203 if (prev
!= ptr
->prev
)
1206 if (ptr
->use
== NULL
)
1207 goto error
; /* 2 roots, or SAFE guard node. */
1208 else if (*(ptr
->use
) != var
)
1214 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1216 if (count
++ > 50000000)
1220 /* Verify list in the other direction. */
1222 for (ptr
= list
->prev
; ptr
!= list
; )
1224 if (prev
!= ptr
->next
)
1238 if (ptr
->loc
.stmt
&& gimple_modified_p (ptr
->loc
.stmt
))
1240 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->loc
.stmt
);
1241 print_gimple_stmt (f
, ptr
->loc
.stmt
, 0, TDF_SLIM
);
1243 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
1245 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
1251 /* Dump all the immediate uses to FILE. */
1254 dump_immediate_uses_for (FILE *file
, tree var
)
1256 imm_use_iterator iter
;
1257 use_operand_p use_p
;
1259 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
1261 print_generic_expr (file
, var
, TDF_SLIM
);
1262 fprintf (file
, " : -->");
1263 if (has_zero_uses (var
))
1264 fprintf (file
, " no uses.\n");
1266 if (has_single_use (var
))
1267 fprintf (file
, " single use.\n");
1269 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
1271 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
1273 if (use_p
->loc
.stmt
== NULL
&& use_p
->use
== NULL
)
1274 fprintf (file
, "***end of stmt iterator marker***\n");
1276 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
1277 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_VOPS
|TDF_MEMSYMS
);
1279 print_gimple_stmt (file
, USE_STMT (use_p
), 0, TDF_SLIM
);
1281 fprintf(file
, "\n");
1285 /* Dump all the immediate uses to FILE. */
1288 dump_immediate_uses (FILE *file
)
1293 fprintf (file
, "Immediate_uses: \n\n");
1294 for (x
= 1; x
< num_ssa_names
; x
++)
1299 dump_immediate_uses_for (file
, var
);
1304 /* Dump def-use edges on stderr. */
1307 debug_immediate_uses (void)
1309 dump_immediate_uses (stderr
);
1313 /* Dump def-use edges on stderr. */
1316 debug_immediate_uses_for (tree var
)
1318 dump_immediate_uses_for (stderr
, var
);
1322 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1325 unlink_stmt_vdef (gimple stmt
)
1327 use_operand_p use_p
;
1328 imm_use_iterator iter
;
1330 tree vdef
= gimple_vdef (stmt
);
1333 || TREE_CODE (vdef
) != SSA_NAME
)
1336 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, gimple_vdef (stmt
))
1338 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
1339 SET_USE (use_p
, gimple_vuse (stmt
));
1342 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt
)))
1343 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt
)) = 1;