1 /* Alias analysis for trees.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
32 #include "tree-pretty-print.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
39 #include "ipa-reference.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
46 /* Broad overview of how alias analysis on gimple works:
48 Statements clobbering or using memory are linked through the
49 virtual operand factored use-def chain. The virtual operand
50 is unique per function, its symbol is accessible via gimple_vop (cfun).
51 Virtual operands are used for efficiently walking memory statements
52 in the gimple IL and are useful for things like value-numbering as
53 a generation count for memory references.
55 SSA_NAME pointers may have associated points-to information
56 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
57 points-to information is (re-)computed by the TODO_rebuild_alias
58 pass manager todo. Points-to information is also used for more
59 precise tracking of call-clobbered and call-used variables and
60 related disambiguations.
62 This file contains functions for disambiguating memory references,
63 the so called alias-oracle and tools for walking of the gimple IL.
65 The main alias-oracle entry-points are
67 bool stmt_may_clobber_ref_p (gimple *, tree)
69 This function queries if a statement may invalidate (parts of)
70 the memory designated by the reference tree argument.
72 bool ref_maybe_used_by_stmt_p (gimple *, tree)
74 This function queries if a statement may need (parts of) the
75 memory designated by the reference tree argument.
77 There are variants of these functions that only handle the call
78 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
79 Note that these do not disambiguate against a possible call lhs.
81 bool refs_may_alias_p (tree, tree)
83 This function tries to disambiguate two reference trees.
85 bool ptr_deref_may_alias_global_p (tree)
87 This function queries if dereferencing a pointer variable may
90 More low-level disambiguators are available and documented in
91 this file. Low-level disambiguators dealing with points-to
92 information are in tree-ssa-structalias.c. */
94 static int nonoverlapping_refs_since_match_p (tree
, tree
, tree
, tree
, bool);
95 static bool nonoverlapping_component_refs_p (const_tree
, const_tree
);
97 /* Query statistics for the different low-level disambiguators.
98 A high-level query may trigger multiple of them. */
101 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
102 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
103 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
104 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
105 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
106 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
107 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias
;
108 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias
;
109 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias
;
110 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias
;
111 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias
;
112 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap
;
113 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias
;
114 unsigned HOST_WIDE_INT modref_use_may_alias
;
115 unsigned HOST_WIDE_INT modref_use_no_alias
;
116 unsigned HOST_WIDE_INT modref_clobber_may_alias
;
117 unsigned HOST_WIDE_INT modref_clobber_no_alias
;
118 unsigned HOST_WIDE_INT modref_tests
;
119 unsigned HOST_WIDE_INT modref_baseptr_tests
;
123 dump_alias_stats (FILE *s
)
125 fprintf (s
, "\nAlias oracle query stats:\n");
126 fprintf (s
, " refs_may_alias_p: "
127 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
128 HOST_WIDE_INT_PRINT_DEC
" queries\n",
129 alias_stats
.refs_may_alias_p_no_alias
,
130 alias_stats
.refs_may_alias_p_no_alias
131 + alias_stats
.refs_may_alias_p_may_alias
);
132 fprintf (s
, " ref_maybe_used_by_call_p: "
133 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
134 HOST_WIDE_INT_PRINT_DEC
" queries\n",
135 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
136 alias_stats
.refs_may_alias_p_no_alias
137 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
138 fprintf (s
, " call_may_clobber_ref_p: "
139 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
140 HOST_WIDE_INT_PRINT_DEC
" queries\n",
141 alias_stats
.call_may_clobber_ref_p_no_alias
,
142 alias_stats
.call_may_clobber_ref_p_no_alias
143 + alias_stats
.call_may_clobber_ref_p_may_alias
);
144 fprintf (s
, " nonoverlapping_component_refs_p: "
145 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
146 HOST_WIDE_INT_PRINT_DEC
" queries\n",
147 alias_stats
.nonoverlapping_component_refs_p_no_alias
,
148 alias_stats
.nonoverlapping_component_refs_p_no_alias
149 + alias_stats
.nonoverlapping_component_refs_p_may_alias
);
150 fprintf (s
, " nonoverlapping_refs_since_match_p: "
151 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
152 HOST_WIDE_INT_PRINT_DEC
" must overlaps, "
153 HOST_WIDE_INT_PRINT_DEC
" queries\n",
154 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
,
155 alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
,
156 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
157 + alias_stats
.nonoverlapping_refs_since_match_p_may_alias
158 + alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
);
159 fprintf (s
, " aliasing_component_refs_p: "
160 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
161 HOST_WIDE_INT_PRINT_DEC
" queries\n",
162 alias_stats
.aliasing_component_refs_p_no_alias
,
163 alias_stats
.aliasing_component_refs_p_no_alias
164 + alias_stats
.aliasing_component_refs_p_may_alias
);
165 dump_alias_stats_in_alias_c (s
);
166 fprintf (s
, "\nModref stats:\n");
167 fprintf (s
, " modref use: "
168 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
169 HOST_WIDE_INT_PRINT_DEC
" queries\n",
170 alias_stats
.modref_use_no_alias
,
171 alias_stats
.modref_use_no_alias
172 + alias_stats
.modref_use_may_alias
);
173 fprintf (s
, " modref clobber: "
174 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
175 HOST_WIDE_INT_PRINT_DEC
" queries\n"
176 " " HOST_WIDE_INT_PRINT_DEC
" tbaa queries (%f per modref query)\n"
177 " " HOST_WIDE_INT_PRINT_DEC
" base compares (%f per modref query)\n",
178 alias_stats
.modref_clobber_no_alias
,
179 alias_stats
.modref_clobber_no_alias
180 + alias_stats
.modref_clobber_may_alias
,
181 alias_stats
.modref_tests
,
182 ((double)alias_stats
.modref_tests
)
183 / (alias_stats
.modref_clobber_no_alias
184 + alias_stats
.modref_clobber_may_alias
),
185 alias_stats
.modref_baseptr_tests
,
186 ((double)alias_stats
.modref_baseptr_tests
)
187 / (alias_stats
.modref_clobber_no_alias
188 + alias_stats
.modref_clobber_may_alias
));
192 /* Return true, if dereferencing PTR may alias with a global variable. */
195 ptr_deref_may_alias_global_p (tree ptr
)
197 struct ptr_info_def
*pi
;
199 /* If we end up with a pointer constant here that may point
201 if (TREE_CODE (ptr
) != SSA_NAME
)
204 pi
= SSA_NAME_PTR_INFO (ptr
);
206 /* If we do not have points-to information for this variable,
211 /* ??? This does not use TBAA to prune globals ptr may not access. */
212 return pt_solution_includes_global (&pi
->pt
);
215 /* Return true if dereferencing PTR may alias DECL.
216 The caller is responsible for applying TBAA to see if PTR
217 may access DECL at all. */
220 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
222 struct ptr_info_def
*pi
;
224 /* Conversions are irrelevant for points-to information and
225 data-dependence analysis can feed us those. */
228 /* Anything we do not explicilty handle aliases. */
229 if ((TREE_CODE (ptr
) != SSA_NAME
230 && TREE_CODE (ptr
) != ADDR_EXPR
231 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
232 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
234 && TREE_CODE (decl
) != PARM_DECL
235 && TREE_CODE (decl
) != RESULT_DECL
))
238 /* Disregard pointer offsetting. */
239 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
243 ptr
= TREE_OPERAND (ptr
, 0);
245 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
246 return ptr_deref_may_alias_decl_p (ptr
, decl
);
249 /* ADDR_EXPR pointers either just offset another pointer or directly
250 specify the pointed-to set. */
251 if (TREE_CODE (ptr
) == ADDR_EXPR
)
253 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
255 && (TREE_CODE (base
) == MEM_REF
256 || TREE_CODE (base
) == TARGET_MEM_REF
))
257 ptr
= TREE_OPERAND (base
, 0);
260 return compare_base_decls (base
, decl
) != 0;
262 && CONSTANT_CLASS_P (base
))
268 /* Non-aliased variables cannot be pointed to. */
269 if (!may_be_aliased (decl
))
272 /* If we do not have useful points-to information for this pointer
273 we cannot disambiguate anything else. */
274 pi
= SSA_NAME_PTR_INFO (ptr
);
278 return pt_solution_includes (&pi
->pt
, decl
);
281 /* Return true if dereferenced PTR1 and PTR2 may alias.
282 The caller is responsible for applying TBAA to see if accesses
283 through PTR1 and PTR2 may conflict at all. */
286 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
288 struct ptr_info_def
*pi1
, *pi2
;
290 /* Conversions are irrelevant for points-to information and
291 data-dependence analysis can feed us those. */
295 /* Disregard pointer offsetting. */
296 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
300 ptr1
= TREE_OPERAND (ptr1
, 0);
302 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
303 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
305 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
309 ptr2
= TREE_OPERAND (ptr2
, 0);
311 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
312 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
315 /* ADDR_EXPR pointers either just offset another pointer or directly
316 specify the pointed-to set. */
317 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
319 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
321 && (TREE_CODE (base
) == MEM_REF
322 || TREE_CODE (base
) == TARGET_MEM_REF
))
323 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
326 return ptr_deref_may_alias_decl_p (ptr2
, base
);
330 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
332 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
334 && (TREE_CODE (base
) == MEM_REF
335 || TREE_CODE (base
) == TARGET_MEM_REF
))
336 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
339 return ptr_deref_may_alias_decl_p (ptr1
, base
);
344 /* From here we require SSA name pointers. Anything else aliases. */
345 if (TREE_CODE (ptr1
) != SSA_NAME
346 || TREE_CODE (ptr2
) != SSA_NAME
347 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
348 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
351 /* We may end up with two empty points-to solutions for two same pointers.
352 In this case we still want to say both pointers alias, so shortcut
357 /* If we do not have useful points-to information for either pointer
358 we cannot disambiguate anything else. */
359 pi1
= SSA_NAME_PTR_INFO (ptr1
);
360 pi2
= SSA_NAME_PTR_INFO (ptr2
);
364 /* ??? This does not use TBAA to prune decls from the intersection
365 that not both pointers may access. */
366 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
369 /* Return true if dereferencing PTR may alias *REF.
370 The caller is responsible for applying TBAA to see if PTR
371 may access *REF at all. */
374 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
376 tree base
= ao_ref_base (ref
);
378 if (TREE_CODE (base
) == MEM_REF
379 || TREE_CODE (base
) == TARGET_MEM_REF
)
380 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
381 else if (DECL_P (base
))
382 return ptr_deref_may_alias_decl_p (ptr
, base
);
387 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
390 ptrs_compare_unequal (tree ptr1
, tree ptr2
)
392 /* First resolve the pointers down to a SSA name pointer base or
393 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
394 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
395 or STRING_CSTs which needs points-to adjustments to track them
396 in the points-to sets. */
397 tree obj1
= NULL_TREE
;
398 tree obj2
= NULL_TREE
;
399 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
401 tree tem
= get_base_address (TREE_OPERAND (ptr1
, 0));
405 || TREE_CODE (tem
) == PARM_DECL
406 || TREE_CODE (tem
) == RESULT_DECL
)
408 else if (TREE_CODE (tem
) == MEM_REF
)
409 ptr1
= TREE_OPERAND (tem
, 0);
411 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
413 tree tem
= get_base_address (TREE_OPERAND (ptr2
, 0));
417 || TREE_CODE (tem
) == PARM_DECL
418 || TREE_CODE (tem
) == RESULT_DECL
)
420 else if (TREE_CODE (tem
) == MEM_REF
)
421 ptr2
= TREE_OPERAND (tem
, 0);
424 /* Canonicalize ptr vs. object. */
425 if (TREE_CODE (ptr1
) == SSA_NAME
&& obj2
)
427 std::swap (ptr1
, ptr2
);
428 std::swap (obj1
, obj2
);
432 /* Other code handles this correctly, no need to duplicate it here. */;
433 else if (obj1
&& TREE_CODE (ptr2
) == SSA_NAME
)
435 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr2
);
436 /* We may not use restrict to optimize pointer comparisons.
437 See PR71062. So we have to assume that restrict-pointed-to
438 may be in fact obj1. */
440 || pi
->pt
.vars_contains_restrict
441 || pi
->pt
.vars_contains_interposable
)
444 && (TREE_STATIC (obj1
) || DECL_EXTERNAL (obj1
)))
446 varpool_node
*node
= varpool_node::get (obj1
);
447 /* If obj1 may bind to NULL give up (see below). */
449 || ! node
->nonzero_address ()
450 || ! decl_binds_to_current_def_p (obj1
))
453 return !pt_solution_includes (&pi
->pt
, obj1
);
456 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
457 but those require pt.null to be conservatively correct. */
462 /* Returns whether reference REF to BASE may refer to global memory. */
465 ref_may_alias_global_p_1 (tree base
)
468 return is_global_var (base
);
469 else if (TREE_CODE (base
) == MEM_REF
470 || TREE_CODE (base
) == TARGET_MEM_REF
)
471 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
476 ref_may_alias_global_p (ao_ref
*ref
)
478 tree base
= ao_ref_base (ref
);
479 return ref_may_alias_global_p_1 (base
);
483 ref_may_alias_global_p (tree ref
)
485 tree base
= get_base_address (ref
);
486 return ref_may_alias_global_p_1 (base
);
489 /* Return true whether STMT may clobber global memory. */
492 stmt_may_clobber_global_p (gimple
*stmt
)
496 if (!gimple_vdef (stmt
))
499 /* ??? We can ask the oracle whether an artificial pointer
500 dereference with a pointer with points-to information covering
501 all global memory (what about non-address taken memory?) maybe
502 clobbered by this call. As there is at the moment no convenient
503 way of doing that without generating garbage do some manual
505 ??? We could make a NULL ao_ref argument to the various
506 predicates special, meaning any global memory. */
508 switch (gimple_code (stmt
))
511 lhs
= gimple_assign_lhs (stmt
);
512 return (TREE_CODE (lhs
) != SSA_NAME
513 && ref_may_alias_global_p (lhs
));
522 /* Dump alias information on FILE. */
525 dump_alias_info (FILE *file
)
530 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
533 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
535 fprintf (file
, "Aliased symbols\n\n");
537 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
539 if (may_be_aliased (var
))
540 dump_variable (file
, var
);
543 fprintf (file
, "\nCall clobber information\n");
545 fprintf (file
, "\nESCAPED");
546 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
548 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
550 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
552 struct ptr_info_def
*pi
;
554 if (!POINTER_TYPE_P (TREE_TYPE (ptr
))
555 || SSA_NAME_IN_FREE_LIST (ptr
))
558 pi
= SSA_NAME_PTR_INFO (ptr
);
560 dump_points_to_info_for (file
, ptr
);
563 fprintf (file
, "\n");
567 /* Dump alias information on stderr. */
570 debug_alias_info (void)
572 dump_alias_info (stderr
);
576 /* Dump the points-to set *PT into FILE. */
579 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
582 fprintf (file
, ", points-to anything");
585 fprintf (file
, ", points-to non-local");
588 fprintf (file
, ", points-to escaped");
591 fprintf (file
, ", points-to unit escaped");
594 fprintf (file
, ", points-to NULL");
598 fprintf (file
, ", points-to vars: ");
599 dump_decl_set (file
, pt
->vars
);
600 if (pt
->vars_contains_nonlocal
601 || pt
->vars_contains_escaped
602 || pt
->vars_contains_escaped_heap
603 || pt
->vars_contains_restrict
)
605 const char *comma
= "";
606 fprintf (file
, " (");
607 if (pt
->vars_contains_nonlocal
)
609 fprintf (file
, "nonlocal");
612 if (pt
->vars_contains_escaped
)
614 fprintf (file
, "%sescaped", comma
);
617 if (pt
->vars_contains_escaped_heap
)
619 fprintf (file
, "%sescaped heap", comma
);
622 if (pt
->vars_contains_restrict
)
624 fprintf (file
, "%srestrict", comma
);
627 if (pt
->vars_contains_interposable
)
628 fprintf (file
, "%sinterposable", comma
);
635 /* Unified dump function for pt_solution. */
638 debug (pt_solution
&ref
)
640 dump_points_to_solution (stderr
, &ref
);
644 debug (pt_solution
*ptr
)
649 fprintf (stderr
, "<nil>\n");
653 /* Dump points-to information for SSA_NAME PTR into FILE. */
656 dump_points_to_info_for (FILE *file
, tree ptr
)
658 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
660 print_generic_expr (file
, ptr
, dump_flags
);
663 dump_points_to_solution (file
, &pi
->pt
);
665 fprintf (file
, ", points-to anything");
667 fprintf (file
, "\n");
671 /* Dump points-to information for VAR into stderr. */
674 debug_points_to_info_for (tree var
)
676 dump_points_to_info_for (stderr
, var
);
680 /* Initializes the alias-oracle reference representation *R from REF. */
683 ao_ref_init (ao_ref
*r
, tree ref
)
690 r
->ref_alias_set
= -1;
691 r
->base_alias_set
= -1;
692 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
695 /* Returns the base object of the memory reference *REF. */
698 ao_ref_base (ao_ref
*ref
)
704 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
705 &ref
->max_size
, &reverse
);
709 /* Returns the base object alias set of the memory reference *REF. */
712 ao_ref_base_alias_set (ao_ref
*ref
)
715 if (ref
->base_alias_set
!= -1)
716 return ref
->base_alias_set
;
720 while (handled_component_p (base_ref
))
721 base_ref
= TREE_OPERAND (base_ref
, 0);
722 ref
->base_alias_set
= get_alias_set (base_ref
);
723 return ref
->base_alias_set
;
726 /* Returns the reference alias set of the memory reference *REF. */
729 ao_ref_alias_set (ao_ref
*ref
)
731 if (ref
->ref_alias_set
!= -1)
732 return ref
->ref_alias_set
;
735 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
736 return ref
->ref_alias_set
;
739 /* Init an alias-oracle reference representation from a gimple pointer
740 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
741 that RANGE_KNOWN is set.
743 The access is assumed to be only to or after of the pointer target adjusted
744 by the offset, not before it (even in the case RANGE_KNOWN is false). */
747 ao_ref_init_from_ptr_and_range (ao_ref
*ref
, tree ptr
,
753 poly_int64 t
, extra_offset
= 0;
755 ref
->ref
= NULL_TREE
;
756 if (TREE_CODE (ptr
) == SSA_NAME
)
758 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
759 if (gimple_assign_single_p (stmt
)
760 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
761 ptr
= gimple_assign_rhs1 (stmt
);
762 else if (is_gimple_assign (stmt
)
763 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
764 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt
), &extra_offset
))
766 ptr
= gimple_assign_rhs1 (stmt
);
767 extra_offset
*= BITS_PER_UNIT
;
771 if (TREE_CODE (ptr
) == ADDR_EXPR
)
773 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
775 ref
->offset
= BITS_PER_UNIT
* t
;
780 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
785 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
786 ref
->base
= build2 (MEM_REF
, char_type_node
,
787 ptr
, null_pointer_node
);
790 ref
->offset
+= extra_offset
+ offset
;
793 ref
->max_size
= max_size
;
797 ref
->max_size
= ref
->size
= -1;
798 ref
->ref_alias_set
= 0;
799 ref
->base_alias_set
= 0;
800 ref
->volatile_p
= false;
803 /* Init an alias-oracle reference representation from a gimple pointer
804 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
805 size is assumed to be unknown. The access is assumed to be only
806 to or after of the pointer target, not before it. */
809 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
813 && poly_int_tree_p (size
, &size_hwi
)
814 && coeffs_in_range_p (size_hwi
, 0, HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
816 size_hwi
= size_hwi
* BITS_PER_UNIT
;
817 ao_ref_init_from_ptr_and_range (ref
, ptr
, true, 0, size_hwi
, size_hwi
);
820 ao_ref_init_from_ptr_and_range (ref
, ptr
, false, 0, -1, -1);
823 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
826 Return 0 if equal or incomparable. */
829 compare_sizes (tree s1
, tree s2
)
837 if (!poly_int_tree_p (s1
, &size1
) || !poly_int_tree_p (s2
, &size2
))
839 if (known_lt (size1
, size2
))
841 if (known_lt (size2
, size1
))
846 /* Compare TYPE1 and TYPE2 by its size.
847 Return -1 if size of TYPE1 < size of TYPE2
848 Return 1 if size of TYPE1 > size of TYPE2
849 Return 0 if types are of equal sizes or we can not compare them. */
852 compare_type_sizes (tree type1
, tree type2
)
854 /* Be conservative for arrays and vectors. We want to support partial
855 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
856 while (TREE_CODE (type1
) == ARRAY_TYPE
857 || TREE_CODE (type1
) == VECTOR_TYPE
)
858 type1
= TREE_TYPE (type1
);
859 while (TREE_CODE (type2
) == ARRAY_TYPE
860 || TREE_CODE (type2
) == VECTOR_TYPE
)
861 type2
= TREE_TYPE (type2
);
862 return compare_sizes (TYPE_SIZE (type1
), TYPE_SIZE (type2
));
865 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
866 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
870 same_type_for_tbaa (tree type1
, tree type2
)
872 type1
= TYPE_MAIN_VARIANT (type1
);
873 type2
= TYPE_MAIN_VARIANT (type2
);
875 /* Handle the most common case first. */
879 /* If we would have to do structural comparison bail out. */
880 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
881 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
884 /* Compare the canonical types. */
885 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
888 /* ??? Array types are not properly unified in all cases as we have
889 spurious changes in the index types for example. Removing this
890 causes all sorts of problems with the Fortran frontend. */
891 if (TREE_CODE (type1
) == ARRAY_TYPE
892 && TREE_CODE (type2
) == ARRAY_TYPE
)
895 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
896 object of one of its constrained subtypes, e.g. when a function with an
897 unconstrained parameter passed by reference is called on an object and
898 inlined. But, even in the case of a fixed size, type and subtypes are
899 not equivalent enough as to share the same TYPE_CANONICAL, since this
900 would mean that conversions between them are useless, whereas they are
901 not (e.g. type and subtypes can have different modes). So, in the end,
902 they are only guaranteed to have the same alias set. */
903 alias_set_type set1
= get_alias_set (type1
);
904 alias_set_type set2
= get_alias_set (type2
);
908 /* Pointers to void are considered compatible with all other pointers,
909 so for two pointers see what the alias set resolution thinks. */
910 if (POINTER_TYPE_P (type1
)
911 && POINTER_TYPE_P (type2
)
912 && alias_sets_conflict_p (set1
, set2
))
915 /* The types are known to be not equal. */
919 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
920 components on it). */
923 type_has_components_p (tree type
)
925 return AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
)
926 || TREE_CODE (type
) == COMPLEX_TYPE
;
929 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
930 respectively are either pointing to same address or are completely
931 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
934 Try to disambiguate using the access path starting from the match
935 and return false if there is no conflict.
937 Helper for aliasing_component_refs_p. */
940 aliasing_matching_component_refs_p (tree match1
, tree ref1
,
941 poly_int64 offset1
, poly_int64 max_size1
,
942 tree match2
, tree ref2
,
943 poly_int64 offset2
, poly_int64 max_size2
,
944 bool partial_overlap
)
946 poly_int64 offadj
, sztmp
, msztmp
;
949 if (!partial_overlap
)
951 get_ref_base_and_extent (match2
, &offadj
, &sztmp
, &msztmp
, &reverse
);
953 get_ref_base_and_extent (match1
, &offadj
, &sztmp
, &msztmp
, &reverse
);
955 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
957 ++alias_stats
.aliasing_component_refs_p_no_alias
;
962 int cmp
= nonoverlapping_refs_since_match_p (match1
, ref1
, match2
, ref2
,
965 || (cmp
== -1 && nonoverlapping_component_refs_p (ref1
, ref2
)))
967 ++alias_stats
.aliasing_component_refs_p_no_alias
;
970 ++alias_stats
.aliasing_component_refs_p_may_alias
;
974 /* Return true if REF is reference to zero sized trailing array. I.e.
975 struct foo {int bar; int array[0];} *fooptr;
979 component_ref_to_zero_sized_trailing_array_p (tree ref
)
981 return (TREE_CODE (ref
) == COMPONENT_REF
982 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
983 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))
984 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))))
985 && array_at_struct_end_p (ref
));
988 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
989 aliasing_component_refs_p.
991 Walk access path REF2 and try to find type matching TYPE1
992 (which is a start of possibly aliasing access path REF1).
993 If match is found, try to disambiguate.
995 Return 0 for sucessful disambiguation.
996 Return 1 if match was found but disambiguation failed
997 Return -1 if there is no match.
998 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
999 in access patch REF2 and -1 if we are not sure. */
1002 aliasing_component_refs_walk (tree ref1
, tree type1
, tree base1
,
1003 poly_int64 offset1
, poly_int64 max_size1
,
1004 tree end_struct_ref1
,
1005 tree ref2
, tree base2
,
1006 poly_int64 offset2
, poly_int64 max_size2
,
1014 /* We walk from inner type to the outer types. If type we see is
1015 already too large to be part of type1, terminate the search. */
1016 int cmp
= compare_type_sizes (type1
, TREE_TYPE (ref
));
1019 && (!end_struct_ref1
1020 || compare_type_sizes (TREE_TYPE (end_struct_ref1
),
1021 TREE_TYPE (ref
)) < 0))
1023 /* If types may be of same size, see if we can decide about their
1027 same_p
= same_type_for_tbaa (TREE_TYPE (ref
), type1
);
1030 /* In case we can't decide whether types are same try to
1031 continue looking for the exact match.
1032 Remember however that we possibly saw a match
1033 to bypass the access path continuations tests we do later. */
1035 *maybe_match
= true;
1037 if (!handled_component_p (ref
))
1039 ref
= TREE_OPERAND (ref
, 0);
1043 bool partial_overlap
= false;
1045 /* We assume that arrays can overlap by multiple of their elements
1046 size as tested in gcc.dg/torture/alias-2.c.
1047 This partial overlap happen only when both arrays are bases of
1048 the access and not contained within another component ref.
1049 To be safe we also assume partial overlap for VLAs. */
1050 if (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
1051 && (!TYPE_SIZE (TREE_TYPE (base1
))
1052 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
))) != INTEGER_CST
1055 /* Setting maybe_match to true triggers
1056 nonoverlapping_component_refs_p test later that still may do
1057 useful disambiguation. */
1058 *maybe_match
= true;
1059 partial_overlap
= true;
1061 return aliasing_matching_component_refs_p (base1
, ref1
,
1070 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1071 Return true if they can be composed to single access path
1072 base1...ref1...base2...ref2.
1074 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1075 a trailing array access after REF1 in the non-TBAA part of the access.
1076 REF1_ALIAS_SET is the alias set of REF1.
1078 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1079 a traling array access in the TBAA part of access path2.
1080 BASE2_ALIAS_SET is the alias set of base2. */
1083 access_path_may_continue_p (tree ref_type1
, bool end_struct_past_end1
,
1084 alias_set_type ref1_alias_set
,
1085 tree base_type2
, tree end_struct_ref2
,
1086 alias_set_type base2_alias_set
)
1088 /* Access path can not continue past types with no components. */
1089 if (!type_has_components_p (ref_type1
))
1092 /* If first access path ends by too small type to hold base of
1093 the second access path, typically paths can not continue.
1095 Punt if end_struct_past_end1 is true. We want to support arbitrary
1096 type puning past first COMPONENT_REF to union because redundant store
1097 elimination depends on this, see PR92152. For this reason we can not
1098 check size of the reference because types may partially overlap. */
1099 if (!end_struct_past_end1
)
1101 if (compare_type_sizes (ref_type1
, base_type2
) < 0)
1103 /* If the path2 contains trailing array access we can strenghten the check
1104 to verify that also the size of element of the trailing array fits.
1105 In fact we could check for offset + type_size, but we do not track
1106 offsets and this is quite side case. */
1108 && compare_type_sizes (ref_type1
, TREE_TYPE (end_struct_ref2
)) < 0)
1111 return (base2_alias_set
== ref1_alias_set
1112 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
1115 /* Determine if the two component references REF1 and REF2 which are
1116 based on access types TYPE1 and TYPE2 and of which at least one is based
1117 on an indirect reference may alias.
1118 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1119 are the respective alias sets. */
1122 aliasing_component_refs_p (tree ref1
,
1123 alias_set_type ref1_alias_set
,
1124 alias_set_type base1_alias_set
,
1125 poly_int64 offset1
, poly_int64 max_size1
,
1127 alias_set_type ref2_alias_set
,
1128 alias_set_type base2_alias_set
,
1129 poly_int64 offset2
, poly_int64 max_size2
)
1131 /* If one reference is a component references through pointers try to find a
1132 common base and apply offset based disambiguation. This handles
1134 struct A { int i; int j; } *q;
1135 struct B { struct A a; int k; } *p;
1136 disambiguating q->i and p->a.j. */
1139 bool maybe_match
= false;
1140 tree end_struct_ref1
= NULL
, end_struct_ref2
= NULL
;
1141 bool end_struct_past_end1
= false;
1142 bool end_struct_past_end2
= false;
1144 /* Choose bases and base types to search for.
1145 The access path is as follows:
1146 base....end_of_tbaa_ref...actual_ref
1147 At one place in the access path may be a reference to zero sized or
1150 We generally discard the segment after end_of_tbaa_ref however
1151 we need to be careful in case it contains zero sized or traling array.
1152 These may happen after refernce to union and in this case we need to
1153 not disambiguate type puning scenarios.
1156 base1 to point to base
1158 ref1 to point to end_of_tbaa_ref
1160 end_struct_ref1 to point the trailing reference (if it exists
1161 in range base....end_of_tbaa_ref
1163 end_struct_past_end1 is true if this traling refernece occurs in
1164 end_of_tbaa_ref...actual_ref. */
1166 while (handled_component_p (base1
))
1168 /* Generally access paths are monotous in the size of object. The
1169 exception are trailing arrays of structures. I.e.
1170 struct a {int array[0];};
1172 struct a {int array1[0]; int array[];};
1173 Such struct has size 0 but accesses to a.array may have non-zero size.
1174 In this case the size of TREE_TYPE (base1) is smaller than
1175 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1177 Because we compare sizes of arrays just by sizes of their elements,
1178 we only need to care about zero sized array fields here. */
1179 if (component_ref_to_zero_sized_trailing_array_p (base1
))
1181 gcc_checking_assert (!end_struct_ref1
);
1182 end_struct_ref1
= base1
;
1184 if (ends_tbaa_access_path_p (base1
))
1186 ref1
= TREE_OPERAND (base1
, 0);
1187 if (end_struct_ref1
)
1189 end_struct_past_end1
= true;
1190 end_struct_ref1
= NULL
;
1193 base1
= TREE_OPERAND (base1
, 0);
1195 type1
= TREE_TYPE (base1
);
1197 while (handled_component_p (base2
))
1199 if (component_ref_to_zero_sized_trailing_array_p (base2
))
1201 gcc_checking_assert (!end_struct_ref2
);
1202 end_struct_ref2
= base2
;
1204 if (ends_tbaa_access_path_p (base2
))
1206 ref2
= TREE_OPERAND (base2
, 0);
1207 if (end_struct_ref2
)
1209 end_struct_past_end2
= true;
1210 end_struct_ref2
= NULL
;
1213 base2
= TREE_OPERAND (base2
, 0);
1215 type2
= TREE_TYPE (base2
);
1217 /* Now search for the type1 in the access path of ref2. This
1218 would be a common base for doing offset based disambiguation on.
1219 This however only makes sense if type2 is big enough to hold type1. */
1220 int cmp_outer
= compare_type_sizes (type2
, type1
);
1222 /* If type2 is big enough to contain type1 walk its access path.
1223 We also need to care of arrays at the end of structs that may extend
1224 beyond the end of structure. If this occurs in the TBAA part of the
1225 access path, we need to consider the increased type as well. */
1228 && compare_type_sizes (TREE_TYPE (end_struct_ref2
), type1
) >= 0))
1230 int res
= aliasing_component_refs_walk (ref1
, type1
, base1
,
1233 ref2
, base2
, offset2
, max_size2
,
1239 /* If we didn't find a common base, try the other way around. */
1242 && compare_type_sizes (TREE_TYPE (end_struct_ref1
), type1
) <= 0))
1244 int res
= aliasing_component_refs_walk (ref2
, type2
, base2
,
1247 ref1
, base1
, offset1
, max_size1
,
1253 /* In the following code we make an assumption that the types in access
1254 paths do not overlap and thus accesses alias only if one path can be
1255 continuation of another. If we was not able to decide about equivalence,
1256 we need to give up. */
1259 if (!nonoverlapping_component_refs_p (ref1
, ref2
))
1261 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1264 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1268 if (access_path_may_continue_p (TREE_TYPE (ref1
), end_struct_past_end1
,
1270 type2
, end_struct_ref2
,
1272 || access_path_may_continue_p (TREE_TYPE (ref2
), end_struct_past_end2
,
1274 type1
, end_struct_ref1
,
1277 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1280 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1284 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1285 that bases of both component refs are either equivalent or nonoverlapping.
1286 We do not assume that the containers of FIELD1 and FIELD2 are of the
1289 Return 0 in case the base address of component_refs are same then
1290 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1291 may not be of same type or size.
1293 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1295 Return -1 otherwise.
1297 Main difference between 0 and -1 is to let
1298 nonoverlapping_component_refs_since_match_p discover the semantically
1299 equivalent part of the access path.
1301 Note that this function is used even with -fno-strict-aliasing
1302 and makes use of no TBAA assumptions. */
1305 nonoverlapping_component_refs_p_1 (const_tree field1
, const_tree field2
)
1307 /* If both fields are of the same type, we could save hard work of
1308 comparing offsets. */
1309 tree type1
= DECL_CONTEXT (field1
);
1310 tree type2
= DECL_CONTEXT (field2
);
1312 if (TREE_CODE (type1
) == RECORD_TYPE
1313 && DECL_BIT_FIELD_REPRESENTATIVE (field1
))
1314 field1
= DECL_BIT_FIELD_REPRESENTATIVE (field1
);
1315 if (TREE_CODE (type2
) == RECORD_TYPE
1316 && DECL_BIT_FIELD_REPRESENTATIVE (field2
))
1317 field2
= DECL_BIT_FIELD_REPRESENTATIVE (field2
);
1319 /* ??? Bitfields can overlap at RTL level so punt on them.
1320 FIXME: RTL expansion should be fixed by adjusting the access path
1321 when producing MEM_ATTRs for MEMs which are wider than
1322 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1323 if (DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
))
1326 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1327 if (type1
== type2
&& TREE_CODE (type1
) == RECORD_TYPE
)
1328 return field1
!= field2
;
1330 /* In common case the offsets and bit offsets will be the same.
1331 However if frontends do not agree on the alignment, they may be
1332 different even if they actually represent same address.
1333 Try the common case first and if that fails calcualte the
1334 actual bit offset. */
1335 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1
),
1336 DECL_FIELD_OFFSET (field2
))
1337 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1
),
1338 DECL_FIELD_BIT_OFFSET (field2
)))
1341 /* Note that it may be possible to use component_ref_field_offset
1342 which would provide offsets as trees. However constructing and folding
1343 trees is expensive and does not seem to be worth the compile time
1346 poly_uint64 offset1
, offset2
;
1347 poly_uint64 bit_offset1
, bit_offset2
;
1349 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1
), &offset1
)
1350 && poly_int_tree_p (DECL_FIELD_OFFSET (field2
), &offset2
)
1351 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1
), &bit_offset1
)
1352 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2
), &bit_offset2
))
1354 offset1
= (offset1
<< LOG2_BITS_PER_UNIT
) + bit_offset1
;
1355 offset2
= (offset2
<< LOG2_BITS_PER_UNIT
) + bit_offset2
;
1357 if (known_eq (offset1
, offset2
))
1360 poly_uint64 size1
, size2
;
1362 if (poly_int_tree_p (DECL_SIZE (field1
), &size1
)
1363 && poly_int_tree_p (DECL_SIZE (field2
), &size2
)
1364 && !ranges_maybe_overlap_p (offset1
, size1
, offset2
, size2
))
1367 /* Resort to slower overlap checking by looking for matching types in
1368 the middle of access path. */
1372 /* Return low bound of array. Do not produce new trees
1373 and thus do not care about particular type of integer constant
1374 and placeholder exprs. */
1377 cheap_array_ref_low_bound (tree ref
)
1379 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
1381 /* Avoid expensive array_ref_low_bound.
1382 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1383 type or it is zero. */
1384 if (TREE_OPERAND (ref
, 2))
1385 return TREE_OPERAND (ref
, 2);
1386 else if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
1387 return TYPE_MIN_VALUE (domain_type
);
1389 return integer_zero_node
;
1392 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1393 completely disjoint.
1395 Return 1 if the refs are non-overlapping.
1396 Return 0 if they are possibly overlapping but if so the overlap again
1397 starts on the same address.
1398 Return -1 otherwise. */
1401 nonoverlapping_array_refs_p (tree ref1
, tree ref2
)
1403 tree index1
= TREE_OPERAND (ref1
, 1);
1404 tree index2
= TREE_OPERAND (ref2
, 1);
1405 tree low_bound1
= cheap_array_ref_low_bound (ref1
);
1406 tree low_bound2
= cheap_array_ref_low_bound (ref2
);
1408 /* Handle zero offsets first: we do not need to match type size in this
1410 if (operand_equal_p (index1
, low_bound1
, 0)
1411 && operand_equal_p (index2
, low_bound2
, 0))
1414 /* If type sizes are different, give up.
1416 Avoid expensive array_ref_element_size.
1417 If operand 3 is present it denotes size in the alignmnet units.
1418 Otherwise size is TYPE_SIZE of the element type.
1419 Handle only common cases where types are of the same "kind". */
1420 if ((TREE_OPERAND (ref1
, 3) == NULL
) != (TREE_OPERAND (ref2
, 3) == NULL
))
1423 tree elmt_type1
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1
, 0)));
1424 tree elmt_type2
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2
, 0)));
1426 if (TREE_OPERAND (ref1
, 3))
1428 if (TYPE_ALIGN (elmt_type1
) != TYPE_ALIGN (elmt_type2
)
1429 || !operand_equal_p (TREE_OPERAND (ref1
, 3),
1430 TREE_OPERAND (ref2
, 3), 0))
1435 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1
),
1436 TYPE_SIZE_UNIT (elmt_type2
), 0))
1440 /* Since we know that type sizes are the same, there is no need to return
1441 -1 after this point. Partial overlap can not be introduced. */
1443 /* We may need to fold trees in this case.
1444 TODO: Handle integer constant case at least. */
1445 if (!operand_equal_p (low_bound1
, low_bound2
, 0))
1448 if (TREE_CODE (index1
) == INTEGER_CST
&& TREE_CODE (index2
) == INTEGER_CST
)
1450 if (tree_int_cst_equal (index1
, index2
))
1454 /* TODO: We can use VRP to further disambiguate here. */
1458 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1459 MATCH2 either point to the same address or are disjoint.
1460 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1461 respectively or NULL in the case we established equivalence of bases.
1462 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1463 overlap by exact multiply of their element size.
1465 This test works by matching the initial segment of the access path
1466 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1467 match was determined without use of TBAA oracle.
1469 Return 1 if we can determine that component references REF1 and REF2,
1470 that are within a common DECL, cannot overlap.
1472 Return 0 if paths are same and thus there is nothing to disambiguate more
1473 (i.e. there is must alias assuming there is must alias between MATCH1 and
1476 Return -1 if we can not determine 0 or 1 - this happens when we met
1477 non-matching types was met in the path.
1478 In this case it may make sense to continue by other disambiguation
1482 nonoverlapping_refs_since_match_p (tree match1
, tree ref1
,
1483 tree match2
, tree ref2
,
1484 bool partial_overlap
)
1486 int ntbaa1
= 0, ntbaa2
= 0;
1487 /* Early return if there are no references to match, we do not need
1488 to walk the access paths.
1490 Do not consider this as may-alias for stats - it is more useful
1491 to have information how many disambiguations happened provided that
1492 the query was meaningful. */
1494 if (match1
== ref1
|| !handled_component_p (ref1
)
1495 || match2
== ref2
|| !handled_component_p (ref2
))
1498 auto_vec
<tree
, 16> component_refs1
;
1499 auto_vec
<tree
, 16> component_refs2
;
1501 /* Create the stack of handled components for REF1. */
1502 while (handled_component_p (ref1
) && ref1
!= match1
)
1504 /* We use TBAA only to re-synchronize after mismatched refs. So we
1505 do not need to truncate access path after TBAA part ends. */
1506 if (ends_tbaa_access_path_p (ref1
))
1510 component_refs1
.safe_push (ref1
);
1511 ref1
= TREE_OPERAND (ref1
, 0);
1514 /* Create the stack of handled components for REF2. */
1515 while (handled_component_p (ref2
) && ref2
!= match2
)
1517 if (ends_tbaa_access_path_p (ref2
))
1521 component_refs2
.safe_push (ref2
);
1522 ref2
= TREE_OPERAND (ref2
, 0);
1525 if (!flag_strict_aliasing
)
1531 bool mem_ref1
= TREE_CODE (ref1
) == MEM_REF
&& ref1
!= match1
;
1532 bool mem_ref2
= TREE_CODE (ref2
) == MEM_REF
&& ref2
!= match2
;
1534 /* If only one of access path starts with MEM_REF check that offset is 0
1535 so the addresses stays the same after stripping it.
1536 TODO: In this case we may walk the other access path until we get same
1539 If both starts with MEM_REF, offset has to be same. */
1540 if ((mem_ref1
&& !mem_ref2
&& !integer_zerop (TREE_OPERAND (ref1
, 1)))
1541 || (mem_ref2
&& !mem_ref1
&& !integer_zerop (TREE_OPERAND (ref2
, 1)))
1542 || (mem_ref1
&& mem_ref2
1543 && !tree_int_cst_equal (TREE_OPERAND (ref1
, 1),
1544 TREE_OPERAND (ref2
, 1))))
1546 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1550 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1551 to handle them here at all. */
1552 gcc_checking_assert (TREE_CODE (ref1
) != TARGET_MEM_REF
1553 && TREE_CODE (ref2
) != TARGET_MEM_REF
);
1555 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1556 rank. This is sufficient because we start from the same DECL and you
1557 cannot reference several fields at a time with COMPONENT_REFs (unlike
1558 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1559 of them to access a sub-component, unless you're in a union, in which
1560 case the return value will precisely be false. */
1563 /* Track if we seen unmatched ref with non-zero offset. In this case
1564 we must look for partial overlaps. */
1565 bool seen_unmatched_ref_p
= false;
1567 /* First match ARRAY_REFs an try to disambiguate. */
1568 if (!component_refs1
.is_empty ()
1569 && !component_refs2
.is_empty ())
1571 unsigned int narray_refs1
=0, narray_refs2
=0;
1573 /* We generally assume that both access paths starts by same sequence
1574 of refs. However if number of array refs is not in sync, try
1575 to recover and pop elts until number match. This helps the case
1576 where one access path starts by array and other by element. */
1577 for (narray_refs1
= 0; narray_refs1
< component_refs1
.length ();
1579 if (TREE_CODE (component_refs1
[component_refs1
.length()
1580 - 1 - narray_refs1
]) != ARRAY_REF
)
1583 for (narray_refs2
= 0; narray_refs2
< component_refs2
.length ();
1585 if (TREE_CODE (component_refs2
[component_refs2
.length()
1586 - 1 - narray_refs2
]) != ARRAY_REF
)
1588 for (; narray_refs1
> narray_refs2
; narray_refs1
--)
1590 ref1
= component_refs1
.pop ();
1593 /* If index is non-zero we need to check whether the reference
1594 does not break the main invariant that bases are either
1595 disjoint or equal. Consider the example:
1597 unsigned char out[][1];
1601 Here bases out and out are same, but after removing the
1602 [i] index, this invariant no longer holds, because
1603 out[i] points to the middle of array out.
1605 TODO: If size of type of the skipped reference is an integer
1606 multiply of the size of type of the other reference this
1607 invariant can be verified, but even then it is not completely
1608 safe with !flag_strict_aliasing if the other reference contains
1609 unbounded array accesses.
1612 if (!operand_equal_p (TREE_OPERAND (ref1
, 1),
1613 cheap_array_ref_low_bound (ref1
), 0))
1616 for (; narray_refs2
> narray_refs1
; narray_refs2
--)
1618 ref2
= component_refs2
.pop ();
1620 if (!operand_equal_p (TREE_OPERAND (ref2
, 1),
1621 cheap_array_ref_low_bound (ref2
), 0))
1624 /* Try to disambiguate matched arrays. */
1625 for (unsigned int i
= 0; i
< narray_refs1
; i
++)
1627 int cmp
= nonoverlapping_array_refs_p (component_refs1
.pop (),
1628 component_refs2
.pop ());
1631 if (cmp
== 1 && !partial_overlap
)
1634 .nonoverlapping_refs_since_match_p_no_alias
;
1639 seen_unmatched_ref_p
= true;
1640 /* We can not maintain the invariant that bases are either
1641 same or completely disjoint. However we can still recover
1642 from type based alias analysis if we reach referneces to
1643 same sizes. We do not attempt to match array sizes, so
1644 just finish array walking and look for component refs. */
1645 if (ntbaa1
< 0 || ntbaa2
< 0)
1647 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1650 for (i
++; i
< narray_refs1
; i
++)
1652 component_refs1
.pop ();
1653 component_refs2
.pop ();
1659 partial_overlap
= false;
1663 /* Next look for component_refs. */
1666 if (component_refs1
.is_empty ())
1669 .nonoverlapping_refs_since_match_p_must_overlap
;
1672 ref1
= component_refs1
.pop ();
1674 if (TREE_CODE (ref1
) != COMPONENT_REF
)
1676 seen_unmatched_ref_p
= true;
1677 if (ntbaa1
< 0 || ntbaa2
< 0)
1679 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1684 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
1688 if (component_refs2
.is_empty ())
1691 .nonoverlapping_refs_since_match_p_must_overlap
;
1694 ref2
= component_refs2
.pop ();
1696 if (TREE_CODE (ref2
) != COMPONENT_REF
)
1698 if (ntbaa1
< 0 || ntbaa2
< 0)
1700 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1703 seen_unmatched_ref_p
= true;
1706 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
1708 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1710 gcc_checking_assert (TREE_CODE (ref1
) == COMPONENT_REF
1711 && TREE_CODE (ref2
) == COMPONENT_REF
);
1713 tree field1
= TREE_OPERAND (ref1
, 1);
1714 tree field2
= TREE_OPERAND (ref2
, 1);
1716 /* ??? We cannot simply use the type of operand #0 of the refs here
1717 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1718 for common blocks instead of using unions like everyone else. */
1719 tree type1
= DECL_CONTEXT (field1
);
1720 tree type2
= DECL_CONTEXT (field2
);
1722 partial_overlap
= false;
1724 /* If we skipped array refs on type of different sizes, we can
1725 no longer be sure that there are not partial overlaps. */
1726 if (seen_unmatched_ref_p
&& ntbaa1
>= 0 && ntbaa2
>= 0
1727 && !operand_equal_p (TYPE_SIZE (type1
), TYPE_SIZE (type2
), 0))
1730 .nonoverlapping_refs_since_match_p_may_alias
;
1734 int cmp
= nonoverlapping_component_refs_p_1 (field1
, field2
);
1738 .nonoverlapping_refs_since_match_p_may_alias
;
1744 .nonoverlapping_refs_since_match_p_no_alias
;
1749 ++alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
;
1753 /* Return TYPE_UID which can be used to match record types we consider
1754 same for TBAA purposes. */
1757 ncr_type_uid (const_tree field
)
1759 /* ??? We cannot simply use the type of operand #0 of the refs here
1760 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1761 for common blocks instead of using unions like everyone else. */
1762 tree type
= DECL_FIELD_CONTEXT (field
);
1763 /* With LTO types considered same_type_for_tbaa_p
1764 from different translation unit may not have same
1765 main variant. They however have same TYPE_CANONICAL. */
1766 if (TYPE_CANONICAL (type
))
1767 return TYPE_UID (TYPE_CANONICAL (type
));
1768 return TYPE_UID (type
);
1771 /* qsort compare function to sort FIELD_DECLs after their
1772 DECL_FIELD_CONTEXT TYPE_UID. */
1775 ncr_compar (const void *field1_
, const void *field2_
)
1777 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
1778 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
1779 unsigned int uid1
= ncr_type_uid (field1
);
1780 unsigned int uid2
= ncr_type_uid (field2
);
1784 else if (uid1
> uid2
)
1789 /* Return true if we can determine that the fields referenced cannot
1790 overlap for any pair of objects. This relies on TBAA. */
1793 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
1795 /* Early return if we have nothing to do.
1797 Do not consider this as may-alias for stats - it is more useful
1798 to have information how many disambiguations happened provided that
1799 the query was meaningful. */
1800 if (!flag_strict_aliasing
1802 || !handled_component_p (x
)
1803 || !handled_component_p (y
))
1806 auto_vec
<const_tree
, 16> fieldsx
;
1807 while (handled_component_p (x
))
1809 if (TREE_CODE (x
) == COMPONENT_REF
)
1811 tree field
= TREE_OPERAND (x
, 1);
1812 tree type
= DECL_FIELD_CONTEXT (field
);
1813 if (TREE_CODE (type
) == RECORD_TYPE
)
1814 fieldsx
.safe_push (field
);
1816 else if (ends_tbaa_access_path_p (x
))
1817 fieldsx
.truncate (0);
1818 x
= TREE_OPERAND (x
, 0);
1820 if (fieldsx
.length () == 0)
1822 auto_vec
<const_tree
, 16> fieldsy
;
1823 while (handled_component_p (y
))
1825 if (TREE_CODE (y
) == COMPONENT_REF
)
1827 tree field
= TREE_OPERAND (y
, 1);
1828 tree type
= DECL_FIELD_CONTEXT (field
);
1829 if (TREE_CODE (type
) == RECORD_TYPE
)
1830 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
1832 else if (ends_tbaa_access_path_p (y
))
1833 fieldsy
.truncate (0);
1834 y
= TREE_OPERAND (y
, 0);
1836 if (fieldsy
.length () == 0)
1838 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1842 /* Most common case first. */
1843 if (fieldsx
.length () == 1
1844 && fieldsy
.length () == 1)
1846 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx
[0]),
1847 DECL_FIELD_CONTEXT (fieldsy
[0])) == 1
1848 && nonoverlapping_component_refs_p_1 (fieldsx
[0], fieldsy
[0]) == 1)
1850 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1855 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1860 if (fieldsx
.length () == 2)
1862 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
1863 std::swap (fieldsx
[0], fieldsx
[1]);
1866 fieldsx
.qsort (ncr_compar
);
1868 if (fieldsy
.length () == 2)
1870 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
1871 std::swap (fieldsy
[0], fieldsy
[1]);
1874 fieldsy
.qsort (ncr_compar
);
1876 unsigned i
= 0, j
= 0;
1879 const_tree fieldx
= fieldsx
[i
];
1880 const_tree fieldy
= fieldsy
[j
];
1882 /* We're left with accessing different fields of a structure,
1883 no possible overlap. */
1884 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx
),
1885 DECL_FIELD_CONTEXT (fieldy
)) == 1
1886 && nonoverlapping_component_refs_p_1 (fieldx
, fieldy
) == 1)
1888 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1892 if (ncr_type_uid (fieldx
) < ncr_type_uid (fieldy
))
1895 if (i
== fieldsx
.length ())
1901 if (j
== fieldsy
.length ())
1907 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1912 /* Return true if two memory references based on the variables BASE1
1913 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1914 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1915 if non-NULL are the complete memory reference trees. */
1918 decl_refs_may_alias_p (tree ref1
, tree base1
,
1919 poly_int64 offset1
, poly_int64 max_size1
,
1921 tree ref2
, tree base2
,
1922 poly_int64 offset2
, poly_int64 max_size2
,
1925 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1927 /* If both references are based on different variables, they cannot alias. */
1928 if (compare_base_decls (base1
, base2
) == 0)
1931 /* If both references are based on the same variable, they cannot alias if
1932 the accesses do not overlap. */
1933 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1936 /* If there is must alias, there is no use disambiguating further. */
1937 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
1940 /* For components with variable position, the above test isn't sufficient,
1941 so we disambiguate component references manually. */
1943 && handled_component_p (ref1
) && handled_component_p (ref2
)
1944 && nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
, false) == 1)
1950 /* Return true if an indirect reference based on *PTR1 constrained
1951 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1952 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1953 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1954 in which case they are computed on-demand. REF1 and REF2
1955 if non-NULL are the complete memory reference trees. */
1958 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1959 poly_int64 offset1
, poly_int64 max_size1
,
1961 alias_set_type ref1_alias_set
,
1962 alias_set_type base1_alias_set
,
1963 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1964 poly_int64 offset2
, poly_int64 max_size2
,
1966 alias_set_type ref2_alias_set
,
1967 alias_set_type base2_alias_set
, bool tbaa_p
)
1970 tree ptrtype1
, dbase2
;
1972 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1973 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1976 ptr1
= TREE_OPERAND (base1
, 0);
1977 poly_offset_int moff
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
1979 /* If only one reference is based on a variable, they cannot alias if
1980 the pointer access is beyond the extent of the variable access.
1981 (the pointer base cannot validly point to an offset less than zero
1983 ??? IVOPTs creates bases that do not honor this restriction,
1984 so do not apply this optimization for TARGET_MEM_REFs. */
1985 if (TREE_CODE (base1
) != TARGET_MEM_REF
1986 && !ranges_maybe_overlap_p (offset1
+ moff
, -1, offset2
, max_size2
))
1988 /* They also cannot alias if the pointer may not point to the decl. */
1989 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1992 /* Disambiguations that rely on strict aliasing rules follow. */
1993 if (!flag_strict_aliasing
|| !tbaa_p
)
1996 /* If the alias set for a pointer access is zero all bets are off. */
1997 if (base1_alias_set
== 0 || base2_alias_set
== 0)
2000 /* When we are trying to disambiguate an access with a pointer dereference
2001 as base versus one with a decl as base we can use both the size
2002 of the decl and its dynamic type for extra disambiguation.
2003 ??? We do not know anything about the dynamic type of the decl
2004 other than that its alias-set contains base2_alias_set as a subset
2005 which does not help us here. */
2006 /* As we know nothing useful about the dynamic type of the decl just
2007 use the usual conflict check rather than a subset test.
2008 ??? We could introduce -fvery-strict-aliasing when the language
2009 does not allow decls to have a dynamic type that differs from their
2010 static type. Then we can check
2011 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2012 if (base1_alias_set
!= base2_alias_set
2013 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
2016 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
2018 /* If the size of the access relevant for TBAA through the pointer
2019 is bigger than the size of the decl we can't possibly access the
2020 decl via that pointer. */
2021 if (/* ??? This in turn may run afoul when a decl of type T which is
2022 a member of union type U is accessed through a pointer to
2023 type U and sizeof T is smaller than sizeof U. */
2024 TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
2025 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
2026 && compare_sizes (DECL_SIZE (base2
),
2027 TYPE_SIZE (TREE_TYPE (ptrtype1
))) < 0)
2033 /* If the decl is accessed via a MEM_REF, reconstruct the base
2034 we can use for TBAA and an appropriately adjusted offset. */
2036 while (handled_component_p (dbase2
))
2037 dbase2
= TREE_OPERAND (dbase2
, 0);
2038 poly_int64 doffset1
= offset1
;
2039 poly_offset_int doffset2
= offset2
;
2040 if (TREE_CODE (dbase2
) == MEM_REF
2041 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
2043 doffset2
-= mem_ref_offset (dbase2
) << LOG2_BITS_PER_UNIT
;
2044 tree ptrtype2
= TREE_TYPE (TREE_OPERAND (dbase2
, 1));
2045 /* If second reference is view-converted, give up now. */
2046 if (same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (ptrtype2
)) != 1)
2050 /* If first reference is view-converted, give up now. */
2051 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1)
2054 /* If both references are through the same type, they do not alias
2055 if the accesses do not overlap. This does extra disambiguation
2056 for mixed/pointer accesses but requires strict aliasing.
2057 For MEM_REFs we require that the component-ref offset we computed
2058 is relative to the start of the type which we ensure by
2059 comparing rvalue and access type and disregarding the constant
2062 But avoid treating variable length arrays as "objects", instead assume they
2063 can overlap by an exact multiple of their element size.
2064 See gcc.dg/torture/alias-2.c. */
2065 if (((TREE_CODE (base1
) != TARGET_MEM_REF
2066 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2067 && (TREE_CODE (dbase2
) != TARGET_MEM_REF
2068 || (!TMR_INDEX (dbase2
) && !TMR_INDEX2 (dbase2
))))
2069 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
2071 bool partial_overlap
= (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
2072 && (TYPE_SIZE (TREE_TYPE (base1
))
2073 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
)))
2075 if (!partial_overlap
2076 && !ranges_maybe_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
))
2079 /* If there is must alias, there is no use disambiguating further. */
2080 || (!partial_overlap
2081 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2083 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2086 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2090 /* Do access-path based disambiguation. */
2092 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2093 return aliasing_component_refs_p (ref1
,
2094 ref1_alias_set
, base1_alias_set
,
2097 ref2_alias_set
, base2_alias_set
,
2098 offset2
, max_size2
);
2103 /* Return true if two indirect references based on *PTR1
2104 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2105 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2106 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2107 in which case they are computed on-demand. REF1 and REF2
2108 if non-NULL are the complete memory reference trees. */
2111 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
2112 poly_int64 offset1
, poly_int64 max_size1
,
2114 alias_set_type ref1_alias_set
,
2115 alias_set_type base1_alias_set
,
2116 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
2117 poly_int64 offset2
, poly_int64 max_size2
,
2119 alias_set_type ref2_alias_set
,
2120 alias_set_type base2_alias_set
, bool tbaa_p
)
2124 tree ptrtype1
, ptrtype2
;
2126 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
2127 || TREE_CODE (base1
) == TARGET_MEM_REF
)
2128 && (TREE_CODE (base2
) == MEM_REF
2129 || TREE_CODE (base2
) == TARGET_MEM_REF
));
2131 ptr1
= TREE_OPERAND (base1
, 0);
2132 ptr2
= TREE_OPERAND (base2
, 0);
2134 /* If both bases are based on pointers they cannot alias if they may not
2135 point to the same memory object or if they point to the same object
2136 and the accesses do not overlap. */
2137 if ((!cfun
|| gimple_in_ssa_p (cfun
))
2138 && operand_equal_p (ptr1
, ptr2
, 0)
2139 && (((TREE_CODE (base1
) != TARGET_MEM_REF
2140 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2141 && (TREE_CODE (base2
) != TARGET_MEM_REF
2142 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
2143 || (TREE_CODE (base1
) == TARGET_MEM_REF
2144 && TREE_CODE (base2
) == TARGET_MEM_REF
2145 && (TMR_STEP (base1
) == TMR_STEP (base2
)
2146 || (TMR_STEP (base1
) && TMR_STEP (base2
)
2147 && operand_equal_p (TMR_STEP (base1
),
2148 TMR_STEP (base2
), 0)))
2149 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
2150 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
2151 && operand_equal_p (TMR_INDEX (base1
),
2152 TMR_INDEX (base2
), 0)))
2153 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
2154 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
2155 && operand_equal_p (TMR_INDEX2 (base1
),
2156 TMR_INDEX2 (base2
), 0))))))
2158 poly_offset_int moff1
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
2159 poly_offset_int moff2
= mem_ref_offset (base2
) << LOG2_BITS_PER_UNIT
;
2160 if (!ranges_maybe_overlap_p (offset1
+ moff1
, max_size1
,
2161 offset2
+ moff2
, max_size2
))
2163 /* If there is must alias, there is no use disambiguating further. */
2164 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
2168 int res
= nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
,
2174 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
2177 /* Disambiguations that rely on strict aliasing rules follow. */
2178 if (!flag_strict_aliasing
|| !tbaa_p
)
2181 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
2182 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
2184 /* If the alias set for a pointer access is zero all bets are off. */
2185 if (base1_alias_set
== 0
2186 || base2_alias_set
== 0)
2189 /* Do type-based disambiguation. */
2190 if (base1_alias_set
!= base2_alias_set
2191 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
2194 /* If either reference is view-converted, give up now. */
2195 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
2196 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
2199 /* If both references are through the same type, they do not alias
2200 if the accesses do not overlap. This does extra disambiguation
2201 for mixed/pointer accesses but requires strict aliasing. */
2202 if ((TREE_CODE (base1
) != TARGET_MEM_REF
2203 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2204 && (TREE_CODE (base2
) != TARGET_MEM_REF
2205 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
2206 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
2207 TREE_TYPE (ptrtype2
)) == 1)
2209 /* But avoid treating arrays as "objects", instead assume they
2210 can overlap by an exact multiple of their element size.
2211 See gcc.dg/torture/alias-2.c. */
2212 bool partial_overlap
= TREE_CODE (TREE_TYPE (ptrtype1
)) == ARRAY_TYPE
;
2214 if (!partial_overlap
2215 && !ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
2218 || (!partial_overlap
2219 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2221 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2224 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2228 /* Do access-path based disambiguation. */
2230 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2231 return aliasing_component_refs_p (ref1
,
2232 ref1_alias_set
, base1_alias_set
,
2235 ref2_alias_set
, base2_alias_set
,
2236 offset2
, max_size2
);
2241 /* Return true, if the two memory references REF1 and REF2 may alias. */
2244 refs_may_alias_p_2 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2247 poly_int64 offset1
= 0, offset2
= 0;
2248 poly_int64 max_size1
= -1, max_size2
= -1;
2249 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
2251 gcc_checking_assert ((!ref1
->ref
2252 || TREE_CODE (ref1
->ref
) == SSA_NAME
2253 || DECL_P (ref1
->ref
)
2254 || TREE_CODE (ref1
->ref
) == STRING_CST
2255 || handled_component_p (ref1
->ref
)
2256 || TREE_CODE (ref1
->ref
) == MEM_REF
2257 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
2259 || TREE_CODE (ref2
->ref
) == SSA_NAME
2260 || DECL_P (ref2
->ref
)
2261 || TREE_CODE (ref2
->ref
) == STRING_CST
2262 || handled_component_p (ref2
->ref
)
2263 || TREE_CODE (ref2
->ref
) == MEM_REF
2264 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
2266 /* Decompose the references into their base objects and the access. */
2267 base1
= ao_ref_base (ref1
);
2268 offset1
= ref1
->offset
;
2269 max_size1
= ref1
->max_size
;
2270 base2
= ao_ref_base (ref2
);
2271 offset2
= ref2
->offset
;
2272 max_size2
= ref2
->max_size
;
2274 /* We can end up with registers or constants as bases for example from
2275 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2276 which is seen as a struct copy. */
2277 if (TREE_CODE (base1
) == SSA_NAME
2278 || TREE_CODE (base1
) == CONST_DECL
2279 || TREE_CODE (base1
) == CONSTRUCTOR
2280 || TREE_CODE (base1
) == ADDR_EXPR
2281 || CONSTANT_CLASS_P (base1
)
2282 || TREE_CODE (base2
) == SSA_NAME
2283 || TREE_CODE (base2
) == CONST_DECL
2284 || TREE_CODE (base2
) == CONSTRUCTOR
2285 || TREE_CODE (base2
) == ADDR_EXPR
2286 || CONSTANT_CLASS_P (base2
))
2289 /* We can end up referring to code via function and label decls.
2290 As we likely do not properly track code aliases conservatively
2292 if (TREE_CODE (base1
) == FUNCTION_DECL
2293 || TREE_CODE (base1
) == LABEL_DECL
2294 || TREE_CODE (base2
) == FUNCTION_DECL
2295 || TREE_CODE (base2
) == LABEL_DECL
)
2298 /* Two volatile accesses always conflict. */
2299 if (ref1
->volatile_p
2300 && ref2
->volatile_p
)
2303 /* Defer to simple offset based disambiguation if we have
2304 references based on two decls. Do this before defering to
2305 TBAA to handle must-alias cases in conformance with the
2306 GCC extension of allowing type-punning through unions. */
2307 var1_p
= DECL_P (base1
);
2308 var2_p
= DECL_P (base2
);
2309 if (var1_p
&& var2_p
)
2310 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
2312 ref2
->ref
, base2
, offset2
, max_size2
,
2315 /* Handle restrict based accesses.
2316 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2318 tree rbase1
= base1
;
2319 tree rbase2
= base2
;
2324 while (handled_component_p (rbase1
))
2325 rbase1
= TREE_OPERAND (rbase1
, 0);
2331 while (handled_component_p (rbase2
))
2332 rbase2
= TREE_OPERAND (rbase2
, 0);
2334 if (rbase1
&& rbase2
2335 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
2336 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
2337 /* If the accesses are in the same restrict clique... */
2338 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
2339 /* But based on different pointers they do not alias. */
2340 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
2343 ind1_p
= (TREE_CODE (base1
) == MEM_REF
2344 || TREE_CODE (base1
) == TARGET_MEM_REF
);
2345 ind2_p
= (TREE_CODE (base2
) == MEM_REF
2346 || TREE_CODE (base2
) == TARGET_MEM_REF
);
2348 /* Canonicalize the pointer-vs-decl case. */
2349 if (ind1_p
&& var2_p
)
2351 std::swap (offset1
, offset2
);
2352 std::swap (max_size1
, max_size2
);
2353 std::swap (base1
, base2
);
2354 std::swap (ref1
, ref2
);
2361 /* First defer to TBAA if possible. */
2363 && flag_strict_aliasing
2364 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
2365 ao_ref_alias_set (ref2
)))
2368 /* If the reference is based on a pointer that points to memory
2369 that may not be written to then the other reference cannot possibly
2371 if ((TREE_CODE (TREE_OPERAND (base2
, 0)) == SSA_NAME
2372 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2
, 0)))
2374 && TREE_CODE (TREE_OPERAND (base1
, 0)) == SSA_NAME
2375 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1
, 0))))
2378 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2379 if (var1_p
&& ind2_p
)
2380 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
2381 offset2
, max_size2
, ref2
->size
,
2382 ao_ref_alias_set (ref2
),
2383 ao_ref_base_alias_set (ref2
),
2385 offset1
, max_size1
, ref1
->size
,
2386 ao_ref_alias_set (ref1
),
2387 ao_ref_base_alias_set (ref1
),
2389 else if (ind1_p
&& ind2_p
)
2390 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
2391 offset1
, max_size1
, ref1
->size
,
2392 ao_ref_alias_set (ref1
),
2393 ao_ref_base_alias_set (ref1
),
2395 offset2
, max_size2
, ref2
->size
,
2396 ao_ref_alias_set (ref2
),
2397 ao_ref_base_alias_set (ref2
),
2403 /* Return true, if the two memory references REF1 and REF2 may alias
2404 and update statistics. */
2407 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2409 bool res
= refs_may_alias_p_2 (ref1
, ref2
, tbaa_p
);
2411 ++alias_stats
.refs_may_alias_p_may_alias
;
2413 ++alias_stats
.refs_may_alias_p_no_alias
;
2418 refs_may_alias_p (tree ref1
, ao_ref
*ref2
, bool tbaa_p
)
2421 ao_ref_init (&r1
, ref1
);
2422 return refs_may_alias_p_1 (&r1
, ref2
, tbaa_p
);
2426 refs_may_alias_p (tree ref1
, tree ref2
, bool tbaa_p
)
2429 ao_ref_init (&r1
, ref1
);
2430 ao_ref_init (&r2
, ref2
);
2431 return refs_may_alias_p_1 (&r1
, &r2
, tbaa_p
);
2434 /* Returns true if there is a anti-dependence for the STORE that
2435 executes after the LOAD. */
2438 refs_anti_dependent_p (tree load
, tree store
)
2441 ao_ref_init (&r1
, load
);
2442 ao_ref_init (&r2
, store
);
2443 return refs_may_alias_p_1 (&r1
, &r2
, false);
2446 /* Returns true if there is a output dependence for the stores
2447 STORE1 and STORE2. */
2450 refs_output_dependent_p (tree store1
, tree store2
)
2453 ao_ref_init (&r1
, store1
);
2454 ao_ref_init (&r2
, store2
);
2455 return refs_may_alias_p_1 (&r1
, &r2
, false);
2458 /* Returns true if and only if REF may alias any access stored in TT.
2459 IF TBAA_P is true, use TBAA oracle. */
2462 modref_may_conflict (const gimple
*stmt
,
2463 modref_tree
<alias_set_type
> *tt
, ao_ref
*ref
, bool tbaa_p
)
2465 alias_set_type base_set
, ref_set
;
2466 modref_base_node
<alias_set_type
> *base_node
;
2467 modref_ref_node
<alias_set_type
> *ref_node
;
2473 base_set
= ao_ref_base_alias_set (ref
);
2475 ref_set
= ao_ref_alias_set (ref
);
2477 int num_tests
= 0, max_tests
= param_modref_max_tests
;
2478 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, base_node
)
2480 if (tbaa_p
&& flag_strict_aliasing
)
2482 if (num_tests
>= max_tests
)
2484 alias_stats
.modref_tests
++;
2485 if (!alias_sets_conflict_p (base_set
, base_node
->base
))
2490 if (base_node
->every_ref
)
2493 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
2495 /* Do not repeat same test as before. */
2496 if ((ref_set
!= base_set
|| base_node
->base
!= ref_node
->ref
)
2497 && tbaa_p
&& flag_strict_aliasing
)
2499 if (num_tests
>= max_tests
)
2501 alias_stats
.modref_tests
++;
2502 if (!alias_sets_conflict_p (ref_set
, ref_node
->ref
))
2507 /* TBAA checks did not disambiguate, try to use base pointer, for
2508 that we however need to have ref->ref. */
2509 if (ref_node
->every_access
|| !ref
->ref
)
2512 modref_access_node
*access_node
;
2513 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
2515 if (num_tests
>= max_tests
)
2518 if (access_node
->parm_index
== -1
2519 || (unsigned)access_node
->parm_index
2520 >= gimple_call_num_args (stmt
))
2524 alias_stats
.modref_baseptr_tests
++;
2526 if (ptr_deref_may_alias_ref_p_1
2527 (gimple_call_arg (stmt
, access_node
->parm_index
), ref
))
2536 /* If the call CALL may use the memory reference REF return true,
2537 otherwise return false. */
2540 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2544 int flags
= gimple_call_flags (call
);
2546 /* Const functions without a static chain do not implicitly use memory. */
2547 if (!gimple_call_chain (call
)
2548 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
2551 /* A call that is not without side-effects might involve volatile
2552 accesses and thus conflicts with all other volatile accesses. */
2553 if (ref
->volatile_p
)
2556 callee
= gimple_call_fndecl (call
);
2558 if (!gimple_call_chain (call
) && callee
!= NULL_TREE
)
2560 struct cgraph_node
*node
= cgraph_node::get (callee
);
2561 /* We can not safely optimize based on summary of calle if it does
2562 not always bind to current def: it is possible that memory load
2563 was optimized out earlier and the interposed variant may not be
2564 optimized this way. */
2565 if (node
&& node
->binds_to_current_def_p ())
2567 modref_summary
*summary
= get_modref_function_summary (node
);
2570 if (!modref_may_conflict (call
, summary
->loads
, ref
, tbaa_p
))
2572 alias_stats
.modref_use_no_alias
++;
2573 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2575 fprintf (dump_file
, "ipa-modref: in %s,"
2576 " call to %s does not use ",
2578 (current_function_decl
)->dump_name (),
2579 node
->dump_name ());
2580 print_generic_expr (dump_file
, ref
->ref
);
2581 fprintf (dump_file
, " %i->%i\n",
2582 ao_ref_base_alias_set (ref
),
2583 ao_ref_alias_set (ref
));
2587 alias_stats
.modref_use_may_alias
++;
2592 base
= ao_ref_base (ref
);
2596 /* If the reference is based on a decl that is not aliased the call
2597 cannot possibly use it. */
2599 && !may_be_aliased (base
)
2600 /* But local statics can be used through recursion. */
2601 && !is_global_var (base
))
2604 /* Handle those builtin functions explicitly that do not act as
2605 escape points. See tree-ssa-structalias.c:find_func_aliases
2606 for the list of builtins we might need to handle here. */
2607 if (callee
!= NULL_TREE
2608 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2609 switch (DECL_FUNCTION_CODE (callee
))
2611 /* All the following functions read memory pointed to by
2612 their second argument. strcat/strncat additionally
2613 reads memory pointed to by the first argument. */
2614 case BUILT_IN_STRCAT
:
2615 case BUILT_IN_STRNCAT
:
2618 ao_ref_init_from_ptr_and_size (&dref
,
2619 gimple_call_arg (call
, 0),
2621 if (refs_may_alias_p_1 (&dref
, ref
, false))
2625 case BUILT_IN_STRCPY
:
2626 case BUILT_IN_STRNCPY
:
2627 case BUILT_IN_MEMCPY
:
2628 case BUILT_IN_MEMMOVE
:
2629 case BUILT_IN_MEMPCPY
:
2630 case BUILT_IN_STPCPY
:
2631 case BUILT_IN_STPNCPY
:
2632 case BUILT_IN_TM_MEMCPY
:
2633 case BUILT_IN_TM_MEMMOVE
:
2636 tree size
= NULL_TREE
;
2637 if (gimple_call_num_args (call
) == 3)
2638 size
= gimple_call_arg (call
, 2);
2639 ao_ref_init_from_ptr_and_size (&dref
,
2640 gimple_call_arg (call
, 1),
2642 return refs_may_alias_p_1 (&dref
, ref
, false);
2644 case BUILT_IN_STRCAT_CHK
:
2645 case BUILT_IN_STRNCAT_CHK
:
2648 ao_ref_init_from_ptr_and_size (&dref
,
2649 gimple_call_arg (call
, 0),
2651 if (refs_may_alias_p_1 (&dref
, ref
, false))
2655 case BUILT_IN_STRCPY_CHK
:
2656 case BUILT_IN_STRNCPY_CHK
:
2657 case BUILT_IN_MEMCPY_CHK
:
2658 case BUILT_IN_MEMMOVE_CHK
:
2659 case BUILT_IN_MEMPCPY_CHK
:
2660 case BUILT_IN_STPCPY_CHK
:
2661 case BUILT_IN_STPNCPY_CHK
:
2664 tree size
= NULL_TREE
;
2665 if (gimple_call_num_args (call
) == 4)
2666 size
= gimple_call_arg (call
, 2);
2667 ao_ref_init_from_ptr_and_size (&dref
,
2668 gimple_call_arg (call
, 1),
2670 return refs_may_alias_p_1 (&dref
, ref
, false);
2672 case BUILT_IN_BCOPY
:
2675 tree size
= gimple_call_arg (call
, 2);
2676 ao_ref_init_from_ptr_and_size (&dref
,
2677 gimple_call_arg (call
, 0),
2679 return refs_may_alias_p_1 (&dref
, ref
, false);
2682 /* The following functions read memory pointed to by their
2684 CASE_BUILT_IN_TM_LOAD (1):
2685 CASE_BUILT_IN_TM_LOAD (2):
2686 CASE_BUILT_IN_TM_LOAD (4):
2687 CASE_BUILT_IN_TM_LOAD (8):
2688 CASE_BUILT_IN_TM_LOAD (FLOAT
):
2689 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
2690 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
2691 CASE_BUILT_IN_TM_LOAD (M64
):
2692 CASE_BUILT_IN_TM_LOAD (M128
):
2693 CASE_BUILT_IN_TM_LOAD (M256
):
2694 case BUILT_IN_TM_LOG
:
2695 case BUILT_IN_TM_LOG_1
:
2696 case BUILT_IN_TM_LOG_2
:
2697 case BUILT_IN_TM_LOG_4
:
2698 case BUILT_IN_TM_LOG_8
:
2699 case BUILT_IN_TM_LOG_FLOAT
:
2700 case BUILT_IN_TM_LOG_DOUBLE
:
2701 case BUILT_IN_TM_LOG_LDOUBLE
:
2702 case BUILT_IN_TM_LOG_M64
:
2703 case BUILT_IN_TM_LOG_M128
:
2704 case BUILT_IN_TM_LOG_M256
:
2705 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
2707 /* These read memory pointed to by the first argument. */
2708 case BUILT_IN_STRDUP
:
2709 case BUILT_IN_STRNDUP
:
2710 case BUILT_IN_REALLOC
:
2713 tree size
= NULL_TREE
;
2714 if (gimple_call_num_args (call
) == 2)
2715 size
= gimple_call_arg (call
, 1);
2716 ao_ref_init_from_ptr_and_size (&dref
,
2717 gimple_call_arg (call
, 0),
2719 return refs_may_alias_p_1 (&dref
, ref
, false);
2721 /* These read memory pointed to by the first argument. */
2722 case BUILT_IN_INDEX
:
2723 case BUILT_IN_STRCHR
:
2724 case BUILT_IN_STRRCHR
:
2727 ao_ref_init_from_ptr_and_size (&dref
,
2728 gimple_call_arg (call
, 0),
2730 return refs_may_alias_p_1 (&dref
, ref
, false);
2732 /* These read memory pointed to by the first argument with size
2733 in the third argument. */
2734 case BUILT_IN_MEMCHR
:
2737 ao_ref_init_from_ptr_and_size (&dref
,
2738 gimple_call_arg (call
, 0),
2739 gimple_call_arg (call
, 2));
2740 return refs_may_alias_p_1 (&dref
, ref
, false);
2742 /* These read memory pointed to by the first and second arguments. */
2743 case BUILT_IN_STRSTR
:
2744 case BUILT_IN_STRPBRK
:
2747 ao_ref_init_from_ptr_and_size (&dref
,
2748 gimple_call_arg (call
, 0),
2750 if (refs_may_alias_p_1 (&dref
, ref
, false))
2752 ao_ref_init_from_ptr_and_size (&dref
,
2753 gimple_call_arg (call
, 1),
2755 return refs_may_alias_p_1 (&dref
, ref
, false);
2758 /* The following builtins do not read from memory. */
2760 case BUILT_IN_MALLOC
:
2761 case BUILT_IN_POSIX_MEMALIGN
:
2762 case BUILT_IN_ALIGNED_ALLOC
:
2763 case BUILT_IN_CALLOC
:
2764 CASE_BUILT_IN_ALLOCA
:
2765 case BUILT_IN_STACK_SAVE
:
2766 case BUILT_IN_STACK_RESTORE
:
2767 case BUILT_IN_MEMSET
:
2768 case BUILT_IN_TM_MEMSET
:
2769 case BUILT_IN_MEMSET_CHK
:
2770 case BUILT_IN_FREXP
:
2771 case BUILT_IN_FREXPF
:
2772 case BUILT_IN_FREXPL
:
2773 case BUILT_IN_GAMMA_R
:
2774 case BUILT_IN_GAMMAF_R
:
2775 case BUILT_IN_GAMMAL_R
:
2776 case BUILT_IN_LGAMMA_R
:
2777 case BUILT_IN_LGAMMAF_R
:
2778 case BUILT_IN_LGAMMAL_R
:
2780 case BUILT_IN_MODFF
:
2781 case BUILT_IN_MODFL
:
2782 case BUILT_IN_REMQUO
:
2783 case BUILT_IN_REMQUOF
:
2784 case BUILT_IN_REMQUOL
:
2785 case BUILT_IN_SINCOS
:
2786 case BUILT_IN_SINCOSF
:
2787 case BUILT_IN_SINCOSL
:
2788 case BUILT_IN_ASSUME_ALIGNED
:
2789 case BUILT_IN_VA_END
:
2791 /* __sync_* builtins and some OpenMP builtins act as threading
2793 #undef DEF_SYNC_BUILTIN
2794 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2795 #include "sync-builtins.def"
2796 #undef DEF_SYNC_BUILTIN
2797 case BUILT_IN_GOMP_ATOMIC_START
:
2798 case BUILT_IN_GOMP_ATOMIC_END
:
2799 case BUILT_IN_GOMP_BARRIER
:
2800 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2801 case BUILT_IN_GOMP_TASKWAIT
:
2802 case BUILT_IN_GOMP_TASKGROUP_END
:
2803 case BUILT_IN_GOMP_CRITICAL_START
:
2804 case BUILT_IN_GOMP_CRITICAL_END
:
2805 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2806 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2807 case BUILT_IN_GOMP_LOOP_END
:
2808 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2809 case BUILT_IN_GOMP_ORDERED_START
:
2810 case BUILT_IN_GOMP_ORDERED_END
:
2811 case BUILT_IN_GOMP_SECTIONS_END
:
2812 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2813 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2814 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2818 /* Fallthru to general call handling. */;
2821 /* Check if base is a global static variable that is not read
2823 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
2825 struct cgraph_node
*node
= cgraph_node::get (callee
);
2829 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2830 node yet. We should enforce that there are nodes for all decls in the
2831 IL and remove this check instead. */
2833 && (id
= ipa_reference_var_uid (base
)) != -1
2834 && (read
= ipa_reference_get_read_global (node
))
2835 && !bitmap_bit_p (read
, id
))
2839 /* Check if the base variable is call-used. */
2842 if (pt_solution_includes (gimple_call_use_set (call
), base
))
2845 else if ((TREE_CODE (base
) == MEM_REF
2846 || TREE_CODE (base
) == TARGET_MEM_REF
)
2847 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2849 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2853 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
2859 /* Inspect call arguments for passed-by-value aliases. */
2861 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
2863 tree op
= gimple_call_arg (call
, i
);
2864 int flags
= gimple_call_arg_flags (call
, i
);
2866 if (flags
& EAF_UNUSED
)
2869 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
2870 op
= TREE_OPERAND (op
, 0);
2872 if (TREE_CODE (op
) != SSA_NAME
2873 && !is_gimple_min_invariant (op
))
2876 ao_ref_init (&r
, op
);
2877 if (refs_may_alias_p_1 (&r
, ref
, tbaa_p
))
2886 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2889 res
= ref_maybe_used_by_call_p_1 (call
, ref
, tbaa_p
);
2891 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
2893 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
2898 /* If the statement STMT may use the memory reference REF return
2899 true, otherwise return false. */
2902 ref_maybe_used_by_stmt_p (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
2904 if (is_gimple_assign (stmt
))
2908 /* All memory assign statements are single. */
2909 if (!gimple_assign_single_p (stmt
))
2912 rhs
= gimple_assign_rhs1 (stmt
);
2913 if (is_gimple_reg (rhs
)
2914 || is_gimple_min_invariant (rhs
)
2915 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
2918 return refs_may_alias_p (rhs
, ref
, tbaa_p
);
2920 else if (is_gimple_call (stmt
))
2921 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
2922 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
2924 tree retval
= gimple_return_retval (return_stmt
);
2926 && TREE_CODE (retval
) != SSA_NAME
2927 && !is_gimple_min_invariant (retval
)
2928 && refs_may_alias_p (retval
, ref
, tbaa_p
))
2930 /* If ref escapes the function then the return acts as a use. */
2931 tree base
= ao_ref_base (ref
);
2934 else if (DECL_P (base
))
2935 return is_global_var (base
);
2936 else if (TREE_CODE (base
) == MEM_REF
2937 || TREE_CODE (base
) == TARGET_MEM_REF
)
2938 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
2946 ref_maybe_used_by_stmt_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
2949 ao_ref_init (&r
, ref
);
2950 return ref_maybe_used_by_stmt_p (stmt
, &r
, tbaa_p
);
2953 /* If the call in statement CALL may clobber the memory reference REF
2954 return true, otherwise return false. */
2957 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2962 /* If the call is pure or const it cannot clobber anything. */
2963 if (gimple_call_flags (call
)
2964 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
2966 if (gimple_call_internal_p (call
))
2967 switch (gimple_call_internal_fn (call
))
2969 /* Treat these internal calls like ECF_PURE for aliasing,
2970 they don't write to any memory the program should care about.
2971 They have important other side-effects, and read memory,
2972 so can't be ECF_NOVOPS. */
2973 case IFN_UBSAN_NULL
:
2974 case IFN_UBSAN_BOUNDS
:
2975 case IFN_UBSAN_VPTR
:
2976 case IFN_UBSAN_OBJECT_SIZE
:
2978 case IFN_ASAN_CHECK
:
2984 callee
= gimple_call_fndecl (call
);
2986 if (callee
!= NULL_TREE
&& !ref
->volatile_p
)
2988 struct cgraph_node
*node
= cgraph_node::get (callee
);
2991 modref_summary
*summary
= get_modref_function_summary (node
);
2994 if (!modref_may_conflict (call
, summary
->stores
, ref
, tbaa_p
))
2996 alias_stats
.modref_clobber_no_alias
++;
2997 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3000 "ipa-modref: in %s, "
3001 "call to %s does not clobber ",
3003 (current_function_decl
)->dump_name (),
3004 node
->dump_name ());
3005 print_generic_expr (dump_file
, ref
->ref
);
3006 fprintf (dump_file
, " %i->%i\n",
3007 ao_ref_base_alias_set (ref
),
3008 ao_ref_alias_set (ref
));
3012 alias_stats
.modref_clobber_may_alias
++;
3017 base
= ao_ref_base (ref
);
3021 if (TREE_CODE (base
) == SSA_NAME
3022 || CONSTANT_CLASS_P (base
))
3025 /* A call that is not without side-effects might involve volatile
3026 accesses and thus conflicts with all other volatile accesses. */
3027 if (ref
->volatile_p
)
3030 /* If the reference is based on a decl that is not aliased the call
3031 cannot possibly clobber it. */
3033 && !may_be_aliased (base
)
3034 /* But local non-readonly statics can be modified through recursion
3035 or the call may implement a threading barrier which we must
3036 treat as may-def. */
3037 && (TREE_READONLY (base
)
3038 || !is_global_var (base
)))
3041 /* If the reference is based on a pointer that points to memory
3042 that may not be written to then the call cannot possibly clobber it. */
3043 if ((TREE_CODE (base
) == MEM_REF
3044 || TREE_CODE (base
) == TARGET_MEM_REF
)
3045 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
3046 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base
, 0)))
3049 /* Handle those builtin functions explicitly that do not act as
3050 escape points. See tree-ssa-structalias.c:find_func_aliases
3051 for the list of builtins we might need to handle here. */
3052 if (callee
!= NULL_TREE
3053 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
3054 switch (DECL_FUNCTION_CODE (callee
))
3056 /* All the following functions clobber memory pointed to by
3057 their first argument. */
3058 case BUILT_IN_STRCPY
:
3059 case BUILT_IN_STRNCPY
:
3060 case BUILT_IN_MEMCPY
:
3061 case BUILT_IN_MEMMOVE
:
3062 case BUILT_IN_MEMPCPY
:
3063 case BUILT_IN_STPCPY
:
3064 case BUILT_IN_STPNCPY
:
3065 case BUILT_IN_STRCAT
:
3066 case BUILT_IN_STRNCAT
:
3067 case BUILT_IN_MEMSET
:
3068 case BUILT_IN_TM_MEMSET
:
3069 CASE_BUILT_IN_TM_STORE (1):
3070 CASE_BUILT_IN_TM_STORE (2):
3071 CASE_BUILT_IN_TM_STORE (4):
3072 CASE_BUILT_IN_TM_STORE (8):
3073 CASE_BUILT_IN_TM_STORE (FLOAT
):
3074 CASE_BUILT_IN_TM_STORE (DOUBLE
):
3075 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
3076 CASE_BUILT_IN_TM_STORE (M64
):
3077 CASE_BUILT_IN_TM_STORE (M128
):
3078 CASE_BUILT_IN_TM_STORE (M256
):
3079 case BUILT_IN_TM_MEMCPY
:
3080 case BUILT_IN_TM_MEMMOVE
:
3083 tree size
= NULL_TREE
;
3084 /* Don't pass in size for strncat, as the maximum size
3085 is strlen (dest) + n + 1 instead of n, resp.
3086 n + 1 at dest + strlen (dest), but strlen (dest) isn't
3088 if (gimple_call_num_args (call
) == 3
3089 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
3090 size
= gimple_call_arg (call
, 2);
3091 ao_ref_init_from_ptr_and_size (&dref
,
3092 gimple_call_arg (call
, 0),
3094 return refs_may_alias_p_1 (&dref
, ref
, false);
3096 case BUILT_IN_STRCPY_CHK
:
3097 case BUILT_IN_STRNCPY_CHK
:
3098 case BUILT_IN_MEMCPY_CHK
:
3099 case BUILT_IN_MEMMOVE_CHK
:
3100 case BUILT_IN_MEMPCPY_CHK
:
3101 case BUILT_IN_STPCPY_CHK
:
3102 case BUILT_IN_STPNCPY_CHK
:
3103 case BUILT_IN_STRCAT_CHK
:
3104 case BUILT_IN_STRNCAT_CHK
:
3105 case BUILT_IN_MEMSET_CHK
:
3108 tree size
= NULL_TREE
;
3109 /* Don't pass in size for __strncat_chk, as the maximum size
3110 is strlen (dest) + n + 1 instead of n, resp.
3111 n + 1 at dest + strlen (dest), but strlen (dest) isn't
3113 if (gimple_call_num_args (call
) == 4
3114 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
3115 size
= gimple_call_arg (call
, 2);
3116 ao_ref_init_from_ptr_and_size (&dref
,
3117 gimple_call_arg (call
, 0),
3119 return refs_may_alias_p_1 (&dref
, ref
, false);
3121 case BUILT_IN_BCOPY
:
3124 tree size
= gimple_call_arg (call
, 2);
3125 ao_ref_init_from_ptr_and_size (&dref
,
3126 gimple_call_arg (call
, 1),
3128 return refs_may_alias_p_1 (&dref
, ref
, false);
3130 /* Allocating memory does not have any side-effects apart from
3131 being the definition point for the pointer. */
3132 case BUILT_IN_MALLOC
:
3133 case BUILT_IN_ALIGNED_ALLOC
:
3134 case BUILT_IN_CALLOC
:
3135 case BUILT_IN_STRDUP
:
3136 case BUILT_IN_STRNDUP
:
3137 /* Unix98 specifies that errno is set on allocation failure. */
3139 && targetm
.ref_may_alias_errno (ref
))
3142 case BUILT_IN_STACK_SAVE
:
3143 CASE_BUILT_IN_ALLOCA
:
3144 case BUILT_IN_ASSUME_ALIGNED
:
3146 /* But posix_memalign stores a pointer into the memory pointed to
3147 by its first argument. */
3148 case BUILT_IN_POSIX_MEMALIGN
:
3150 tree ptrptr
= gimple_call_arg (call
, 0);
3152 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
3153 TYPE_SIZE_UNIT (ptr_type_node
));
3154 return (refs_may_alias_p_1 (&dref
, ref
, false)
3156 && targetm
.ref_may_alias_errno (ref
)));
3158 /* Freeing memory kills the pointed-to memory. More importantly
3159 the call has to serve as a barrier for moving loads and stores
3162 case BUILT_IN_VA_END
:
3164 tree ptr
= gimple_call_arg (call
, 0);
3165 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
3167 /* Realloc serves both as allocation point and deallocation point. */
3168 case BUILT_IN_REALLOC
:
3170 tree ptr
= gimple_call_arg (call
, 0);
3171 /* Unix98 specifies that errno is set on allocation failure. */
3172 return ((flag_errno_math
3173 && targetm
.ref_may_alias_errno (ref
))
3174 || ptr_deref_may_alias_ref_p_1 (ptr
, ref
));
3176 case BUILT_IN_GAMMA_R
:
3177 case BUILT_IN_GAMMAF_R
:
3178 case BUILT_IN_GAMMAL_R
:
3179 case BUILT_IN_LGAMMA_R
:
3180 case BUILT_IN_LGAMMAF_R
:
3181 case BUILT_IN_LGAMMAL_R
:
3183 tree out
= gimple_call_arg (call
, 1);
3184 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
3186 if (flag_errno_math
)
3190 case BUILT_IN_FREXP
:
3191 case BUILT_IN_FREXPF
:
3192 case BUILT_IN_FREXPL
:
3194 case BUILT_IN_MODFF
:
3195 case BUILT_IN_MODFL
:
3197 tree out
= gimple_call_arg (call
, 1);
3198 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
3200 case BUILT_IN_REMQUO
:
3201 case BUILT_IN_REMQUOF
:
3202 case BUILT_IN_REMQUOL
:
3204 tree out
= gimple_call_arg (call
, 2);
3205 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
3207 if (flag_errno_math
)
3211 case BUILT_IN_SINCOS
:
3212 case BUILT_IN_SINCOSF
:
3213 case BUILT_IN_SINCOSL
:
3215 tree sin
= gimple_call_arg (call
, 1);
3216 tree cos
= gimple_call_arg (call
, 2);
3217 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
3218 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
3220 /* __sync_* builtins and some OpenMP builtins act as threading
3222 #undef DEF_SYNC_BUILTIN
3223 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
3224 #include "sync-builtins.def"
3225 #undef DEF_SYNC_BUILTIN
3226 case BUILT_IN_GOMP_ATOMIC_START
:
3227 case BUILT_IN_GOMP_ATOMIC_END
:
3228 case BUILT_IN_GOMP_BARRIER
:
3229 case BUILT_IN_GOMP_BARRIER_CANCEL
:
3230 case BUILT_IN_GOMP_TASKWAIT
:
3231 case BUILT_IN_GOMP_TASKGROUP_END
:
3232 case BUILT_IN_GOMP_CRITICAL_START
:
3233 case BUILT_IN_GOMP_CRITICAL_END
:
3234 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
3235 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
3236 case BUILT_IN_GOMP_LOOP_END
:
3237 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
3238 case BUILT_IN_GOMP_ORDERED_START
:
3239 case BUILT_IN_GOMP_ORDERED_END
:
3240 case BUILT_IN_GOMP_SECTIONS_END
:
3241 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
3242 case BUILT_IN_GOMP_SINGLE_COPY_START
:
3243 case BUILT_IN_GOMP_SINGLE_COPY_END
:
3246 /* Fallthru to general call handling. */;
3249 /* Check if base is a global static variable that is not written
3251 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
3253 struct cgraph_node
*node
= cgraph_node::get (callee
);
3258 && (id
= ipa_reference_var_uid (base
)) != -1
3259 && (written
= ipa_reference_get_written_global (node
))
3260 && !bitmap_bit_p (written
, id
))
3264 /* Check if the base variable is call-clobbered. */
3266 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
3267 else if ((TREE_CODE (base
) == MEM_REF
3268 || TREE_CODE (base
) == TARGET_MEM_REF
)
3269 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
3271 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
3275 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
3281 /* If the call in statement CALL may clobber the memory reference REF
3282 return true, otherwise return false. */
3285 call_may_clobber_ref_p (gcall
*call
, tree ref
)
3289 ao_ref_init (&r
, ref
);
3290 res
= call_may_clobber_ref_p_1 (call
, &r
, true);
3292 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
3294 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
3299 /* If the statement STMT may clobber the memory reference REF return true,
3300 otherwise return false. */
3303 stmt_may_clobber_ref_p_1 (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
3305 if (is_gimple_call (stmt
))
3307 tree lhs
= gimple_call_lhs (stmt
);
3309 && TREE_CODE (lhs
) != SSA_NAME
)
3312 ao_ref_init (&r
, lhs
);
3313 if (refs_may_alias_p_1 (ref
, &r
, tbaa_p
))
3317 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
3319 else if (gimple_assign_single_p (stmt
))
3321 tree lhs
= gimple_assign_lhs (stmt
);
3322 if (TREE_CODE (lhs
) != SSA_NAME
)
3325 ao_ref_init (&r
, lhs
);
3326 return refs_may_alias_p_1 (ref
, &r
, tbaa_p
);
3329 else if (gimple_code (stmt
) == GIMPLE_ASM
)
3336 stmt_may_clobber_ref_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
3339 ao_ref_init (&r
, ref
);
3340 return stmt_may_clobber_ref_p_1 (stmt
, &r
, tbaa_p
);
3343 /* Return true if store1 and store2 described by corresponding tuples
3344 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3348 same_addr_size_stores_p (tree base1
, poly_int64 offset1
, poly_int64 size1
,
3349 poly_int64 max_size1
,
3350 tree base2
, poly_int64 offset2
, poly_int64 size2
,
3351 poly_int64 max_size2
)
3353 /* Offsets need to be 0. */
3354 if (maybe_ne (offset1
, 0)
3355 || maybe_ne (offset2
, 0))
3358 bool base1_obj_p
= SSA_VAR_P (base1
);
3359 bool base2_obj_p
= SSA_VAR_P (base2
);
3361 /* We need one object. */
3362 if (base1_obj_p
== base2_obj_p
)
3364 tree obj
= base1_obj_p
? base1
: base2
;
3366 /* And we need one MEM_REF. */
3367 bool base1_memref_p
= TREE_CODE (base1
) == MEM_REF
;
3368 bool base2_memref_p
= TREE_CODE (base2
) == MEM_REF
;
3369 if (base1_memref_p
== base2_memref_p
)
3371 tree memref
= base1_memref_p
? base1
: base2
;
3373 /* Sizes need to be valid. */
3374 if (!known_size_p (max_size1
)
3375 || !known_size_p (max_size2
)
3376 || !known_size_p (size1
)
3377 || !known_size_p (size2
))
3380 /* Max_size needs to match size. */
3381 if (maybe_ne (max_size1
, size1
)
3382 || maybe_ne (max_size2
, size2
))
3385 /* Sizes need to match. */
3386 if (maybe_ne (size1
, size2
))
3390 /* Check that memref is a store to pointer with singleton points-to info. */
3391 if (!integer_zerop (TREE_OPERAND (memref
, 1)))
3393 tree ptr
= TREE_OPERAND (memref
, 0);
3394 if (TREE_CODE (ptr
) != SSA_NAME
)
3396 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
3397 unsigned int pt_uid
;
3399 || !pt_solution_singleton_or_null_p (&pi
->pt
, &pt_uid
))
3402 /* Be conservative with non-call exceptions when the address might
3404 if (cfun
->can_throw_non_call_exceptions
&& pi
->pt
.null
)
3407 /* Check that ptr points relative to obj. */
3408 unsigned int obj_uid
= DECL_PT_UID (obj
);
3409 if (obj_uid
!= pt_uid
)
3412 /* Check that the object size is the same as the store size. That ensures us
3413 that ptr points to the start of obj. */
3414 return (DECL_SIZE (obj
)
3415 && poly_int_tree_p (DECL_SIZE (obj
))
3416 && known_eq (wi::to_poly_offset (DECL_SIZE (obj
)), size1
));
3419 /* If STMT kills the memory reference REF return true, otherwise
3423 stmt_kills_ref_p (gimple
*stmt
, ao_ref
*ref
)
3425 if (!ao_ref_base (ref
))
3428 if (gimple_has_lhs (stmt
)
3429 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
3430 /* The assignment is not necessarily carried out if it can throw
3431 and we can catch it in the current function where we could inspect
3433 ??? We only need to care about the RHS throwing. For aggregate
3434 assignments or similar calls and non-call exceptions the LHS
3435 might throw as well. */
3436 && !stmt_can_throw_internal (cfun
, stmt
))
3438 tree lhs
= gimple_get_lhs (stmt
);
3439 /* If LHS is literally a base of the access we are done. */
3442 tree base
= ref
->ref
;
3443 tree innermost_dropped_array_ref
= NULL_TREE
;
3444 if (handled_component_p (base
))
3446 tree saved_lhs0
= NULL_TREE
;
3447 if (handled_component_p (lhs
))
3449 saved_lhs0
= TREE_OPERAND (lhs
, 0);
3450 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
3454 /* Just compare the outermost handled component, if
3455 they are equal we have found a possible common
3457 tree saved_base0
= TREE_OPERAND (base
, 0);
3458 TREE_OPERAND (base
, 0) = integer_zero_node
;
3459 bool res
= operand_equal_p (lhs
, base
, 0);
3460 TREE_OPERAND (base
, 0) = saved_base0
;
3463 /* Remember if we drop an array-ref that we need to
3464 double-check not being at struct end. */
3465 if (TREE_CODE (base
) == ARRAY_REF
3466 || TREE_CODE (base
) == ARRAY_RANGE_REF
)
3467 innermost_dropped_array_ref
= base
;
3468 /* Otherwise drop handled components of the access. */
3471 while (handled_component_p (base
));
3473 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
3475 /* Finally check if the lhs has the same address and size as the
3476 base candidate of the access. Watch out if we have dropped
3477 an array-ref that was at struct end, this means ref->ref may
3478 be outside of the TYPE_SIZE of its base. */
3479 if ((! innermost_dropped_array_ref
3480 || ! array_at_struct_end_p (innermost_dropped_array_ref
))
3482 || (((TYPE_SIZE (TREE_TYPE (lhs
))
3483 == TYPE_SIZE (TREE_TYPE (base
)))
3484 || (TYPE_SIZE (TREE_TYPE (lhs
))
3485 && TYPE_SIZE (TREE_TYPE (base
))
3486 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs
)),
3487 TYPE_SIZE (TREE_TYPE (base
)),
3489 && operand_equal_p (lhs
, base
,
3491 | OEP_MATCH_SIDE_EFFECTS
))))
3495 /* Now look for non-literal equal bases with the restriction of
3496 handling constant offset and size. */
3497 /* For a must-alias check we need to be able to constrain
3498 the access properly. */
3499 if (!ref
->max_size_known_p ())
3501 poly_int64 size
, offset
, max_size
, ref_offset
= ref
->offset
;
3503 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
,
3505 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3506 so base == ref->base does not always hold. */
3507 if (base
!= ref
->base
)
3509 /* Try using points-to info. */
3510 if (same_addr_size_stores_p (base
, offset
, size
, max_size
, ref
->base
,
3511 ref
->offset
, ref
->size
, ref
->max_size
))
3514 /* If both base and ref->base are MEM_REFs, only compare the
3515 first operand, and if the second operand isn't equal constant,
3516 try to add the offsets into offset and ref_offset. */
3517 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
3518 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
3520 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
3521 TREE_OPERAND (ref
->base
, 1)))
3523 poly_offset_int off1
= mem_ref_offset (base
);
3524 off1
<<= LOG2_BITS_PER_UNIT
;
3526 poly_offset_int off2
= mem_ref_offset (ref
->base
);
3527 off2
<<= LOG2_BITS_PER_UNIT
;
3529 if (!off1
.to_shwi (&offset
) || !off2
.to_shwi (&ref_offset
))
3536 /* For a must-alias check we need to be able to constrain
3537 the access properly. */
3538 if (known_eq (size
, max_size
)
3539 && known_subrange_p (ref_offset
, ref
->max_size
, offset
, size
))
3543 if (is_gimple_call (stmt
))
3545 tree callee
= gimple_call_fndecl (stmt
);
3546 if (callee
!= NULL_TREE
3547 && gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3548 switch (DECL_FUNCTION_CODE (callee
))
3552 tree ptr
= gimple_call_arg (stmt
, 0);
3553 tree base
= ao_ref_base (ref
);
3554 if (base
&& TREE_CODE (base
) == MEM_REF
3555 && TREE_OPERAND (base
, 0) == ptr
)
3560 case BUILT_IN_MEMCPY
:
3561 case BUILT_IN_MEMPCPY
:
3562 case BUILT_IN_MEMMOVE
:
3563 case BUILT_IN_MEMSET
:
3564 case BUILT_IN_MEMCPY_CHK
:
3565 case BUILT_IN_MEMPCPY_CHK
:
3566 case BUILT_IN_MEMMOVE_CHK
:
3567 case BUILT_IN_MEMSET_CHK
:
3568 case BUILT_IN_STRNCPY
:
3569 case BUILT_IN_STPNCPY
:
3570 case BUILT_IN_CALLOC
:
3572 /* For a must-alias check we need to be able to constrain
3573 the access properly. */
3574 if (!ref
->max_size_known_p ())
3579 /* In execution order a calloc call will never kill
3580 anything. However, DSE will (ab)use this interface
3581 to ask if a calloc call writes the same memory locations
3582 as a later assignment, memset, etc. So handle calloc
3583 in the expected way. */
3584 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
)
3586 tree arg0
= gimple_call_arg (stmt
, 0);
3587 tree arg1
= gimple_call_arg (stmt
, 1);
3588 if (TREE_CODE (arg0
) != INTEGER_CST
3589 || TREE_CODE (arg1
) != INTEGER_CST
)
3592 dest
= gimple_call_lhs (stmt
);
3595 len
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg0
), arg0
, arg1
);
3599 dest
= gimple_call_arg (stmt
, 0);
3600 len
= gimple_call_arg (stmt
, 2);
3602 if (!poly_int_tree_p (len
))
3604 tree rbase
= ref
->base
;
3605 poly_offset_int roffset
= ref
->offset
;
3607 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
3608 tree base
= ao_ref_base (&dref
);
3609 poly_offset_int offset
= dref
.offset
;
3610 if (!base
|| !known_size_p (dref
.size
))
3612 if (TREE_CODE (base
) == MEM_REF
)
3614 if (TREE_CODE (rbase
) != MEM_REF
)
3616 // Compare pointers.
3617 offset
+= mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
;
3618 roffset
+= mem_ref_offset (rbase
) << LOG2_BITS_PER_UNIT
;
3619 base
= TREE_OPERAND (base
, 0);
3620 rbase
= TREE_OPERAND (rbase
, 0);
3623 && known_subrange_p (roffset
, ref
->max_size
, offset
,
3624 wi::to_poly_offset (len
)
3625 << LOG2_BITS_PER_UNIT
))
3630 case BUILT_IN_VA_END
:
3632 tree ptr
= gimple_call_arg (stmt
, 0);
3633 if (TREE_CODE (ptr
) == ADDR_EXPR
)
3635 tree base
= ao_ref_base (ref
);
3636 if (TREE_OPERAND (ptr
, 0) == base
)
3649 stmt_kills_ref_p (gimple
*stmt
, tree ref
)
3652 ao_ref_init (&r
, ref
);
3653 return stmt_kills_ref_p (stmt
, &r
);
3657 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3658 TARGET or a statement clobbering the memory reference REF in which
3659 case false is returned. The walk starts with VUSE, one argument of PHI. */
3662 maybe_skip_until (gimple
*phi
, tree
&target
, basic_block target_bb
,
3663 ao_ref
*ref
, tree vuse
, bool tbaa_p
, unsigned int &limit
,
3664 bitmap
*visited
, bool abort_on_visited
,
3665 void *(*translate
)(ao_ref
*, tree
, void *, translate_flags
*),
3666 translate_flags disambiguate_only
,
3669 basic_block bb
= gimple_bb (phi
);
3672 *visited
= BITMAP_ALLOC (NULL
);
3674 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
3676 /* Walk until we hit the target. */
3677 while (vuse
!= target
)
3679 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3680 /* If we are searching for the target VUSE by walking up to
3681 TARGET_BB dominating the original PHI we are finished once
3682 we reach a default def or a definition in a block dominating
3683 that block. Update TARGET and return. */
3685 && (gimple_nop_p (def_stmt
)
3686 || dominated_by_p (CDI_DOMINATORS
,
3687 target_bb
, gimple_bb (def_stmt
))))
3693 /* Recurse for PHI nodes. */
3694 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3696 /* An already visited PHI node ends the walk successfully. */
3697 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
3698 return !abort_on_visited
;
3699 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3700 visited
, abort_on_visited
,
3701 translate
, data
, disambiguate_only
);
3706 else if (gimple_nop_p (def_stmt
))
3710 /* A clobbering statement or the end of the IL ends it failing. */
3711 if ((int)limit
<= 0)
3714 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3716 translate_flags tf
= disambiguate_only
;
3718 && (*translate
) (ref
, vuse
, data
, &tf
) == NULL
)
3724 /* If we reach a new basic-block see if we already skipped it
3725 in a previous walk that ended successfully. */
3726 if (gimple_bb (def_stmt
) != bb
)
3728 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
3729 return !abort_on_visited
;
3730 bb
= gimple_bb (def_stmt
);
3732 vuse
= gimple_vuse (def_stmt
);
3738 /* Starting from a PHI node for the virtual operand of the memory reference
3739 REF find a continuation virtual operand that allows to continue walking
3740 statements dominating PHI skipping only statements that cannot possibly
3741 clobber REF. Decrements LIMIT for each alias disambiguation done
3742 and aborts the walk, returning NULL_TREE if it reaches zero.
3743 Returns NULL_TREE if no suitable virtual operand can be found. */
3746 get_continuation_for_phi (gimple
*phi
, ao_ref
*ref
, bool tbaa_p
,
3747 unsigned int &limit
, bitmap
*visited
,
3748 bool abort_on_visited
,
3749 void *(*translate
)(ao_ref
*, tree
, void *,
3752 translate_flags disambiguate_only
)
3754 unsigned nargs
= gimple_phi_num_args (phi
);
3756 /* Through a single-argument PHI we can simply look through. */
3758 return PHI_ARG_DEF (phi
, 0);
3760 /* For two or more arguments try to pairwise skip non-aliasing code
3761 until we hit the phi argument definition that dominates the other one. */
3762 basic_block phi_bb
= gimple_bb (phi
);
3766 /* Find a candidate for the virtual operand which definition
3767 dominates those of all others. */
3768 /* First look if any of the args themselves satisfy this. */
3769 for (i
= 0; i
< nargs
; ++i
)
3771 arg0
= PHI_ARG_DEF (phi
, i
);
3772 if (SSA_NAME_IS_DEFAULT_DEF (arg0
))
3774 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (arg0
));
3775 if (def_bb
!= phi_bb
3776 && dominated_by_p (CDI_DOMINATORS
, phi_bb
, def_bb
))
3780 /* If not, look if we can reach such candidate by walking defs
3781 until we hit the immediate dominator. maybe_skip_until will
3783 basic_block dom
= get_immediate_dominator (CDI_DOMINATORS
, phi_bb
);
3785 /* Then check against the (to be) found candidate. */
3786 for (i
= 0; i
< nargs
; ++i
)
3788 arg1
= PHI_ARG_DEF (phi
, i
);
3791 else if (! maybe_skip_until (phi
, arg0
, dom
, ref
, arg1
, tbaa_p
,
3795 /* Do not valueize when walking over
3799 gimple_bb (SSA_NAME_DEF_STMT (arg1
)),
3802 : disambiguate_only
, data
))
3809 /* Based on the memory reference REF and its virtual use VUSE call
3810 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3811 itself. That is, for each virtual use for which its defining statement
3812 does not clobber REF.
3814 WALKER is called with REF, the current virtual use and DATA. If
3815 WALKER returns non-NULL the walk stops and its result is returned.
3816 At the end of a non-successful walk NULL is returned.
3818 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3819 use which definition is a statement that may clobber REF and DATA.
3820 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3821 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3822 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3823 to adjust REF and *DATA to make that valid.
3825 VALUEIZE if non-NULL is called with the next VUSE that is considered
3826 and return value is substituted for that. This can be used to
3827 implement optimistic value-numbering for example. Note that the
3828 VUSE argument is assumed to be valueized already.
3830 LIMIT specifies the number of alias queries we are allowed to do,
3831 the walk stops when it reaches zero and NULL is returned. LIMIT
3832 is decremented by the number of alias queries (plus adjustments
3833 done by the callbacks) upon return.
3835 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3838 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
, bool tbaa_p
,
3839 void *(*walker
)(ao_ref
*, tree
, void *),
3840 void *(*translate
)(ao_ref
*, tree
, void *,
3842 tree (*valueize
)(tree
),
3843 unsigned &limit
, void *data
)
3845 bitmap visited
= NULL
;
3847 bool translated
= false;
3849 timevar_push (TV_ALIAS_STMT_WALK
);
3855 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3856 res
= (*walker
) (ref
, vuse
, data
);
3858 if (res
== (void *)-1)
3863 /* Lookup succeeded. */
3864 else if (res
!= NULL
)
3869 vuse
= valueize (vuse
);
3876 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3877 if (gimple_nop_p (def_stmt
))
3879 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3880 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3881 &visited
, translated
, translate
, data
);
3884 if ((int)limit
<= 0)
3890 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3894 translate_flags disambiguate_only
= TR_TRANSLATE
;
3895 res
= (*translate
) (ref
, vuse
, data
, &disambiguate_only
);
3896 /* Failed lookup and translation. */
3897 if (res
== (void *)-1)
3902 /* Lookup succeeded. */
3903 else if (res
!= NULL
)
3905 /* Translation succeeded, continue walking. */
3906 translated
= translated
|| disambiguate_only
== TR_TRANSLATE
;
3908 vuse
= gimple_vuse (def_stmt
);
3914 BITMAP_FREE (visited
);
3916 timevar_pop (TV_ALIAS_STMT_WALK
);
3922 /* Based on the memory reference REF call WALKER for each vdef which
3923 defining statement may clobber REF, starting with VDEF. If REF
3924 is NULL_TREE, each defining statement is visited.
3926 WALKER is called with REF, the current vdef and DATA. If WALKER
3927 returns true the walk is stopped, otherwise it continues.
3929 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3930 The pointer may be NULL and then we do not track this information.
3932 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3933 PHI argument (but only one walk continues on merge points), the
3934 return value is true if any of the walks was successful.
3936 The function returns the number of statements walked or -1 if
3937 LIMIT stmts were walked and the walk was aborted at this point.
3938 If LIMIT is zero the walk is not aborted. */
3941 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
3942 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3943 bitmap
*visited
, unsigned int cnt
,
3944 bool *function_entry_reached
, unsigned limit
)
3948 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
3951 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
3954 if (gimple_nop_p (def_stmt
))
3956 if (function_entry_reached
)
3957 *function_entry_reached
= true;
3960 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3964 *visited
= BITMAP_ALLOC (NULL
);
3965 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
3967 int res
= walk_aliased_vdefs_1 (ref
,
3968 gimple_phi_arg_def (def_stmt
, i
),
3969 walker
, data
, visited
, cnt
,
3970 function_entry_reached
, limit
);
3978 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3983 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
3984 && (*walker
) (ref
, vdef
, data
))
3987 vdef
= gimple_vuse (def_stmt
);
3993 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
3994 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3996 bool *function_entry_reached
, unsigned int limit
)
3998 bitmap local_visited
= NULL
;
4001 timevar_push (TV_ALIAS_STMT_WALK
);
4003 if (function_entry_reached
)
4004 *function_entry_reached
= false;
4006 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
4007 visited
? visited
: &local_visited
, 0,
4008 function_entry_reached
, limit
);
4010 BITMAP_FREE (local_visited
);
4012 timevar_pop (TV_ALIAS_STMT_WALK
);
4017 /* Verify validity of the fnspec string.
4018 See attr-fnspec.h for details. */
4021 attr_fnspec::verify ()
4025 /* Check return value specifier. */
4026 if (len
< return_desc_size
)
4028 else if ((len
- return_desc_size
) % arg_desc_size
)
4030 else if ((str
[0] < '1' || str
[0] > '4')
4031 && str
[0] != '.' && str
[0] != 'm'
4032 /* FIXME: Fortran trans-decl.c contains multiple wrong fnspec
4033 strings. The following characters have no meaning. */
4034 && str
[0] != 'R' && str
[0] != 'W')
4040 /* Now check all parameters. */
4041 for (unsigned int i
= 0; arg_specified_p (i
); i
++)
4043 unsigned int idx
= arg_idx (i
);
4057 if (str
[idx
+ 1] != ' ')
4061 internal_error ("invalid fn spec attribute \"%s\"", str
);