]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-alias.c
Create branch for wide-int development.
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
CommitLineData
4ee9c684 1/* Alias analysis for trees.
711789cc 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4ee9c684 3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
4ee9c684 26#include "tm_p.h"
be97d4b6 27#include "target.h"
4ee9c684 28#include "basic-block.h"
b9ed1410 29#include "timevar.h" /* for TV_ALIAS_STMT_WALK */
4ee9c684 30#include "ggc.h"
31#include "langhooks.h"
32#include "flags.h"
33#include "function.h"
ce084dfc 34#include "tree-pretty-print.h"
b9ed1410 35#include "dumpfile.h"
75a70cf9 36#include "gimple.h"
4ee9c684 37#include "tree-flow.h"
38#include "tree-inline.h"
4ee9c684 39#include "params.h"
2be14d8b 40#include "vec.h"
a55dc2cd 41#include "bitmap.h"
4fb5e5ca 42#include "pointer-set.h"
6cc9bd27 43#include "alloc-pool.h"
dd277d48 44#include "tree-ssa-alias.h"
a55dc2cd 45
dd277d48 46/* Broad overview of how alias analysis on gimple works:
4fb5e5ca 47
dd277d48 48 Statements clobbering or using memory are linked through the
49 virtual operand factored use-def chain. The virtual operand
50 is unique per function, its symbol is accessible via gimple_vop (cfun).
51 Virtual operands are used for efficiently walking memory statements
52 in the gimple IL and are useful for things like value-numbering as
53 a generation count for memory references.
339d7079 54
dd277d48 55 SSA_NAME pointers may have associated points-to information
56 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
57 points-to information is (re-)computed by the TODO_rebuild_alias
58 pass manager todo. Points-to information is also used for more
59 precise tracking of call-clobbered and call-used variables and
60 related disambiguations.
339d7079 61
dd277d48 62 This file contains functions for disambiguating memory references,
63 the so called alias-oracle and tools for walking of the gimple IL.
339d7079 64
dd277d48 65 The main alias-oracle entry-points are
339d7079 66
dd277d48 67 bool stmt_may_clobber_ref_p (gimple, tree)
339d7079 68
dd277d48 69 This function queries if a statement may invalidate (parts of)
70 the memory designated by the reference tree argument.
339d7079 71
dd277d48 72 bool ref_maybe_used_by_stmt_p (gimple, tree)
339d7079 73
dd277d48 74 This function queries if a statement may need (parts of) the
75 memory designated by the reference tree argument.
339d7079 76
dd277d48 77 There are variants of these functions that only handle the call
78 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
79 Note that these do not disambiguate against a possible call lhs.
339d7079 80
dd277d48 81 bool refs_may_alias_p (tree, tree)
339d7079 82
dd277d48 83 This function tries to disambiguate two reference trees.
05931b00 84
dd277d48 85 bool ptr_deref_may_alias_global_p (tree)
339d7079 86
dd277d48 87 This function queries if dereferencing a pointer variable may
88 alias global memory.
339d7079 89
dd277d48 90 More low-level disambiguators are available and documented in
91 this file. Low-level disambiguators dealing with points-to
92 information are in tree-ssa-structalias.c. */
339d7079 93
339d7079 94
dd277d48 95/* Query statistics for the different low-level disambiguators.
96 A high-level query may trigger multiple of them. */
339d7079 97
dd277d48 98static struct {
99 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
100 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
101 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
102 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
103 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
104 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
105} alias_stats;
339d7079 106
dd277d48 107void
108dump_alias_stats (FILE *s)
109{
110 fprintf (s, "\nAlias oracle query stats:\n");
111 fprintf (s, " refs_may_alias_p: "
112 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
113 HOST_WIDE_INT_PRINT_DEC" queries\n",
114 alias_stats.refs_may_alias_p_no_alias,
115 alias_stats.refs_may_alias_p_no_alias
116 + alias_stats.refs_may_alias_p_may_alias);
117 fprintf (s, " ref_maybe_used_by_call_p: "
118 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
119 HOST_WIDE_INT_PRINT_DEC" queries\n",
120 alias_stats.ref_maybe_used_by_call_p_no_alias,
121 alias_stats.refs_may_alias_p_no_alias
122 + alias_stats.ref_maybe_used_by_call_p_may_alias);
123 fprintf (s, " call_may_clobber_ref_p: "
124 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC" queries\n",
126 alias_stats.call_may_clobber_ref_p_no_alias,
127 alias_stats.call_may_clobber_ref_p_no_alias
128 + alias_stats.call_may_clobber_ref_p_may_alias);
129}
130
131
132/* Return true, if dereferencing PTR may alias with a global variable. */
4ee9c684 133
dd277d48 134bool
135ptr_deref_may_alias_global_p (tree ptr)
4ee9c684 136{
dd277d48 137 struct ptr_info_def *pi;
4fb5e5ca 138
dd277d48 139 /* If we end up with a pointer constant here that may point
140 to global memory. */
141 if (TREE_CODE (ptr) != SSA_NAME)
142 return true;
4ee9c684 143
dd277d48 144 pi = SSA_NAME_PTR_INFO (ptr);
4ee9c684 145
dd277d48 146 /* If we do not have points-to information for this variable,
147 we have to punt. */
148 if (!pi)
149 return true;
4ee9c684 150
2a3ebafa 151 /* ??? This does not use TBAA to prune globals ptr may not access. */
dd277d48 152 return pt_solution_includes_global (&pi->pt);
4ee9c684 153}
154
2a3ebafa 155/* Return true if dereferencing PTR may alias DECL.
156 The caller is responsible for applying TBAA to see if PTR
157 may access DECL at all. */
4ee9c684 158
dd277d48 159static bool
160ptr_deref_may_alias_decl_p (tree ptr, tree decl)
4ee9c684 161{
dd277d48 162 struct ptr_info_def *pi;
f7d118a9 163
2760c0e3 164 /* Conversions are irrelevant for points-to information and
165 data-dependence analysis can feed us those. */
166 STRIP_NOPS (ptr);
167
168 /* Anything we do not explicilty handle aliases. */
169 if ((TREE_CODE (ptr) != SSA_NAME
170 && TREE_CODE (ptr) != ADDR_EXPR
171 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
172 || !POINTER_TYPE_P (TREE_TYPE (ptr))
173 || (TREE_CODE (decl) != VAR_DECL
174 && TREE_CODE (decl) != PARM_DECL
175 && TREE_CODE (decl) != RESULT_DECL))
9c01641d 176 return true;
2fd0b5dc 177
2760c0e3 178 /* Disregard pointer offsetting. */
179 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
180 {
181 do
182 {
183 ptr = TREE_OPERAND (ptr, 0);
184 }
185 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
186 return ptr_deref_may_alias_decl_p (ptr, decl);
187 }
188
047fdd47 189 /* ADDR_EXPR pointers either just offset another pointer or directly
190 specify the pointed-to set. */
191 if (TREE_CODE (ptr) == ADDR_EXPR)
192 {
193 tree base = get_base_address (TREE_OPERAND (ptr, 0));
194 if (base
2622064f 195 && (TREE_CODE (base) == MEM_REF
196 || TREE_CODE (base) == TARGET_MEM_REF))
047fdd47 197 ptr = TREE_OPERAND (base, 0);
198 else if (base
2622064f 199 && DECL_P (base))
7f2d9047 200 return base == decl;
047fdd47 201 else if (base
202 && CONSTANT_CLASS_P (base))
203 return false;
204 else
205 return true;
206 }
207
2760c0e3 208 /* Non-aliased variables can not be pointed to. */
209 if (!may_be_aliased (decl))
210 return false;
047fdd47 211
dd277d48 212 /* If we do not have useful points-to information for this pointer
213 we cannot disambiguate anything else. */
214 pi = SSA_NAME_PTR_INFO (ptr);
215 if (!pi)
2fd0b5dc 216 return true;
217
dd277d48 218 return pt_solution_includes (&pi->pt, decl);
2fd0b5dc 219}
4ee9c684 220
2a3ebafa 221/* Return true if dereferenced PTR1 and PTR2 may alias.
222 The caller is responsible for applying TBAA to see if accesses
223 through PTR1 and PTR2 may conflict at all. */
4ee9c684 224
2760c0e3 225bool
dd277d48 226ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
4ee9c684 227{
dd277d48 228 struct ptr_info_def *pi1, *pi2;
4ee9c684 229
2760c0e3 230 /* Conversions are irrelevant for points-to information and
231 data-dependence analysis can feed us those. */
232 STRIP_NOPS (ptr1);
233 STRIP_NOPS (ptr2);
234
2760c0e3 235 /* Disregard pointer offsetting. */
236 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
237 {
238 do
239 {
240 ptr1 = TREE_OPERAND (ptr1, 0);
241 }
242 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
243 return ptr_derefs_may_alias_p (ptr1, ptr2);
244 }
245 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
246 {
247 do
248 {
249 ptr2 = TREE_OPERAND (ptr2, 0);
250 }
251 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
252 return ptr_derefs_may_alias_p (ptr1, ptr2);
253 }
f85fb819 254
047fdd47 255 /* ADDR_EXPR pointers either just offset another pointer or directly
256 specify the pointed-to set. */
257 if (TREE_CODE (ptr1) == ADDR_EXPR)
258 {
259 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
260 if (base
2622064f 261 && (TREE_CODE (base) == MEM_REF
262 || TREE_CODE (base) == TARGET_MEM_REF))
80b4d93e 263 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
047fdd47 264 else if (base
2622064f 265 && DECL_P (base))
047fdd47 266 return ptr_deref_may_alias_decl_p (ptr2, base);
267 else
268 return true;
269 }
270 if (TREE_CODE (ptr2) == ADDR_EXPR)
271 {
272 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
273 if (base
2622064f 274 && (TREE_CODE (base) == MEM_REF
275 || TREE_CODE (base) == TARGET_MEM_REF))
80b4d93e 276 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
047fdd47 277 else if (base
2622064f 278 && DECL_P (base))
047fdd47 279 return ptr_deref_may_alias_decl_p (ptr1, base);
280 else
281 return true;
282 }
283
80b4d93e 284 /* From here we require SSA name pointers. Anything else aliases. */
285 if (TREE_CODE (ptr1) != SSA_NAME
286 || TREE_CODE (ptr2) != SSA_NAME
287 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
288 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
289 return true;
290
dd277d48 291 /* We may end up with two empty points-to solutions for two same pointers.
292 In this case we still want to say both pointers alias, so shortcut
293 that here. */
294 if (ptr1 == ptr2)
295 return true;
81d08033 296
dd277d48 297 /* If we do not have useful points-to information for either pointer
298 we cannot disambiguate anything else. */
299 pi1 = SSA_NAME_PTR_INFO (ptr1);
300 pi2 = SSA_NAME_PTR_INFO (ptr2);
301 if (!pi1 || !pi2)
302 return true;
81d08033 303
2a3ebafa 304 /* ??? This does not use TBAA to prune decls from the intersection
305 that not both pointers may access. */
dd277d48 306 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
81d08033 307}
308
047fdd47 309/* Return true if dereferencing PTR may alias *REF.
310 The caller is responsible for applying TBAA to see if PTR
311 may access *REF at all. */
312
313static bool
314ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
315{
316 tree base = ao_ref_base (ref);
317
2622064f 318 if (TREE_CODE (base) == MEM_REF
319 || TREE_CODE (base) == TARGET_MEM_REF)
047fdd47 320 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
2622064f 321 else if (DECL_P (base))
047fdd47 322 return ptr_deref_may_alias_decl_p (ptr, base);
323
324 return true;
325}
326
8763c223 327/* Return true whether REF may refer to global memory. */
328
329bool
330ref_may_alias_global_p (tree ref)
331{
332 tree base = get_base_address (ref);
333 if (DECL_P (base))
334 return is_global_var (base);
335 else if (TREE_CODE (base) == MEM_REF
336 || TREE_CODE (base) == TARGET_MEM_REF)
337 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
338 return true;
339}
340
341/* Return true whether STMT may clobber global memory. */
342
343bool
344stmt_may_clobber_global_p (gimple stmt)
345{
346 tree lhs;
347
348 if (!gimple_vdef (stmt))
349 return false;
350
351 /* ??? We can ask the oracle whether an artificial pointer
352 dereference with a pointer with points-to information covering
353 all global memory (what about non-address taken memory?) maybe
354 clobbered by this call. As there is at the moment no convenient
355 way of doing that without generating garbage do some manual
356 checking instead.
357 ??? We could make a NULL ao_ref argument to the various
358 predicates special, meaning any global memory. */
359
360 switch (gimple_code (stmt))
361 {
362 case GIMPLE_ASSIGN:
363 lhs = gimple_assign_lhs (stmt);
364 return (TREE_CODE (lhs) != SSA_NAME
365 && ref_may_alias_global_p (lhs));
366 case GIMPLE_CALL:
367 return true;
368 default:
369 return true;
370 }
371}
372
4ee9c684 373
374/* Dump alias information on FILE. */
375
376void
377dump_alias_info (FILE *file)
378{
2aac21c5 379 unsigned i;
4ee9c684 380 const char *funcname
5135beeb 381 = lang_hooks.decl_printable_name (current_function_decl, 2);
a55dc2cd 382 tree var;
4ee9c684 383
dd277d48 384 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
81d08033 385
386 fprintf (file, "Aliased symbols\n\n");
48e1416a 387
2aac21c5 388 FOR_EACH_LOCAL_DECL (cfun, i, var)
81d08033 389 {
81d08033 390 if (may_be_aliased (var))
391 dump_variable (file, var);
392 }
393
7f81b5ee 394 fprintf (file, "\nCall clobber information\n");
395
396 fprintf (file, "\nESCAPED");
397 dump_points_to_solution (file, &cfun->gimple_df->escaped);
7f81b5ee 398
399 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
81d08033 400
81d08033 401 for (i = 1; i < num_ssa_names; i++)
402 {
403 tree ptr = ssa_name (i);
a48b1470 404 struct ptr_info_def *pi;
48e1416a 405
dd277d48 406 if (ptr == NULL_TREE
407 || SSA_NAME_IN_FREE_LIST (ptr))
a48b1470 408 continue;
409
410 pi = SSA_NAME_PTR_INFO (ptr);
dd277d48 411 if (pi)
81d08033 412 dump_points_to_info_for (file, ptr);
413 }
4ee9c684 414
4ee9c684 415 fprintf (file, "\n");
416}
417
418
419/* Dump alias information on stderr. */
420
4b987fac 421DEBUG_FUNCTION void
4ee9c684 422debug_alias_info (void)
423{
424 dump_alias_info (stderr);
425}
426
427
7f81b5ee 428/* Dump the points-to set *PT into FILE. */
4ee9c684 429
d793732c 430void
7f81b5ee 431dump_points_to_solution (FILE *file, struct pt_solution *pt)
4ee9c684 432{
7f81b5ee 433 if (pt->anything)
434 fprintf (file, ", points-to anything");
4ee9c684 435
7f81b5ee 436 if (pt->nonlocal)
437 fprintf (file, ", points-to non-local");
4ee9c684 438
7f81b5ee 439 if (pt->escaped)
440 fprintf (file, ", points-to escaped");
441
1a981e1a 442 if (pt->ipa_escaped)
443 fprintf (file, ", points-to unit escaped");
444
7f81b5ee 445 if (pt->null)
446 fprintf (file, ", points-to NULL");
447
448 if (pt->vars)
4ee9c684 449 {
7f81b5ee 450 fprintf (file, ", points-to vars: ");
451 dump_decl_set (file, pt->vars);
452 if (pt->vars_contains_global)
453 fprintf (file, " (includes global vars)");
454 }
455}
4ee9c684 456
c7d89805 457
458/* Unified dump function for pt_solution. */
459
460DEBUG_FUNCTION void
461debug (pt_solution &ref)
462{
463 dump_points_to_solution (stderr, &ref);
464}
465
466DEBUG_FUNCTION void
467debug (pt_solution *ptr)
468{
469 if (ptr)
470 debug (*ptr);
471 else
472 fprintf (stderr, "<nil>\n");
473}
474
475
7f81b5ee 476/* Dump points-to information for SSA_NAME PTR into FILE. */
4ee9c684 477
7f81b5ee 478void
479dump_points_to_info_for (FILE *file, tree ptr)
480{
481 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
4ee9c684 482
7f81b5ee 483 print_generic_expr (file, ptr, dump_flags);
1e1a4c8c 484
7f81b5ee 485 if (pi)
486 dump_points_to_solution (file, &pi->pt);
487 else
488 fprintf (file, ", points-to anything");
4ee9c684 489
490 fprintf (file, "\n");
491}
492
493
d793732c 494/* Dump points-to information for VAR into stderr. */
495
4b987fac 496DEBUG_FUNCTION void
d793732c 497debug_points_to_info_for (tree var)
498{
499 dump_points_to_info_for (stderr, var);
500}
501
3918bd18 502
503/* Initializes the alias-oracle reference representation *R from REF. */
504
505void
506ao_ref_init (ao_ref *r, tree ref)
507{
508 r->ref = ref;
509 r->base = NULL_TREE;
510 r->offset = 0;
511 r->size = -1;
512 r->max_size = -1;
513 r->ref_alias_set = -1;
514 r->base_alias_set = -1;
3787db52 515 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
3918bd18 516}
517
518/* Returns the base object of the memory reference *REF. */
519
520tree
521ao_ref_base (ao_ref *ref)
522{
523 if (ref->base)
524 return ref->base;
525 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
526 &ref->max_size);
527 return ref->base;
528}
529
530/* Returns the base object alias set of the memory reference *REF. */
531
182cf5a9 532static alias_set_type
3918bd18 533ao_ref_base_alias_set (ao_ref *ref)
534{
182cf5a9 535 tree base_ref;
3918bd18 536 if (ref->base_alias_set != -1)
537 return ref->base_alias_set;
182cf5a9 538 if (!ref->ref)
539 return 0;
540 base_ref = ref->ref;
541 while (handled_component_p (base_ref))
542 base_ref = TREE_OPERAND (base_ref, 0);
543 ref->base_alias_set = get_alias_set (base_ref);
3918bd18 544 return ref->base_alias_set;
545}
546
547/* Returns the reference alias set of the memory reference *REF. */
548
549alias_set_type
550ao_ref_alias_set (ao_ref *ref)
551{
552 if (ref->ref_alias_set != -1)
553 return ref->ref_alias_set;
554 ref->ref_alias_set = get_alias_set (ref->ref);
555 return ref->ref_alias_set;
556}
557
134899ac 558/* Init an alias-oracle reference representation from a gimple pointer
559 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE the the
560 size is assumed to be unknown. The access is assumed to be only
561 to or after of the pointer target, not before it. */
562
563void
564ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
565{
566 HOST_WIDE_INT t1, t2;
567 ref->ref = NULL_TREE;
568 if (TREE_CODE (ptr) == ADDR_EXPR)
569 ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
570 &ref->offset, &t1, &t2);
571 else
572 {
182cf5a9 573 ref->base = build2 (MEM_REF, char_type_node,
2512209b 574 ptr, null_pointer_node);
134899ac 575 ref->offset = 0;
576 }
577 if (size
578 && host_integerp (size, 0)
579 && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
580 ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
581 else
582 ref->max_size = ref->size = -1;
583 ref->ref_alias_set = 0;
584 ref->base_alias_set = 0;
3787db52 585 ref->volatile_p = false;
134899ac 586}
587
dd277d48 588/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
589 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
590 decide. */
d793732c 591
dd277d48 592static inline int
593same_type_for_tbaa (tree type1, tree type2)
594{
595 type1 = TYPE_MAIN_VARIANT (type1);
596 type2 = TYPE_MAIN_VARIANT (type2);
4ee9c684 597
dd277d48 598 /* If we would have to do structural comparison bail out. */
599 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
600 || TYPE_STRUCTURAL_EQUALITY_P (type2))
601 return -1;
602
603 /* Compare the canonical types. */
604 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
605 return 1;
606
4b4d1513 607 /* ??? Array types are not properly unified in all cases as we have
dd277d48 608 spurious changes in the index types for example. Removing this
609 causes all sorts of problems with the Fortran frontend. */
610 if (TREE_CODE (type1) == ARRAY_TYPE
611 && TREE_CODE (type2) == ARRAY_TYPE)
612 return -1;
613
102bfc9c 614 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
615 object of one of its constrained subtypes, e.g. when a function with an
616 unconstrained parameter passed by reference is called on an object and
617 inlined. But, even in the case of a fixed size, type and subtypes are
618 not equivalent enough as to share the same TYPE_CANONICAL, since this
619 would mean that conversions between them are useless, whereas they are
620 not (e.g. type and subtypes can have different modes). So, in the end,
621 they are only guaranteed to have the same alias set. */
622 if (get_alias_set (type1) == get_alias_set (type2))
4b4d1513 623 return -1;
624
dd277d48 625 /* The types are known to be not equal. */
626 return 0;
627}
628
629/* Determine if the two component references REF1 and REF2 which are
630 based on access types TYPE1 and TYPE2 and of which at least one is based
ac30e3b2 631 on an indirect reference may alias. REF2 is the only one that can
632 be a decl in which case REF2_IS_DECL is true.
633 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
634 are the respective alias sets. */
dd277d48 635
636static bool
0d2f7431 637aliasing_component_refs_p (tree ref1,
ac30e3b2 638 alias_set_type ref1_alias_set,
639 alias_set_type base1_alias_set,
a42ce2cc 640 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
0d2f7431 641 tree ref2,
ac30e3b2 642 alias_set_type ref2_alias_set,
643 alias_set_type base2_alias_set,
644 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
645 bool ref2_is_decl)
dd277d48 646{
647 /* If one reference is a component references through pointers try to find a
648 common base and apply offset based disambiguation. This handles
649 for example
650 struct A { int i; int j; } *q;
651 struct B { struct A a; int k; } *p;
652 disambiguating q->i and p->a.j. */
0d2f7431 653 tree base1, base2;
654 tree type1, type2;
dd277d48 655 tree *refp;
656 int same_p;
657
0d2f7431 658 /* Choose bases and base types to search for. */
659 base1 = ref1;
660 while (handled_component_p (base1))
661 base1 = TREE_OPERAND (base1, 0);
662 type1 = TREE_TYPE (base1);
663 base2 = ref2;
664 while (handled_component_p (base2))
665 base2 = TREE_OPERAND (base2, 0);
666 type2 = TREE_TYPE (base2);
667
dd277d48 668 /* Now search for the type1 in the access path of ref2. This
669 would be a common base for doing offset based disambiguation on. */
bc3c318e 670 refp = &ref2;
dd277d48 671 while (handled_component_p (*refp)
672 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
673 refp = &TREE_OPERAND (*refp, 0);
674 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
675 /* If we couldn't compare types we have to bail out. */
676 if (same_p == -1)
677 return true;
678 else if (same_p == 1)
679 {
680 HOST_WIDE_INT offadj, sztmp, msztmp;
681 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
682 offset2 -= offadj;
0d2f7431 683 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
684 offset1 -= offadj;
dd277d48 685 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
686 }
687 /* If we didn't find a common base, try the other way around. */
bc3c318e 688 refp = &ref1;
dd277d48 689 while (handled_component_p (*refp)
690 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
691 refp = &TREE_OPERAND (*refp, 0);
692 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
693 /* If we couldn't compare types we have to bail out. */
694 if (same_p == -1)
695 return true;
696 else if (same_p == 1)
697 {
698 HOST_WIDE_INT offadj, sztmp, msztmp;
699 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
700 offset1 -= offadj;
0d2f7431 701 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
702 offset2 -= offadj;
dd277d48 703 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
704 }
ac30e3b2 705
4b4d1513 706 /* If we have two type access paths B1.path1 and B2.path2 they may
ac30e3b2 707 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
708 But we can still have a path that goes B1.path1...B2.path2 with
709 a part that we do not see. So we can only disambiguate now
710 if there is no B2 in the tail of path1 and no B1 on the
711 tail of path2. */
712 if (base1_alias_set == ref2_alias_set
713 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
714 return true;
715 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
716 if (!ref2_is_decl)
717 return (base2_alias_set == ref1_alias_set
718 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
4b4d1513 719 return false;
dd277d48 720}
721
740a2cb6 722/* Return true if we can determine that component references REF1 and REF2,
723 that are within a common DECL, cannot overlap. */
724
725static bool
726nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
727{
728 vec<tree, va_stack> component_refs1;
729 vec<tree, va_stack> component_refs2;
730
731 vec_stack_alloc (tree, component_refs1, 16);
732 vec_stack_alloc (tree, component_refs2, 16);
733
734 /* Create the stack of handled components for REF1. */
735 while (handled_component_p (ref1))
736 {
737 component_refs1.safe_push (ref1);
738 ref1 = TREE_OPERAND (ref1, 0);
739 }
740 if (TREE_CODE (ref1) == MEM_REF)
741 {
742 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
743 goto may_overlap;
744 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
745 }
746
747 /* Create the stack of handled components for REF2. */
748 while (handled_component_p (ref2))
749 {
750 component_refs2.safe_push (ref2);
751 ref2 = TREE_OPERAND (ref2, 0);
752 }
753 if (TREE_CODE (ref2) == MEM_REF)
754 {
755 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
756 goto may_overlap;
757 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
758 }
759
760 /* We must have the same base DECL. */
761 gcc_assert (ref1 == ref2);
762
763 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
764 rank. This is sufficient because we start from the same DECL and you
765 cannot reference several fields at a time with COMPONENT_REFs (unlike
766 with ARRAY_RANGE_REFs for arrays) so you always need the same number
767 of them to access a sub-component, unless you're in a union, in which
768 case the return value will precisely be false. */
769 while (true)
770 {
771 do
772 {
773 if (component_refs1.is_empty ())
774 goto may_overlap;
775 ref1 = component_refs1.pop ();
776 }
777 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
778
779 do
780 {
781 if (component_refs2.is_empty ())
782 goto may_overlap;
783 ref2 = component_refs2.pop ();
784 }
785 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
786
787 /* Beware of BIT_FIELD_REF. */
788 if (TREE_CODE (ref1) != COMPONENT_REF
789 || TREE_CODE (ref2) != COMPONENT_REF)
790 goto may_overlap;
791
792 tree field1 = TREE_OPERAND (ref1, 1);
793 tree field2 = TREE_OPERAND (ref2, 1);
794
795 /* ??? We cannot simply use the type of operand #0 of the refs here
796 as the Fortran compiler smuggles type punning into COMPONENT_REFs
797 for common blocks instead of using unions like everyone else. */
798 tree type1 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field1));
799 tree type2 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field2));
800
801 /* We cannot disambiguate fields in a union or qualified union. */
802 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
803 goto may_overlap;
804
805 /* Different fields of the same record type cannot overlap. */
806 if (field1 != field2)
807 {
808 component_refs1.release ();
809 component_refs2.release ();
810 return true;
811 }
812 }
813
814may_overlap:
815 component_refs1.release ();
816 component_refs2.release ();
817 return false;
818}
819
dd277d48 820/* Return true if two memory references based on the variables BASE1
077ad5ad 821 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
740a2cb6 822 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
823 if non-NULL are the complete memory reference trees. */
dd277d48 824
825static bool
740a2cb6 826decl_refs_may_alias_p (tree ref1, tree base1,
dd277d48 827 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
740a2cb6 828 tree ref2, tree base2,
dd277d48 829 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
4ee9c684 830{
2622064f 831 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
4ee9c684 832
dd277d48 833 /* If both references are based on different variables, they cannot alias. */
7f2d9047 834 if (base1 != base2)
dd277d48 835 return false;
4ee9c684 836
dd277d48 837 /* If both references are based on the same variable, they cannot alias if
838 the accesses do not overlap. */
740a2cb6 839 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
840 return false;
841
842 /* For components with variable position, the above test isn't sufficient,
843 so we disambiguate component references manually. */
844 if (ref1 && ref2
845 && handled_component_p (ref1) && handled_component_p (ref2)
846 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
847 return false;
848
849 return true;
dd277d48 850}
851
852/* Return true if an indirect reference based on *PTR1 constrained
077ad5ad 853 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
854 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
dd277d48 855 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
856 in which case they are computed on-demand. REF1 and REF2
857 if non-NULL are the complete memory reference trees. */
858
859static bool
182cf5a9 860indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
861 HOST_WIDE_INT offset1,
862 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
ac30e3b2 863 alias_set_type ref1_alias_set,
dd277d48 864 alias_set_type base1_alias_set,
182cf5a9 865 tree ref2 ATTRIBUTE_UNUSED, tree base2,
dd277d48 866 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
ac30e3b2 867 alias_set_type ref2_alias_set,
182cf5a9 868 alias_set_type base2_alias_set, bool tbaa_p)
dd277d48 869{
9a14ba4f 870 tree ptr1;
061b1263 871 tree ptrtype1, dbase2;
7749ee1c 872 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
061b1263 873 HOST_WIDE_INT doffset1, doffset2;
2622064f 874 double_int moff;
875
876 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
877 || TREE_CODE (base1) == TARGET_MEM_REF)
878 && DECL_P (base2));
182cf5a9 879
28daba6f 880 ptr1 = TREE_OPERAND (base1, 0);
9a14ba4f 881
7749ee1c 882 /* The offset embedded in MEM_REFs can be negative. Bias them
883 so that the resulting offset adjustment is positive. */
2622064f 884 moff = mem_ref_offset (base1);
9a8b9e26 885 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 886 if (moff.is_negative ())
887 offset2p += (-moff).low;
2622064f 888 else
889 offset1p += moff.low;
182cf5a9 890
dd277d48 891 /* If only one reference is based on a variable, they cannot alias if
892 the pointer access is beyond the extent of the variable access.
893 (the pointer base cannot validly point to an offset less than zero
894 of the variable).
a07f6629 895 ??? IVOPTs creates bases that do not honor this restriction,
896 so do not apply this optimization for TARGET_MEM_REFs. */
897 if (TREE_CODE (base1) != TARGET_MEM_REF
9a14ba4f 898 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
dd277d48 899 return false;
a07f6629 900 /* They also cannot alias if the pointer may not point to the decl. */
dd277d48 901 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
902 return false;
903
904 /* Disambiguations that rely on strict aliasing rules follow. */
182cf5a9 905 if (!flag_strict_aliasing || !tbaa_p)
dd277d48 906 return true;
907
2622064f 908 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
182cf5a9 909
dd277d48 910 /* If the alias set for a pointer access is zero all bets are off. */
911 if (base1_alias_set == -1)
182cf5a9 912 base1_alias_set = get_deref_alias_set (ptrtype1);
dd277d48 913 if (base1_alias_set == 0)
914 return true;
915 if (base2_alias_set == -1)
916 base2_alias_set = get_alias_set (base2);
917
182cf5a9 918 /* When we are trying to disambiguate an access with a pointer dereference
919 as base versus one with a decl as base we can use both the size
920 of the decl and its dynamic type for extra disambiguation.
921 ??? We do not know anything about the dynamic type of the decl
922 other than that its alias-set contains base2_alias_set as a subset
923 which does not help us here. */
924 /* As we know nothing useful about the dynamic type of the decl just
925 use the usual conflict check rather than a subset test.
926 ??? We could introduce -fvery-strict-aliasing when the language
927 does not allow decls to have a dynamic type that differs from their
928 static type. Then we can check
929 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
dd277d48 930 if (base1_alias_set != base2_alias_set
182cf5a9 931 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
932 return false;
933 /* If the size of the access relevant for TBAA through the pointer
934 is bigger than the size of the decl we can't possibly access the
935 decl via that pointer. */
936 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
937 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
938 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
939 /* ??? This in turn may run afoul when a decl of type T which is
940 a member of union type U is accessed through a pointer to
941 type U and sizeof T is smaller than sizeof U. */
942 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
943 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
944 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
dd277d48 945 return false;
946
061b1263 947 if (!ref2)
948 return true;
949
a07f6629 950 /* If the decl is accessed via a MEM_REF, reconstruct the base
061b1263 951 we can use for TBAA and an appropriately adjusted offset. */
952 dbase2 = ref2;
953 while (handled_component_p (dbase2))
954 dbase2 = TREE_OPERAND (dbase2, 0);
955 doffset1 = offset1;
956 doffset2 = offset2;
957 if (TREE_CODE (dbase2) == MEM_REF
958 || TREE_CODE (dbase2) == TARGET_MEM_REF)
959 {
960 double_int moff = mem_ref_offset (dbase2);
9a8b9e26 961 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 962 if (moff.is_negative ())
963 doffset1 -= (-moff).low;
061b1263 964 else
965 doffset2 -= moff.low;
966 }
967
968 /* If either reference is view-converted, give up now. */
969 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
5ef4302d 970 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
061b1263 971 return true;
972
973 /* If both references are through the same type, they do not alias
974 if the accesses do not overlap. This does extra disambiguation
975 for mixed/pointer accesses but requires strict aliasing.
976 For MEM_REFs we require that the component-ref offset we computed
977 is relative to the start of the type which we ensure by
978 comparing rvalue and access type and disregarding the constant
979 pointer offset. */
980 if ((TREE_CODE (base1) != TARGET_MEM_REF
981 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
982 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
983 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
984
dd277d48 985 /* Do access-path based disambiguation. */
986 if (ref1 && ref2
061b1263 987 && (handled_component_p (ref1) || handled_component_p (ref2)))
0d2f7431 988 return aliasing_component_refs_p (ref1,
ac30e3b2 989 ref1_alias_set, base1_alias_set,
a42ce2cc 990 offset1, max_size1,
0d2f7431 991 ref2,
ac30e3b2 992 ref2_alias_set, base2_alias_set,
993 offset2, max_size2, true);
dd277d48 994
995 return true;
996}
997
998/* Return true if two indirect references based on *PTR1
077ad5ad 999 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1000 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
dd277d48 1001 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1002 in which case they are computed on-demand. REF1 and REF2
1003 if non-NULL are the complete memory reference trees. */
1004
1005static bool
182cf5a9 1006indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
dd277d48 1007 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
ac30e3b2 1008 alias_set_type ref1_alias_set,
dd277d48 1009 alias_set_type base1_alias_set,
182cf5a9 1010 tree ref2 ATTRIBUTE_UNUSED, tree base2,
dd277d48 1011 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
ac30e3b2 1012 alias_set_type ref2_alias_set,
182cf5a9 1013 alias_set_type base2_alias_set, bool tbaa_p)
dd277d48 1014{
9a14ba4f 1015 tree ptr1;
1016 tree ptr2;
182cf5a9 1017 tree ptrtype1, ptrtype2;
1018
2622064f 1019 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1020 || TREE_CODE (base1) == TARGET_MEM_REF)
1021 && (TREE_CODE (base2) == MEM_REF
1022 || TREE_CODE (base2) == TARGET_MEM_REF));
1023
28daba6f 1024 ptr1 = TREE_OPERAND (base1, 0);
1025 ptr2 = TREE_OPERAND (base2, 0);
9a14ba4f 1026
dd277d48 1027 /* If both bases are based on pointers they cannot alias if they may not
1028 point to the same memory object or if they point to the same object
1029 and the accesses do not overlap. */
b5c3c805 1030 if ((!cfun || gimple_in_ssa_p (cfun))
9a14ba4f 1031 && operand_equal_p (ptr1, ptr2, 0)
1032 && (((TREE_CODE (base1) != TARGET_MEM_REF
88be3999 1033 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
9a14ba4f 1034 && (TREE_CODE (base2) != TARGET_MEM_REF
88be3999 1035 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
9a14ba4f 1036 || (TREE_CODE (base1) == TARGET_MEM_REF
1037 && TREE_CODE (base2) == TARGET_MEM_REF
1038 && (TMR_STEP (base1) == TMR_STEP (base2)
1039 || (TMR_STEP (base1) && TMR_STEP (base2)
1040 && operand_equal_p (TMR_STEP (base1),
1041 TMR_STEP (base2), 0)))
88be3999 1042 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1043 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1044 && operand_equal_p (TMR_INDEX (base1),
1045 TMR_INDEX (base2), 0)))
1046 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1047 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1048 && operand_equal_p (TMR_INDEX2 (base1),
1049 TMR_INDEX2 (base2), 0))))))
182cf5a9 1050 {
2622064f 1051 double_int moff;
7749ee1c 1052 /* The offset embedded in MEM_REFs can be negative. Bias them
1053 so that the resulting offset adjustment is positive. */
2622064f 1054 moff = mem_ref_offset (base1);
9a8b9e26 1055 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 1056 if (moff.is_negative ())
1057 offset2 += (-moff).low;
2622064f 1058 else
1059 offset1 += moff.low;
1060 moff = mem_ref_offset (base2);
9a8b9e26 1061 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 1062 if (moff.is_negative ())
1063 offset1 += (-moff).low;
2622064f 1064 else
1065 offset2 += moff.low;
182cf5a9 1066 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1067 }
dd277d48 1068 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1069 return false;
1070
1071 /* Disambiguations that rely on strict aliasing rules follow. */
182cf5a9 1072 if (!flag_strict_aliasing || !tbaa_p)
dd277d48 1073 return true;
1074
2622064f 1075 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1076 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
182cf5a9 1077
dd277d48 1078 /* If the alias set for a pointer access is zero all bets are off. */
1079 if (base1_alias_set == -1)
182cf5a9 1080 base1_alias_set = get_deref_alias_set (ptrtype1);
dd277d48 1081 if (base1_alias_set == 0)
1082 return true;
1083 if (base2_alias_set == -1)
182cf5a9 1084 base2_alias_set = get_deref_alias_set (ptrtype2);
dd277d48 1085 if (base2_alias_set == 0)
1086 return true;
1087
1088 /* If both references are through the same type, they do not alias
1089 if the accesses do not overlap. This does extra disambiguation
1090 for mixed/pointer accesses but requires strict aliasing. */
9c455d1e 1091 if ((TREE_CODE (base1) != TARGET_MEM_REF
1092 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1093 && (TREE_CODE (base2) != TARGET_MEM_REF
1094 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1095 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1096 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
182cf5a9 1097 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1098 TREE_TYPE (ptrtype2)) == 1)
dd277d48 1099 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1100
1101 /* Do type-based disambiguation. */
1102 if (base1_alias_set != base2_alias_set
1103 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1104 return false;
1105
1106 /* Do access-path based disambiguation. */
1107 if (ref1 && ref2
9c455d1e 1108 && (handled_component_p (ref1) || handled_component_p (ref2))
1109 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1110 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
0d2f7431 1111 return aliasing_component_refs_p (ref1,
ac30e3b2 1112 ref1_alias_set, base1_alias_set,
a42ce2cc 1113 offset1, max_size1,
0d2f7431 1114 ref2,
ac30e3b2 1115 ref2_alias_set, base2_alias_set,
1116 offset2, max_size2, false);
dd277d48 1117
1118 return true;
1119}
1120
1121/* Return true, if the two memory references REF1 and REF2 may alias. */
1122
3a443843 1123bool
3918bd18 1124refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
dd277d48 1125{
1126 tree base1, base2;
1127 HOST_WIDE_INT offset1 = 0, offset2 = 0;
dd277d48 1128 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1129 bool var1_p, var2_p, ind1_p, ind2_p;
1130
c78d5cec 1131 gcc_checking_assert ((!ref1->ref
8e7c2b9c 1132 || TREE_CODE (ref1->ref) == SSA_NAME
c78d5cec 1133 || DECL_P (ref1->ref)
2760c0e3 1134 || TREE_CODE (ref1->ref) == STRING_CST
c78d5cec 1135 || handled_component_p (ref1->ref)
182cf5a9 1136 || TREE_CODE (ref1->ref) == MEM_REF
c78d5cec 1137 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1138 && (!ref2->ref
8e7c2b9c 1139 || TREE_CODE (ref2->ref) == SSA_NAME
c78d5cec 1140 || DECL_P (ref2->ref)
2760c0e3 1141 || TREE_CODE (ref2->ref) == STRING_CST
c78d5cec 1142 || handled_component_p (ref2->ref)
182cf5a9 1143 || TREE_CODE (ref2->ref) == MEM_REF
c78d5cec 1144 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
dd277d48 1145
dd277d48 1146 /* Decompose the references into their base objects and the access. */
3918bd18 1147 base1 = ao_ref_base (ref1);
1148 offset1 = ref1->offset;
3918bd18 1149 max_size1 = ref1->max_size;
1150 base2 = ao_ref_base (ref2);
1151 offset2 = ref2->offset;
3918bd18 1152 max_size2 = ref2->max_size;
dd277d48 1153
1154 /* We can end up with registers or constants as bases for example from
1155 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1156 which is seen as a struct copy. */
1157 if (TREE_CODE (base1) == SSA_NAME
119147d1 1158 || TREE_CODE (base1) == CONST_DECL
66b86a74 1159 || TREE_CODE (base1) == CONSTRUCTOR
1160 || TREE_CODE (base1) == ADDR_EXPR
1161 || CONSTANT_CLASS_P (base1)
1162 || TREE_CODE (base2) == SSA_NAME
119147d1 1163 || TREE_CODE (base2) == CONST_DECL
66b86a74 1164 || TREE_CODE (base2) == CONSTRUCTOR
1165 || TREE_CODE (base2) == ADDR_EXPR
1166 || CONSTANT_CLASS_P (base2))
dd277d48 1167 return false;
1168
04ec15fa 1169 /* We can end up referring to code via function and label decls.
c78d5cec 1170 As we likely do not properly track code aliases conservatively
1171 bail out. */
ceefbccc 1172 if (TREE_CODE (base1) == FUNCTION_DECL
c78d5cec 1173 || TREE_CODE (base1) == LABEL_DECL
66b86a74 1174 || TREE_CODE (base2) == FUNCTION_DECL
c78d5cec 1175 || TREE_CODE (base2) == LABEL_DECL)
ceefbccc 1176 return true;
1177
3787db52 1178 /* Two volatile accesses always conflict. */
1179 if (ref1->volatile_p
1180 && ref2->volatile_p)
1181 return true;
1182
090a8c65 1183 /* Defer to simple offset based disambiguation if we have
1184 references based on two decls. Do this before defering to
1185 TBAA to handle must-alias cases in conformance with the
1186 GCC extension of allowing type-punning through unions. */
2622064f 1187 var1_p = DECL_P (base1);
1188 var2_p = DECL_P (base2);
dd277d48 1189 if (var1_p && var2_p)
740a2cb6 1190 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1191 ref2->ref, base2, offset2, max_size2);
090a8c65 1192
2622064f 1193 ind1_p = (TREE_CODE (base1) == MEM_REF
1194 || TREE_CODE (base1) == TARGET_MEM_REF);
1195 ind2_p = (TREE_CODE (base2) == MEM_REF
1196 || TREE_CODE (base2) == TARGET_MEM_REF);
182cf5a9 1197
67817f0f 1198 /* Canonicalize the pointer-vs-decl case. */
1199 if (ind1_p && var2_p)
1200 {
1201 HOST_WIDE_INT tmp1;
1202 tree tmp2;
1203 ao_ref *tmp3;
1204 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1205 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1206 tmp2 = base1; base1 = base2; base2 = tmp2;
1207 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1208 var1_p = true;
1209 ind1_p = false;
1210 var2_p = false;
1211 ind2_p = true;
1212 }
1213
090a8c65 1214 /* First defer to TBAA if possible. */
2a3ebafa 1215 if (tbaa_p
1216 && flag_strict_aliasing
3918bd18 1217 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1218 ao_ref_alias_set (ref2)))
090a8c65 1219 return false;
1220
090a8c65 1221 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
090a8c65 1222 if (var1_p && ind2_p)
182cf5a9 1223 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
ac30e3b2 1224 offset2, max_size2,
182cf5a9 1225 ao_ref_alias_set (ref2), -1,
3918bd18 1226 ref1->ref, base1,
ac30e3b2 1227 offset1, max_size1,
182cf5a9 1228 ao_ref_alias_set (ref1),
1229 ao_ref_base_alias_set (ref1),
1230 tbaa_p);
dd277d48 1231 else if (ind1_p && ind2_p)
182cf5a9 1232 return indirect_refs_may_alias_p (ref1->ref, base1,
ac30e3b2 1233 offset1, max_size1,
182cf5a9 1234 ao_ref_alias_set (ref1), -1,
1235 ref2->ref, base2,
ac30e3b2 1236 offset2, max_size2,
182cf5a9 1237 ao_ref_alias_set (ref2), -1,
1238 tbaa_p);
dd277d48 1239
9c01641d 1240 /* We really do not want to end up here, but returning true is safe. */
1241#ifdef ENABLE_CHECKING
dd277d48 1242 gcc_unreachable ();
9c01641d 1243#else
1244 return true;
1245#endif
dd277d48 1246}
1247
1248bool
1249refs_may_alias_p (tree ref1, tree ref2)
1250{
3918bd18 1251 ao_ref r1, r2;
1252 bool res;
1253 ao_ref_init (&r1, ref1);
1254 ao_ref_init (&r2, ref2);
1255 res = refs_may_alias_p_1 (&r1, &r2, true);
dd277d48 1256 if (res)
1257 ++alias_stats.refs_may_alias_p_may_alias;
1258 else
1259 ++alias_stats.refs_may_alias_p_no_alias;
1260 return res;
1261}
1262
2a3ebafa 1263/* Returns true if there is a anti-dependence for the STORE that
1264 executes after the LOAD. */
1265
1266bool
1267refs_anti_dependent_p (tree load, tree store)
1268{
3918bd18 1269 ao_ref r1, r2;
1270 ao_ref_init (&r1, load);
1271 ao_ref_init (&r2, store);
1272 return refs_may_alias_p_1 (&r1, &r2, false);
2a3ebafa 1273}
1274
1275/* Returns true if there is a output dependence for the stores
1276 STORE1 and STORE2. */
1277
1278bool
1279refs_output_dependent_p (tree store1, tree store2)
1280{
3918bd18 1281 ao_ref r1, r2;
1282 ao_ref_init (&r1, store1);
1283 ao_ref_init (&r2, store2);
1284 return refs_may_alias_p_1 (&r1, &r2, false);
2a3ebafa 1285}
dd277d48 1286
1287/* If the call CALL may use the memory reference REF return true,
1288 otherwise return false. */
1289
1290static bool
134899ac 1291ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
dd277d48 1292{
047fdd47 1293 tree base, callee;
dd277d48 1294 unsigned i;
1295 int flags = gimple_call_flags (call);
1296
1297 /* Const functions without a static chain do not implicitly use memory. */
1298 if (!gimple_call_chain (call)
1299 && (flags & (ECF_CONST|ECF_NOVOPS)))
1300 goto process_args;
1301
134899ac 1302 base = ao_ref_base (ref);
917f08fa 1303 if (!base)
dd277d48 1304 return true;
1305
3787db52 1306 /* A call that is not without side-effects might involve volatile
1307 accesses and thus conflicts with all other volatile accesses. */
1308 if (ref->volatile_p)
1309 return true;
1310
09347e88 1311 /* If the reference is based on a decl that is not aliased the call
1312 cannot possibly use it. */
1313 if (DECL_P (base)
1314 && !may_be_aliased (base)
7f7f16d4 1315 /* But local statics can be used through recursion. */
1316 && !is_global_var (base))
09347e88 1317 goto process_args;
1318
047fdd47 1319 callee = gimple_call_fndecl (call);
1320
1321 /* Handle those builtin functions explicitly that do not act as
1322 escape points. See tree-ssa-structalias.c:find_func_aliases
1323 for the list of builtins we might need to handle here. */
1324 if (callee != NULL_TREE
1325 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1326 switch (DECL_FUNCTION_CODE (callee))
1327 {
77efe819 1328 /* All the following functions read memory pointed to by
1329 their second argument. strcat/strncat additionally
1330 reads memory pointed to by the first argument. */
1331 case BUILT_IN_STRCAT:
1332 case BUILT_IN_STRNCAT:
1333 {
1334 ao_ref dref;
1335 ao_ref_init_from_ptr_and_size (&dref,
1336 gimple_call_arg (call, 0),
1337 NULL_TREE);
1338 if (refs_may_alias_p_1 (&dref, ref, false))
1339 return true;
1340 }
1341 /* FALLTHRU */
047fdd47 1342 case BUILT_IN_STRCPY:
1343 case BUILT_IN_STRNCPY:
047fdd47 1344 case BUILT_IN_MEMCPY:
1345 case BUILT_IN_MEMMOVE:
1346 case BUILT_IN_MEMPCPY:
1347 case BUILT_IN_STPCPY:
1348 case BUILT_IN_STPNCPY:
4c0315d0 1349 case BUILT_IN_TM_MEMCPY:
1350 case BUILT_IN_TM_MEMMOVE:
047fdd47 1351 {
134899ac 1352 ao_ref dref;
1353 tree size = NULL_TREE;
1354 if (gimple_call_num_args (call) == 3)
1355 size = gimple_call_arg (call, 2);
1356 ao_ref_init_from_ptr_and_size (&dref,
1357 gimple_call_arg (call, 1),
1358 size);
1359 return refs_may_alias_p_1 (&dref, ref, false);
047fdd47 1360 }
77efe819 1361 case BUILT_IN_STRCAT_CHK:
1362 case BUILT_IN_STRNCAT_CHK:
1363 {
1364 ao_ref dref;
1365 ao_ref_init_from_ptr_and_size (&dref,
1366 gimple_call_arg (call, 0),
1367 NULL_TREE);
1368 if (refs_may_alias_p_1 (&dref, ref, false))
1369 return true;
1370 }
1371 /* FALLTHRU */
939514e9 1372 case BUILT_IN_STRCPY_CHK:
1373 case BUILT_IN_STRNCPY_CHK:
1374 case BUILT_IN_MEMCPY_CHK:
1375 case BUILT_IN_MEMMOVE_CHK:
1376 case BUILT_IN_MEMPCPY_CHK:
1377 case BUILT_IN_STPCPY_CHK:
1063acde 1378 case BUILT_IN_STPNCPY_CHK:
939514e9 1379 {
1380 ao_ref dref;
1381 tree size = NULL_TREE;
1382 if (gimple_call_num_args (call) == 4)
1383 size = gimple_call_arg (call, 2);
1384 ao_ref_init_from_ptr_and_size (&dref,
1385 gimple_call_arg (call, 1),
1386 size);
1387 return refs_may_alias_p_1 (&dref, ref, false);
1388 }
119147d1 1389 case BUILT_IN_BCOPY:
1390 {
1391 ao_ref dref;
1392 tree size = gimple_call_arg (call, 2);
1393 ao_ref_init_from_ptr_and_size (&dref,
1394 gimple_call_arg (call, 0),
1395 size);
1396 return refs_may_alias_p_1 (&dref, ref, false);
1397 }
4c0315d0 1398
1399 /* The following functions read memory pointed to by their
1400 first argument. */
1401 CASE_BUILT_IN_TM_LOAD (1):
1402 CASE_BUILT_IN_TM_LOAD (2):
1403 CASE_BUILT_IN_TM_LOAD (4):
1404 CASE_BUILT_IN_TM_LOAD (8):
1405 CASE_BUILT_IN_TM_LOAD (FLOAT):
1406 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1407 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1408 CASE_BUILT_IN_TM_LOAD (M64):
1409 CASE_BUILT_IN_TM_LOAD (M128):
1410 CASE_BUILT_IN_TM_LOAD (M256):
1411 case BUILT_IN_TM_LOG:
1412 case BUILT_IN_TM_LOG_1:
1413 case BUILT_IN_TM_LOG_2:
1414 case BUILT_IN_TM_LOG_4:
1415 case BUILT_IN_TM_LOG_8:
1416 case BUILT_IN_TM_LOG_FLOAT:
1417 case BUILT_IN_TM_LOG_DOUBLE:
1418 case BUILT_IN_TM_LOG_LDOUBLE:
1419 case BUILT_IN_TM_LOG_M64:
1420 case BUILT_IN_TM_LOG_M128:
1421 case BUILT_IN_TM_LOG_M256:
1422 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1423
77efe819 1424 /* These read memory pointed to by the first argument. */
1425 case BUILT_IN_STRDUP:
1426 case BUILT_IN_STRNDUP:
1427 {
1428 ao_ref dref;
1429 tree size = NULL_TREE;
1430 if (gimple_call_num_args (call) == 2)
1431 size = gimple_call_arg (call, 1);
1432 ao_ref_init_from_ptr_and_size (&dref,
1433 gimple_call_arg (call, 0),
1434 size);
1435 return refs_may_alias_p_1 (&dref, ref, false);
1436 }
4953672f 1437 /* These read memory pointed to by the first argument. */
1438 case BUILT_IN_INDEX:
1439 case BUILT_IN_STRCHR:
1440 case BUILT_IN_STRRCHR:
1441 {
1442 ao_ref dref;
1443 ao_ref_init_from_ptr_and_size (&dref,
1444 gimple_call_arg (call, 0),
1445 NULL_TREE);
1446 return refs_may_alias_p_1 (&dref, ref, false);
1447 }
1448 /* These read memory pointed to by the first argument with size
1449 in the third argument. */
1450 case BUILT_IN_MEMCHR:
1451 {
1452 ao_ref dref;
1453 ao_ref_init_from_ptr_and_size (&dref,
1454 gimple_call_arg (call, 0),
1455 gimple_call_arg (call, 2));
1456 return refs_may_alias_p_1 (&dref, ref, false);
1457 }
1458 /* These read memory pointed to by the first and second arguments. */
1459 case BUILT_IN_STRSTR:
1460 case BUILT_IN_STRPBRK:
1461 {
1462 ao_ref dref;
1463 ao_ref_init_from_ptr_and_size (&dref,
1464 gimple_call_arg (call, 0),
1465 NULL_TREE);
1466 if (refs_may_alias_p_1 (&dref, ref, false))
1467 return true;
1468 ao_ref_init_from_ptr_and_size (&dref,
1469 gimple_call_arg (call, 1),
1470 NULL_TREE);
1471 return refs_may_alias_p_1 (&dref, ref, false);
1472 }
1473
09d9c774 1474 /* The following builtins do not read from memory. */
1475 case BUILT_IN_FREE:
0d3ca08e 1476 case BUILT_IN_MALLOC:
4ef43bbd 1477 case BUILT_IN_CALLOC:
ca793672 1478 case BUILT_IN_ALLOCA:
581bf1c2 1479 case BUILT_IN_ALLOCA_WITH_ALIGN:
ca793672 1480 case BUILT_IN_STACK_SAVE:
1481 case BUILT_IN_STACK_RESTORE:
09d9c774 1482 case BUILT_IN_MEMSET:
4c0315d0 1483 case BUILT_IN_TM_MEMSET:
939514e9 1484 case BUILT_IN_MEMSET_CHK:
09d9c774 1485 case BUILT_IN_FREXP:
1486 case BUILT_IN_FREXPF:
1487 case BUILT_IN_FREXPL:
1488 case BUILT_IN_GAMMA_R:
1489 case BUILT_IN_GAMMAF_R:
1490 case BUILT_IN_GAMMAL_R:
1491 case BUILT_IN_LGAMMA_R:
1492 case BUILT_IN_LGAMMAF_R:
1493 case BUILT_IN_LGAMMAL_R:
1494 case BUILT_IN_MODF:
1495 case BUILT_IN_MODFF:
1496 case BUILT_IN_MODFL:
1497 case BUILT_IN_REMQUO:
1498 case BUILT_IN_REMQUOF:
1499 case BUILT_IN_REMQUOL:
1500 case BUILT_IN_SINCOS:
1501 case BUILT_IN_SINCOSF:
1502 case BUILT_IN_SINCOSL:
fca0886c 1503 case BUILT_IN_ASSUME_ALIGNED:
9d17b4a8 1504 case BUILT_IN_VA_END:
09d9c774 1505 return false;
a44814b9 1506 /* __sync_* builtins and some OpenMP builtins act as threading
1507 barriers. */
1508#undef DEF_SYNC_BUILTIN
1509#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1510#include "sync-builtins.def"
1511#undef DEF_SYNC_BUILTIN
1512 case BUILT_IN_GOMP_ATOMIC_START:
1513 case BUILT_IN_GOMP_ATOMIC_END:
1514 case BUILT_IN_GOMP_BARRIER:
1515 case BUILT_IN_GOMP_TASKWAIT:
1516 case BUILT_IN_GOMP_CRITICAL_START:
1517 case BUILT_IN_GOMP_CRITICAL_END:
1518 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1519 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1520 case BUILT_IN_GOMP_LOOP_END:
1521 case BUILT_IN_GOMP_ORDERED_START:
1522 case BUILT_IN_GOMP_ORDERED_END:
1523 case BUILT_IN_GOMP_PARALLEL_END:
1524 case BUILT_IN_GOMP_SECTIONS_END:
1525 case BUILT_IN_GOMP_SINGLE_COPY_START:
1526 case BUILT_IN_GOMP_SINGLE_COPY_END:
1527 return true;
09d9c774 1528
047fdd47 1529 default:
1530 /* Fallthru to general call handling. */;
1531 }
1532
dd277d48 1533 /* Check if base is a global static variable that is not read
1534 by the function. */
924de091 1535 if (callee != NULL_TREE
1536 && TREE_CODE (base) == VAR_DECL
d97be713 1537 && TREE_STATIC (base))
4ee9c684 1538 {
924de091 1539 struct cgraph_node *node = cgraph_get_node (callee);
dd277d48 1540 bitmap not_read;
1541
924de091 1542 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1543 node yet. We should enforce that there are nodes for all decls in the
1544 IL and remove this check instead. */
1545 if (node
1546 && (not_read = ipa_reference_get_not_read_global (node))
dd277d48 1547 && bitmap_bit_p (not_read, DECL_UID (base)))
1548 goto process_args;
4ee9c684 1549 }
1550
cb245216 1551 /* Check if the base variable is call-used. */
1552 if (DECL_P (base))
4ee9c684 1553 {
cb245216 1554 if (pt_solution_includes (gimple_call_use_set (call), base))
dd277d48 1555 return true;
1556 }
2622064f 1557 else if ((TREE_CODE (base) == MEM_REF
1558 || TREE_CODE (base) == TARGET_MEM_REF)
cb245216 1559 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
dd277d48 1560 {
cb245216 1561 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1562 if (!pi)
1563 return true;
917f08fa 1564
cb245216 1565 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
dd277d48 1566 return true;
1567 }
cb245216 1568 else
1569 return true;
dd277d48 1570
1571 /* Inspect call arguments for passed-by-value aliases. */
1572process_args:
1573 for (i = 0; i < gimple_call_num_args (call); ++i)
1574 {
1575 tree op = gimple_call_arg (call, i);
8ce86007 1576 int flags = gimple_call_arg_flags (call, i);
1577
1578 if (flags & EAF_UNUSED)
1579 continue;
dd277d48 1580
dd277d48 1581 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1582 op = TREE_OPERAND (op, 0);
4ee9c684 1583
dd277d48 1584 if (TREE_CODE (op) != SSA_NAME
134899ac 1585 && !is_gimple_min_invariant (op))
1586 {
1587 ao_ref r;
1588 ao_ref_init (&r, op);
1589 if (refs_may_alias_p_1 (&r, ref, true))
1590 return true;
1591 }
4ee9c684 1592 }
1593
dd277d48 1594 return false;
1595}
1596
1597static bool
1598ref_maybe_used_by_call_p (gimple call, tree ref)
1599{
134899ac 1600 ao_ref r;
1601 bool res;
1602 ao_ref_init (&r, ref);
1603 res = ref_maybe_used_by_call_p_1 (call, &r);
dd277d48 1604 if (res)
1605 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1606 else
1607 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1608 return res;
4ee9c684 1609}
1610
1611
dd277d48 1612/* If the statement STMT may use the memory reference REF return
1613 true, otherwise return false. */
4ee9c684 1614
dd277d48 1615bool
1616ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
4ee9c684 1617{
dd277d48 1618 if (is_gimple_assign (stmt))
1619 {
1620 tree rhs;
1621
1622 /* All memory assign statements are single. */
1623 if (!gimple_assign_single_p (stmt))
1624 return false;
1625
1626 rhs = gimple_assign_rhs1 (stmt);
1627 if (is_gimple_reg (rhs)
1628 || is_gimple_min_invariant (rhs)
1629 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1630 return false;
1631
1632 return refs_may_alias_p (rhs, ref);
1633 }
1634 else if (is_gimple_call (stmt))
1635 return ref_maybe_used_by_call_p (stmt, ref);
91234829 1636 else if (gimple_code (stmt) == GIMPLE_RETURN)
1637 {
1638 tree retval = gimple_return_retval (stmt);
1639 tree base;
1640 if (retval
1641 && TREE_CODE (retval) != SSA_NAME
1642 && !is_gimple_min_invariant (retval)
1643 && refs_may_alias_p (retval, ref))
1644 return true;
1645 /* If ref escapes the function then the return acts as a use. */
1646 base = get_base_address (ref);
1647 if (!base)
1648 ;
1649 else if (DECL_P (base))
1650 return is_global_var (base);
1651 else if (TREE_CODE (base) == MEM_REF
1652 || TREE_CODE (base) == TARGET_MEM_REF)
1653 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1654 return false;
1655 }
dd277d48 1656
1657 return true;
4ee9c684 1658}
1659
dd277d48 1660/* If the call in statement CALL may clobber the memory reference REF
1661 return true, otherwise return false. */
4ee9c684 1662
dd277d48 1663static bool
3918bd18 1664call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
4ee9c684 1665{
7f7f16d4 1666 tree base;
047fdd47 1667 tree callee;
dd277d48 1668
1669 /* If the call is pure or const it cannot clobber anything. */
1670 if (gimple_call_flags (call)
1671 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1672 return false;
1673
3918bd18 1674 base = ao_ref_base (ref);
dd277d48 1675 if (!base)
1676 return true;
1677
1678 if (TREE_CODE (base) == SSA_NAME
1679 || CONSTANT_CLASS_P (base))
1680 return false;
1681
3787db52 1682 /* A call that is not without side-effects might involve volatile
1683 accesses and thus conflicts with all other volatile accesses. */
1684 if (ref->volatile_p)
1685 return true;
1686
09347e88 1687 /* If the reference is based on a decl that is not aliased the call
1688 cannot possibly clobber it. */
1689 if (DECL_P (base)
1690 && !may_be_aliased (base)
7f7f16d4 1691 /* But local non-readonly statics can be modified through recursion
1692 or the call may implement a threading barrier which we must
1693 treat as may-def. */
09347e88 1694 && (TREE_READONLY (base)
7f7f16d4 1695 || !is_global_var (base)))
09347e88 1696 return false;
1697
047fdd47 1698 callee = gimple_call_fndecl (call);
1699
1700 /* Handle those builtin functions explicitly that do not act as
1701 escape points. See tree-ssa-structalias.c:find_func_aliases
1702 for the list of builtins we might need to handle here. */
1703 if (callee != NULL_TREE
1704 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1705 switch (DECL_FUNCTION_CODE (callee))
1706 {
1707 /* All the following functions clobber memory pointed to by
1708 their first argument. */
1709 case BUILT_IN_STRCPY:
1710 case BUILT_IN_STRNCPY:
047fdd47 1711 case BUILT_IN_MEMCPY:
1712 case BUILT_IN_MEMMOVE:
1713 case BUILT_IN_MEMPCPY:
1714 case BUILT_IN_STPCPY:
1715 case BUILT_IN_STPNCPY:
1716 case BUILT_IN_STRCAT:
1717 case BUILT_IN_STRNCAT:
134899ac 1718 case BUILT_IN_MEMSET:
4c0315d0 1719 case BUILT_IN_TM_MEMSET:
1720 CASE_BUILT_IN_TM_STORE (1):
1721 CASE_BUILT_IN_TM_STORE (2):
1722 CASE_BUILT_IN_TM_STORE (4):
1723 CASE_BUILT_IN_TM_STORE (8):
1724 CASE_BUILT_IN_TM_STORE (FLOAT):
1725 CASE_BUILT_IN_TM_STORE (DOUBLE):
1726 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1727 CASE_BUILT_IN_TM_STORE (M64):
1728 CASE_BUILT_IN_TM_STORE (M128):
1729 CASE_BUILT_IN_TM_STORE (M256):
1730 case BUILT_IN_TM_MEMCPY:
1731 case BUILT_IN_TM_MEMMOVE:
047fdd47 1732 {
134899ac 1733 ao_ref dref;
1734 tree size = NULL_TREE;
77efe819 1735 /* Don't pass in size for strncat, as the maximum size
1736 is strlen (dest) + n + 1 instead of n, resp.
1737 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1738 known. */
1739 if (gimple_call_num_args (call) == 3
1740 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
134899ac 1741 size = gimple_call_arg (call, 2);
1742 ao_ref_init_from_ptr_and_size (&dref,
1743 gimple_call_arg (call, 0),
1744 size);
1745 return refs_may_alias_p_1 (&dref, ref, false);
047fdd47 1746 }
939514e9 1747 case BUILT_IN_STRCPY_CHK:
1748 case BUILT_IN_STRNCPY_CHK:
1749 case BUILT_IN_MEMCPY_CHK:
1750 case BUILT_IN_MEMMOVE_CHK:
1751 case BUILT_IN_MEMPCPY_CHK:
1752 case BUILT_IN_STPCPY_CHK:
1063acde 1753 case BUILT_IN_STPNCPY_CHK:
939514e9 1754 case BUILT_IN_STRCAT_CHK:
1755 case BUILT_IN_STRNCAT_CHK:
1756 case BUILT_IN_MEMSET_CHK:
1757 {
1758 ao_ref dref;
1759 tree size = NULL_TREE;
77efe819 1760 /* Don't pass in size for __strncat_chk, as the maximum size
1761 is strlen (dest) + n + 1 instead of n, resp.
1762 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1763 known. */
1764 if (gimple_call_num_args (call) == 4
1765 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
939514e9 1766 size = gimple_call_arg (call, 2);
1767 ao_ref_init_from_ptr_and_size (&dref,
1768 gimple_call_arg (call, 0),
1769 size);
1770 return refs_may_alias_p_1 (&dref, ref, false);
1771 }
119147d1 1772 case BUILT_IN_BCOPY:
1773 {
1774 ao_ref dref;
1775 tree size = gimple_call_arg (call, 2);
1776 ao_ref_init_from_ptr_and_size (&dref,
1777 gimple_call_arg (call, 1),
1778 size);
1779 return refs_may_alias_p_1 (&dref, ref, false);
1780 }
0d3ca08e 1781 /* Allocating memory does not have any side-effects apart from
1782 being the definition point for the pointer. */
1783 case BUILT_IN_MALLOC:
4ef43bbd 1784 case BUILT_IN_CALLOC:
77efe819 1785 case BUILT_IN_STRDUP:
1786 case BUILT_IN_STRNDUP:
be97d4b6 1787 /* Unix98 specifies that errno is set on allocation failure. */
a29c5f9c 1788 if (flag_errno_math
be97d4b6 1789 && targetm.ref_may_alias_errno (ref))
1790 return true;
0d3ca08e 1791 return false;
ca793672 1792 case BUILT_IN_STACK_SAVE:
1793 case BUILT_IN_ALLOCA:
581bf1c2 1794 case BUILT_IN_ALLOCA_WITH_ALIGN:
fca0886c 1795 case BUILT_IN_ASSUME_ALIGNED:
ca793672 1796 return false;
047fdd47 1797 /* Freeing memory kills the pointed-to memory. More importantly
1798 the call has to serve as a barrier for moving loads and stores
134899ac 1799 across it. */
047fdd47 1800 case BUILT_IN_FREE:
9d17b4a8 1801 case BUILT_IN_VA_END:
047fdd47 1802 {
1803 tree ptr = gimple_call_arg (call, 0);
1804 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1805 }
047fdd47 1806 case BUILT_IN_GAMMA_R:
1807 case BUILT_IN_GAMMAF_R:
1808 case BUILT_IN_GAMMAL_R:
1809 case BUILT_IN_LGAMMA_R:
1810 case BUILT_IN_LGAMMAF_R:
1811 case BUILT_IN_LGAMMAL_R:
09d9c774 1812 {
1813 tree out = gimple_call_arg (call, 1);
1814 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1815 return true;
1816 if (flag_errno_math)
1817 break;
1818 return false;
1819 }
1820 case BUILT_IN_FREXP:
1821 case BUILT_IN_FREXPF:
1822 case BUILT_IN_FREXPL:
047fdd47 1823 case BUILT_IN_MODF:
1824 case BUILT_IN_MODFF:
1825 case BUILT_IN_MODFL:
1826 {
1827 tree out = gimple_call_arg (call, 1);
1828 return ptr_deref_may_alias_ref_p_1 (out, ref);
1829 }
1830 case BUILT_IN_REMQUO:
1831 case BUILT_IN_REMQUOF:
1832 case BUILT_IN_REMQUOL:
1833 {
1834 tree out = gimple_call_arg (call, 2);
09d9c774 1835 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1836 return true;
1837 if (flag_errno_math)
1838 break;
1839 return false;
047fdd47 1840 }
1841 case BUILT_IN_SINCOS:
1842 case BUILT_IN_SINCOSF:
1843 case BUILT_IN_SINCOSL:
1844 {
1845 tree sin = gimple_call_arg (call, 1);
1846 tree cos = gimple_call_arg (call, 2);
1847 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1848 || ptr_deref_may_alias_ref_p_1 (cos, ref));
1849 }
a44814b9 1850 /* __sync_* builtins and some OpenMP builtins act as threading
1851 barriers. */
1852#undef DEF_SYNC_BUILTIN
1853#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1854#include "sync-builtins.def"
1855#undef DEF_SYNC_BUILTIN
1856 case BUILT_IN_GOMP_ATOMIC_START:
1857 case BUILT_IN_GOMP_ATOMIC_END:
1858 case BUILT_IN_GOMP_BARRIER:
1859 case BUILT_IN_GOMP_TASKWAIT:
1860 case BUILT_IN_GOMP_CRITICAL_START:
1861 case BUILT_IN_GOMP_CRITICAL_END:
1862 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1863 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1864 case BUILT_IN_GOMP_LOOP_END:
1865 case BUILT_IN_GOMP_ORDERED_START:
1866 case BUILT_IN_GOMP_ORDERED_END:
1867 case BUILT_IN_GOMP_PARALLEL_END:
1868 case BUILT_IN_GOMP_SECTIONS_END:
1869 case BUILT_IN_GOMP_SINGLE_COPY_START:
1870 case BUILT_IN_GOMP_SINGLE_COPY_END:
1871 return true;
047fdd47 1872 default:
1873 /* Fallthru to general call handling. */;
1874 }
1875
dd277d48 1876 /* Check if base is a global static variable that is not written
1877 by the function. */
047fdd47 1878 if (callee != NULL_TREE
1879 && TREE_CODE (base) == VAR_DECL
d97be713 1880 && TREE_STATIC (base))
4ee9c684 1881 {
924de091 1882 struct cgraph_node *node = cgraph_get_node (callee);
dd277d48 1883 bitmap not_written;
0eaf9bd7 1884
924de091 1885 if (node
1886 && (not_written = ipa_reference_get_not_written_global (node))
dd277d48 1887 && bitmap_bit_p (not_written, DECL_UID (base)))
1888 return false;
4ee9c684 1889 }
4ee9c684 1890
cb245216 1891 /* Check if the base variable is call-clobbered. */
dd277d48 1892 if (DECL_P (base))
cb245216 1893 return pt_solution_includes (gimple_call_clobber_set (call), base);
2622064f 1894 else if ((TREE_CODE (base) == MEM_REF
1895 || TREE_CODE (base) == TARGET_MEM_REF)
917f08fa 1896 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1897 {
1898 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1899 if (!pi)
1900 return true;
1901
cb245216 1902 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
917f08fa 1903 }
4ee9c684 1904
dd277d48 1905 return true;
1906}
4ee9c684 1907
4debfcfc 1908/* If the call in statement CALL may clobber the memory reference REF
1909 return true, otherwise return false. */
1910
1911bool
dd277d48 1912call_may_clobber_ref_p (gimple call, tree ref)
4ee9c684 1913{
3918bd18 1914 bool res;
1915 ao_ref r;
1916 ao_ref_init (&r, ref);
1917 res = call_may_clobber_ref_p_1 (call, &r);
dd277d48 1918 if (res)
1919 ++alias_stats.call_may_clobber_ref_p_may_alias;
1920 else
1921 ++alias_stats.call_may_clobber_ref_p_no_alias;
1922 return res;
4ee9c684 1923}
94e6573f 1924
dd277d48 1925
1926/* If the statement STMT may clobber the memory reference REF return true,
1927 otherwise return false. */
94e6573f 1928
1929bool
3918bd18 1930stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
94e6573f 1931{
dd277d48 1932 if (is_gimple_call (stmt))
1933 {
1934 tree lhs = gimple_call_lhs (stmt);
1935 if (lhs
66b86a74 1936 && TREE_CODE (lhs) != SSA_NAME)
3918bd18 1937 {
1938 ao_ref r;
1939 ao_ref_init (&r, lhs);
1940 if (refs_may_alias_p_1 (ref, &r, true))
1941 return true;
1942 }
94e6573f 1943
3918bd18 1944 return call_may_clobber_ref_p_1 (stmt, ref);
dd277d48 1945 }
8e7c2b9c 1946 else if (gimple_assign_single_p (stmt))
3918bd18 1947 {
8e7c2b9c 1948 tree lhs = gimple_assign_lhs (stmt);
66b86a74 1949 if (TREE_CODE (lhs) != SSA_NAME)
8e7c2b9c 1950 {
1951 ao_ref r;
66b86a74 1952 ao_ref_init (&r, lhs);
8e7c2b9c 1953 return refs_may_alias_p_1 (ref, &r, true);
1954 }
3918bd18 1955 }
dd277d48 1956 else if (gimple_code (stmt) == GIMPLE_ASM)
94e6573f 1957 return true;
1958
6329636b 1959 return false;
94e6573f 1960}
2be14d8b 1961
3918bd18 1962bool
1963stmt_may_clobber_ref_p (gimple stmt, tree ref)
1964{
1965 ao_ref r;
1966 ao_ref_init (&r, ref);
1967 return stmt_may_clobber_ref_p_1 (stmt, &r);
1968}
1969
ab450cc6 1970/* If STMT kills the memory reference REF return true, otherwise
1971 return false. */
1972
1973static bool
1974stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
1975{
07ff6a65 1976 /* For a must-alias check we need to be able to constrain
1977 the access properly. */
1978 ao_ref_base (ref);
1979 if (ref->max_size == -1)
1980 return false;
1981
ab450cc6 1982 if (gimple_has_lhs (stmt)
e54d36c4 1983 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
1984 /* The assignment is not necessarily carried out if it can throw
1985 and we can catch it in the current function where we could inspect
1986 the previous value.
1987 ??? We only need to care about the RHS throwing. For aggregate
1988 assignments or similar calls and non-call exceptions the LHS
1989 might throw as well. */
1990 && !stmt_can_throw_internal (stmt))
ab450cc6 1991 {
1992 tree base, lhs = gimple_get_lhs (stmt);
137eddca 1993 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
ab450cc6 1994 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1995 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
1996 so base == ref->base does not always hold. */
137eddca 1997 if (base != ref->base)
ab450cc6 1998 {
137eddca 1999 /* If both base and ref->base are MEM_REFs, only compare the
2000 first operand, and if the second operand isn't equal constant,
2001 try to add the offsets into offset and ref_offset. */
2002 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2003 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
ab450cc6 2004 {
83d18d6d 2005 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2006 TREE_OPERAND (ref->base, 1)))
137eddca 2007 {
2008 double_int off1 = mem_ref_offset (base);
9a8b9e26 2009 off1 = off1.lshift (BITS_PER_UNIT == 8
2010 ? 3 : exact_log2 (BITS_PER_UNIT));
137eddca 2011 off1 = off1 + double_int::from_shwi (offset);
2012 double_int off2 = mem_ref_offset (ref->base);
9a8b9e26 2013 off2 = off2.lshift (BITS_PER_UNIT == 8
2014 ? 3 : exact_log2 (BITS_PER_UNIT));
137eddca 2015 off2 = off2 + double_int::from_shwi (ref_offset);
2016 if (off1.fits_shwi () && off2.fits_shwi ())
2017 {
2018 offset = off1.to_shwi ();
2019 ref_offset = off2.to_shwi ();
2020 }
2021 else
2022 size = -1;
2023 }
ab450cc6 2024 }
137eddca 2025 else
2026 size = -1;
2027 }
2028 /* For a must-alias check we need to be able to constrain
2029 the access properly. */
2030 if (size != -1 && size == max_size)
2031 {
2032 if (offset <= ref_offset
2033 && offset + size >= ref_offset + ref->max_size)
2034 return true;
ab450cc6 2035 }
2036 }
07ff6a65 2037
2038 if (is_gimple_call (stmt))
2039 {
2040 tree callee = gimple_call_fndecl (stmt);
2041 if (callee != NULL_TREE
2042 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2043 switch (DECL_FUNCTION_CODE (callee))
2044 {
2045 case BUILT_IN_MEMCPY:
2046 case BUILT_IN_MEMPCPY:
2047 case BUILT_IN_MEMMOVE:
2048 case BUILT_IN_MEMSET:
939514e9 2049 case BUILT_IN_MEMCPY_CHK:
2050 case BUILT_IN_MEMPCPY_CHK:
2051 case BUILT_IN_MEMMOVE_CHK:
2052 case BUILT_IN_MEMSET_CHK:
07ff6a65 2053 {
2054 tree dest = gimple_call_arg (stmt, 0);
2055 tree len = gimple_call_arg (stmt, 2);
2056 tree base = NULL_TREE;
2057 HOST_WIDE_INT offset = 0;
2058 if (!host_integerp (len, 0))
2059 return false;
2060 if (TREE_CODE (dest) == ADDR_EXPR)
2061 base = get_addr_base_and_unit_offset (TREE_OPERAND (dest, 0),
2062 &offset);
2063 else if (TREE_CODE (dest) == SSA_NAME)
2064 base = dest;
2065 if (base
2066 && base == ao_ref_base (ref))
2067 {
2068 HOST_WIDE_INT size = TREE_INT_CST_LOW (len);
2069 if (offset <= ref->offset / BITS_PER_UNIT
2070 && (offset + size
2071 >= ((ref->offset + ref->max_size + BITS_PER_UNIT - 1)
2072 / BITS_PER_UNIT)))
2073 return true;
2074 }
9d17b4a8 2075 break;
2076 }
2077
2078 case BUILT_IN_VA_END:
2079 {
2080 tree ptr = gimple_call_arg (stmt, 0);
2081 if (TREE_CODE (ptr) == ADDR_EXPR)
2082 {
2083 tree base = ao_ref_base (ref);
2084 if (TREE_OPERAND (ptr, 0) == base)
2085 return true;
2086 }
2087 break;
07ff6a65 2088 }
9d17b4a8 2089
07ff6a65 2090 default:;
2091 }
07ff6a65 2092 }
ab450cc6 2093 return false;
2094}
2095
2096bool
2097stmt_kills_ref_p (gimple stmt, tree ref)
2098{
2099 ao_ref r;
2100 ao_ref_init (&r, ref);
2101 return stmt_kills_ref_p_1 (stmt, &r);
2102}
2103
3918bd18 2104
dd277d48 2105/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2106 TARGET or a statement clobbering the memory reference REF in which
2107 case false is returned. The walk starts with VUSE, one argument of PHI. */
2108
2109static bool
3918bd18 2110maybe_skip_until (gimple phi, tree target, ao_ref *ref,
4fd7a6de 2111 tree vuse, unsigned int *cnt, bitmap *visited,
2112 bool abort_on_visited)
ac926412 2113{
0afe880d 2114 basic_block bb = gimple_bb (phi);
2115
dd277d48 2116 if (!*visited)
2117 *visited = BITMAP_ALLOC (NULL);
ac926412 2118
dd277d48 2119 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2120
2121 /* Walk until we hit the target. */
2122 while (vuse != target)
ac926412 2123 {
dd277d48 2124 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2125 /* Recurse for PHI nodes. */
2126 if (gimple_code (def_stmt) == GIMPLE_PHI)
2127 {
2128 /* An already visited PHI node ends the walk successfully. */
2129 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
4fd7a6de 2130 return !abort_on_visited;
2131 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2132 visited, abort_on_visited);
dd277d48 2133 if (!vuse)
2134 return false;
2135 continue;
2136 }
297a2110 2137 else if (gimple_nop_p (def_stmt))
dd277d48 2138 return false;
297a2110 2139 else
2140 {
2141 /* A clobbering statement or the end of the IL ends it failing. */
2142 ++*cnt;
2143 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2144 return false;
2145 }
0afe880d 2146 /* If we reach a new basic-block see if we already skipped it
2147 in a previous walk that ended successfully. */
2148 if (gimple_bb (def_stmt) != bb)
2149 {
2150 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
4fd7a6de 2151 return !abort_on_visited;
0afe880d 2152 bb = gimple_bb (def_stmt);
2153 }
dd277d48 2154 vuse = gimple_vuse (def_stmt);
ac926412 2155 }
dd277d48 2156 return true;
2157}
ac926412 2158
d7f69727 2159/* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2160 until we hit the phi argument definition that dominates the other one.
2161 Return that, or NULL_TREE if there is no such definition. */
2162
2163static tree
2164get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
4fd7a6de 2165 ao_ref *ref, unsigned int *cnt,
2166 bitmap *visited, bool abort_on_visited)
d7f69727 2167{
2168 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2169 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2170 tree common_vuse;
2171
2172 if (arg0 == arg1)
2173 return arg0;
2174 else if (gimple_nop_p (def0)
2175 || (!gimple_nop_p (def1)
2176 && dominated_by_p (CDI_DOMINATORS,
2177 gimple_bb (def1), gimple_bb (def0))))
2178 {
4fd7a6de 2179 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2180 visited, abort_on_visited))
d7f69727 2181 return arg0;
2182 }
2183 else if (gimple_nop_p (def1)
2184 || dominated_by_p (CDI_DOMINATORS,
2185 gimple_bb (def0), gimple_bb (def1)))
2186 {
4fd7a6de 2187 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2188 visited, abort_on_visited))
d7f69727 2189 return arg1;
2190 }
2191 /* Special case of a diamond:
2192 MEM_1 = ...
2193 goto (cond) ? L1 : L2
2194 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2195 goto L3
2196 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2197 L3: MEM_4 = PHI<MEM_2, MEM_3>
2198 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2199 dominate each other, but still we can easily skip this PHI node
2200 if we recognize that the vuse MEM operand is the same for both,
2201 and that we can skip both statements (they don't clobber us).
2202 This is still linear. Don't use maybe_skip_until, that might
2203 potentially be slow. */
2204 else if ((common_vuse = gimple_vuse (def0))
2205 && common_vuse == gimple_vuse (def1))
2206 {
297a2110 2207 *cnt += 2;
d7f69727 2208 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2209 && !stmt_may_clobber_ref_p_1 (def1, ref))
2210 return common_vuse;
2211 }
2212
2213 return NULL_TREE;
2214}
2215
2216
dd277d48 2217/* Starting from a PHI node for the virtual operand of the memory reference
2218 REF find a continuation virtual operand that allows to continue walking
2219 statements dominating PHI skipping only statements that cannot possibly
297a2110 2220 clobber REF. Increments *CNT for each alias disambiguation done.
2221 Returns NULL_TREE if no suitable virtual operand can be found. */
dd277d48 2222
7814d418 2223tree
297a2110 2224get_continuation_for_phi (gimple phi, ao_ref *ref,
4fd7a6de 2225 unsigned int *cnt, bitmap *visited,
2226 bool abort_on_visited)
dd277d48 2227{
2228 unsigned nargs = gimple_phi_num_args (phi);
2229
2230 /* Through a single-argument PHI we can simply look through. */
2231 if (nargs == 1)
2232 return PHI_ARG_DEF (phi, 0);
2233
d7f69727 2234 /* For two or more arguments try to pairwise skip non-aliasing code
2235 until we hit the phi argument definition that dominates the other one. */
2236 else if (nargs >= 2)
ac926412 2237 {
0afe880d 2238 tree arg0, arg1;
2239 unsigned i;
2240
2241 /* Find a candidate for the virtual operand which definition
2242 dominates those of all others. */
2243 arg0 = PHI_ARG_DEF (phi, 0);
2244 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2245 for (i = 1; i < nargs; ++i)
2246 {
2247 arg1 = PHI_ARG_DEF (phi, i);
2248 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2249 {
2250 arg0 = arg1;
2251 break;
2252 }
2253 if (dominated_by_p (CDI_DOMINATORS,
2254 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2255 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2256 arg0 = arg1;
2257 }
2258
2259 /* Then pairwise reduce against the found candidate. */
2260 for (i = 0; i < nargs; ++i)
7814d418 2261 {
d7f69727 2262 arg1 = PHI_ARG_DEF (phi, i);
297a2110 2263 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
4fd7a6de 2264 cnt, visited, abort_on_visited);
d7f69727 2265 if (!arg0)
2266 return NULL_TREE;
7814d418 2267 }
d7f69727 2268
2269 return arg0;
ac926412 2270 }
dd277d48 2271
2272 return NULL_TREE;
ac926412 2273}
516849c7 2274
dd277d48 2275/* Based on the memory reference REF and its virtual use VUSE call
2276 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2277 itself. That is, for each virtual use for which its defining statement
2278 does not clobber REF.
e683de6d 2279
dd277d48 2280 WALKER is called with REF, the current virtual use and DATA. If
2281 WALKER returns non-NULL the walk stops and its result is returned.
2282 At the end of a non-successful walk NULL is returned.
3857274c 2283
d8021dea 2284 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2285 use which definition is a statement that may clobber REF and DATA.
2286 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2287 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2288 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2289 to adjust REF and *DATA to make that valid.
2290
dd277d48 2291 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2292
2293void *
3918bd18 2294walk_non_aliased_vuses (ao_ref *ref, tree vuse,
297a2110 2295 void *(*walker)(ao_ref *, tree, unsigned int, void *),
3918bd18 2296 void *(*translate)(ao_ref *, tree, void *), void *data)
3857274c 2297{
dd277d48 2298 bitmap visited = NULL;
2299 void *res;
297a2110 2300 unsigned int cnt = 0;
4fd7a6de 2301 bool translated = false;
dd277d48 2302
02c74b9b 2303 timevar_push (TV_ALIAS_STMT_WALK);
2304
dd277d48 2305 do
2306 {
2307 gimple def_stmt;
3857274c 2308
02c74b9b 2309 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
297a2110 2310 res = (*walker) (ref, vuse, cnt, data);
2311 /* Abort walk. */
2312 if (res == (void *)-1)
2313 {
2314 res = NULL;
2315 break;
2316 }
2317 /* Lookup succeeded. */
2318 else if (res != NULL)
dd277d48 2319 break;
3857274c 2320
dd277d48 2321 def_stmt = SSA_NAME_DEF_STMT (vuse);
2322 if (gimple_nop_p (def_stmt))
2323 break;
2324 else if (gimple_code (def_stmt) == GIMPLE_PHI)
4fd7a6de 2325 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2326 &visited, translated);
dd277d48 2327 else
2328 {
297a2110 2329 cnt++;
3918bd18 2330 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
d8021dea 2331 {
2332 if (!translate)
2333 break;
3918bd18 2334 res = (*translate) (ref, vuse, data);
d8021dea 2335 /* Failed lookup and translation. */
2336 if (res == (void *)-1)
2337 {
2338 res = NULL;
2339 break;
2340 }
2341 /* Lookup succeeded. */
2342 else if (res != NULL)
2343 break;
2344 /* Translation succeeded, continue walking. */
4fd7a6de 2345 translated = true;
d8021dea 2346 }
dd277d48 2347 vuse = gimple_vuse (def_stmt);
2348 }
2349 }
2350 while (vuse);
ac926412 2351
dd277d48 2352 if (visited)
2353 BITMAP_FREE (visited);
ac926412 2354
02c74b9b 2355 timevar_pop (TV_ALIAS_STMT_WALK);
2356
dd277d48 2357 return res;
3857274c 2358}
2359
604eef2c 2360
dd277d48 2361/* Based on the memory reference REF call WALKER for each vdef which
2362 defining statement may clobber REF, starting with VDEF. If REF
2363 is NULL_TREE, each defining statement is visited.
2364
2365 WALKER is called with REF, the current vdef and DATA. If WALKER
2366 returns true the walk is stopped, otherwise it continues.
2367
2368 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2369 PHI argument (but only one walk continues on merge points), the
2370 return value is true if any of the walks was successful.
2371
2372 The function returns the number of statements walked. */
604eef2c 2373
2374static unsigned int
1daead67 2375walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2376 bool (*walker)(ao_ref *, tree, void *), void *data,
dd277d48 2377 bitmap *visited, unsigned int cnt)
604eef2c 2378{
dd277d48 2379 do
2380 {
2381 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
604eef2c 2382
dd277d48 2383 if (*visited
2384 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2385 return cnt;
2386
2387 if (gimple_nop_p (def_stmt))
2388 return cnt;
2389 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2390 {
2391 unsigned i;
2392 if (!*visited)
2393 *visited = BITMAP_ALLOC (NULL);
2394 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2395 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2396 walker, data, visited, 0);
2397 return cnt;
2398 }
2399
02c74b9b 2400 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
dd277d48 2401 cnt++;
2402 if ((!ref
1daead67 2403 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
dd277d48 2404 && (*walker) (ref, vdef, data))
2405 return cnt;
2406
2407 vdef = gimple_vuse (def_stmt);
2408 }
2409 while (1);
604eef2c 2410}
2411
dd277d48 2412unsigned int
1daead67 2413walk_aliased_vdefs (ao_ref *ref, tree vdef,
2414 bool (*walker)(ao_ref *, tree, void *), void *data,
dd277d48 2415 bitmap *visited)
df3f0669 2416{
dd277d48 2417 bitmap local_visited = NULL;
2418 unsigned int ret;
2419
02c74b9b 2420 timevar_push (TV_ALIAS_STMT_WALK);
2421
dd277d48 2422 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2423 visited ? visited : &local_visited, 0);
2424 if (local_visited)
2425 BITMAP_FREE (local_visited);
2426
02c74b9b 2427 timevar_pop (TV_ALIAS_STMT_WALK);
2428
dd277d48 2429 return ret;
2430}
2431