]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-alias.c
2014-12-09 Richard Biener <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
CommitLineData
4ee9c684 1/* Alias analysis for trees.
3aea1f79 2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4ee9c684 3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
4ee9c684 26#include "tm_p.h"
be97d4b6 27#include "target.h"
94ea8568 28#include "predict.h"
29#include "vec.h"
a3020f2f 30#include "hashtab.h"
31#include "hash-set.h"
a3020f2f 32#include "machmode.h"
33#include "hard-reg-set.h"
34#include "input.h"
4ee9c684 35#include "function.h"
94ea8568 36#include "dominance.h"
37#include "basic-block.h"
38#include "timevar.h" /* for TV_ALIAS_STMT_WALK */
39#include "langhooks.h"
40#include "flags.h"
ce084dfc 41#include "tree-pretty-print.h"
b9ed1410 42#include "dumpfile.h"
bc61cadb 43#include "tree-ssa-alias.h"
44#include "internal-fn.h"
45#include "tree-eh.h"
46#include "gimple-expr.h"
47#include "is-a.h"
75a70cf9 48#include "gimple.h"
073c1fd5 49#include "gimple-ssa.h"
9ed99284 50#include "stringpool.h"
073c1fd5 51#include "tree-ssanames.h"
9ed99284 52#include "expr.h"
073c1fd5 53#include "tree-dfa.h"
4ee9c684 54#include "tree-inline.h"
4ee9c684 55#include "params.h"
6cc9bd27 56#include "alloc-pool.h"
dd277d48 57#include "tree-ssa-alias.h"
1140c305 58#include "bitmap.h"
59#include "hash-map.h"
60#include "plugin-api.h"
61#include "ipa-ref.h"
62#include "cgraph.h"
dd9784d4 63#include "ipa-reference.h"
a55dc2cd 64
dd277d48 65/* Broad overview of how alias analysis on gimple works:
4fb5e5ca 66
dd277d48 67 Statements clobbering or using memory are linked through the
68 virtual operand factored use-def chain. The virtual operand
69 is unique per function, its symbol is accessible via gimple_vop (cfun).
70 Virtual operands are used for efficiently walking memory statements
71 in the gimple IL and are useful for things like value-numbering as
72 a generation count for memory references.
339d7079 73
dd277d48 74 SSA_NAME pointers may have associated points-to information
75 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
76 points-to information is (re-)computed by the TODO_rebuild_alias
77 pass manager todo. Points-to information is also used for more
78 precise tracking of call-clobbered and call-used variables and
79 related disambiguations.
339d7079 80
dd277d48 81 This file contains functions for disambiguating memory references,
82 the so called alias-oracle and tools for walking of the gimple IL.
339d7079 83
dd277d48 84 The main alias-oracle entry-points are
339d7079 85
dd277d48 86 bool stmt_may_clobber_ref_p (gimple, tree)
339d7079 87
dd277d48 88 This function queries if a statement may invalidate (parts of)
89 the memory designated by the reference tree argument.
339d7079 90
dd277d48 91 bool ref_maybe_used_by_stmt_p (gimple, tree)
339d7079 92
dd277d48 93 This function queries if a statement may need (parts of) the
94 memory designated by the reference tree argument.
339d7079 95
dd277d48 96 There are variants of these functions that only handle the call
97 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
98 Note that these do not disambiguate against a possible call lhs.
339d7079 99
dd277d48 100 bool refs_may_alias_p (tree, tree)
339d7079 101
dd277d48 102 This function tries to disambiguate two reference trees.
05931b00 103
dd277d48 104 bool ptr_deref_may_alias_global_p (tree)
339d7079 105
dd277d48 106 This function queries if dereferencing a pointer variable may
107 alias global memory.
339d7079 108
dd277d48 109 More low-level disambiguators are available and documented in
110 this file. Low-level disambiguators dealing with points-to
111 information are in tree-ssa-structalias.c. */
339d7079 112
339d7079 113
dd277d48 114/* Query statistics for the different low-level disambiguators.
115 A high-level query may trigger multiple of them. */
339d7079 116
dd277d48 117static struct {
118 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
119 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
120 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
121 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
122 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
123 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
124} alias_stats;
339d7079 125
dd277d48 126void
127dump_alias_stats (FILE *s)
128{
129 fprintf (s, "\nAlias oracle query stats:\n");
130 fprintf (s, " refs_may_alias_p: "
131 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
132 HOST_WIDE_INT_PRINT_DEC" queries\n",
133 alias_stats.refs_may_alias_p_no_alias,
134 alias_stats.refs_may_alias_p_no_alias
135 + alias_stats.refs_may_alias_p_may_alias);
136 fprintf (s, " ref_maybe_used_by_call_p: "
137 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
138 HOST_WIDE_INT_PRINT_DEC" queries\n",
139 alias_stats.ref_maybe_used_by_call_p_no_alias,
140 alias_stats.refs_may_alias_p_no_alias
141 + alias_stats.ref_maybe_used_by_call_p_may_alias);
142 fprintf (s, " call_may_clobber_ref_p: "
143 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
144 HOST_WIDE_INT_PRINT_DEC" queries\n",
145 alias_stats.call_may_clobber_ref_p_no_alias,
146 alias_stats.call_may_clobber_ref_p_no_alias
147 + alias_stats.call_may_clobber_ref_p_may_alias);
148}
149
150
151/* Return true, if dereferencing PTR may alias with a global variable. */
4ee9c684 152
dd277d48 153bool
154ptr_deref_may_alias_global_p (tree ptr)
4ee9c684 155{
dd277d48 156 struct ptr_info_def *pi;
4fb5e5ca 157
dd277d48 158 /* If we end up with a pointer constant here that may point
159 to global memory. */
160 if (TREE_CODE (ptr) != SSA_NAME)
161 return true;
4ee9c684 162
dd277d48 163 pi = SSA_NAME_PTR_INFO (ptr);
4ee9c684 164
dd277d48 165 /* If we do not have points-to information for this variable,
166 we have to punt. */
167 if (!pi)
168 return true;
4ee9c684 169
2a3ebafa 170 /* ??? This does not use TBAA to prune globals ptr may not access. */
dd277d48 171 return pt_solution_includes_global (&pi->pt);
4ee9c684 172}
173
2a3ebafa 174/* Return true if dereferencing PTR may alias DECL.
175 The caller is responsible for applying TBAA to see if PTR
176 may access DECL at all. */
4ee9c684 177
dd277d48 178static bool
179ptr_deref_may_alias_decl_p (tree ptr, tree decl)
4ee9c684 180{
dd277d48 181 struct ptr_info_def *pi;
f7d118a9 182
2760c0e3 183 /* Conversions are irrelevant for points-to information and
184 data-dependence analysis can feed us those. */
185 STRIP_NOPS (ptr);
186
187 /* Anything we do not explicilty handle aliases. */
188 if ((TREE_CODE (ptr) != SSA_NAME
189 && TREE_CODE (ptr) != ADDR_EXPR
190 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
191 || !POINTER_TYPE_P (TREE_TYPE (ptr))
192 || (TREE_CODE (decl) != VAR_DECL
193 && TREE_CODE (decl) != PARM_DECL
194 && TREE_CODE (decl) != RESULT_DECL))
9c01641d 195 return true;
2fd0b5dc 196
2760c0e3 197 /* Disregard pointer offsetting. */
198 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
199 {
200 do
201 {
202 ptr = TREE_OPERAND (ptr, 0);
203 }
204 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
205 return ptr_deref_may_alias_decl_p (ptr, decl);
206 }
207
047fdd47 208 /* ADDR_EXPR pointers either just offset another pointer or directly
209 specify the pointed-to set. */
210 if (TREE_CODE (ptr) == ADDR_EXPR)
211 {
212 tree base = get_base_address (TREE_OPERAND (ptr, 0));
213 if (base
2622064f 214 && (TREE_CODE (base) == MEM_REF
215 || TREE_CODE (base) == TARGET_MEM_REF))
047fdd47 216 ptr = TREE_OPERAND (base, 0);
217 else if (base
2622064f 218 && DECL_P (base))
7f2d9047 219 return base == decl;
047fdd47 220 else if (base
221 && CONSTANT_CLASS_P (base))
222 return false;
223 else
224 return true;
225 }
226
2760c0e3 227 /* Non-aliased variables can not be pointed to. */
228 if (!may_be_aliased (decl))
229 return false;
047fdd47 230
dd277d48 231 /* If we do not have useful points-to information for this pointer
232 we cannot disambiguate anything else. */
233 pi = SSA_NAME_PTR_INFO (ptr);
234 if (!pi)
2fd0b5dc 235 return true;
236
dd277d48 237 return pt_solution_includes (&pi->pt, decl);
2fd0b5dc 238}
4ee9c684 239
2a3ebafa 240/* Return true if dereferenced PTR1 and PTR2 may alias.
241 The caller is responsible for applying TBAA to see if accesses
242 through PTR1 and PTR2 may conflict at all. */
4ee9c684 243
2760c0e3 244bool
dd277d48 245ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
4ee9c684 246{
dd277d48 247 struct ptr_info_def *pi1, *pi2;
4ee9c684 248
2760c0e3 249 /* Conversions are irrelevant for points-to information and
250 data-dependence analysis can feed us those. */
251 STRIP_NOPS (ptr1);
252 STRIP_NOPS (ptr2);
253
2760c0e3 254 /* Disregard pointer offsetting. */
255 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
256 {
257 do
258 {
259 ptr1 = TREE_OPERAND (ptr1, 0);
260 }
261 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
262 return ptr_derefs_may_alias_p (ptr1, ptr2);
263 }
264 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
265 {
266 do
267 {
268 ptr2 = TREE_OPERAND (ptr2, 0);
269 }
270 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
271 return ptr_derefs_may_alias_p (ptr1, ptr2);
272 }
f85fb819 273
047fdd47 274 /* ADDR_EXPR pointers either just offset another pointer or directly
275 specify the pointed-to set. */
276 if (TREE_CODE (ptr1) == ADDR_EXPR)
277 {
278 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
279 if (base
2622064f 280 && (TREE_CODE (base) == MEM_REF
281 || TREE_CODE (base) == TARGET_MEM_REF))
80b4d93e 282 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
047fdd47 283 else if (base
2622064f 284 && DECL_P (base))
047fdd47 285 return ptr_deref_may_alias_decl_p (ptr2, base);
286 else
287 return true;
288 }
289 if (TREE_CODE (ptr2) == ADDR_EXPR)
290 {
291 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
292 if (base
2622064f 293 && (TREE_CODE (base) == MEM_REF
294 || TREE_CODE (base) == TARGET_MEM_REF))
80b4d93e 295 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
047fdd47 296 else if (base
2622064f 297 && DECL_P (base))
047fdd47 298 return ptr_deref_may_alias_decl_p (ptr1, base);
299 else
300 return true;
301 }
302
80b4d93e 303 /* From here we require SSA name pointers. Anything else aliases. */
304 if (TREE_CODE (ptr1) != SSA_NAME
305 || TREE_CODE (ptr2) != SSA_NAME
306 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
307 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
308 return true;
309
dd277d48 310 /* We may end up with two empty points-to solutions for two same pointers.
311 In this case we still want to say both pointers alias, so shortcut
312 that here. */
313 if (ptr1 == ptr2)
314 return true;
81d08033 315
dd277d48 316 /* If we do not have useful points-to information for either pointer
317 we cannot disambiguate anything else. */
318 pi1 = SSA_NAME_PTR_INFO (ptr1);
319 pi2 = SSA_NAME_PTR_INFO (ptr2);
320 if (!pi1 || !pi2)
321 return true;
81d08033 322
2a3ebafa 323 /* ??? This does not use TBAA to prune decls from the intersection
324 that not both pointers may access. */
dd277d48 325 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
81d08033 326}
327
047fdd47 328/* Return true if dereferencing PTR may alias *REF.
329 The caller is responsible for applying TBAA to see if PTR
330 may access *REF at all. */
331
332static bool
333ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
334{
335 tree base = ao_ref_base (ref);
336
2622064f 337 if (TREE_CODE (base) == MEM_REF
338 || TREE_CODE (base) == TARGET_MEM_REF)
047fdd47 339 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
2622064f 340 else if (DECL_P (base))
047fdd47 341 return ptr_deref_may_alias_decl_p (ptr, base);
342
343 return true;
344}
345
258bd648 346/* Returns whether reference REF to BASE may refer to global memory. */
8763c223 347
258bd648 348static bool
349ref_may_alias_global_p_1 (tree base)
8763c223 350{
8763c223 351 if (DECL_P (base))
352 return is_global_var (base);
353 else if (TREE_CODE (base) == MEM_REF
354 || TREE_CODE (base) == TARGET_MEM_REF)
355 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
356 return true;
357}
358
258bd648 359bool
360ref_may_alias_global_p (ao_ref *ref)
361{
362 tree base = ao_ref_base (ref);
363 return ref_may_alias_global_p_1 (base);
364}
365
366bool
367ref_may_alias_global_p (tree ref)
368{
369 tree base = get_base_address (ref);
370 return ref_may_alias_global_p_1 (base);
371}
372
8763c223 373/* Return true whether STMT may clobber global memory. */
374
375bool
376stmt_may_clobber_global_p (gimple stmt)
377{
378 tree lhs;
379
380 if (!gimple_vdef (stmt))
381 return false;
382
383 /* ??? We can ask the oracle whether an artificial pointer
384 dereference with a pointer with points-to information covering
385 all global memory (what about non-address taken memory?) maybe
386 clobbered by this call. As there is at the moment no convenient
387 way of doing that without generating garbage do some manual
388 checking instead.
389 ??? We could make a NULL ao_ref argument to the various
390 predicates special, meaning any global memory. */
391
392 switch (gimple_code (stmt))
393 {
394 case GIMPLE_ASSIGN:
395 lhs = gimple_assign_lhs (stmt);
396 return (TREE_CODE (lhs) != SSA_NAME
397 && ref_may_alias_global_p (lhs));
398 case GIMPLE_CALL:
399 return true;
400 default:
401 return true;
402 }
403}
404
4ee9c684 405
406/* Dump alias information on FILE. */
407
408void
409dump_alias_info (FILE *file)
410{
2aac21c5 411 unsigned i;
4ee9c684 412 const char *funcname
5135beeb 413 = lang_hooks.decl_printable_name (current_function_decl, 2);
a55dc2cd 414 tree var;
4ee9c684 415
dd277d48 416 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
81d08033 417
418 fprintf (file, "Aliased symbols\n\n");
48e1416a 419
2aac21c5 420 FOR_EACH_LOCAL_DECL (cfun, i, var)
81d08033 421 {
81d08033 422 if (may_be_aliased (var))
423 dump_variable (file, var);
424 }
425
7f81b5ee 426 fprintf (file, "\nCall clobber information\n");
427
428 fprintf (file, "\nESCAPED");
429 dump_points_to_solution (file, &cfun->gimple_df->escaped);
7f81b5ee 430
431 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
81d08033 432
81d08033 433 for (i = 1; i < num_ssa_names; i++)
434 {
435 tree ptr = ssa_name (i);
a48b1470 436 struct ptr_info_def *pi;
48e1416a 437
dd277d48 438 if (ptr == NULL_TREE
0cf78115 439 || !POINTER_TYPE_P (TREE_TYPE (ptr))
dd277d48 440 || SSA_NAME_IN_FREE_LIST (ptr))
a48b1470 441 continue;
442
443 pi = SSA_NAME_PTR_INFO (ptr);
dd277d48 444 if (pi)
81d08033 445 dump_points_to_info_for (file, ptr);
446 }
4ee9c684 447
4ee9c684 448 fprintf (file, "\n");
449}
450
451
452/* Dump alias information on stderr. */
453
4b987fac 454DEBUG_FUNCTION void
4ee9c684 455debug_alias_info (void)
456{
457 dump_alias_info (stderr);
458}
459
460
7f81b5ee 461/* Dump the points-to set *PT into FILE. */
4ee9c684 462
d793732c 463void
7f81b5ee 464dump_points_to_solution (FILE *file, struct pt_solution *pt)
4ee9c684 465{
7f81b5ee 466 if (pt->anything)
467 fprintf (file, ", points-to anything");
4ee9c684 468
7f81b5ee 469 if (pt->nonlocal)
470 fprintf (file, ", points-to non-local");
4ee9c684 471
7f81b5ee 472 if (pt->escaped)
473 fprintf (file, ", points-to escaped");
474
1a981e1a 475 if (pt->ipa_escaped)
476 fprintf (file, ", points-to unit escaped");
477
7f81b5ee 478 if (pt->null)
479 fprintf (file, ", points-to NULL");
480
481 if (pt->vars)
4ee9c684 482 {
7f81b5ee 483 fprintf (file, ", points-to vars: ");
484 dump_decl_set (file, pt->vars);
420582bc 485 if (pt->vars_contains_nonlocal
486 && pt->vars_contains_escaped_heap)
487 fprintf (file, " (nonlocal, escaped heap)");
488 else if (pt->vars_contains_nonlocal
489 && pt->vars_contains_escaped)
490 fprintf (file, " (nonlocal, escaped)");
491 else if (pt->vars_contains_nonlocal)
492 fprintf (file, " (nonlocal)");
493 else if (pt->vars_contains_escaped_heap)
494 fprintf (file, " (escaped heap)");
495 else if (pt->vars_contains_escaped)
496 fprintf (file, " (escaped)");
7f81b5ee 497 }
498}
4ee9c684 499
c7d89805 500
501/* Unified dump function for pt_solution. */
502
503DEBUG_FUNCTION void
504debug (pt_solution &ref)
505{
506 dump_points_to_solution (stderr, &ref);
507}
508
509DEBUG_FUNCTION void
510debug (pt_solution *ptr)
511{
512 if (ptr)
513 debug (*ptr);
514 else
515 fprintf (stderr, "<nil>\n");
516}
517
518
7f81b5ee 519/* Dump points-to information for SSA_NAME PTR into FILE. */
4ee9c684 520
7f81b5ee 521void
522dump_points_to_info_for (FILE *file, tree ptr)
523{
524 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
4ee9c684 525
7f81b5ee 526 print_generic_expr (file, ptr, dump_flags);
1e1a4c8c 527
7f81b5ee 528 if (pi)
529 dump_points_to_solution (file, &pi->pt);
530 else
531 fprintf (file, ", points-to anything");
4ee9c684 532
533 fprintf (file, "\n");
534}
535
536
d793732c 537/* Dump points-to information for VAR into stderr. */
538
4b987fac 539DEBUG_FUNCTION void
d793732c 540debug_points_to_info_for (tree var)
541{
542 dump_points_to_info_for (stderr, var);
543}
544
3918bd18 545
546/* Initializes the alias-oracle reference representation *R from REF. */
547
548void
549ao_ref_init (ao_ref *r, tree ref)
550{
551 r->ref = ref;
552 r->base = NULL_TREE;
553 r->offset = 0;
554 r->size = -1;
555 r->max_size = -1;
556 r->ref_alias_set = -1;
557 r->base_alias_set = -1;
3787db52 558 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
3918bd18 559}
560
561/* Returns the base object of the memory reference *REF. */
562
563tree
564ao_ref_base (ao_ref *ref)
565{
566 if (ref->base)
567 return ref->base;
568 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
569 &ref->max_size);
570 return ref->base;
571}
572
573/* Returns the base object alias set of the memory reference *REF. */
574
8e92a5ee 575alias_set_type
3918bd18 576ao_ref_base_alias_set (ao_ref *ref)
577{
182cf5a9 578 tree base_ref;
3918bd18 579 if (ref->base_alias_set != -1)
580 return ref->base_alias_set;
182cf5a9 581 if (!ref->ref)
582 return 0;
583 base_ref = ref->ref;
584 while (handled_component_p (base_ref))
585 base_ref = TREE_OPERAND (base_ref, 0);
586 ref->base_alias_set = get_alias_set (base_ref);
3918bd18 587 return ref->base_alias_set;
588}
589
590/* Returns the reference alias set of the memory reference *REF. */
591
592alias_set_type
593ao_ref_alias_set (ao_ref *ref)
594{
595 if (ref->ref_alias_set != -1)
596 return ref->ref_alias_set;
597 ref->ref_alias_set = get_alias_set (ref->ref);
598 return ref->ref_alias_set;
599}
600
134899ac 601/* Init an alias-oracle reference representation from a gimple pointer
234f1f79 602 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
134899ac 603 size is assumed to be unknown. The access is assumed to be only
604 to or after of the pointer target, not before it. */
605
606void
607ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
608{
dd49bf5f 609 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
134899ac 610 ref->ref = NULL_TREE;
4e9e79c0 611 if (TREE_CODE (ptr) == SSA_NAME)
612 {
613 gimple stmt = SSA_NAME_DEF_STMT (ptr);
614 if (gimple_assign_single_p (stmt)
615 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
616 ptr = gimple_assign_rhs1 (stmt);
e3ac92d2 617 else if (is_gimple_assign (stmt)
618 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
234f1f79 619 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
e3ac92d2 620 {
621 ptr = gimple_assign_rhs1 (stmt);
234f1f79 622 extra_offset = BITS_PER_UNIT
623 * int_cst_value (gimple_assign_rhs2 (stmt));
e3ac92d2 624 }
4e9e79c0 625 }
626
134899ac 627 if (TREE_CODE (ptr) == ADDR_EXPR)
d1603825 628 {
629 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
630 if (ref->base)
631 ref->offset = BITS_PER_UNIT * t;
632 else
633 {
634 size = NULL_TREE;
635 ref->offset = 0;
636 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
637 }
638 }
134899ac 639 else
640 {
182cf5a9 641 ref->base = build2 (MEM_REF, char_type_node,
2512209b 642 ptr, null_pointer_node);
134899ac 643 ref->offset = 0;
644 }
e3ac92d2 645 ref->offset += extra_offset;
134899ac 646 if (size
e913b5cd 647 && tree_fits_shwi_p (size)
dd49bf5f 648 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
649 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
134899ac 650 else
651 ref->max_size = ref->size = -1;
652 ref->ref_alias_set = 0;
653 ref->base_alias_set = 0;
3787db52 654 ref->volatile_p = false;
134899ac 655}
656
dd277d48 657/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
658 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
659 decide. */
d793732c 660
dd277d48 661static inline int
662same_type_for_tbaa (tree type1, tree type2)
663{
664 type1 = TYPE_MAIN_VARIANT (type1);
665 type2 = TYPE_MAIN_VARIANT (type2);
4ee9c684 666
dd277d48 667 /* If we would have to do structural comparison bail out. */
668 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
669 || TYPE_STRUCTURAL_EQUALITY_P (type2))
670 return -1;
671
672 /* Compare the canonical types. */
673 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
674 return 1;
675
4b4d1513 676 /* ??? Array types are not properly unified in all cases as we have
dd277d48 677 spurious changes in the index types for example. Removing this
678 causes all sorts of problems with the Fortran frontend. */
679 if (TREE_CODE (type1) == ARRAY_TYPE
680 && TREE_CODE (type2) == ARRAY_TYPE)
681 return -1;
682
102bfc9c 683 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
684 object of one of its constrained subtypes, e.g. when a function with an
685 unconstrained parameter passed by reference is called on an object and
686 inlined. But, even in the case of a fixed size, type and subtypes are
687 not equivalent enough as to share the same TYPE_CANONICAL, since this
688 would mean that conversions between them are useless, whereas they are
689 not (e.g. type and subtypes can have different modes). So, in the end,
690 they are only guaranteed to have the same alias set. */
691 if (get_alias_set (type1) == get_alias_set (type2))
4b4d1513 692 return -1;
693
dd277d48 694 /* The types are known to be not equal. */
695 return 0;
696}
697
698/* Determine if the two component references REF1 and REF2 which are
699 based on access types TYPE1 and TYPE2 and of which at least one is based
ac30e3b2 700 on an indirect reference may alias. REF2 is the only one that can
701 be a decl in which case REF2_IS_DECL is true.
702 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
703 are the respective alias sets. */
dd277d48 704
705static bool
0d2f7431 706aliasing_component_refs_p (tree ref1,
ac30e3b2 707 alias_set_type ref1_alias_set,
708 alias_set_type base1_alias_set,
a42ce2cc 709 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
0d2f7431 710 tree ref2,
ac30e3b2 711 alias_set_type ref2_alias_set,
712 alias_set_type base2_alias_set,
713 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
714 bool ref2_is_decl)
dd277d48 715{
716 /* If one reference is a component references through pointers try to find a
717 common base and apply offset based disambiguation. This handles
718 for example
719 struct A { int i; int j; } *q;
720 struct B { struct A a; int k; } *p;
721 disambiguating q->i and p->a.j. */
0d2f7431 722 tree base1, base2;
723 tree type1, type2;
dd277d48 724 tree *refp;
725 int same_p;
726
0d2f7431 727 /* Choose bases and base types to search for. */
728 base1 = ref1;
729 while (handled_component_p (base1))
730 base1 = TREE_OPERAND (base1, 0);
731 type1 = TREE_TYPE (base1);
732 base2 = ref2;
733 while (handled_component_p (base2))
734 base2 = TREE_OPERAND (base2, 0);
735 type2 = TREE_TYPE (base2);
736
dd277d48 737 /* Now search for the type1 in the access path of ref2. This
738 would be a common base for doing offset based disambiguation on. */
bc3c318e 739 refp = &ref2;
dd277d48 740 while (handled_component_p (*refp)
741 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
742 refp = &TREE_OPERAND (*refp, 0);
743 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
744 /* If we couldn't compare types we have to bail out. */
745 if (same_p == -1)
746 return true;
747 else if (same_p == 1)
748 {
749 HOST_WIDE_INT offadj, sztmp, msztmp;
750 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
751 offset2 -= offadj;
0d2f7431 752 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
753 offset1 -= offadj;
dd277d48 754 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
755 }
756 /* If we didn't find a common base, try the other way around. */
bc3c318e 757 refp = &ref1;
dd277d48 758 while (handled_component_p (*refp)
759 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
760 refp = &TREE_OPERAND (*refp, 0);
761 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
762 /* If we couldn't compare types we have to bail out. */
763 if (same_p == -1)
764 return true;
765 else if (same_p == 1)
766 {
767 HOST_WIDE_INT offadj, sztmp, msztmp;
768 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
769 offset1 -= offadj;
0d2f7431 770 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
771 offset2 -= offadj;
dd277d48 772 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
773 }
ac30e3b2 774
4b4d1513 775 /* If we have two type access paths B1.path1 and B2.path2 they may
ac30e3b2 776 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
777 But we can still have a path that goes B1.path1...B2.path2 with
778 a part that we do not see. So we can only disambiguate now
779 if there is no B2 in the tail of path1 and no B1 on the
780 tail of path2. */
781 if (base1_alias_set == ref2_alias_set
782 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
783 return true;
784 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
785 if (!ref2_is_decl)
786 return (base2_alias_set == ref1_alias_set
787 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
4b4d1513 788 return false;
dd277d48 789}
790
740a2cb6 791/* Return true if we can determine that component references REF1 and REF2,
792 that are within a common DECL, cannot overlap. */
793
794static bool
795nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
796{
4997014d 797 auto_vec<tree, 16> component_refs1;
798 auto_vec<tree, 16> component_refs2;
740a2cb6 799
800 /* Create the stack of handled components for REF1. */
801 while (handled_component_p (ref1))
802 {
803 component_refs1.safe_push (ref1);
804 ref1 = TREE_OPERAND (ref1, 0);
805 }
806 if (TREE_CODE (ref1) == MEM_REF)
807 {
808 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
809 goto may_overlap;
810 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
811 }
812
813 /* Create the stack of handled components for REF2. */
814 while (handled_component_p (ref2))
815 {
816 component_refs2.safe_push (ref2);
817 ref2 = TREE_OPERAND (ref2, 0);
818 }
819 if (TREE_CODE (ref2) == MEM_REF)
820 {
821 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
822 goto may_overlap;
823 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
824 }
825
826 /* We must have the same base DECL. */
827 gcc_assert (ref1 == ref2);
828
829 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
830 rank. This is sufficient because we start from the same DECL and you
831 cannot reference several fields at a time with COMPONENT_REFs (unlike
832 with ARRAY_RANGE_REFs for arrays) so you always need the same number
833 of them to access a sub-component, unless you're in a union, in which
834 case the return value will precisely be false. */
835 while (true)
836 {
837 do
838 {
839 if (component_refs1.is_empty ())
840 goto may_overlap;
841 ref1 = component_refs1.pop ();
842 }
843 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
844
845 do
846 {
847 if (component_refs2.is_empty ())
848 goto may_overlap;
849 ref2 = component_refs2.pop ();
850 }
851 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
852
853 /* Beware of BIT_FIELD_REF. */
854 if (TREE_CODE (ref1) != COMPONENT_REF
855 || TREE_CODE (ref2) != COMPONENT_REF)
856 goto may_overlap;
857
858 tree field1 = TREE_OPERAND (ref1, 1);
859 tree field2 = TREE_OPERAND (ref2, 1);
860
861 /* ??? We cannot simply use the type of operand #0 of the refs here
862 as the Fortran compiler smuggles type punning into COMPONENT_REFs
863 for common blocks instead of using unions like everyone else. */
608927af 864 tree type1 = DECL_CONTEXT (field1);
865 tree type2 = DECL_CONTEXT (field2);
740a2cb6 866
867 /* We cannot disambiguate fields in a union or qualified union. */
868 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
869 goto may_overlap;
870
11ad02c6 871 /* Different fields of the same record type cannot overlap.
872 ??? Bitfields can overlap at RTL level so punt on them. */
740a2cb6 873 if (field1 != field2)
874 {
875 component_refs1.release ();
876 component_refs2.release ();
11ad02c6 877 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
740a2cb6 878 }
879 }
880
881may_overlap:
882 component_refs1.release ();
883 component_refs2.release ();
884 return false;
885}
886
f270d2a0 887/* qsort compare function to sort FIELD_DECLs after their
888 DECL_FIELD_CONTEXT TYPE_UID. */
889
890static inline int
891ncr_compar (const void *field1_, const void *field2_)
892{
893 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
894 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
608927af 895 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
896 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
f270d2a0 897 if (uid1 < uid2)
898 return -1;
899 else if (uid1 > uid2)
900 return 1;
901 return 0;
902}
903
904/* Return true if we can determine that the fields referenced cannot
905 overlap for any pair of objects. */
906
907static bool
908nonoverlapping_component_refs_p (const_tree x, const_tree y)
909{
910 if (!flag_strict_aliasing
911 || !x || !y
912 || TREE_CODE (x) != COMPONENT_REF
913 || TREE_CODE (y) != COMPONENT_REF)
914 return false;
915
916 auto_vec<const_tree, 16> fieldsx;
917 while (TREE_CODE (x) == COMPONENT_REF)
918 {
919 tree field = TREE_OPERAND (x, 1);
608927af 920 tree type = DECL_FIELD_CONTEXT (field);
f270d2a0 921 if (TREE_CODE (type) == RECORD_TYPE)
922 fieldsx.safe_push (field);
923 x = TREE_OPERAND (x, 0);
924 }
925 if (fieldsx.length () == 0)
926 return false;
927 auto_vec<const_tree, 16> fieldsy;
928 while (TREE_CODE (y) == COMPONENT_REF)
929 {
930 tree field = TREE_OPERAND (y, 1);
608927af 931 tree type = DECL_FIELD_CONTEXT (field);
f270d2a0 932 if (TREE_CODE (type) == RECORD_TYPE)
933 fieldsy.safe_push (TREE_OPERAND (y, 1));
934 y = TREE_OPERAND (y, 0);
935 }
936 if (fieldsy.length () == 0)
937 return false;
938
939 /* Most common case first. */
940 if (fieldsx.length () == 1
941 && fieldsy.length () == 1)
608927af 942 return ((DECL_FIELD_CONTEXT (fieldsx[0])
943 == DECL_FIELD_CONTEXT (fieldsy[0]))
f270d2a0 944 && fieldsx[0] != fieldsy[0]
945 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
946
947 if (fieldsx.length () == 2)
948 {
949 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
950 {
951 const_tree tem = fieldsx[0];
952 fieldsx[0] = fieldsx[1];
953 fieldsx[1] = tem;
954 }
955 }
956 else
957 fieldsx.qsort (ncr_compar);
958
959 if (fieldsy.length () == 2)
960 {
961 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
962 {
963 const_tree tem = fieldsy[0];
964 fieldsy[0] = fieldsy[1];
965 fieldsy[1] = tem;
966 }
967 }
968 else
969 fieldsy.qsort (ncr_compar);
970
971 unsigned i = 0, j = 0;
972 do
973 {
974 const_tree fieldx = fieldsx[i];
975 const_tree fieldy = fieldsy[j];
608927af 976 tree typex = DECL_FIELD_CONTEXT (fieldx);
977 tree typey = DECL_FIELD_CONTEXT (fieldy);
f270d2a0 978 if (typex == typey)
979 {
980 /* We're left with accessing different fields of a structure,
981 no possible overlap, unless they are both bitfields. */
982 if (fieldx != fieldy)
983 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
984 }
985 if (TYPE_UID (typex) < TYPE_UID (typey))
986 {
987 i++;
988 if (i == fieldsx.length ())
989 break;
990 }
991 else
992 {
993 j++;
994 if (j == fieldsy.length ())
995 break;
996 }
997 }
998 while (1);
999
1000 return false;
1001}
1002
1003
dd277d48 1004/* Return true if two memory references based on the variables BASE1
077ad5ad 1005 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
740a2cb6 1006 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1007 if non-NULL are the complete memory reference trees. */
dd277d48 1008
1009static bool
740a2cb6 1010decl_refs_may_alias_p (tree ref1, tree base1,
dd277d48 1011 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
740a2cb6 1012 tree ref2, tree base2,
dd277d48 1013 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
4ee9c684 1014{
2622064f 1015 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
4ee9c684 1016
dd277d48 1017 /* If both references are based on different variables, they cannot alias. */
7f2d9047 1018 if (base1 != base2)
dd277d48 1019 return false;
4ee9c684 1020
dd277d48 1021 /* If both references are based on the same variable, they cannot alias if
1022 the accesses do not overlap. */
740a2cb6 1023 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1024 return false;
1025
1026 /* For components with variable position, the above test isn't sufficient,
1027 so we disambiguate component references manually. */
1028 if (ref1 && ref2
1029 && handled_component_p (ref1) && handled_component_p (ref2)
1030 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1031 return false;
1032
1033 return true;
dd277d48 1034}
1035
1036/* Return true if an indirect reference based on *PTR1 constrained
077ad5ad 1037 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1038 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
dd277d48 1039 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1040 in which case they are computed on-demand. REF1 and REF2
1041 if non-NULL are the complete memory reference trees. */
1042
1043static bool
182cf5a9 1044indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1045 HOST_WIDE_INT offset1,
1046 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
ac30e3b2 1047 alias_set_type ref1_alias_set,
dd277d48 1048 alias_set_type base1_alias_set,
182cf5a9 1049 tree ref2 ATTRIBUTE_UNUSED, tree base2,
dd277d48 1050 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
ac30e3b2 1051 alias_set_type ref2_alias_set,
182cf5a9 1052 alias_set_type base2_alias_set, bool tbaa_p)
dd277d48 1053{
9a14ba4f 1054 tree ptr1;
061b1263 1055 tree ptrtype1, dbase2;
7749ee1c 1056 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
061b1263 1057 HOST_WIDE_INT doffset1, doffset2;
2622064f 1058
1059 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1060 || TREE_CODE (base1) == TARGET_MEM_REF)
1061 && DECL_P (base2));
182cf5a9 1062
28daba6f 1063 ptr1 = TREE_OPERAND (base1, 0);
9a14ba4f 1064
7749ee1c 1065 /* The offset embedded in MEM_REFs can be negative. Bias them
1066 so that the resulting offset adjustment is positive. */
ab2c1de8 1067 offset_int moff = mem_ref_offset (base1);
885a2694 1068 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
796b6678 1069 if (wi::neg_p (moff))
e913b5cd 1070 offset2p += (-moff).to_short_addr ();
2622064f 1071 else
e913b5cd 1072 offset1p += moff.to_short_addr ();
182cf5a9 1073
dd277d48 1074 /* If only one reference is based on a variable, they cannot alias if
1075 the pointer access is beyond the extent of the variable access.
1076 (the pointer base cannot validly point to an offset less than zero
1077 of the variable).
a07f6629 1078 ??? IVOPTs creates bases that do not honor this restriction,
1079 so do not apply this optimization for TARGET_MEM_REFs. */
1080 if (TREE_CODE (base1) != TARGET_MEM_REF
9a14ba4f 1081 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
dd277d48 1082 return false;
a07f6629 1083 /* They also cannot alias if the pointer may not point to the decl. */
dd277d48 1084 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1085 return false;
1086
1087 /* Disambiguations that rely on strict aliasing rules follow. */
182cf5a9 1088 if (!flag_strict_aliasing || !tbaa_p)
dd277d48 1089 return true;
1090
2622064f 1091 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
182cf5a9 1092
dd277d48 1093 /* If the alias set for a pointer access is zero all bets are off. */
1094 if (base1_alias_set == -1)
182cf5a9 1095 base1_alias_set = get_deref_alias_set (ptrtype1);
dd277d48 1096 if (base1_alias_set == 0)
1097 return true;
1098 if (base2_alias_set == -1)
1099 base2_alias_set = get_alias_set (base2);
1100
182cf5a9 1101 /* When we are trying to disambiguate an access with a pointer dereference
1102 as base versus one with a decl as base we can use both the size
1103 of the decl and its dynamic type for extra disambiguation.
1104 ??? We do not know anything about the dynamic type of the decl
1105 other than that its alias-set contains base2_alias_set as a subset
1106 which does not help us here. */
1107 /* As we know nothing useful about the dynamic type of the decl just
1108 use the usual conflict check rather than a subset test.
1109 ??? We could introduce -fvery-strict-aliasing when the language
1110 does not allow decls to have a dynamic type that differs from their
1111 static type. Then we can check
1112 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
dd277d48 1113 if (base1_alias_set != base2_alias_set
182cf5a9 1114 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1115 return false;
1116 /* If the size of the access relevant for TBAA through the pointer
1117 is bigger than the size of the decl we can't possibly access the
1118 decl via that pointer. */
1119 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1120 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1121 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1122 /* ??? This in turn may run afoul when a decl of type T which is
1123 a member of union type U is accessed through a pointer to
1124 type U and sizeof T is smaller than sizeof U. */
1125 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1126 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1127 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
dd277d48 1128 return false;
1129
061b1263 1130 if (!ref2)
1131 return true;
1132
a07f6629 1133 /* If the decl is accessed via a MEM_REF, reconstruct the base
061b1263 1134 we can use for TBAA and an appropriately adjusted offset. */
1135 dbase2 = ref2;
1136 while (handled_component_p (dbase2))
1137 dbase2 = TREE_OPERAND (dbase2, 0);
1138 doffset1 = offset1;
1139 doffset2 = offset2;
1140 if (TREE_CODE (dbase2) == MEM_REF
1141 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1142 {
5de9d3ed 1143 offset_int moff = mem_ref_offset (dbase2);
885a2694 1144 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
796b6678 1145 if (wi::neg_p (moff))
e913b5cd 1146 doffset1 -= (-moff).to_short_addr ();
061b1263 1147 else
e913b5cd 1148 doffset2 -= moff.to_short_addr ();
061b1263 1149 }
1150
1151 /* If either reference is view-converted, give up now. */
1152 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
5ef4302d 1153 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
061b1263 1154 return true;
1155
1156 /* If both references are through the same type, they do not alias
1157 if the accesses do not overlap. This does extra disambiguation
1158 for mixed/pointer accesses but requires strict aliasing.
1159 For MEM_REFs we require that the component-ref offset we computed
1160 is relative to the start of the type which we ensure by
1161 comparing rvalue and access type and disregarding the constant
1162 pointer offset. */
1163 if ((TREE_CODE (base1) != TARGET_MEM_REF
1164 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1165 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1166 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1167
f270d2a0 1168 if (ref1 && ref2
1169 && nonoverlapping_component_refs_p (ref1, ref2))
1170 return false;
1171
dd277d48 1172 /* Do access-path based disambiguation. */
1173 if (ref1 && ref2
061b1263 1174 && (handled_component_p (ref1) || handled_component_p (ref2)))
0d2f7431 1175 return aliasing_component_refs_p (ref1,
ac30e3b2 1176 ref1_alias_set, base1_alias_set,
a42ce2cc 1177 offset1, max_size1,
0d2f7431 1178 ref2,
ac30e3b2 1179 ref2_alias_set, base2_alias_set,
1180 offset2, max_size2, true);
dd277d48 1181
1182 return true;
1183}
1184
1185/* Return true if two indirect references based on *PTR1
077ad5ad 1186 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1187 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
dd277d48 1188 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1189 in which case they are computed on-demand. REF1 and REF2
1190 if non-NULL are the complete memory reference trees. */
1191
1192static bool
182cf5a9 1193indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
dd277d48 1194 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
ac30e3b2 1195 alias_set_type ref1_alias_set,
dd277d48 1196 alias_set_type base1_alias_set,
182cf5a9 1197 tree ref2 ATTRIBUTE_UNUSED, tree base2,
dd277d48 1198 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
ac30e3b2 1199 alias_set_type ref2_alias_set,
182cf5a9 1200 alias_set_type base2_alias_set, bool tbaa_p)
dd277d48 1201{
9a14ba4f 1202 tree ptr1;
1203 tree ptr2;
182cf5a9 1204 tree ptrtype1, ptrtype2;
1205
2622064f 1206 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1207 || TREE_CODE (base1) == TARGET_MEM_REF)
1208 && (TREE_CODE (base2) == MEM_REF
1209 || TREE_CODE (base2) == TARGET_MEM_REF));
1210
28daba6f 1211 ptr1 = TREE_OPERAND (base1, 0);
1212 ptr2 = TREE_OPERAND (base2, 0);
9a14ba4f 1213
dd277d48 1214 /* If both bases are based on pointers they cannot alias if they may not
1215 point to the same memory object or if they point to the same object
1216 and the accesses do not overlap. */
b5c3c805 1217 if ((!cfun || gimple_in_ssa_p (cfun))
9a14ba4f 1218 && operand_equal_p (ptr1, ptr2, 0)
1219 && (((TREE_CODE (base1) != TARGET_MEM_REF
88be3999 1220 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
9a14ba4f 1221 && (TREE_CODE (base2) != TARGET_MEM_REF
88be3999 1222 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
9a14ba4f 1223 || (TREE_CODE (base1) == TARGET_MEM_REF
1224 && TREE_CODE (base2) == TARGET_MEM_REF
1225 && (TMR_STEP (base1) == TMR_STEP (base2)
1226 || (TMR_STEP (base1) && TMR_STEP (base2)
1227 && operand_equal_p (TMR_STEP (base1),
1228 TMR_STEP (base2), 0)))
88be3999 1229 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1230 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1231 && operand_equal_p (TMR_INDEX (base1),
1232 TMR_INDEX (base2), 0)))
1233 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1234 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1235 && operand_equal_p (TMR_INDEX2 (base1),
1236 TMR_INDEX2 (base2), 0))))))
182cf5a9 1237 {
5de9d3ed 1238 offset_int moff;
7749ee1c 1239 /* The offset embedded in MEM_REFs can be negative. Bias them
1240 so that the resulting offset adjustment is positive. */
2622064f 1241 moff = mem_ref_offset (base1);
885a2694 1242 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
796b6678 1243 if (wi::neg_p (moff))
e913b5cd 1244 offset2 += (-moff).to_short_addr ();
2622064f 1245 else
e913b5cd 1246 offset1 += moff.to_shwi ();
2622064f 1247 moff = mem_ref_offset (base2);
885a2694 1248 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
796b6678 1249 if (wi::neg_p (moff))
e913b5cd 1250 offset1 += (-moff).to_short_addr ();
2622064f 1251 else
e913b5cd 1252 offset2 += moff.to_short_addr ();
182cf5a9 1253 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1254 }
dd277d48 1255 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1256 return false;
1257
1258 /* Disambiguations that rely on strict aliasing rules follow. */
182cf5a9 1259 if (!flag_strict_aliasing || !tbaa_p)
dd277d48 1260 return true;
1261
2622064f 1262 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1263 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
182cf5a9 1264
dd277d48 1265 /* If the alias set for a pointer access is zero all bets are off. */
1266 if (base1_alias_set == -1)
182cf5a9 1267 base1_alias_set = get_deref_alias_set (ptrtype1);
dd277d48 1268 if (base1_alias_set == 0)
1269 return true;
1270 if (base2_alias_set == -1)
182cf5a9 1271 base2_alias_set = get_deref_alias_set (ptrtype2);
dd277d48 1272 if (base2_alias_set == 0)
1273 return true;
1274
1275 /* If both references are through the same type, they do not alias
1276 if the accesses do not overlap. This does extra disambiguation
1277 for mixed/pointer accesses but requires strict aliasing. */
9c455d1e 1278 if ((TREE_CODE (base1) != TARGET_MEM_REF
1279 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1280 && (TREE_CODE (base2) != TARGET_MEM_REF
1281 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1282 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1283 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
182cf5a9 1284 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1285 TREE_TYPE (ptrtype2)) == 1)
dd277d48 1286 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1287
1288 /* Do type-based disambiguation. */
1289 if (base1_alias_set != base2_alias_set
1290 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1291 return false;
1292
f270d2a0 1293 /* If either reference is view-converted, give up now. */
1294 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1295 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1296 return true;
1297
1298 if (ref1 && ref2
1299 && nonoverlapping_component_refs_p (ref1, ref2))
1300 return false;
1301
dd277d48 1302 /* Do access-path based disambiguation. */
1303 if (ref1 && ref2
f270d2a0 1304 && (handled_component_p (ref1) || handled_component_p (ref2)))
0d2f7431 1305 return aliasing_component_refs_p (ref1,
ac30e3b2 1306 ref1_alias_set, base1_alias_set,
a42ce2cc 1307 offset1, max_size1,
0d2f7431 1308 ref2,
ac30e3b2 1309 ref2_alias_set, base2_alias_set,
1310 offset2, max_size2, false);
dd277d48 1311
1312 return true;
1313}
1314
1315/* Return true, if the two memory references REF1 and REF2 may alias. */
1316
3a443843 1317bool
3918bd18 1318refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
dd277d48 1319{
1320 tree base1, base2;
1321 HOST_WIDE_INT offset1 = 0, offset2 = 0;
dd277d48 1322 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1323 bool var1_p, var2_p, ind1_p, ind2_p;
1324
c78d5cec 1325 gcc_checking_assert ((!ref1->ref
8e7c2b9c 1326 || TREE_CODE (ref1->ref) == SSA_NAME
c78d5cec 1327 || DECL_P (ref1->ref)
2760c0e3 1328 || TREE_CODE (ref1->ref) == STRING_CST
c78d5cec 1329 || handled_component_p (ref1->ref)
182cf5a9 1330 || TREE_CODE (ref1->ref) == MEM_REF
c78d5cec 1331 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1332 && (!ref2->ref
8e7c2b9c 1333 || TREE_CODE (ref2->ref) == SSA_NAME
c78d5cec 1334 || DECL_P (ref2->ref)
2760c0e3 1335 || TREE_CODE (ref2->ref) == STRING_CST
c78d5cec 1336 || handled_component_p (ref2->ref)
182cf5a9 1337 || TREE_CODE (ref2->ref) == MEM_REF
c78d5cec 1338 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
dd277d48 1339
dd277d48 1340 /* Decompose the references into their base objects and the access. */
3918bd18 1341 base1 = ao_ref_base (ref1);
1342 offset1 = ref1->offset;
3918bd18 1343 max_size1 = ref1->max_size;
1344 base2 = ao_ref_base (ref2);
1345 offset2 = ref2->offset;
3918bd18 1346 max_size2 = ref2->max_size;
dd277d48 1347
1348 /* We can end up with registers or constants as bases for example from
1349 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1350 which is seen as a struct copy. */
1351 if (TREE_CODE (base1) == SSA_NAME
119147d1 1352 || TREE_CODE (base1) == CONST_DECL
66b86a74 1353 || TREE_CODE (base1) == CONSTRUCTOR
1354 || TREE_CODE (base1) == ADDR_EXPR
1355 || CONSTANT_CLASS_P (base1)
1356 || TREE_CODE (base2) == SSA_NAME
119147d1 1357 || TREE_CODE (base2) == CONST_DECL
66b86a74 1358 || TREE_CODE (base2) == CONSTRUCTOR
1359 || TREE_CODE (base2) == ADDR_EXPR
1360 || CONSTANT_CLASS_P (base2))
dd277d48 1361 return false;
1362
04ec15fa 1363 /* We can end up referring to code via function and label decls.
c78d5cec 1364 As we likely do not properly track code aliases conservatively
1365 bail out. */
ceefbccc 1366 if (TREE_CODE (base1) == FUNCTION_DECL
c78d5cec 1367 || TREE_CODE (base1) == LABEL_DECL
66b86a74 1368 || TREE_CODE (base2) == FUNCTION_DECL
c78d5cec 1369 || TREE_CODE (base2) == LABEL_DECL)
ceefbccc 1370 return true;
1371
3787db52 1372 /* Two volatile accesses always conflict. */
1373 if (ref1->volatile_p
1374 && ref2->volatile_p)
1375 return true;
1376
090a8c65 1377 /* Defer to simple offset based disambiguation if we have
1378 references based on two decls. Do this before defering to
1379 TBAA to handle must-alias cases in conformance with the
1380 GCC extension of allowing type-punning through unions. */
2622064f 1381 var1_p = DECL_P (base1);
1382 var2_p = DECL_P (base2);
dd277d48 1383 if (var1_p && var2_p)
740a2cb6 1384 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1385 ref2->ref, base2, offset2, max_size2);
090a8c65 1386
62b0a610 1387 /* Handle restrict based accesses.
1388 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1389 here. */
1390 tree rbase1 = base1;
1391 tree rbase2 = base2;
1392 if (var1_p)
1393 {
1394 rbase1 = ref1->ref;
1395 if (rbase1)
1396 while (handled_component_p (rbase1))
1397 rbase1 = TREE_OPERAND (rbase1, 0);
1398 }
1399 if (var2_p)
1400 {
1401 rbase2 = ref2->ref;
1402 if (rbase2)
1403 while (handled_component_p (rbase2))
1404 rbase2 = TREE_OPERAND (rbase2, 0);
1405 }
1406 if (rbase1 && rbase2
1407 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1408 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1409 /* If the accesses are in the same restrict clique... */
1410 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1411 /* But based on different pointers they do not alias. */
1412 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1413 return false;
1414
2622064f 1415 ind1_p = (TREE_CODE (base1) == MEM_REF
1416 || TREE_CODE (base1) == TARGET_MEM_REF);
1417 ind2_p = (TREE_CODE (base2) == MEM_REF
1418 || TREE_CODE (base2) == TARGET_MEM_REF);
182cf5a9 1419
67817f0f 1420 /* Canonicalize the pointer-vs-decl case. */
1421 if (ind1_p && var2_p)
1422 {
1423 HOST_WIDE_INT tmp1;
1424 tree tmp2;
1425 ao_ref *tmp3;
1426 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1427 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1428 tmp2 = base1; base1 = base2; base2 = tmp2;
1429 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1430 var1_p = true;
1431 ind1_p = false;
1432 var2_p = false;
1433 ind2_p = true;
1434 }
1435
090a8c65 1436 /* First defer to TBAA if possible. */
2a3ebafa 1437 if (tbaa_p
1438 && flag_strict_aliasing
3918bd18 1439 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1440 ao_ref_alias_set (ref2)))
090a8c65 1441 return false;
1442
090a8c65 1443 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
090a8c65 1444 if (var1_p && ind2_p)
182cf5a9 1445 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
ac30e3b2 1446 offset2, max_size2,
182cf5a9 1447 ao_ref_alias_set (ref2), -1,
3918bd18 1448 ref1->ref, base1,
ac30e3b2 1449 offset1, max_size1,
182cf5a9 1450 ao_ref_alias_set (ref1),
1451 ao_ref_base_alias_set (ref1),
1452 tbaa_p);
dd277d48 1453 else if (ind1_p && ind2_p)
182cf5a9 1454 return indirect_refs_may_alias_p (ref1->ref, base1,
ac30e3b2 1455 offset1, max_size1,
182cf5a9 1456 ao_ref_alias_set (ref1), -1,
1457 ref2->ref, base2,
ac30e3b2 1458 offset2, max_size2,
182cf5a9 1459 ao_ref_alias_set (ref2), -1,
1460 tbaa_p);
dd277d48 1461
9c01641d 1462 /* We really do not want to end up here, but returning true is safe. */
1463#ifdef ENABLE_CHECKING
dd277d48 1464 gcc_unreachable ();
9c01641d 1465#else
1466 return true;
1467#endif
dd277d48 1468}
1469
258bd648 1470static bool
1471refs_may_alias_p (tree ref1, ao_ref *ref2)
1472{
1473 ao_ref r1;
1474 ao_ref_init (&r1, ref1);
1475 return refs_may_alias_p_1 (&r1, ref2, true);
1476}
1477
dd277d48 1478bool
1479refs_may_alias_p (tree ref1, tree ref2)
1480{
3918bd18 1481 ao_ref r1, r2;
1482 bool res;
1483 ao_ref_init (&r1, ref1);
1484 ao_ref_init (&r2, ref2);
1485 res = refs_may_alias_p_1 (&r1, &r2, true);
dd277d48 1486 if (res)
1487 ++alias_stats.refs_may_alias_p_may_alias;
1488 else
1489 ++alias_stats.refs_may_alias_p_no_alias;
1490 return res;
1491}
1492
2a3ebafa 1493/* Returns true if there is a anti-dependence for the STORE that
1494 executes after the LOAD. */
1495
1496bool
1497refs_anti_dependent_p (tree load, tree store)
1498{
3918bd18 1499 ao_ref r1, r2;
1500 ao_ref_init (&r1, load);
1501 ao_ref_init (&r2, store);
1502 return refs_may_alias_p_1 (&r1, &r2, false);
2a3ebafa 1503}
1504
1505/* Returns true if there is a output dependence for the stores
1506 STORE1 and STORE2. */
1507
1508bool
1509refs_output_dependent_p (tree store1, tree store2)
1510{
3918bd18 1511 ao_ref r1, r2;
1512 ao_ref_init (&r1, store1);
1513 ao_ref_init (&r2, store2);
1514 return refs_may_alias_p_1 (&r1, &r2, false);
2a3ebafa 1515}
dd277d48 1516
1517/* If the call CALL may use the memory reference REF return true,
1518 otherwise return false. */
1519
1520static bool
1a91d914 1521ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
dd277d48 1522{
047fdd47 1523 tree base, callee;
dd277d48 1524 unsigned i;
1525 int flags = gimple_call_flags (call);
1526
1527 /* Const functions without a static chain do not implicitly use memory. */
1528 if (!gimple_call_chain (call)
1529 && (flags & (ECF_CONST|ECF_NOVOPS)))
1530 goto process_args;
1531
134899ac 1532 base = ao_ref_base (ref);
917f08fa 1533 if (!base)
dd277d48 1534 return true;
1535
3787db52 1536 /* A call that is not without side-effects might involve volatile
1537 accesses and thus conflicts with all other volatile accesses. */
1538 if (ref->volatile_p)
1539 return true;
1540
09347e88 1541 /* If the reference is based on a decl that is not aliased the call
1542 cannot possibly use it. */
1543 if (DECL_P (base)
1544 && !may_be_aliased (base)
7f7f16d4 1545 /* But local statics can be used through recursion. */
1546 && !is_global_var (base))
09347e88 1547 goto process_args;
1548
047fdd47 1549 callee = gimple_call_fndecl (call);
1550
1551 /* Handle those builtin functions explicitly that do not act as
1552 escape points. See tree-ssa-structalias.c:find_func_aliases
1553 for the list of builtins we might need to handle here. */
1554 if (callee != NULL_TREE
1555 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1556 switch (DECL_FUNCTION_CODE (callee))
1557 {
77efe819 1558 /* All the following functions read memory pointed to by
1559 their second argument. strcat/strncat additionally
1560 reads memory pointed to by the first argument. */
1561 case BUILT_IN_STRCAT:
1562 case BUILT_IN_STRNCAT:
1563 {
1564 ao_ref dref;
1565 ao_ref_init_from_ptr_and_size (&dref,
1566 gimple_call_arg (call, 0),
1567 NULL_TREE);
1568 if (refs_may_alias_p_1 (&dref, ref, false))
1569 return true;
1570 }
1571 /* FALLTHRU */
047fdd47 1572 case BUILT_IN_STRCPY:
1573 case BUILT_IN_STRNCPY:
047fdd47 1574 case BUILT_IN_MEMCPY:
1575 case BUILT_IN_MEMMOVE:
1576 case BUILT_IN_MEMPCPY:
1577 case BUILT_IN_STPCPY:
1578 case BUILT_IN_STPNCPY:
4c0315d0 1579 case BUILT_IN_TM_MEMCPY:
1580 case BUILT_IN_TM_MEMMOVE:
047fdd47 1581 {
134899ac 1582 ao_ref dref;
1583 tree size = NULL_TREE;
1584 if (gimple_call_num_args (call) == 3)
1585 size = gimple_call_arg (call, 2);
1586 ao_ref_init_from_ptr_and_size (&dref,
1587 gimple_call_arg (call, 1),
1588 size);
1589 return refs_may_alias_p_1 (&dref, ref, false);
047fdd47 1590 }
77efe819 1591 case BUILT_IN_STRCAT_CHK:
1592 case BUILT_IN_STRNCAT_CHK:
1593 {
1594 ao_ref dref;
1595 ao_ref_init_from_ptr_and_size (&dref,
1596 gimple_call_arg (call, 0),
1597 NULL_TREE);
1598 if (refs_may_alias_p_1 (&dref, ref, false))
1599 return true;
1600 }
1601 /* FALLTHRU */
939514e9 1602 case BUILT_IN_STRCPY_CHK:
1603 case BUILT_IN_STRNCPY_CHK:
1604 case BUILT_IN_MEMCPY_CHK:
1605 case BUILT_IN_MEMMOVE_CHK:
1606 case BUILT_IN_MEMPCPY_CHK:
1607 case BUILT_IN_STPCPY_CHK:
1063acde 1608 case BUILT_IN_STPNCPY_CHK:
939514e9 1609 {
1610 ao_ref dref;
1611 tree size = NULL_TREE;
1612 if (gimple_call_num_args (call) == 4)
1613 size = gimple_call_arg (call, 2);
1614 ao_ref_init_from_ptr_and_size (&dref,
1615 gimple_call_arg (call, 1),
1616 size);
1617 return refs_may_alias_p_1 (&dref, ref, false);
1618 }
119147d1 1619 case BUILT_IN_BCOPY:
1620 {
1621 ao_ref dref;
1622 tree size = gimple_call_arg (call, 2);
1623 ao_ref_init_from_ptr_and_size (&dref,
1624 gimple_call_arg (call, 0),
1625 size);
1626 return refs_may_alias_p_1 (&dref, ref, false);
1627 }
4c0315d0 1628
1629 /* The following functions read memory pointed to by their
1630 first argument. */
1631 CASE_BUILT_IN_TM_LOAD (1):
1632 CASE_BUILT_IN_TM_LOAD (2):
1633 CASE_BUILT_IN_TM_LOAD (4):
1634 CASE_BUILT_IN_TM_LOAD (8):
1635 CASE_BUILT_IN_TM_LOAD (FLOAT):
1636 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1637 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1638 CASE_BUILT_IN_TM_LOAD (M64):
1639 CASE_BUILT_IN_TM_LOAD (M128):
1640 CASE_BUILT_IN_TM_LOAD (M256):
1641 case BUILT_IN_TM_LOG:
1642 case BUILT_IN_TM_LOG_1:
1643 case BUILT_IN_TM_LOG_2:
1644 case BUILT_IN_TM_LOG_4:
1645 case BUILT_IN_TM_LOG_8:
1646 case BUILT_IN_TM_LOG_FLOAT:
1647 case BUILT_IN_TM_LOG_DOUBLE:
1648 case BUILT_IN_TM_LOG_LDOUBLE:
1649 case BUILT_IN_TM_LOG_M64:
1650 case BUILT_IN_TM_LOG_M128:
1651 case BUILT_IN_TM_LOG_M256:
1652 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1653
77efe819 1654 /* These read memory pointed to by the first argument. */
1655 case BUILT_IN_STRDUP:
1656 case BUILT_IN_STRNDUP:
ee890734 1657 case BUILT_IN_REALLOC:
77efe819 1658 {
1659 ao_ref dref;
1660 tree size = NULL_TREE;
1661 if (gimple_call_num_args (call) == 2)
1662 size = gimple_call_arg (call, 1);
1663 ao_ref_init_from_ptr_and_size (&dref,
1664 gimple_call_arg (call, 0),
1665 size);
1666 return refs_may_alias_p_1 (&dref, ref, false);
1667 }
4953672f 1668 /* These read memory pointed to by the first argument. */
1669 case BUILT_IN_INDEX:
1670 case BUILT_IN_STRCHR:
1671 case BUILT_IN_STRRCHR:
1672 {
1673 ao_ref dref;
1674 ao_ref_init_from_ptr_and_size (&dref,
1675 gimple_call_arg (call, 0),
1676 NULL_TREE);
1677 return refs_may_alias_p_1 (&dref, ref, false);
1678 }
1679 /* These read memory pointed to by the first argument with size
1680 in the third argument. */
1681 case BUILT_IN_MEMCHR:
1682 {
1683 ao_ref dref;
1684 ao_ref_init_from_ptr_and_size (&dref,
1685 gimple_call_arg (call, 0),
1686 gimple_call_arg (call, 2));
1687 return refs_may_alias_p_1 (&dref, ref, false);
1688 }
1689 /* These read memory pointed to by the first and second arguments. */
1690 case BUILT_IN_STRSTR:
1691 case BUILT_IN_STRPBRK:
1692 {
1693 ao_ref dref;
1694 ao_ref_init_from_ptr_and_size (&dref,
1695 gimple_call_arg (call, 0),
1696 NULL_TREE);
1697 if (refs_may_alias_p_1 (&dref, ref, false))
1698 return true;
1699 ao_ref_init_from_ptr_and_size (&dref,
1700 gimple_call_arg (call, 1),
1701 NULL_TREE);
1702 return refs_may_alias_p_1 (&dref, ref, false);
1703 }
1704
09d9c774 1705 /* The following builtins do not read from memory. */
1706 case BUILT_IN_FREE:
0d3ca08e 1707 case BUILT_IN_MALLOC:
be2c7f8f 1708 case BUILT_IN_POSIX_MEMALIGN:
060fc206 1709 case BUILT_IN_ALIGNED_ALLOC:
4ef43bbd 1710 case BUILT_IN_CALLOC:
ca793672 1711 case BUILT_IN_ALLOCA:
581bf1c2 1712 case BUILT_IN_ALLOCA_WITH_ALIGN:
ca793672 1713 case BUILT_IN_STACK_SAVE:
1714 case BUILT_IN_STACK_RESTORE:
09d9c774 1715 case BUILT_IN_MEMSET:
4c0315d0 1716 case BUILT_IN_TM_MEMSET:
939514e9 1717 case BUILT_IN_MEMSET_CHK:
09d9c774 1718 case BUILT_IN_FREXP:
1719 case BUILT_IN_FREXPF:
1720 case BUILT_IN_FREXPL:
1721 case BUILT_IN_GAMMA_R:
1722 case BUILT_IN_GAMMAF_R:
1723 case BUILT_IN_GAMMAL_R:
1724 case BUILT_IN_LGAMMA_R:
1725 case BUILT_IN_LGAMMAF_R:
1726 case BUILT_IN_LGAMMAL_R:
1727 case BUILT_IN_MODF:
1728 case BUILT_IN_MODFF:
1729 case BUILT_IN_MODFL:
1730 case BUILT_IN_REMQUO:
1731 case BUILT_IN_REMQUOF:
1732 case BUILT_IN_REMQUOL:
1733 case BUILT_IN_SINCOS:
1734 case BUILT_IN_SINCOSF:
1735 case BUILT_IN_SINCOSL:
fca0886c 1736 case BUILT_IN_ASSUME_ALIGNED:
9d17b4a8 1737 case BUILT_IN_VA_END:
09d9c774 1738 return false;
a44814b9 1739 /* __sync_* builtins and some OpenMP builtins act as threading
1740 barriers. */
1741#undef DEF_SYNC_BUILTIN
1742#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1743#include "sync-builtins.def"
1744#undef DEF_SYNC_BUILTIN
1745 case BUILT_IN_GOMP_ATOMIC_START:
1746 case BUILT_IN_GOMP_ATOMIC_END:
1747 case BUILT_IN_GOMP_BARRIER:
bc7bff74 1748 case BUILT_IN_GOMP_BARRIER_CANCEL:
a44814b9 1749 case BUILT_IN_GOMP_TASKWAIT:
bc7bff74 1750 case BUILT_IN_GOMP_TASKGROUP_END:
a44814b9 1751 case BUILT_IN_GOMP_CRITICAL_START:
1752 case BUILT_IN_GOMP_CRITICAL_END:
1753 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1754 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1755 case BUILT_IN_GOMP_LOOP_END:
bc7bff74 1756 case BUILT_IN_GOMP_LOOP_END_CANCEL:
a44814b9 1757 case BUILT_IN_GOMP_ORDERED_START:
1758 case BUILT_IN_GOMP_ORDERED_END:
a44814b9 1759 case BUILT_IN_GOMP_SECTIONS_END:
bc7bff74 1760 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
a44814b9 1761 case BUILT_IN_GOMP_SINGLE_COPY_START:
1762 case BUILT_IN_GOMP_SINGLE_COPY_END:
1763 return true;
09d9c774 1764
047fdd47 1765 default:
1766 /* Fallthru to general call handling. */;
1767 }
1768
dd277d48 1769 /* Check if base is a global static variable that is not read
1770 by the function. */
924de091 1771 if (callee != NULL_TREE
1772 && TREE_CODE (base) == VAR_DECL
d97be713 1773 && TREE_STATIC (base))
4ee9c684 1774 {
415d1b9a 1775 struct cgraph_node *node = cgraph_node::get (callee);
dd277d48 1776 bitmap not_read;
1777
924de091 1778 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1779 node yet. We should enforce that there are nodes for all decls in the
1780 IL and remove this check instead. */
1781 if (node
1782 && (not_read = ipa_reference_get_not_read_global (node))
dd277d48 1783 && bitmap_bit_p (not_read, DECL_UID (base)))
1784 goto process_args;
4ee9c684 1785 }
1786
cb245216 1787 /* Check if the base variable is call-used. */
1788 if (DECL_P (base))
4ee9c684 1789 {
cb245216 1790 if (pt_solution_includes (gimple_call_use_set (call), base))
dd277d48 1791 return true;
1792 }
2622064f 1793 else if ((TREE_CODE (base) == MEM_REF
1794 || TREE_CODE (base) == TARGET_MEM_REF)
cb245216 1795 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
dd277d48 1796 {
cb245216 1797 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1798 if (!pi)
1799 return true;
917f08fa 1800
cb245216 1801 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
dd277d48 1802 return true;
1803 }
cb245216 1804 else
1805 return true;
dd277d48 1806
1807 /* Inspect call arguments for passed-by-value aliases. */
1808process_args:
1809 for (i = 0; i < gimple_call_num_args (call); ++i)
1810 {
1811 tree op = gimple_call_arg (call, i);
8ce86007 1812 int flags = gimple_call_arg_flags (call, i);
1813
1814 if (flags & EAF_UNUSED)
1815 continue;
dd277d48 1816
dd277d48 1817 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1818 op = TREE_OPERAND (op, 0);
4ee9c684 1819
dd277d48 1820 if (TREE_CODE (op) != SSA_NAME
134899ac 1821 && !is_gimple_min_invariant (op))
1822 {
1823 ao_ref r;
1824 ao_ref_init (&r, op);
1825 if (refs_may_alias_p_1 (&r, ref, true))
1826 return true;
1827 }
4ee9c684 1828 }
1829
dd277d48 1830 return false;
1831}
1832
1833static bool
1a91d914 1834ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
dd277d48 1835{
134899ac 1836 bool res;
258bd648 1837 res = ref_maybe_used_by_call_p_1 (call, ref);
dd277d48 1838 if (res)
1839 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1840 else
1841 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1842 return res;
4ee9c684 1843}
1844
1845
dd277d48 1846/* If the statement STMT may use the memory reference REF return
1847 true, otherwise return false. */
4ee9c684 1848
dd277d48 1849bool
258bd648 1850ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
4ee9c684 1851{
dd277d48 1852 if (is_gimple_assign (stmt))
1853 {
1854 tree rhs;
1855
1856 /* All memory assign statements are single. */
1857 if (!gimple_assign_single_p (stmt))
1858 return false;
1859
1860 rhs = gimple_assign_rhs1 (stmt);
1861 if (is_gimple_reg (rhs)
1862 || is_gimple_min_invariant (rhs)
1863 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1864 return false;
1865
1866 return refs_may_alias_p (rhs, ref);
1867 }
1868 else if (is_gimple_call (stmt))
1a91d914 1869 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1870 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
91234829 1871 {
1a91d914 1872 tree retval = gimple_return_retval (return_stmt);
91234829 1873 if (retval
1874 && TREE_CODE (retval) != SSA_NAME
1875 && !is_gimple_min_invariant (retval)
1876 && refs_may_alias_p (retval, ref))
1877 return true;
1878 /* If ref escapes the function then the return acts as a use. */
258bd648 1879 tree base = ao_ref_base (ref);
91234829 1880 if (!base)
1881 ;
1882 else if (DECL_P (base))
1883 return is_global_var (base);
1884 else if (TREE_CODE (base) == MEM_REF
1885 || TREE_CODE (base) == TARGET_MEM_REF)
1886 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1887 return false;
1888 }
dd277d48 1889
1890 return true;
4ee9c684 1891}
1892
258bd648 1893bool
1894ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1895{
1896 ao_ref r;
1897 ao_ref_init (&r, ref);
1898 return ref_maybe_used_by_stmt_p (stmt, &r);
1899}
1900
dd277d48 1901/* If the call in statement CALL may clobber the memory reference REF
1902 return true, otherwise return false. */
4ee9c684 1903
38168b16 1904bool
1a91d914 1905call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
4ee9c684 1906{
7f7f16d4 1907 tree base;
047fdd47 1908 tree callee;
dd277d48 1909
1910 /* If the call is pure or const it cannot clobber anything. */
1911 if (gimple_call_flags (call)
1912 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1913 return false;
1914
3918bd18 1915 base = ao_ref_base (ref);
dd277d48 1916 if (!base)
1917 return true;
1918
1919 if (TREE_CODE (base) == SSA_NAME
1920 || CONSTANT_CLASS_P (base))
1921 return false;
1922
3787db52 1923 /* A call that is not without side-effects might involve volatile
1924 accesses and thus conflicts with all other volatile accesses. */
1925 if (ref->volatile_p)
1926 return true;
1927
09347e88 1928 /* If the reference is based on a decl that is not aliased the call
1929 cannot possibly clobber it. */
1930 if (DECL_P (base)
1931 && !may_be_aliased (base)
7f7f16d4 1932 /* But local non-readonly statics can be modified through recursion
1933 or the call may implement a threading barrier which we must
1934 treat as may-def. */
09347e88 1935 && (TREE_READONLY (base)
7f7f16d4 1936 || !is_global_var (base)))
09347e88 1937 return false;
1938
047fdd47 1939 callee = gimple_call_fndecl (call);
1940
1941 /* Handle those builtin functions explicitly that do not act as
1942 escape points. See tree-ssa-structalias.c:find_func_aliases
1943 for the list of builtins we might need to handle here. */
1944 if (callee != NULL_TREE
1945 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1946 switch (DECL_FUNCTION_CODE (callee))
1947 {
1948 /* All the following functions clobber memory pointed to by
1949 their first argument. */
1950 case BUILT_IN_STRCPY:
1951 case BUILT_IN_STRNCPY:
047fdd47 1952 case BUILT_IN_MEMCPY:
1953 case BUILT_IN_MEMMOVE:
1954 case BUILT_IN_MEMPCPY:
1955 case BUILT_IN_STPCPY:
1956 case BUILT_IN_STPNCPY:
1957 case BUILT_IN_STRCAT:
1958 case BUILT_IN_STRNCAT:
134899ac 1959 case BUILT_IN_MEMSET:
4c0315d0 1960 case BUILT_IN_TM_MEMSET:
1961 CASE_BUILT_IN_TM_STORE (1):
1962 CASE_BUILT_IN_TM_STORE (2):
1963 CASE_BUILT_IN_TM_STORE (4):
1964 CASE_BUILT_IN_TM_STORE (8):
1965 CASE_BUILT_IN_TM_STORE (FLOAT):
1966 CASE_BUILT_IN_TM_STORE (DOUBLE):
1967 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1968 CASE_BUILT_IN_TM_STORE (M64):
1969 CASE_BUILT_IN_TM_STORE (M128):
1970 CASE_BUILT_IN_TM_STORE (M256):
1971 case BUILT_IN_TM_MEMCPY:
1972 case BUILT_IN_TM_MEMMOVE:
047fdd47 1973 {
134899ac 1974 ao_ref dref;
1975 tree size = NULL_TREE;
77efe819 1976 /* Don't pass in size for strncat, as the maximum size
1977 is strlen (dest) + n + 1 instead of n, resp.
1978 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1979 known. */
1980 if (gimple_call_num_args (call) == 3
1981 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
134899ac 1982 size = gimple_call_arg (call, 2);
1983 ao_ref_init_from_ptr_and_size (&dref,
1984 gimple_call_arg (call, 0),
1985 size);
1986 return refs_may_alias_p_1 (&dref, ref, false);
047fdd47 1987 }
939514e9 1988 case BUILT_IN_STRCPY_CHK:
1989 case BUILT_IN_STRNCPY_CHK:
1990 case BUILT_IN_MEMCPY_CHK:
1991 case BUILT_IN_MEMMOVE_CHK:
1992 case BUILT_IN_MEMPCPY_CHK:
1993 case BUILT_IN_STPCPY_CHK:
1063acde 1994 case BUILT_IN_STPNCPY_CHK:
939514e9 1995 case BUILT_IN_STRCAT_CHK:
1996 case BUILT_IN_STRNCAT_CHK:
1997 case BUILT_IN_MEMSET_CHK:
1998 {
1999 ao_ref dref;
2000 tree size = NULL_TREE;
77efe819 2001 /* Don't pass in size for __strncat_chk, as the maximum size
2002 is strlen (dest) + n + 1 instead of n, resp.
2003 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2004 known. */
2005 if (gimple_call_num_args (call) == 4
2006 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
939514e9 2007 size = gimple_call_arg (call, 2);
2008 ao_ref_init_from_ptr_and_size (&dref,
2009 gimple_call_arg (call, 0),
2010 size);
2011 return refs_may_alias_p_1 (&dref, ref, false);
2012 }
119147d1 2013 case BUILT_IN_BCOPY:
2014 {
2015 ao_ref dref;
2016 tree size = gimple_call_arg (call, 2);
2017 ao_ref_init_from_ptr_and_size (&dref,
2018 gimple_call_arg (call, 1),
2019 size);
2020 return refs_may_alias_p_1 (&dref, ref, false);
2021 }
0d3ca08e 2022 /* Allocating memory does not have any side-effects apart from
2023 being the definition point for the pointer. */
2024 case BUILT_IN_MALLOC:
060fc206 2025 case BUILT_IN_ALIGNED_ALLOC:
4ef43bbd 2026 case BUILT_IN_CALLOC:
77efe819 2027 case BUILT_IN_STRDUP:
2028 case BUILT_IN_STRNDUP:
be97d4b6 2029 /* Unix98 specifies that errno is set on allocation failure. */
a29c5f9c 2030 if (flag_errno_math
be97d4b6 2031 && targetm.ref_may_alias_errno (ref))
2032 return true;
0d3ca08e 2033 return false;
ca793672 2034 case BUILT_IN_STACK_SAVE:
2035 case BUILT_IN_ALLOCA:
581bf1c2 2036 case BUILT_IN_ALLOCA_WITH_ALIGN:
fca0886c 2037 case BUILT_IN_ASSUME_ALIGNED:
ca793672 2038 return false;
be2c7f8f 2039 /* But posix_memalign stores a pointer into the memory pointed to
2040 by its first argument. */
2041 case BUILT_IN_POSIX_MEMALIGN:
2042 {
2043 tree ptrptr = gimple_call_arg (call, 0);
2044 ao_ref dref;
2045 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2046 TYPE_SIZE_UNIT (ptr_type_node));
2047 return (refs_may_alias_p_1 (&dref, ref, false)
2048 || (flag_errno_math
2049 && targetm.ref_may_alias_errno (ref)));
2050 }
047fdd47 2051 /* Freeing memory kills the pointed-to memory. More importantly
2052 the call has to serve as a barrier for moving loads and stores
134899ac 2053 across it. */
047fdd47 2054 case BUILT_IN_FREE:
9d17b4a8 2055 case BUILT_IN_VA_END:
047fdd47 2056 {
2057 tree ptr = gimple_call_arg (call, 0);
2058 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2059 }
ee890734 2060 /* Realloc serves both as allocation point and deallocation point. */
2061 case BUILT_IN_REALLOC:
2062 {
2063 tree ptr = gimple_call_arg (call, 0);
2064 /* Unix98 specifies that errno is set on allocation failure. */
2065 return ((flag_errno_math
2066 && targetm.ref_may_alias_errno (ref))
2067 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2068 }
047fdd47 2069 case BUILT_IN_GAMMA_R:
2070 case BUILT_IN_GAMMAF_R:
2071 case BUILT_IN_GAMMAL_R:
2072 case BUILT_IN_LGAMMA_R:
2073 case BUILT_IN_LGAMMAF_R:
2074 case BUILT_IN_LGAMMAL_R:
09d9c774 2075 {
2076 tree out = gimple_call_arg (call, 1);
2077 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2078 return true;
2079 if (flag_errno_math)
2080 break;
2081 return false;
2082 }
2083 case BUILT_IN_FREXP:
2084 case BUILT_IN_FREXPF:
2085 case BUILT_IN_FREXPL:
047fdd47 2086 case BUILT_IN_MODF:
2087 case BUILT_IN_MODFF:
2088 case BUILT_IN_MODFL:
2089 {
2090 tree out = gimple_call_arg (call, 1);
2091 return ptr_deref_may_alias_ref_p_1 (out, ref);
2092 }
2093 case BUILT_IN_REMQUO:
2094 case BUILT_IN_REMQUOF:
2095 case BUILT_IN_REMQUOL:
2096 {
2097 tree out = gimple_call_arg (call, 2);
09d9c774 2098 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2099 return true;
2100 if (flag_errno_math)
2101 break;
2102 return false;
047fdd47 2103 }
2104 case BUILT_IN_SINCOS:
2105 case BUILT_IN_SINCOSF:
2106 case BUILT_IN_SINCOSL:
2107 {
2108 tree sin = gimple_call_arg (call, 1);
2109 tree cos = gimple_call_arg (call, 2);
2110 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2111 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2112 }
a44814b9 2113 /* __sync_* builtins and some OpenMP builtins act as threading
2114 barriers. */
2115#undef DEF_SYNC_BUILTIN
2116#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2117#include "sync-builtins.def"
2118#undef DEF_SYNC_BUILTIN
2119 case BUILT_IN_GOMP_ATOMIC_START:
2120 case BUILT_IN_GOMP_ATOMIC_END:
2121 case BUILT_IN_GOMP_BARRIER:
bc7bff74 2122 case BUILT_IN_GOMP_BARRIER_CANCEL:
a44814b9 2123 case BUILT_IN_GOMP_TASKWAIT:
bc7bff74 2124 case BUILT_IN_GOMP_TASKGROUP_END:
a44814b9 2125 case BUILT_IN_GOMP_CRITICAL_START:
2126 case BUILT_IN_GOMP_CRITICAL_END:
2127 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2128 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2129 case BUILT_IN_GOMP_LOOP_END:
bc7bff74 2130 case BUILT_IN_GOMP_LOOP_END_CANCEL:
a44814b9 2131 case BUILT_IN_GOMP_ORDERED_START:
2132 case BUILT_IN_GOMP_ORDERED_END:
a44814b9 2133 case BUILT_IN_GOMP_SECTIONS_END:
bc7bff74 2134 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
a44814b9 2135 case BUILT_IN_GOMP_SINGLE_COPY_START:
2136 case BUILT_IN_GOMP_SINGLE_COPY_END:
2137 return true;
047fdd47 2138 default:
2139 /* Fallthru to general call handling. */;
2140 }
2141
dd277d48 2142 /* Check if base is a global static variable that is not written
2143 by the function. */
047fdd47 2144 if (callee != NULL_TREE
2145 && TREE_CODE (base) == VAR_DECL
d97be713 2146 && TREE_STATIC (base))
4ee9c684 2147 {
415d1b9a 2148 struct cgraph_node *node = cgraph_node::get (callee);
dd277d48 2149 bitmap not_written;
0eaf9bd7 2150
924de091 2151 if (node
2152 && (not_written = ipa_reference_get_not_written_global (node))
dd277d48 2153 && bitmap_bit_p (not_written, DECL_UID (base)))
2154 return false;
4ee9c684 2155 }
4ee9c684 2156
cb245216 2157 /* Check if the base variable is call-clobbered. */
dd277d48 2158 if (DECL_P (base))
cb245216 2159 return pt_solution_includes (gimple_call_clobber_set (call), base);
2622064f 2160 else if ((TREE_CODE (base) == MEM_REF
2161 || TREE_CODE (base) == TARGET_MEM_REF)
917f08fa 2162 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2163 {
2164 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2165 if (!pi)
2166 return true;
2167
cb245216 2168 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
917f08fa 2169 }
4ee9c684 2170
dd277d48 2171 return true;
2172}
4ee9c684 2173
4debfcfc 2174/* If the call in statement CALL may clobber the memory reference REF
2175 return true, otherwise return false. */
2176
2177bool
1a91d914 2178call_may_clobber_ref_p (gcall *call, tree ref)
4ee9c684 2179{
3918bd18 2180 bool res;
2181 ao_ref r;
2182 ao_ref_init (&r, ref);
2183 res = call_may_clobber_ref_p_1 (call, &r);
dd277d48 2184 if (res)
2185 ++alias_stats.call_may_clobber_ref_p_may_alias;
2186 else
2187 ++alias_stats.call_may_clobber_ref_p_no_alias;
2188 return res;
4ee9c684 2189}
94e6573f 2190
dd277d48 2191
2192/* If the statement STMT may clobber the memory reference REF return true,
2193 otherwise return false. */
94e6573f 2194
2195bool
3918bd18 2196stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
94e6573f 2197{
dd277d48 2198 if (is_gimple_call (stmt))
2199 {
2200 tree lhs = gimple_call_lhs (stmt);
2201 if (lhs
66b86a74 2202 && TREE_CODE (lhs) != SSA_NAME)
3918bd18 2203 {
2204 ao_ref r;
2205 ao_ref_init (&r, lhs);
2206 if (refs_may_alias_p_1 (ref, &r, true))
2207 return true;
2208 }
94e6573f 2209
1a91d914 2210 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
dd277d48 2211 }
8e7c2b9c 2212 else if (gimple_assign_single_p (stmt))
3918bd18 2213 {
8e7c2b9c 2214 tree lhs = gimple_assign_lhs (stmt);
66b86a74 2215 if (TREE_CODE (lhs) != SSA_NAME)
8e7c2b9c 2216 {
2217 ao_ref r;
66b86a74 2218 ao_ref_init (&r, lhs);
8e7c2b9c 2219 return refs_may_alias_p_1 (ref, &r, true);
2220 }
3918bd18 2221 }
dd277d48 2222 else if (gimple_code (stmt) == GIMPLE_ASM)
94e6573f 2223 return true;
2224
6329636b 2225 return false;
94e6573f 2226}
2be14d8b 2227
3918bd18 2228bool
2229stmt_may_clobber_ref_p (gimple stmt, tree ref)
2230{
2231 ao_ref r;
2232 ao_ref_init (&r, ref);
2233 return stmt_may_clobber_ref_p_1 (stmt, &r);
2234}
2235
ab450cc6 2236/* If STMT kills the memory reference REF return true, otherwise
2237 return false. */
2238
258bd648 2239bool
2240stmt_kills_ref_p (gimple stmt, ao_ref *ref)
ab450cc6 2241{
2e333543 2242 if (!ao_ref_base (ref))
07ff6a65 2243 return false;
2244
ab450cc6 2245 if (gimple_has_lhs (stmt)
e54d36c4 2246 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2247 /* The assignment is not necessarily carried out if it can throw
2248 and we can catch it in the current function where we could inspect
2249 the previous value.
2250 ??? We only need to care about the RHS throwing. For aggregate
2251 assignments or similar calls and non-call exceptions the LHS
2252 might throw as well. */
2253 && !stmt_can_throw_internal (stmt))
ab450cc6 2254 {
2e333543 2255 tree lhs = gimple_get_lhs (stmt);
2256 /* If LHS is literally a base of the access we are done. */
2257 if (ref->ref)
2258 {
2259 tree base = ref->ref;
2260 if (handled_component_p (base))
2261 {
2262 tree saved_lhs0 = NULL_TREE;
2263 if (handled_component_p (lhs))
2264 {
2265 saved_lhs0 = TREE_OPERAND (lhs, 0);
2266 TREE_OPERAND (lhs, 0) = integer_zero_node;
2267 }
2268 do
2269 {
2270 /* Just compare the outermost handled component, if
2271 they are equal we have found a possible common
2272 base. */
2273 tree saved_base0 = TREE_OPERAND (base, 0);
2274 TREE_OPERAND (base, 0) = integer_zero_node;
2275 bool res = operand_equal_p (lhs, base, 0);
2276 TREE_OPERAND (base, 0) = saved_base0;
2277 if (res)
2278 break;
2279 /* Otherwise drop handled components of the access. */
2280 base = saved_base0;
2281 }
2282 while (handled_component_p (base));
2283 if (saved_lhs0)
2284 TREE_OPERAND (lhs, 0) = saved_lhs0;
2285 }
2286 /* Finally check if lhs is equal or equal to the base candidate
2287 of the access. */
2288 if (operand_equal_p (lhs, base, 0))
2289 return true;
2290 }
2291
2292 /* Now look for non-literal equal bases with the restriction of
2293 handling constant offset and size. */
2294 /* For a must-alias check we need to be able to constrain
2295 the access properly. */
2296 if (ref->max_size == -1)
2297 return false;
137eddca 2298 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2e333543 2299 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
ab450cc6 2300 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2301 so base == ref->base does not always hold. */
137eddca 2302 if (base != ref->base)
ab450cc6 2303 {
137eddca 2304 /* If both base and ref->base are MEM_REFs, only compare the
2305 first operand, and if the second operand isn't equal constant,
2306 try to add the offsets into offset and ref_offset. */
2307 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2308 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
ab450cc6 2309 {
83d18d6d 2310 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2311 TREE_OPERAND (ref->base, 1)))
137eddca 2312 {
5de9d3ed 2313 offset_int off1 = mem_ref_offset (base);
885a2694 2314 off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
e913b5cd 2315 off1 += offset;
5de9d3ed 2316 offset_int off2 = mem_ref_offset (ref->base);
885a2694 2317 off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
e913b5cd 2318 off2 += ref_offset;
796b6678 2319 if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
137eddca 2320 {
2321 offset = off1.to_shwi ();
2322 ref_offset = off2.to_shwi ();
2323 }
2324 else
2325 size = -1;
2326 }
ab450cc6 2327 }
137eddca 2328 else
2329 size = -1;
2330 }
2331 /* For a must-alias check we need to be able to constrain
2332 the access properly. */
2333 if (size != -1 && size == max_size)
2334 {
2335 if (offset <= ref_offset
2336 && offset + size >= ref_offset + ref->max_size)
2337 return true;
ab450cc6 2338 }
2339 }
07ff6a65 2340
2341 if (is_gimple_call (stmt))
2342 {
2343 tree callee = gimple_call_fndecl (stmt);
2344 if (callee != NULL_TREE
2345 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2346 switch (DECL_FUNCTION_CODE (callee))
2347 {
3d0ea943 2348 case BUILT_IN_FREE:
2349 {
2350 tree ptr = gimple_call_arg (stmt, 0);
2351 tree base = ao_ref_base (ref);
2352 if (base && TREE_CODE (base) == MEM_REF
2353 && TREE_OPERAND (base, 0) == ptr)
2354 return true;
2355 break;
2356 }
2357
07ff6a65 2358 case BUILT_IN_MEMCPY:
2359 case BUILT_IN_MEMPCPY:
2360 case BUILT_IN_MEMMOVE:
2361 case BUILT_IN_MEMSET:
939514e9 2362 case BUILT_IN_MEMCPY_CHK:
2363 case BUILT_IN_MEMPCPY_CHK:
2364 case BUILT_IN_MEMMOVE_CHK:
2365 case BUILT_IN_MEMSET_CHK:
07ff6a65 2366 {
2e333543 2367 /* For a must-alias check we need to be able to constrain
2368 the access properly. */
2369 if (ref->max_size == -1)
2370 return false;
07ff6a65 2371 tree dest = gimple_call_arg (stmt, 0);
2372 tree len = gimple_call_arg (stmt, 2);
e913b5cd 2373 if (!tree_fits_shwi_p (len))
07ff6a65 2374 return false;
e6c705fb 2375 tree rbase = ref->base;
652505ad 2376 offset_int roffset = ref->offset;
e6c705fb 2377 ao_ref dref;
2378 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2379 tree base = ao_ref_base (&dref);
652505ad 2380 offset_int offset = dref.offset;
e6c705fb 2381 if (!base || dref.size == -1)
2382 return false;
2383 if (TREE_CODE (base) == MEM_REF)
2384 {
2385 if (TREE_CODE (rbase) != MEM_REF)
2386 return false;
2387 // Compare pointers.
2315e038 2388 offset += wi::lshift (mem_ref_offset (base),
2389 LOG2_BITS_PER_UNIT);
2390 roffset += wi::lshift (mem_ref_offset (rbase),
2391 LOG2_BITS_PER_UNIT);
e6c705fb 2392 base = TREE_OPERAND (base, 0);
2393 rbase = TREE_OPERAND (rbase, 0);
2394 }
2315e038 2395 if (base == rbase
2396 && wi::les_p (offset, roffset)
2397 && wi::les_p (roffset + ref->max_size,
2398 offset + wi::lshift (wi::to_offset (len),
2399 LOG2_BITS_PER_UNIT)))
2400 return true;
9d17b4a8 2401 break;
2402 }
2403
2404 case BUILT_IN_VA_END:
2405 {
2406 tree ptr = gimple_call_arg (stmt, 0);
2407 if (TREE_CODE (ptr) == ADDR_EXPR)
2408 {
2409 tree base = ao_ref_base (ref);
2410 if (TREE_OPERAND (ptr, 0) == base)
2411 return true;
2412 }
2413 break;
07ff6a65 2414 }
9d17b4a8 2415
07ff6a65 2416 default:;
2417 }
07ff6a65 2418 }
ab450cc6 2419 return false;
2420}
2421
2422bool
2423stmt_kills_ref_p (gimple stmt, tree ref)
2424{
2425 ao_ref r;
2426 ao_ref_init (&r, ref);
258bd648 2427 return stmt_kills_ref_p (stmt, &r);
ab450cc6 2428}
2429
3918bd18 2430
dd277d48 2431/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2432 TARGET or a statement clobbering the memory reference REF in which
2433 case false is returned. The walk starts with VUSE, one argument of PHI. */
2434
2435static bool
3918bd18 2436maybe_skip_until (gimple phi, tree target, ao_ref *ref,
4fd7a6de 2437 tree vuse, unsigned int *cnt, bitmap *visited,
38168b16 2438 bool abort_on_visited,
2439 void *(*translate)(ao_ref *, tree, void *, bool),
2440 void *data)
ac926412 2441{
0afe880d 2442 basic_block bb = gimple_bb (phi);
2443
dd277d48 2444 if (!*visited)
2445 *visited = BITMAP_ALLOC (NULL);
ac926412 2446
dd277d48 2447 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2448
2449 /* Walk until we hit the target. */
2450 while (vuse != target)
ac926412 2451 {
dd277d48 2452 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2453 /* Recurse for PHI nodes. */
2454 if (gimple_code (def_stmt) == GIMPLE_PHI)
2455 {
2456 /* An already visited PHI node ends the walk successfully. */
2457 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
4fd7a6de 2458 return !abort_on_visited;
2459 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
38168b16 2460 visited, abort_on_visited,
2461 translate, data);
dd277d48 2462 if (!vuse)
2463 return false;
2464 continue;
2465 }
297a2110 2466 else if (gimple_nop_p (def_stmt))
dd277d48 2467 return false;
297a2110 2468 else
2469 {
2470 /* A clobbering statement or the end of the IL ends it failing. */
2471 ++*cnt;
2472 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
38168b16 2473 {
2474 if (translate
2475 && (*translate) (ref, vuse, data, true) == NULL)
2476 ;
2477 else
2478 return false;
2479 }
297a2110 2480 }
0afe880d 2481 /* If we reach a new basic-block see if we already skipped it
2482 in a previous walk that ended successfully. */
2483 if (gimple_bb (def_stmt) != bb)
2484 {
2485 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
4fd7a6de 2486 return !abort_on_visited;
0afe880d 2487 bb = gimple_bb (def_stmt);
2488 }
dd277d48 2489 vuse = gimple_vuse (def_stmt);
ac926412 2490 }
dd277d48 2491 return true;
2492}
ac926412 2493
d7f69727 2494/* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2495 until we hit the phi argument definition that dominates the other one.
2496 Return that, or NULL_TREE if there is no such definition. */
2497
2498static tree
2499get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
4fd7a6de 2500 ao_ref *ref, unsigned int *cnt,
38168b16 2501 bitmap *visited, bool abort_on_visited,
2502 void *(*translate)(ao_ref *, tree, void *, bool),
2503 void *data)
d7f69727 2504{
2505 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2506 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2507 tree common_vuse;
2508
2509 if (arg0 == arg1)
2510 return arg0;
2511 else if (gimple_nop_p (def0)
2512 || (!gimple_nop_p (def1)
2513 && dominated_by_p (CDI_DOMINATORS,
2514 gimple_bb (def1), gimple_bb (def0))))
2515 {
4fd7a6de 2516 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
38168b16 2517 visited, abort_on_visited, translate, data))
d7f69727 2518 return arg0;
2519 }
2520 else if (gimple_nop_p (def1)
2521 || dominated_by_p (CDI_DOMINATORS,
2522 gimple_bb (def0), gimple_bb (def1)))
2523 {
4fd7a6de 2524 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
38168b16 2525 visited, abort_on_visited, translate, data))
d7f69727 2526 return arg1;
2527 }
2528 /* Special case of a diamond:
2529 MEM_1 = ...
2530 goto (cond) ? L1 : L2
2531 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2532 goto L3
2533 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2534 L3: MEM_4 = PHI<MEM_2, MEM_3>
2535 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2536 dominate each other, but still we can easily skip this PHI node
2537 if we recognize that the vuse MEM operand is the same for both,
2538 and that we can skip both statements (they don't clobber us).
2539 This is still linear. Don't use maybe_skip_until, that might
2540 potentially be slow. */
2541 else if ((common_vuse = gimple_vuse (def0))
2542 && common_vuse == gimple_vuse (def1))
2543 {
297a2110 2544 *cnt += 2;
38168b16 2545 if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2546 || (translate
2547 && (*translate) (ref, arg0, data, true) == NULL))
2548 && (!stmt_may_clobber_ref_p_1 (def1, ref)
2549 || (translate
2550 && (*translate) (ref, arg1, data, true) == NULL)))
d7f69727 2551 return common_vuse;
2552 }
2553
2554 return NULL_TREE;
2555}
2556
2557
dd277d48 2558/* Starting from a PHI node for the virtual operand of the memory reference
2559 REF find a continuation virtual operand that allows to continue walking
2560 statements dominating PHI skipping only statements that cannot possibly
297a2110 2561 clobber REF. Increments *CNT for each alias disambiguation done.
2562 Returns NULL_TREE if no suitable virtual operand can be found. */
dd277d48 2563
7814d418 2564tree
297a2110 2565get_continuation_for_phi (gimple phi, ao_ref *ref,
4fd7a6de 2566 unsigned int *cnt, bitmap *visited,
38168b16 2567 bool abort_on_visited,
2568 void *(*translate)(ao_ref *, tree, void *, bool),
2569 void *data)
dd277d48 2570{
2571 unsigned nargs = gimple_phi_num_args (phi);
2572
2573 /* Through a single-argument PHI we can simply look through. */
2574 if (nargs == 1)
2575 return PHI_ARG_DEF (phi, 0);
2576
d7f69727 2577 /* For two or more arguments try to pairwise skip non-aliasing code
2578 until we hit the phi argument definition that dominates the other one. */
2579 else if (nargs >= 2)
ac926412 2580 {
0afe880d 2581 tree arg0, arg1;
2582 unsigned i;
2583
2584 /* Find a candidate for the virtual operand which definition
2585 dominates those of all others. */
2586 arg0 = PHI_ARG_DEF (phi, 0);
2587 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2588 for (i = 1; i < nargs; ++i)
2589 {
2590 arg1 = PHI_ARG_DEF (phi, i);
2591 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2592 {
2593 arg0 = arg1;
2594 break;
2595 }
2596 if (dominated_by_p (CDI_DOMINATORS,
2597 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2598 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2599 arg0 = arg1;
2600 }
2601
2602 /* Then pairwise reduce against the found candidate. */
2603 for (i = 0; i < nargs; ++i)
7814d418 2604 {
d7f69727 2605 arg1 = PHI_ARG_DEF (phi, i);
297a2110 2606 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
38168b16 2607 cnt, visited, abort_on_visited,
2608 translate, data);
d7f69727 2609 if (!arg0)
2610 return NULL_TREE;
7814d418 2611 }
d7f69727 2612
2613 return arg0;
ac926412 2614 }
dd277d48 2615
2616 return NULL_TREE;
ac926412 2617}
516849c7 2618
dd277d48 2619/* Based on the memory reference REF and its virtual use VUSE call
2620 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2621 itself. That is, for each virtual use for which its defining statement
2622 does not clobber REF.
e683de6d 2623
dd277d48 2624 WALKER is called with REF, the current virtual use and DATA. If
2625 WALKER returns non-NULL the walk stops and its result is returned.
2626 At the end of a non-successful walk NULL is returned.
3857274c 2627
d8021dea 2628 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2629 use which definition is a statement that may clobber REF and DATA.
2630 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2631 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2632 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2633 to adjust REF and *DATA to make that valid.
2634
46816709 2635 VALUEIZE if non-NULL is called with the next VUSE that is considered
2636 and return value is substituted for that. This can be used to
2637 implement optimistic value-numbering for example. Note that the
2638 VUSE argument is assumed to be valueized already.
2639
dd277d48 2640 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2641
2642void *
3918bd18 2643walk_non_aliased_vuses (ao_ref *ref, tree vuse,
297a2110 2644 void *(*walker)(ao_ref *, tree, unsigned int, void *),
38168b16 2645 void *(*translate)(ao_ref *, tree, void *, bool),
46816709 2646 tree (*valueize)(tree),
38168b16 2647 void *data)
3857274c 2648{
dd277d48 2649 bitmap visited = NULL;
2650 void *res;
297a2110 2651 unsigned int cnt = 0;
4fd7a6de 2652 bool translated = false;
dd277d48 2653
02c74b9b 2654 timevar_push (TV_ALIAS_STMT_WALK);
2655
dd277d48 2656 do
2657 {
2658 gimple def_stmt;
3857274c 2659
02c74b9b 2660 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
297a2110 2661 res = (*walker) (ref, vuse, cnt, data);
2662 /* Abort walk. */
2663 if (res == (void *)-1)
2664 {
2665 res = NULL;
2666 break;
2667 }
2668 /* Lookup succeeded. */
2669 else if (res != NULL)
dd277d48 2670 break;
3857274c 2671
46816709 2672 if (valueize)
2673 vuse = valueize (vuse);
dd277d48 2674 def_stmt = SSA_NAME_DEF_STMT (vuse);
2675 if (gimple_nop_p (def_stmt))
2676 break;
2677 else if (gimple_code (def_stmt) == GIMPLE_PHI)
4fd7a6de 2678 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
38168b16 2679 &visited, translated, translate, data);
dd277d48 2680 else
2681 {
297a2110 2682 cnt++;
3918bd18 2683 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
d8021dea 2684 {
2685 if (!translate)
2686 break;
38168b16 2687 res = (*translate) (ref, vuse, data, false);
d8021dea 2688 /* Failed lookup and translation. */
2689 if (res == (void *)-1)
2690 {
2691 res = NULL;
2692 break;
2693 }
2694 /* Lookup succeeded. */
2695 else if (res != NULL)
2696 break;
2697 /* Translation succeeded, continue walking. */
4fd7a6de 2698 translated = true;
d8021dea 2699 }
dd277d48 2700 vuse = gimple_vuse (def_stmt);
2701 }
2702 }
2703 while (vuse);
ac926412 2704
dd277d48 2705 if (visited)
2706 BITMAP_FREE (visited);
ac926412 2707
02c74b9b 2708 timevar_pop (TV_ALIAS_STMT_WALK);
2709
dd277d48 2710 return res;
3857274c 2711}
2712
604eef2c 2713
dd277d48 2714/* Based on the memory reference REF call WALKER for each vdef which
2715 defining statement may clobber REF, starting with VDEF. If REF
2716 is NULL_TREE, each defining statement is visited.
2717
2718 WALKER is called with REF, the current vdef and DATA. If WALKER
2719 returns true the walk is stopped, otherwise it continues.
2720
f91737f9 2721 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2722 The pointer may be NULL and then we do not track this information.
2723
dd277d48 2724 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2725 PHI argument (but only one walk continues on merge points), the
2726 return value is true if any of the walks was successful.
2727
2728 The function returns the number of statements walked. */
604eef2c 2729
2730static unsigned int
1daead67 2731walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2732 bool (*walker)(ao_ref *, tree, void *), void *data,
f91737f9 2733 bitmap *visited, unsigned int cnt,
2734 bool *function_entry_reached)
604eef2c 2735{
dd277d48 2736 do
2737 {
2738 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
604eef2c 2739
dd277d48 2740 if (*visited
2741 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2742 return cnt;
2743
2744 if (gimple_nop_p (def_stmt))
f91737f9 2745 {
2746 if (function_entry_reached)
2747 *function_entry_reached = true;
2748 return cnt;
2749 }
dd277d48 2750 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2751 {
2752 unsigned i;
2753 if (!*visited)
2754 *visited = BITMAP_ALLOC (NULL);
2755 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2756 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
f91737f9 2757 walker, data, visited, 0,
2758 function_entry_reached);
dd277d48 2759 return cnt;
2760 }
2761
02c74b9b 2762 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
dd277d48 2763 cnt++;
2764 if ((!ref
1daead67 2765 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
dd277d48 2766 && (*walker) (ref, vdef, data))
2767 return cnt;
2768
2769 vdef = gimple_vuse (def_stmt);
2770 }
2771 while (1);
604eef2c 2772}
2773
dd277d48 2774unsigned int
1daead67 2775walk_aliased_vdefs (ao_ref *ref, tree vdef,
2776 bool (*walker)(ao_ref *, tree, void *), void *data,
f91737f9 2777 bitmap *visited,
2778 bool *function_entry_reached)
df3f0669 2779{
dd277d48 2780 bitmap local_visited = NULL;
2781 unsigned int ret;
2782
02c74b9b 2783 timevar_push (TV_ALIAS_STMT_WALK);
2784
325396bf 2785 if (function_entry_reached)
2786 *function_entry_reached = false;
2787
dd277d48 2788 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
f91737f9 2789 visited ? visited : &local_visited, 0,
2790 function_entry_reached);
dd277d48 2791 if (local_visited)
2792 BITMAP_FREE (local_visited);
2793
02c74b9b 2794 timevar_pop (TV_ALIAS_STMT_WALK);
2795
dd277d48 2796 return ret;
2797}
2798