]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-alias.c
2014-04-15 Richard Biener <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
CommitLineData
4ee9c684 1/* Alias analysis for trees.
3aea1f79 2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4ee9c684 3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
4ee9c684 26#include "tm_p.h"
be97d4b6 27#include "target.h"
4ee9c684 28#include "basic-block.h"
b9ed1410 29#include "timevar.h" /* for TV_ALIAS_STMT_WALK */
4ee9c684 30#include "langhooks.h"
31#include "flags.h"
32#include "function.h"
ce084dfc 33#include "tree-pretty-print.h"
b9ed1410 34#include "dumpfile.h"
bc61cadb 35#include "tree-ssa-alias.h"
36#include "internal-fn.h"
37#include "tree-eh.h"
38#include "gimple-expr.h"
39#include "is-a.h"
75a70cf9 40#include "gimple.h"
073c1fd5 41#include "gimple-ssa.h"
9ed99284 42#include "stringpool.h"
073c1fd5 43#include "tree-ssanames.h"
9ed99284 44#include "expr.h"
073c1fd5 45#include "tree-dfa.h"
4ee9c684 46#include "tree-inline.h"
4ee9c684 47#include "params.h"
6cc9bd27 48#include "alloc-pool.h"
dd277d48 49#include "tree-ssa-alias.h"
dd9784d4 50#include "ipa-reference.h"
a55dc2cd 51
dd277d48 52/* Broad overview of how alias analysis on gimple works:
4fb5e5ca 53
dd277d48 54 Statements clobbering or using memory are linked through the
55 virtual operand factored use-def chain. The virtual operand
56 is unique per function, its symbol is accessible via gimple_vop (cfun).
57 Virtual operands are used for efficiently walking memory statements
58 in the gimple IL and are useful for things like value-numbering as
59 a generation count for memory references.
339d7079 60
dd277d48 61 SSA_NAME pointers may have associated points-to information
62 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 points-to information is (re-)computed by the TODO_rebuild_alias
64 pass manager todo. Points-to information is also used for more
65 precise tracking of call-clobbered and call-used variables and
66 related disambiguations.
339d7079 67
dd277d48 68 This file contains functions for disambiguating memory references,
69 the so called alias-oracle and tools for walking of the gimple IL.
339d7079 70
dd277d48 71 The main alias-oracle entry-points are
339d7079 72
dd277d48 73 bool stmt_may_clobber_ref_p (gimple, tree)
339d7079 74
dd277d48 75 This function queries if a statement may invalidate (parts of)
76 the memory designated by the reference tree argument.
339d7079 77
dd277d48 78 bool ref_maybe_used_by_stmt_p (gimple, tree)
339d7079 79
dd277d48 80 This function queries if a statement may need (parts of) the
81 memory designated by the reference tree argument.
339d7079 82
dd277d48 83 There are variants of these functions that only handle the call
84 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 Note that these do not disambiguate against a possible call lhs.
339d7079 86
dd277d48 87 bool refs_may_alias_p (tree, tree)
339d7079 88
dd277d48 89 This function tries to disambiguate two reference trees.
05931b00 90
dd277d48 91 bool ptr_deref_may_alias_global_p (tree)
339d7079 92
dd277d48 93 This function queries if dereferencing a pointer variable may
94 alias global memory.
339d7079 95
dd277d48 96 More low-level disambiguators are available and documented in
97 this file. Low-level disambiguators dealing with points-to
98 information are in tree-ssa-structalias.c. */
339d7079 99
339d7079 100
dd277d48 101/* Query statistics for the different low-level disambiguators.
102 A high-level query may trigger multiple of them. */
339d7079 103
dd277d48 104static struct {
105 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
106 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
107 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
109 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
111} alias_stats;
339d7079 112
dd277d48 113void
114dump_alias_stats (FILE *s)
115{
116 fprintf (s, "\nAlias oracle query stats:\n");
117 fprintf (s, " refs_may_alias_p: "
118 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
119 HOST_WIDE_INT_PRINT_DEC" queries\n",
120 alias_stats.refs_may_alias_p_no_alias,
121 alias_stats.refs_may_alias_p_no_alias
122 + alias_stats.refs_may_alias_p_may_alias);
123 fprintf (s, " ref_maybe_used_by_call_p: "
124 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC" queries\n",
126 alias_stats.ref_maybe_used_by_call_p_no_alias,
127 alias_stats.refs_may_alias_p_no_alias
128 + alias_stats.ref_maybe_used_by_call_p_may_alias);
129 fprintf (s, " call_may_clobber_ref_p: "
130 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
131 HOST_WIDE_INT_PRINT_DEC" queries\n",
132 alias_stats.call_may_clobber_ref_p_no_alias,
133 alias_stats.call_may_clobber_ref_p_no_alias
134 + alias_stats.call_may_clobber_ref_p_may_alias);
135}
136
137
138/* Return true, if dereferencing PTR may alias with a global variable. */
4ee9c684 139
dd277d48 140bool
141ptr_deref_may_alias_global_p (tree ptr)
4ee9c684 142{
dd277d48 143 struct ptr_info_def *pi;
4fb5e5ca 144
dd277d48 145 /* If we end up with a pointer constant here that may point
146 to global memory. */
147 if (TREE_CODE (ptr) != SSA_NAME)
148 return true;
4ee9c684 149
dd277d48 150 pi = SSA_NAME_PTR_INFO (ptr);
4ee9c684 151
dd277d48 152 /* If we do not have points-to information for this variable,
153 we have to punt. */
154 if (!pi)
155 return true;
4ee9c684 156
2a3ebafa 157 /* ??? This does not use TBAA to prune globals ptr may not access. */
dd277d48 158 return pt_solution_includes_global (&pi->pt);
4ee9c684 159}
160
2a3ebafa 161/* Return true if dereferencing PTR may alias DECL.
162 The caller is responsible for applying TBAA to see if PTR
163 may access DECL at all. */
4ee9c684 164
dd277d48 165static bool
166ptr_deref_may_alias_decl_p (tree ptr, tree decl)
4ee9c684 167{
dd277d48 168 struct ptr_info_def *pi;
f7d118a9 169
2760c0e3 170 /* Conversions are irrelevant for points-to information and
171 data-dependence analysis can feed us those. */
172 STRIP_NOPS (ptr);
173
174 /* Anything we do not explicilty handle aliases. */
175 if ((TREE_CODE (ptr) != SSA_NAME
176 && TREE_CODE (ptr) != ADDR_EXPR
177 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
178 || !POINTER_TYPE_P (TREE_TYPE (ptr))
179 || (TREE_CODE (decl) != VAR_DECL
180 && TREE_CODE (decl) != PARM_DECL
181 && TREE_CODE (decl) != RESULT_DECL))
9c01641d 182 return true;
2fd0b5dc 183
2760c0e3 184 /* Disregard pointer offsetting. */
185 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
186 {
187 do
188 {
189 ptr = TREE_OPERAND (ptr, 0);
190 }
191 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
192 return ptr_deref_may_alias_decl_p (ptr, decl);
193 }
194
047fdd47 195 /* ADDR_EXPR pointers either just offset another pointer or directly
196 specify the pointed-to set. */
197 if (TREE_CODE (ptr) == ADDR_EXPR)
198 {
199 tree base = get_base_address (TREE_OPERAND (ptr, 0));
200 if (base
2622064f 201 && (TREE_CODE (base) == MEM_REF
202 || TREE_CODE (base) == TARGET_MEM_REF))
047fdd47 203 ptr = TREE_OPERAND (base, 0);
204 else if (base
2622064f 205 && DECL_P (base))
7f2d9047 206 return base == decl;
047fdd47 207 else if (base
208 && CONSTANT_CLASS_P (base))
209 return false;
210 else
211 return true;
212 }
213
2760c0e3 214 /* Non-aliased variables can not be pointed to. */
215 if (!may_be_aliased (decl))
216 return false;
047fdd47 217
dd277d48 218 /* If we do not have useful points-to information for this pointer
219 we cannot disambiguate anything else. */
220 pi = SSA_NAME_PTR_INFO (ptr);
221 if (!pi)
2fd0b5dc 222 return true;
223
dd277d48 224 return pt_solution_includes (&pi->pt, decl);
2fd0b5dc 225}
4ee9c684 226
2a3ebafa 227/* Return true if dereferenced PTR1 and PTR2 may alias.
228 The caller is responsible for applying TBAA to see if accesses
229 through PTR1 and PTR2 may conflict at all. */
4ee9c684 230
2760c0e3 231bool
dd277d48 232ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
4ee9c684 233{
dd277d48 234 struct ptr_info_def *pi1, *pi2;
4ee9c684 235
2760c0e3 236 /* Conversions are irrelevant for points-to information and
237 data-dependence analysis can feed us those. */
238 STRIP_NOPS (ptr1);
239 STRIP_NOPS (ptr2);
240
2760c0e3 241 /* Disregard pointer offsetting. */
242 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
243 {
244 do
245 {
246 ptr1 = TREE_OPERAND (ptr1, 0);
247 }
248 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
249 return ptr_derefs_may_alias_p (ptr1, ptr2);
250 }
251 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
252 {
253 do
254 {
255 ptr2 = TREE_OPERAND (ptr2, 0);
256 }
257 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
258 return ptr_derefs_may_alias_p (ptr1, ptr2);
259 }
f85fb819 260
047fdd47 261 /* ADDR_EXPR pointers either just offset another pointer or directly
262 specify the pointed-to set. */
263 if (TREE_CODE (ptr1) == ADDR_EXPR)
264 {
265 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
266 if (base
2622064f 267 && (TREE_CODE (base) == MEM_REF
268 || TREE_CODE (base) == TARGET_MEM_REF))
80b4d93e 269 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
047fdd47 270 else if (base
2622064f 271 && DECL_P (base))
047fdd47 272 return ptr_deref_may_alias_decl_p (ptr2, base);
273 else
274 return true;
275 }
276 if (TREE_CODE (ptr2) == ADDR_EXPR)
277 {
278 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
279 if (base
2622064f 280 && (TREE_CODE (base) == MEM_REF
281 || TREE_CODE (base) == TARGET_MEM_REF))
80b4d93e 282 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
047fdd47 283 else if (base
2622064f 284 && DECL_P (base))
047fdd47 285 return ptr_deref_may_alias_decl_p (ptr1, base);
286 else
287 return true;
288 }
289
80b4d93e 290 /* From here we require SSA name pointers. Anything else aliases. */
291 if (TREE_CODE (ptr1) != SSA_NAME
292 || TREE_CODE (ptr2) != SSA_NAME
293 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
294 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
295 return true;
296
dd277d48 297 /* We may end up with two empty points-to solutions for two same pointers.
298 In this case we still want to say both pointers alias, so shortcut
299 that here. */
300 if (ptr1 == ptr2)
301 return true;
81d08033 302
dd277d48 303 /* If we do not have useful points-to information for either pointer
304 we cannot disambiguate anything else. */
305 pi1 = SSA_NAME_PTR_INFO (ptr1);
306 pi2 = SSA_NAME_PTR_INFO (ptr2);
307 if (!pi1 || !pi2)
308 return true;
81d08033 309
2a3ebafa 310 /* ??? This does not use TBAA to prune decls from the intersection
311 that not both pointers may access. */
dd277d48 312 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
81d08033 313}
314
047fdd47 315/* Return true if dereferencing PTR may alias *REF.
316 The caller is responsible for applying TBAA to see if PTR
317 may access *REF at all. */
318
319static bool
320ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
321{
322 tree base = ao_ref_base (ref);
323
2622064f 324 if (TREE_CODE (base) == MEM_REF
325 || TREE_CODE (base) == TARGET_MEM_REF)
047fdd47 326 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
2622064f 327 else if (DECL_P (base))
047fdd47 328 return ptr_deref_may_alias_decl_p (ptr, base);
329
330 return true;
331}
332
8763c223 333/* Return true whether REF may refer to global memory. */
334
335bool
336ref_may_alias_global_p (tree ref)
337{
338 tree base = get_base_address (ref);
339 if (DECL_P (base))
340 return is_global_var (base);
341 else if (TREE_CODE (base) == MEM_REF
342 || TREE_CODE (base) == TARGET_MEM_REF)
343 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
344 return true;
345}
346
347/* Return true whether STMT may clobber global memory. */
348
349bool
350stmt_may_clobber_global_p (gimple stmt)
351{
352 tree lhs;
353
354 if (!gimple_vdef (stmt))
355 return false;
356
357 /* ??? We can ask the oracle whether an artificial pointer
358 dereference with a pointer with points-to information covering
359 all global memory (what about non-address taken memory?) maybe
360 clobbered by this call. As there is at the moment no convenient
361 way of doing that without generating garbage do some manual
362 checking instead.
363 ??? We could make a NULL ao_ref argument to the various
364 predicates special, meaning any global memory. */
365
366 switch (gimple_code (stmt))
367 {
368 case GIMPLE_ASSIGN:
369 lhs = gimple_assign_lhs (stmt);
370 return (TREE_CODE (lhs) != SSA_NAME
371 && ref_may_alias_global_p (lhs));
372 case GIMPLE_CALL:
373 return true;
374 default:
375 return true;
376 }
377}
378
4ee9c684 379
380/* Dump alias information on FILE. */
381
382void
383dump_alias_info (FILE *file)
384{
2aac21c5 385 unsigned i;
4ee9c684 386 const char *funcname
5135beeb 387 = lang_hooks.decl_printable_name (current_function_decl, 2);
a55dc2cd 388 tree var;
4ee9c684 389
dd277d48 390 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
81d08033 391
392 fprintf (file, "Aliased symbols\n\n");
48e1416a 393
2aac21c5 394 FOR_EACH_LOCAL_DECL (cfun, i, var)
81d08033 395 {
81d08033 396 if (may_be_aliased (var))
397 dump_variable (file, var);
398 }
399
7f81b5ee 400 fprintf (file, "\nCall clobber information\n");
401
402 fprintf (file, "\nESCAPED");
403 dump_points_to_solution (file, &cfun->gimple_df->escaped);
7f81b5ee 404
405 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
81d08033 406
81d08033 407 for (i = 1; i < num_ssa_names; i++)
408 {
409 tree ptr = ssa_name (i);
a48b1470 410 struct ptr_info_def *pi;
48e1416a 411
dd277d48 412 if (ptr == NULL_TREE
0cf78115 413 || !POINTER_TYPE_P (TREE_TYPE (ptr))
dd277d48 414 || SSA_NAME_IN_FREE_LIST (ptr))
a48b1470 415 continue;
416
417 pi = SSA_NAME_PTR_INFO (ptr);
dd277d48 418 if (pi)
81d08033 419 dump_points_to_info_for (file, ptr);
420 }
4ee9c684 421
4ee9c684 422 fprintf (file, "\n");
423}
424
425
426/* Dump alias information on stderr. */
427
4b987fac 428DEBUG_FUNCTION void
4ee9c684 429debug_alias_info (void)
430{
431 dump_alias_info (stderr);
432}
433
434
7f81b5ee 435/* Dump the points-to set *PT into FILE. */
4ee9c684 436
d793732c 437void
7f81b5ee 438dump_points_to_solution (FILE *file, struct pt_solution *pt)
4ee9c684 439{
7f81b5ee 440 if (pt->anything)
441 fprintf (file, ", points-to anything");
4ee9c684 442
7f81b5ee 443 if (pt->nonlocal)
444 fprintf (file, ", points-to non-local");
4ee9c684 445
7f81b5ee 446 if (pt->escaped)
447 fprintf (file, ", points-to escaped");
448
1a981e1a 449 if (pt->ipa_escaped)
450 fprintf (file, ", points-to unit escaped");
451
7f81b5ee 452 if (pt->null)
453 fprintf (file, ", points-to NULL");
454
455 if (pt->vars)
4ee9c684 456 {
7f81b5ee 457 fprintf (file, ", points-to vars: ");
458 dump_decl_set (file, pt->vars);
420582bc 459 if (pt->vars_contains_nonlocal
460 && pt->vars_contains_escaped_heap)
461 fprintf (file, " (nonlocal, escaped heap)");
462 else if (pt->vars_contains_nonlocal
463 && pt->vars_contains_escaped)
464 fprintf (file, " (nonlocal, escaped)");
465 else if (pt->vars_contains_nonlocal)
466 fprintf (file, " (nonlocal)");
467 else if (pt->vars_contains_escaped_heap)
468 fprintf (file, " (escaped heap)");
469 else if (pt->vars_contains_escaped)
470 fprintf (file, " (escaped)");
7f81b5ee 471 }
472}
4ee9c684 473
c7d89805 474
475/* Unified dump function for pt_solution. */
476
477DEBUG_FUNCTION void
478debug (pt_solution &ref)
479{
480 dump_points_to_solution (stderr, &ref);
481}
482
483DEBUG_FUNCTION void
484debug (pt_solution *ptr)
485{
486 if (ptr)
487 debug (*ptr);
488 else
489 fprintf (stderr, "<nil>\n");
490}
491
492
7f81b5ee 493/* Dump points-to information for SSA_NAME PTR into FILE. */
4ee9c684 494
7f81b5ee 495void
496dump_points_to_info_for (FILE *file, tree ptr)
497{
498 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
4ee9c684 499
7f81b5ee 500 print_generic_expr (file, ptr, dump_flags);
1e1a4c8c 501
7f81b5ee 502 if (pi)
503 dump_points_to_solution (file, &pi->pt);
504 else
505 fprintf (file, ", points-to anything");
4ee9c684 506
507 fprintf (file, "\n");
508}
509
510
d793732c 511/* Dump points-to information for VAR into stderr. */
512
4b987fac 513DEBUG_FUNCTION void
d793732c 514debug_points_to_info_for (tree var)
515{
516 dump_points_to_info_for (stderr, var);
517}
518
3918bd18 519
520/* Initializes the alias-oracle reference representation *R from REF. */
521
522void
523ao_ref_init (ao_ref *r, tree ref)
524{
525 r->ref = ref;
526 r->base = NULL_TREE;
527 r->offset = 0;
528 r->size = -1;
529 r->max_size = -1;
530 r->ref_alias_set = -1;
531 r->base_alias_set = -1;
3787db52 532 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
3918bd18 533}
534
535/* Returns the base object of the memory reference *REF. */
536
537tree
538ao_ref_base (ao_ref *ref)
539{
540 if (ref->base)
541 return ref->base;
542 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
543 &ref->max_size);
544 return ref->base;
545}
546
547/* Returns the base object alias set of the memory reference *REF. */
548
182cf5a9 549static alias_set_type
3918bd18 550ao_ref_base_alias_set (ao_ref *ref)
551{
182cf5a9 552 tree base_ref;
3918bd18 553 if (ref->base_alias_set != -1)
554 return ref->base_alias_set;
182cf5a9 555 if (!ref->ref)
556 return 0;
557 base_ref = ref->ref;
558 while (handled_component_p (base_ref))
559 base_ref = TREE_OPERAND (base_ref, 0);
560 ref->base_alias_set = get_alias_set (base_ref);
3918bd18 561 return ref->base_alias_set;
562}
563
564/* Returns the reference alias set of the memory reference *REF. */
565
566alias_set_type
567ao_ref_alias_set (ao_ref *ref)
568{
569 if (ref->ref_alias_set != -1)
570 return ref->ref_alias_set;
571 ref->ref_alias_set = get_alias_set (ref->ref);
572 return ref->ref_alias_set;
573}
574
134899ac 575/* Init an alias-oracle reference representation from a gimple pointer
234f1f79 576 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
134899ac 577 size is assumed to be unknown. The access is assumed to be only
578 to or after of the pointer target, not before it. */
579
580void
581ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
582{
dd49bf5f 583 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
134899ac 584 ref->ref = NULL_TREE;
4e9e79c0 585 if (TREE_CODE (ptr) == SSA_NAME)
586 {
587 gimple stmt = SSA_NAME_DEF_STMT (ptr);
588 if (gimple_assign_single_p (stmt)
589 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
590 ptr = gimple_assign_rhs1 (stmt);
e3ac92d2 591 else if (is_gimple_assign (stmt)
592 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
234f1f79 593 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
e3ac92d2 594 {
595 ptr = gimple_assign_rhs1 (stmt);
234f1f79 596 extra_offset = BITS_PER_UNIT
597 * int_cst_value (gimple_assign_rhs2 (stmt));
e3ac92d2 598 }
4e9e79c0 599 }
600
134899ac 601 if (TREE_CODE (ptr) == ADDR_EXPR)
d1603825 602 {
603 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
604 if (ref->base)
605 ref->offset = BITS_PER_UNIT * t;
606 else
607 {
608 size = NULL_TREE;
609 ref->offset = 0;
610 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
611 }
612 }
134899ac 613 else
614 {
182cf5a9 615 ref->base = build2 (MEM_REF, char_type_node,
2512209b 616 ptr, null_pointer_node);
134899ac 617 ref->offset = 0;
618 }
e3ac92d2 619 ref->offset += extra_offset;
134899ac 620 if (size
35ec552a 621 && tree_fits_shwi_p (size)
dd49bf5f 622 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
623 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
134899ac 624 else
625 ref->max_size = ref->size = -1;
626 ref->ref_alias_set = 0;
627 ref->base_alias_set = 0;
3787db52 628 ref->volatile_p = false;
134899ac 629}
630
dd277d48 631/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
632 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
633 decide. */
d793732c 634
dd277d48 635static inline int
636same_type_for_tbaa (tree type1, tree type2)
637{
638 type1 = TYPE_MAIN_VARIANT (type1);
639 type2 = TYPE_MAIN_VARIANT (type2);
4ee9c684 640
dd277d48 641 /* If we would have to do structural comparison bail out. */
642 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
643 || TYPE_STRUCTURAL_EQUALITY_P (type2))
644 return -1;
645
646 /* Compare the canonical types. */
647 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
648 return 1;
649
4b4d1513 650 /* ??? Array types are not properly unified in all cases as we have
dd277d48 651 spurious changes in the index types for example. Removing this
652 causes all sorts of problems with the Fortran frontend. */
653 if (TREE_CODE (type1) == ARRAY_TYPE
654 && TREE_CODE (type2) == ARRAY_TYPE)
655 return -1;
656
102bfc9c 657 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
658 object of one of its constrained subtypes, e.g. when a function with an
659 unconstrained parameter passed by reference is called on an object and
660 inlined. But, even in the case of a fixed size, type and subtypes are
661 not equivalent enough as to share the same TYPE_CANONICAL, since this
662 would mean that conversions between them are useless, whereas they are
663 not (e.g. type and subtypes can have different modes). So, in the end,
664 they are only guaranteed to have the same alias set. */
665 if (get_alias_set (type1) == get_alias_set (type2))
4b4d1513 666 return -1;
667
dd277d48 668 /* The types are known to be not equal. */
669 return 0;
670}
671
672/* Determine if the two component references REF1 and REF2 which are
673 based on access types TYPE1 and TYPE2 and of which at least one is based
ac30e3b2 674 on an indirect reference may alias. REF2 is the only one that can
675 be a decl in which case REF2_IS_DECL is true.
676 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
677 are the respective alias sets. */
dd277d48 678
679static bool
0d2f7431 680aliasing_component_refs_p (tree ref1,
ac30e3b2 681 alias_set_type ref1_alias_set,
682 alias_set_type base1_alias_set,
a42ce2cc 683 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
0d2f7431 684 tree ref2,
ac30e3b2 685 alias_set_type ref2_alias_set,
686 alias_set_type base2_alias_set,
687 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
688 bool ref2_is_decl)
dd277d48 689{
690 /* If one reference is a component references through pointers try to find a
691 common base and apply offset based disambiguation. This handles
692 for example
693 struct A { int i; int j; } *q;
694 struct B { struct A a; int k; } *p;
695 disambiguating q->i and p->a.j. */
0d2f7431 696 tree base1, base2;
697 tree type1, type2;
dd277d48 698 tree *refp;
699 int same_p;
700
0d2f7431 701 /* Choose bases and base types to search for. */
702 base1 = ref1;
703 while (handled_component_p (base1))
704 base1 = TREE_OPERAND (base1, 0);
705 type1 = TREE_TYPE (base1);
706 base2 = ref2;
707 while (handled_component_p (base2))
708 base2 = TREE_OPERAND (base2, 0);
709 type2 = TREE_TYPE (base2);
710
dd277d48 711 /* Now search for the type1 in the access path of ref2. This
712 would be a common base for doing offset based disambiguation on. */
bc3c318e 713 refp = &ref2;
dd277d48 714 while (handled_component_p (*refp)
715 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
716 refp = &TREE_OPERAND (*refp, 0);
717 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
718 /* If we couldn't compare types we have to bail out. */
719 if (same_p == -1)
720 return true;
721 else if (same_p == 1)
722 {
723 HOST_WIDE_INT offadj, sztmp, msztmp;
724 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
725 offset2 -= offadj;
0d2f7431 726 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
727 offset1 -= offadj;
dd277d48 728 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
729 }
730 /* If we didn't find a common base, try the other way around. */
bc3c318e 731 refp = &ref1;
dd277d48 732 while (handled_component_p (*refp)
733 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
734 refp = &TREE_OPERAND (*refp, 0);
735 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
736 /* If we couldn't compare types we have to bail out. */
737 if (same_p == -1)
738 return true;
739 else if (same_p == 1)
740 {
741 HOST_WIDE_INT offadj, sztmp, msztmp;
742 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
743 offset1 -= offadj;
0d2f7431 744 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
745 offset2 -= offadj;
dd277d48 746 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
747 }
ac30e3b2 748
4b4d1513 749 /* If we have two type access paths B1.path1 and B2.path2 they may
ac30e3b2 750 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
751 But we can still have a path that goes B1.path1...B2.path2 with
752 a part that we do not see. So we can only disambiguate now
753 if there is no B2 in the tail of path1 and no B1 on the
754 tail of path2. */
755 if (base1_alias_set == ref2_alias_set
756 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
757 return true;
758 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
759 if (!ref2_is_decl)
760 return (base2_alias_set == ref1_alias_set
761 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
4b4d1513 762 return false;
dd277d48 763}
764
740a2cb6 765/* Return true if we can determine that component references REF1 and REF2,
766 that are within a common DECL, cannot overlap. */
767
768static bool
769nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
770{
4997014d 771 auto_vec<tree, 16> component_refs1;
772 auto_vec<tree, 16> component_refs2;
740a2cb6 773
774 /* Create the stack of handled components for REF1. */
775 while (handled_component_p (ref1))
776 {
777 component_refs1.safe_push (ref1);
778 ref1 = TREE_OPERAND (ref1, 0);
779 }
780 if (TREE_CODE (ref1) == MEM_REF)
781 {
782 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
783 goto may_overlap;
784 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
785 }
786
787 /* Create the stack of handled components for REF2. */
788 while (handled_component_p (ref2))
789 {
790 component_refs2.safe_push (ref2);
791 ref2 = TREE_OPERAND (ref2, 0);
792 }
793 if (TREE_CODE (ref2) == MEM_REF)
794 {
795 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
796 goto may_overlap;
797 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
798 }
799
800 /* We must have the same base DECL. */
801 gcc_assert (ref1 == ref2);
802
803 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
804 rank. This is sufficient because we start from the same DECL and you
805 cannot reference several fields at a time with COMPONENT_REFs (unlike
806 with ARRAY_RANGE_REFs for arrays) so you always need the same number
807 of them to access a sub-component, unless you're in a union, in which
808 case the return value will precisely be false. */
809 while (true)
810 {
811 do
812 {
813 if (component_refs1.is_empty ())
814 goto may_overlap;
815 ref1 = component_refs1.pop ();
816 }
817 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
818
819 do
820 {
821 if (component_refs2.is_empty ())
822 goto may_overlap;
823 ref2 = component_refs2.pop ();
824 }
825 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
826
827 /* Beware of BIT_FIELD_REF. */
828 if (TREE_CODE (ref1) != COMPONENT_REF
829 || TREE_CODE (ref2) != COMPONENT_REF)
830 goto may_overlap;
831
832 tree field1 = TREE_OPERAND (ref1, 1);
833 tree field2 = TREE_OPERAND (ref2, 1);
834
835 /* ??? We cannot simply use the type of operand #0 of the refs here
836 as the Fortran compiler smuggles type punning into COMPONENT_REFs
837 for common blocks instead of using unions like everyone else. */
838 tree type1 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field1));
839 tree type2 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field2));
840
841 /* We cannot disambiguate fields in a union or qualified union. */
842 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
843 goto may_overlap;
844
11ad02c6 845 /* Different fields of the same record type cannot overlap.
846 ??? Bitfields can overlap at RTL level so punt on them. */
740a2cb6 847 if (field1 != field2)
848 {
849 component_refs1.release ();
850 component_refs2.release ();
11ad02c6 851 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
740a2cb6 852 }
853 }
854
855may_overlap:
856 component_refs1.release ();
857 component_refs2.release ();
858 return false;
859}
860
f270d2a0 861/* qsort compare function to sort FIELD_DECLs after their
862 DECL_FIELD_CONTEXT TYPE_UID. */
863
864static inline int
865ncr_compar (const void *field1_, const void *field2_)
866{
867 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
868 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
869 unsigned int uid1
870 = TYPE_UID (TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field1)));
871 unsigned int uid2
872 = TYPE_UID (TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field2)));
873 if (uid1 < uid2)
874 return -1;
875 else if (uid1 > uid2)
876 return 1;
877 return 0;
878}
879
880/* Return true if we can determine that the fields referenced cannot
881 overlap for any pair of objects. */
882
883static bool
884nonoverlapping_component_refs_p (const_tree x, const_tree y)
885{
886 if (!flag_strict_aliasing
887 || !x || !y
888 || TREE_CODE (x) != COMPONENT_REF
889 || TREE_CODE (y) != COMPONENT_REF)
890 return false;
891
892 auto_vec<const_tree, 16> fieldsx;
893 while (TREE_CODE (x) == COMPONENT_REF)
894 {
895 tree field = TREE_OPERAND (x, 1);
896 tree type = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field));
897 if (TREE_CODE (type) == RECORD_TYPE)
898 fieldsx.safe_push (field);
899 x = TREE_OPERAND (x, 0);
900 }
901 if (fieldsx.length () == 0)
902 return false;
903 auto_vec<const_tree, 16> fieldsy;
904 while (TREE_CODE (y) == COMPONENT_REF)
905 {
906 tree field = TREE_OPERAND (y, 1);
907 tree type = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field));
908 if (TREE_CODE (type) == RECORD_TYPE)
909 fieldsy.safe_push (TREE_OPERAND (y, 1));
910 y = TREE_OPERAND (y, 0);
911 }
912 if (fieldsy.length () == 0)
913 return false;
914
915 /* Most common case first. */
916 if (fieldsx.length () == 1
917 && fieldsy.length () == 1)
918 return ((TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldsx[0]))
919 == TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldsy[0])))
920 && fieldsx[0] != fieldsy[0]
921 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
922
923 if (fieldsx.length () == 2)
924 {
925 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
926 {
927 const_tree tem = fieldsx[0];
928 fieldsx[0] = fieldsx[1];
929 fieldsx[1] = tem;
930 }
931 }
932 else
933 fieldsx.qsort (ncr_compar);
934
935 if (fieldsy.length () == 2)
936 {
937 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
938 {
939 const_tree tem = fieldsy[0];
940 fieldsy[0] = fieldsy[1];
941 fieldsy[1] = tem;
942 }
943 }
944 else
945 fieldsy.qsort (ncr_compar);
946
947 unsigned i = 0, j = 0;
948 do
949 {
950 const_tree fieldx = fieldsx[i];
951 const_tree fieldy = fieldsy[j];
952 tree typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
953 tree typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
954 if (typex == typey)
955 {
956 /* We're left with accessing different fields of a structure,
957 no possible overlap, unless they are both bitfields. */
958 if (fieldx != fieldy)
959 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
960 }
961 if (TYPE_UID (typex) < TYPE_UID (typey))
962 {
963 i++;
964 if (i == fieldsx.length ())
965 break;
966 }
967 else
968 {
969 j++;
970 if (j == fieldsy.length ())
971 break;
972 }
973 }
974 while (1);
975
976 return false;
977}
978
979
dd277d48 980/* Return true if two memory references based on the variables BASE1
077ad5ad 981 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
740a2cb6 982 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
983 if non-NULL are the complete memory reference trees. */
dd277d48 984
985static bool
740a2cb6 986decl_refs_may_alias_p (tree ref1, tree base1,
dd277d48 987 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
740a2cb6 988 tree ref2, tree base2,
dd277d48 989 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
4ee9c684 990{
2622064f 991 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
4ee9c684 992
dd277d48 993 /* If both references are based on different variables, they cannot alias. */
7f2d9047 994 if (base1 != base2)
dd277d48 995 return false;
4ee9c684 996
dd277d48 997 /* If both references are based on the same variable, they cannot alias if
998 the accesses do not overlap. */
740a2cb6 999 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1000 return false;
1001
1002 /* For components with variable position, the above test isn't sufficient,
1003 so we disambiguate component references manually. */
1004 if (ref1 && ref2
1005 && handled_component_p (ref1) && handled_component_p (ref2)
1006 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1007 return false;
1008
1009 return true;
dd277d48 1010}
1011
1012/* Return true if an indirect reference based on *PTR1 constrained
077ad5ad 1013 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1014 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
dd277d48 1015 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1016 in which case they are computed on-demand. REF1 and REF2
1017 if non-NULL are the complete memory reference trees. */
1018
1019static bool
182cf5a9 1020indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1021 HOST_WIDE_INT offset1,
1022 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
ac30e3b2 1023 alias_set_type ref1_alias_set,
dd277d48 1024 alias_set_type base1_alias_set,
182cf5a9 1025 tree ref2 ATTRIBUTE_UNUSED, tree base2,
dd277d48 1026 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
ac30e3b2 1027 alias_set_type ref2_alias_set,
182cf5a9 1028 alias_set_type base2_alias_set, bool tbaa_p)
dd277d48 1029{
9a14ba4f 1030 tree ptr1;
061b1263 1031 tree ptrtype1, dbase2;
7749ee1c 1032 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
061b1263 1033 HOST_WIDE_INT doffset1, doffset2;
2622064f 1034 double_int moff;
1035
1036 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1037 || TREE_CODE (base1) == TARGET_MEM_REF)
1038 && DECL_P (base2));
182cf5a9 1039
28daba6f 1040 ptr1 = TREE_OPERAND (base1, 0);
9a14ba4f 1041
7749ee1c 1042 /* The offset embedded in MEM_REFs can be negative. Bias them
1043 so that the resulting offset adjustment is positive. */
2622064f 1044 moff = mem_ref_offset (base1);
9a8b9e26 1045 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 1046 if (moff.is_negative ())
1047 offset2p += (-moff).low;
2622064f 1048 else
1049 offset1p += moff.low;
182cf5a9 1050
dd277d48 1051 /* If only one reference is based on a variable, they cannot alias if
1052 the pointer access is beyond the extent of the variable access.
1053 (the pointer base cannot validly point to an offset less than zero
1054 of the variable).
a07f6629 1055 ??? IVOPTs creates bases that do not honor this restriction,
1056 so do not apply this optimization for TARGET_MEM_REFs. */
1057 if (TREE_CODE (base1) != TARGET_MEM_REF
9a14ba4f 1058 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
dd277d48 1059 return false;
a07f6629 1060 /* They also cannot alias if the pointer may not point to the decl. */
dd277d48 1061 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1062 return false;
1063
1064 /* Disambiguations that rely on strict aliasing rules follow. */
182cf5a9 1065 if (!flag_strict_aliasing || !tbaa_p)
dd277d48 1066 return true;
1067
2622064f 1068 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
182cf5a9 1069
dd277d48 1070 /* If the alias set for a pointer access is zero all bets are off. */
1071 if (base1_alias_set == -1)
182cf5a9 1072 base1_alias_set = get_deref_alias_set (ptrtype1);
dd277d48 1073 if (base1_alias_set == 0)
1074 return true;
1075 if (base2_alias_set == -1)
1076 base2_alias_set = get_alias_set (base2);
1077
182cf5a9 1078 /* When we are trying to disambiguate an access with a pointer dereference
1079 as base versus one with a decl as base we can use both the size
1080 of the decl and its dynamic type for extra disambiguation.
1081 ??? We do not know anything about the dynamic type of the decl
1082 other than that its alias-set contains base2_alias_set as a subset
1083 which does not help us here. */
1084 /* As we know nothing useful about the dynamic type of the decl just
1085 use the usual conflict check rather than a subset test.
1086 ??? We could introduce -fvery-strict-aliasing when the language
1087 does not allow decls to have a dynamic type that differs from their
1088 static type. Then we can check
1089 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
dd277d48 1090 if (base1_alias_set != base2_alias_set
182cf5a9 1091 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1092 return false;
1093 /* If the size of the access relevant for TBAA through the pointer
1094 is bigger than the size of the decl we can't possibly access the
1095 decl via that pointer. */
1096 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1097 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1098 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1099 /* ??? This in turn may run afoul when a decl of type T which is
1100 a member of union type U is accessed through a pointer to
1101 type U and sizeof T is smaller than sizeof U. */
1102 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1103 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1104 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
dd277d48 1105 return false;
1106
061b1263 1107 if (!ref2)
1108 return true;
1109
a07f6629 1110 /* If the decl is accessed via a MEM_REF, reconstruct the base
061b1263 1111 we can use for TBAA and an appropriately adjusted offset. */
1112 dbase2 = ref2;
1113 while (handled_component_p (dbase2))
1114 dbase2 = TREE_OPERAND (dbase2, 0);
1115 doffset1 = offset1;
1116 doffset2 = offset2;
1117 if (TREE_CODE (dbase2) == MEM_REF
1118 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1119 {
1120 double_int moff = mem_ref_offset (dbase2);
9a8b9e26 1121 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 1122 if (moff.is_negative ())
1123 doffset1 -= (-moff).low;
061b1263 1124 else
1125 doffset2 -= moff.low;
1126 }
1127
1128 /* If either reference is view-converted, give up now. */
1129 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
5ef4302d 1130 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
061b1263 1131 return true;
1132
1133 /* If both references are through the same type, they do not alias
1134 if the accesses do not overlap. This does extra disambiguation
1135 for mixed/pointer accesses but requires strict aliasing.
1136 For MEM_REFs we require that the component-ref offset we computed
1137 is relative to the start of the type which we ensure by
1138 comparing rvalue and access type and disregarding the constant
1139 pointer offset. */
1140 if ((TREE_CODE (base1) != TARGET_MEM_REF
1141 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1142 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1143 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1144
f270d2a0 1145 if (ref1 && ref2
1146 && nonoverlapping_component_refs_p (ref1, ref2))
1147 return false;
1148
dd277d48 1149 /* Do access-path based disambiguation. */
1150 if (ref1 && ref2
061b1263 1151 && (handled_component_p (ref1) || handled_component_p (ref2)))
0d2f7431 1152 return aliasing_component_refs_p (ref1,
ac30e3b2 1153 ref1_alias_set, base1_alias_set,
a42ce2cc 1154 offset1, max_size1,
0d2f7431 1155 ref2,
ac30e3b2 1156 ref2_alias_set, base2_alias_set,
1157 offset2, max_size2, true);
dd277d48 1158
1159 return true;
1160}
1161
1162/* Return true if two indirect references based on *PTR1
077ad5ad 1163 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1164 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
dd277d48 1165 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1166 in which case they are computed on-demand. REF1 and REF2
1167 if non-NULL are the complete memory reference trees. */
1168
1169static bool
182cf5a9 1170indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
dd277d48 1171 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
ac30e3b2 1172 alias_set_type ref1_alias_set,
dd277d48 1173 alias_set_type base1_alias_set,
182cf5a9 1174 tree ref2 ATTRIBUTE_UNUSED, tree base2,
dd277d48 1175 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
ac30e3b2 1176 alias_set_type ref2_alias_set,
182cf5a9 1177 alias_set_type base2_alias_set, bool tbaa_p)
dd277d48 1178{
9a14ba4f 1179 tree ptr1;
1180 tree ptr2;
182cf5a9 1181 tree ptrtype1, ptrtype2;
1182
2622064f 1183 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1184 || TREE_CODE (base1) == TARGET_MEM_REF)
1185 && (TREE_CODE (base2) == MEM_REF
1186 || TREE_CODE (base2) == TARGET_MEM_REF));
1187
28daba6f 1188 ptr1 = TREE_OPERAND (base1, 0);
1189 ptr2 = TREE_OPERAND (base2, 0);
9a14ba4f 1190
dd277d48 1191 /* If both bases are based on pointers they cannot alias if they may not
1192 point to the same memory object or if they point to the same object
1193 and the accesses do not overlap. */
b5c3c805 1194 if ((!cfun || gimple_in_ssa_p (cfun))
9a14ba4f 1195 && operand_equal_p (ptr1, ptr2, 0)
1196 && (((TREE_CODE (base1) != TARGET_MEM_REF
88be3999 1197 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
9a14ba4f 1198 && (TREE_CODE (base2) != TARGET_MEM_REF
88be3999 1199 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
9a14ba4f 1200 || (TREE_CODE (base1) == TARGET_MEM_REF
1201 && TREE_CODE (base2) == TARGET_MEM_REF
1202 && (TMR_STEP (base1) == TMR_STEP (base2)
1203 || (TMR_STEP (base1) && TMR_STEP (base2)
1204 && operand_equal_p (TMR_STEP (base1),
1205 TMR_STEP (base2), 0)))
88be3999 1206 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1207 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1208 && operand_equal_p (TMR_INDEX (base1),
1209 TMR_INDEX (base2), 0)))
1210 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1211 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1212 && operand_equal_p (TMR_INDEX2 (base1),
1213 TMR_INDEX2 (base2), 0))))))
182cf5a9 1214 {
2622064f 1215 double_int moff;
7749ee1c 1216 /* The offset embedded in MEM_REFs can be negative. Bias them
1217 so that the resulting offset adjustment is positive. */
2622064f 1218 moff = mem_ref_offset (base1);
9a8b9e26 1219 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 1220 if (moff.is_negative ())
1221 offset2 += (-moff).low;
2622064f 1222 else
1223 offset1 += moff.low;
1224 moff = mem_ref_offset (base2);
9a8b9e26 1225 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
cf8f0e63 1226 if (moff.is_negative ())
1227 offset1 += (-moff).low;
2622064f 1228 else
1229 offset2 += moff.low;
182cf5a9 1230 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1231 }
dd277d48 1232 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1233 return false;
1234
1235 /* Disambiguations that rely on strict aliasing rules follow. */
182cf5a9 1236 if (!flag_strict_aliasing || !tbaa_p)
dd277d48 1237 return true;
1238
2622064f 1239 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1240 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
182cf5a9 1241
dd277d48 1242 /* If the alias set for a pointer access is zero all bets are off. */
1243 if (base1_alias_set == -1)
182cf5a9 1244 base1_alias_set = get_deref_alias_set (ptrtype1);
dd277d48 1245 if (base1_alias_set == 0)
1246 return true;
1247 if (base2_alias_set == -1)
182cf5a9 1248 base2_alias_set = get_deref_alias_set (ptrtype2);
dd277d48 1249 if (base2_alias_set == 0)
1250 return true;
1251
1252 /* If both references are through the same type, they do not alias
1253 if the accesses do not overlap. This does extra disambiguation
1254 for mixed/pointer accesses but requires strict aliasing. */
9c455d1e 1255 if ((TREE_CODE (base1) != TARGET_MEM_REF
1256 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1257 && (TREE_CODE (base2) != TARGET_MEM_REF
1258 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1259 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1260 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
182cf5a9 1261 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1262 TREE_TYPE (ptrtype2)) == 1)
dd277d48 1263 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1264
1265 /* Do type-based disambiguation. */
1266 if (base1_alias_set != base2_alias_set
1267 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1268 return false;
1269
f270d2a0 1270 /* If either reference is view-converted, give up now. */
1271 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1272 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1273 return true;
1274
1275 if (ref1 && ref2
1276 && nonoverlapping_component_refs_p (ref1, ref2))
1277 return false;
1278
dd277d48 1279 /* Do access-path based disambiguation. */
1280 if (ref1 && ref2
f270d2a0 1281 && (handled_component_p (ref1) || handled_component_p (ref2)))
0d2f7431 1282 return aliasing_component_refs_p (ref1,
ac30e3b2 1283 ref1_alias_set, base1_alias_set,
a42ce2cc 1284 offset1, max_size1,
0d2f7431 1285 ref2,
ac30e3b2 1286 ref2_alias_set, base2_alias_set,
1287 offset2, max_size2, false);
dd277d48 1288
1289 return true;
1290}
1291
1292/* Return true, if the two memory references REF1 and REF2 may alias. */
1293
3a443843 1294bool
3918bd18 1295refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
dd277d48 1296{
1297 tree base1, base2;
1298 HOST_WIDE_INT offset1 = 0, offset2 = 0;
dd277d48 1299 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1300 bool var1_p, var2_p, ind1_p, ind2_p;
1301
c78d5cec 1302 gcc_checking_assert ((!ref1->ref
8e7c2b9c 1303 || TREE_CODE (ref1->ref) == SSA_NAME
c78d5cec 1304 || DECL_P (ref1->ref)
2760c0e3 1305 || TREE_CODE (ref1->ref) == STRING_CST
c78d5cec 1306 || handled_component_p (ref1->ref)
182cf5a9 1307 || TREE_CODE (ref1->ref) == MEM_REF
c78d5cec 1308 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1309 && (!ref2->ref
8e7c2b9c 1310 || TREE_CODE (ref2->ref) == SSA_NAME
c78d5cec 1311 || DECL_P (ref2->ref)
2760c0e3 1312 || TREE_CODE (ref2->ref) == STRING_CST
c78d5cec 1313 || handled_component_p (ref2->ref)
182cf5a9 1314 || TREE_CODE (ref2->ref) == MEM_REF
c78d5cec 1315 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
dd277d48 1316
dd277d48 1317 /* Decompose the references into their base objects and the access. */
3918bd18 1318 base1 = ao_ref_base (ref1);
1319 offset1 = ref1->offset;
3918bd18 1320 max_size1 = ref1->max_size;
1321 base2 = ao_ref_base (ref2);
1322 offset2 = ref2->offset;
3918bd18 1323 max_size2 = ref2->max_size;
dd277d48 1324
1325 /* We can end up with registers or constants as bases for example from
1326 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1327 which is seen as a struct copy. */
1328 if (TREE_CODE (base1) == SSA_NAME
119147d1 1329 || TREE_CODE (base1) == CONST_DECL
66b86a74 1330 || TREE_CODE (base1) == CONSTRUCTOR
1331 || TREE_CODE (base1) == ADDR_EXPR
1332 || CONSTANT_CLASS_P (base1)
1333 || TREE_CODE (base2) == SSA_NAME
119147d1 1334 || TREE_CODE (base2) == CONST_DECL
66b86a74 1335 || TREE_CODE (base2) == CONSTRUCTOR
1336 || TREE_CODE (base2) == ADDR_EXPR
1337 || CONSTANT_CLASS_P (base2))
dd277d48 1338 return false;
1339
04ec15fa 1340 /* We can end up referring to code via function and label decls.
c78d5cec 1341 As we likely do not properly track code aliases conservatively
1342 bail out. */
ceefbccc 1343 if (TREE_CODE (base1) == FUNCTION_DECL
c78d5cec 1344 || TREE_CODE (base1) == LABEL_DECL
66b86a74 1345 || TREE_CODE (base2) == FUNCTION_DECL
c78d5cec 1346 || TREE_CODE (base2) == LABEL_DECL)
ceefbccc 1347 return true;
1348
3787db52 1349 /* Two volatile accesses always conflict. */
1350 if (ref1->volatile_p
1351 && ref2->volatile_p)
1352 return true;
1353
090a8c65 1354 /* Defer to simple offset based disambiguation if we have
1355 references based on two decls. Do this before defering to
1356 TBAA to handle must-alias cases in conformance with the
1357 GCC extension of allowing type-punning through unions. */
2622064f 1358 var1_p = DECL_P (base1);
1359 var2_p = DECL_P (base2);
dd277d48 1360 if (var1_p && var2_p)
740a2cb6 1361 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1362 ref2->ref, base2, offset2, max_size2);
090a8c65 1363
2622064f 1364 ind1_p = (TREE_CODE (base1) == MEM_REF
1365 || TREE_CODE (base1) == TARGET_MEM_REF);
1366 ind2_p = (TREE_CODE (base2) == MEM_REF
1367 || TREE_CODE (base2) == TARGET_MEM_REF);
182cf5a9 1368
67817f0f 1369 /* Canonicalize the pointer-vs-decl case. */
1370 if (ind1_p && var2_p)
1371 {
1372 HOST_WIDE_INT tmp1;
1373 tree tmp2;
1374 ao_ref *tmp3;
1375 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1376 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1377 tmp2 = base1; base1 = base2; base2 = tmp2;
1378 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1379 var1_p = true;
1380 ind1_p = false;
1381 var2_p = false;
1382 ind2_p = true;
1383 }
1384
090a8c65 1385 /* First defer to TBAA if possible. */
2a3ebafa 1386 if (tbaa_p
1387 && flag_strict_aliasing
3918bd18 1388 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1389 ao_ref_alias_set (ref2)))
090a8c65 1390 return false;
1391
090a8c65 1392 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
090a8c65 1393 if (var1_p && ind2_p)
182cf5a9 1394 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
ac30e3b2 1395 offset2, max_size2,
182cf5a9 1396 ao_ref_alias_set (ref2), -1,
3918bd18 1397 ref1->ref, base1,
ac30e3b2 1398 offset1, max_size1,
182cf5a9 1399 ao_ref_alias_set (ref1),
1400 ao_ref_base_alias_set (ref1),
1401 tbaa_p);
dd277d48 1402 else if (ind1_p && ind2_p)
182cf5a9 1403 return indirect_refs_may_alias_p (ref1->ref, base1,
ac30e3b2 1404 offset1, max_size1,
182cf5a9 1405 ao_ref_alias_set (ref1), -1,
1406 ref2->ref, base2,
ac30e3b2 1407 offset2, max_size2,
182cf5a9 1408 ao_ref_alias_set (ref2), -1,
1409 tbaa_p);
dd277d48 1410
9c01641d 1411 /* We really do not want to end up here, but returning true is safe. */
1412#ifdef ENABLE_CHECKING
dd277d48 1413 gcc_unreachable ();
9c01641d 1414#else
1415 return true;
1416#endif
dd277d48 1417}
1418
1419bool
1420refs_may_alias_p (tree ref1, tree ref2)
1421{
3918bd18 1422 ao_ref r1, r2;
1423 bool res;
1424 ao_ref_init (&r1, ref1);
1425 ao_ref_init (&r2, ref2);
1426 res = refs_may_alias_p_1 (&r1, &r2, true);
dd277d48 1427 if (res)
1428 ++alias_stats.refs_may_alias_p_may_alias;
1429 else
1430 ++alias_stats.refs_may_alias_p_no_alias;
1431 return res;
1432}
1433
2a3ebafa 1434/* Returns true if there is a anti-dependence for the STORE that
1435 executes after the LOAD. */
1436
1437bool
1438refs_anti_dependent_p (tree load, tree store)
1439{
3918bd18 1440 ao_ref r1, r2;
1441 ao_ref_init (&r1, load);
1442 ao_ref_init (&r2, store);
1443 return refs_may_alias_p_1 (&r1, &r2, false);
2a3ebafa 1444}
1445
1446/* Returns true if there is a output dependence for the stores
1447 STORE1 and STORE2. */
1448
1449bool
1450refs_output_dependent_p (tree store1, tree store2)
1451{
3918bd18 1452 ao_ref r1, r2;
1453 ao_ref_init (&r1, store1);
1454 ao_ref_init (&r2, store2);
1455 return refs_may_alias_p_1 (&r1, &r2, false);
2a3ebafa 1456}
dd277d48 1457
1458/* If the call CALL may use the memory reference REF return true,
1459 otherwise return false. */
1460
1461static bool
134899ac 1462ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
dd277d48 1463{
047fdd47 1464 tree base, callee;
dd277d48 1465 unsigned i;
1466 int flags = gimple_call_flags (call);
1467
1468 /* Const functions without a static chain do not implicitly use memory. */
1469 if (!gimple_call_chain (call)
1470 && (flags & (ECF_CONST|ECF_NOVOPS)))
1471 goto process_args;
1472
134899ac 1473 base = ao_ref_base (ref);
917f08fa 1474 if (!base)
dd277d48 1475 return true;
1476
3787db52 1477 /* A call that is not without side-effects might involve volatile
1478 accesses and thus conflicts with all other volatile accesses. */
1479 if (ref->volatile_p)
1480 return true;
1481
09347e88 1482 /* If the reference is based on a decl that is not aliased the call
1483 cannot possibly use it. */
1484 if (DECL_P (base)
1485 && !may_be_aliased (base)
7f7f16d4 1486 /* But local statics can be used through recursion. */
1487 && !is_global_var (base))
09347e88 1488 goto process_args;
1489
047fdd47 1490 callee = gimple_call_fndecl (call);
1491
1492 /* Handle those builtin functions explicitly that do not act as
1493 escape points. See tree-ssa-structalias.c:find_func_aliases
1494 for the list of builtins we might need to handle here. */
1495 if (callee != NULL_TREE
1496 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1497 switch (DECL_FUNCTION_CODE (callee))
1498 {
77efe819 1499 /* All the following functions read memory pointed to by
1500 their second argument. strcat/strncat additionally
1501 reads memory pointed to by the first argument. */
1502 case BUILT_IN_STRCAT:
1503 case BUILT_IN_STRNCAT:
1504 {
1505 ao_ref dref;
1506 ao_ref_init_from_ptr_and_size (&dref,
1507 gimple_call_arg (call, 0),
1508 NULL_TREE);
1509 if (refs_may_alias_p_1 (&dref, ref, false))
1510 return true;
1511 }
1512 /* FALLTHRU */
047fdd47 1513 case BUILT_IN_STRCPY:
1514 case BUILT_IN_STRNCPY:
047fdd47 1515 case BUILT_IN_MEMCPY:
1516 case BUILT_IN_MEMMOVE:
1517 case BUILT_IN_MEMPCPY:
1518 case BUILT_IN_STPCPY:
1519 case BUILT_IN_STPNCPY:
4c0315d0 1520 case BUILT_IN_TM_MEMCPY:
1521 case BUILT_IN_TM_MEMMOVE:
047fdd47 1522 {
134899ac 1523 ao_ref dref;
1524 tree size = NULL_TREE;
1525 if (gimple_call_num_args (call) == 3)
1526 size = gimple_call_arg (call, 2);
1527 ao_ref_init_from_ptr_and_size (&dref,
1528 gimple_call_arg (call, 1),
1529 size);
1530 return refs_may_alias_p_1 (&dref, ref, false);
047fdd47 1531 }
77efe819 1532 case BUILT_IN_STRCAT_CHK:
1533 case BUILT_IN_STRNCAT_CHK:
1534 {
1535 ao_ref dref;
1536 ao_ref_init_from_ptr_and_size (&dref,
1537 gimple_call_arg (call, 0),
1538 NULL_TREE);
1539 if (refs_may_alias_p_1 (&dref, ref, false))
1540 return true;
1541 }
1542 /* FALLTHRU */
939514e9 1543 case BUILT_IN_STRCPY_CHK:
1544 case BUILT_IN_STRNCPY_CHK:
1545 case BUILT_IN_MEMCPY_CHK:
1546 case BUILT_IN_MEMMOVE_CHK:
1547 case BUILT_IN_MEMPCPY_CHK:
1548 case BUILT_IN_STPCPY_CHK:
1063acde 1549 case BUILT_IN_STPNCPY_CHK:
939514e9 1550 {
1551 ao_ref dref;
1552 tree size = NULL_TREE;
1553 if (gimple_call_num_args (call) == 4)
1554 size = gimple_call_arg (call, 2);
1555 ao_ref_init_from_ptr_and_size (&dref,
1556 gimple_call_arg (call, 1),
1557 size);
1558 return refs_may_alias_p_1 (&dref, ref, false);
1559 }
119147d1 1560 case BUILT_IN_BCOPY:
1561 {
1562 ao_ref dref;
1563 tree size = gimple_call_arg (call, 2);
1564 ao_ref_init_from_ptr_and_size (&dref,
1565 gimple_call_arg (call, 0),
1566 size);
1567 return refs_may_alias_p_1 (&dref, ref, false);
1568 }
4c0315d0 1569
1570 /* The following functions read memory pointed to by their
1571 first argument. */
1572 CASE_BUILT_IN_TM_LOAD (1):
1573 CASE_BUILT_IN_TM_LOAD (2):
1574 CASE_BUILT_IN_TM_LOAD (4):
1575 CASE_BUILT_IN_TM_LOAD (8):
1576 CASE_BUILT_IN_TM_LOAD (FLOAT):
1577 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1578 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1579 CASE_BUILT_IN_TM_LOAD (M64):
1580 CASE_BUILT_IN_TM_LOAD (M128):
1581 CASE_BUILT_IN_TM_LOAD (M256):
1582 case BUILT_IN_TM_LOG:
1583 case BUILT_IN_TM_LOG_1:
1584 case BUILT_IN_TM_LOG_2:
1585 case BUILT_IN_TM_LOG_4:
1586 case BUILT_IN_TM_LOG_8:
1587 case BUILT_IN_TM_LOG_FLOAT:
1588 case BUILT_IN_TM_LOG_DOUBLE:
1589 case BUILT_IN_TM_LOG_LDOUBLE:
1590 case BUILT_IN_TM_LOG_M64:
1591 case BUILT_IN_TM_LOG_M128:
1592 case BUILT_IN_TM_LOG_M256:
1593 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1594
77efe819 1595 /* These read memory pointed to by the first argument. */
1596 case BUILT_IN_STRDUP:
1597 case BUILT_IN_STRNDUP:
1598 {
1599 ao_ref dref;
1600 tree size = NULL_TREE;
1601 if (gimple_call_num_args (call) == 2)
1602 size = gimple_call_arg (call, 1);
1603 ao_ref_init_from_ptr_and_size (&dref,
1604 gimple_call_arg (call, 0),
1605 size);
1606 return refs_may_alias_p_1 (&dref, ref, false);
1607 }
4953672f 1608 /* These read memory pointed to by the first argument. */
1609 case BUILT_IN_INDEX:
1610 case BUILT_IN_STRCHR:
1611 case BUILT_IN_STRRCHR:
1612 {
1613 ao_ref dref;
1614 ao_ref_init_from_ptr_and_size (&dref,
1615 gimple_call_arg (call, 0),
1616 NULL_TREE);
1617 return refs_may_alias_p_1 (&dref, ref, false);
1618 }
1619 /* These read memory pointed to by the first argument with size
1620 in the third argument. */
1621 case BUILT_IN_MEMCHR:
1622 {
1623 ao_ref dref;
1624 ao_ref_init_from_ptr_and_size (&dref,
1625 gimple_call_arg (call, 0),
1626 gimple_call_arg (call, 2));
1627 return refs_may_alias_p_1 (&dref, ref, false);
1628 }
1629 /* These read memory pointed to by the first and second arguments. */
1630 case BUILT_IN_STRSTR:
1631 case BUILT_IN_STRPBRK:
1632 {
1633 ao_ref dref;
1634 ao_ref_init_from_ptr_and_size (&dref,
1635 gimple_call_arg (call, 0),
1636 NULL_TREE);
1637 if (refs_may_alias_p_1 (&dref, ref, false))
1638 return true;
1639 ao_ref_init_from_ptr_and_size (&dref,
1640 gimple_call_arg (call, 1),
1641 NULL_TREE);
1642 return refs_may_alias_p_1 (&dref, ref, false);
1643 }
1644
09d9c774 1645 /* The following builtins do not read from memory. */
1646 case BUILT_IN_FREE:
0d3ca08e 1647 case BUILT_IN_MALLOC:
be2c7f8f 1648 case BUILT_IN_POSIX_MEMALIGN:
4ef43bbd 1649 case BUILT_IN_CALLOC:
ca793672 1650 case BUILT_IN_ALLOCA:
581bf1c2 1651 case BUILT_IN_ALLOCA_WITH_ALIGN:
ca793672 1652 case BUILT_IN_STACK_SAVE:
1653 case BUILT_IN_STACK_RESTORE:
09d9c774 1654 case BUILT_IN_MEMSET:
4c0315d0 1655 case BUILT_IN_TM_MEMSET:
939514e9 1656 case BUILT_IN_MEMSET_CHK:
09d9c774 1657 case BUILT_IN_FREXP:
1658 case BUILT_IN_FREXPF:
1659 case BUILT_IN_FREXPL:
1660 case BUILT_IN_GAMMA_R:
1661 case BUILT_IN_GAMMAF_R:
1662 case BUILT_IN_GAMMAL_R:
1663 case BUILT_IN_LGAMMA_R:
1664 case BUILT_IN_LGAMMAF_R:
1665 case BUILT_IN_LGAMMAL_R:
1666 case BUILT_IN_MODF:
1667 case BUILT_IN_MODFF:
1668 case BUILT_IN_MODFL:
1669 case BUILT_IN_REMQUO:
1670 case BUILT_IN_REMQUOF:
1671 case BUILT_IN_REMQUOL:
1672 case BUILT_IN_SINCOS:
1673 case BUILT_IN_SINCOSF:
1674 case BUILT_IN_SINCOSL:
fca0886c 1675 case BUILT_IN_ASSUME_ALIGNED:
9d17b4a8 1676 case BUILT_IN_VA_END:
09d9c774 1677 return false;
a44814b9 1678 /* __sync_* builtins and some OpenMP builtins act as threading
1679 barriers. */
1680#undef DEF_SYNC_BUILTIN
1681#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1682#include "sync-builtins.def"
1683#undef DEF_SYNC_BUILTIN
1684 case BUILT_IN_GOMP_ATOMIC_START:
1685 case BUILT_IN_GOMP_ATOMIC_END:
1686 case BUILT_IN_GOMP_BARRIER:
bc7bff74 1687 case BUILT_IN_GOMP_BARRIER_CANCEL:
a44814b9 1688 case BUILT_IN_GOMP_TASKWAIT:
bc7bff74 1689 case BUILT_IN_GOMP_TASKGROUP_END:
a44814b9 1690 case BUILT_IN_GOMP_CRITICAL_START:
1691 case BUILT_IN_GOMP_CRITICAL_END:
1692 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1693 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1694 case BUILT_IN_GOMP_LOOP_END:
bc7bff74 1695 case BUILT_IN_GOMP_LOOP_END_CANCEL:
a44814b9 1696 case BUILT_IN_GOMP_ORDERED_START:
1697 case BUILT_IN_GOMP_ORDERED_END:
a44814b9 1698 case BUILT_IN_GOMP_SECTIONS_END:
bc7bff74 1699 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
a44814b9 1700 case BUILT_IN_GOMP_SINGLE_COPY_START:
1701 case BUILT_IN_GOMP_SINGLE_COPY_END:
1702 return true;
09d9c774 1703
047fdd47 1704 default:
1705 /* Fallthru to general call handling. */;
1706 }
1707
dd277d48 1708 /* Check if base is a global static variable that is not read
1709 by the function. */
924de091 1710 if (callee != NULL_TREE
1711 && TREE_CODE (base) == VAR_DECL
d97be713 1712 && TREE_STATIC (base))
4ee9c684 1713 {
924de091 1714 struct cgraph_node *node = cgraph_get_node (callee);
dd277d48 1715 bitmap not_read;
1716
924de091 1717 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1718 node yet. We should enforce that there are nodes for all decls in the
1719 IL and remove this check instead. */
1720 if (node
1721 && (not_read = ipa_reference_get_not_read_global (node))
dd277d48 1722 && bitmap_bit_p (not_read, DECL_UID (base)))
1723 goto process_args;
4ee9c684 1724 }
1725
cb245216 1726 /* Check if the base variable is call-used. */
1727 if (DECL_P (base))
4ee9c684 1728 {
cb245216 1729 if (pt_solution_includes (gimple_call_use_set (call), base))
dd277d48 1730 return true;
1731 }
2622064f 1732 else if ((TREE_CODE (base) == MEM_REF
1733 || TREE_CODE (base) == TARGET_MEM_REF)
cb245216 1734 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
dd277d48 1735 {
cb245216 1736 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1737 if (!pi)
1738 return true;
917f08fa 1739
cb245216 1740 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
dd277d48 1741 return true;
1742 }
cb245216 1743 else
1744 return true;
dd277d48 1745
1746 /* Inspect call arguments for passed-by-value aliases. */
1747process_args:
1748 for (i = 0; i < gimple_call_num_args (call); ++i)
1749 {
1750 tree op = gimple_call_arg (call, i);
8ce86007 1751 int flags = gimple_call_arg_flags (call, i);
1752
1753 if (flags & EAF_UNUSED)
1754 continue;
dd277d48 1755
dd277d48 1756 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1757 op = TREE_OPERAND (op, 0);
4ee9c684 1758
dd277d48 1759 if (TREE_CODE (op) != SSA_NAME
134899ac 1760 && !is_gimple_min_invariant (op))
1761 {
1762 ao_ref r;
1763 ao_ref_init (&r, op);
1764 if (refs_may_alias_p_1 (&r, ref, true))
1765 return true;
1766 }
4ee9c684 1767 }
1768
dd277d48 1769 return false;
1770}
1771
1772static bool
1773ref_maybe_used_by_call_p (gimple call, tree ref)
1774{
134899ac 1775 ao_ref r;
1776 bool res;
1777 ao_ref_init (&r, ref);
1778 res = ref_maybe_used_by_call_p_1 (call, &r);
dd277d48 1779 if (res)
1780 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1781 else
1782 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1783 return res;
4ee9c684 1784}
1785
1786
dd277d48 1787/* If the statement STMT may use the memory reference REF return
1788 true, otherwise return false. */
4ee9c684 1789
dd277d48 1790bool
1791ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
4ee9c684 1792{
dd277d48 1793 if (is_gimple_assign (stmt))
1794 {
1795 tree rhs;
1796
1797 /* All memory assign statements are single. */
1798 if (!gimple_assign_single_p (stmt))
1799 return false;
1800
1801 rhs = gimple_assign_rhs1 (stmt);
1802 if (is_gimple_reg (rhs)
1803 || is_gimple_min_invariant (rhs)
1804 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1805 return false;
1806
1807 return refs_may_alias_p (rhs, ref);
1808 }
1809 else if (is_gimple_call (stmt))
1810 return ref_maybe_used_by_call_p (stmt, ref);
91234829 1811 else if (gimple_code (stmt) == GIMPLE_RETURN)
1812 {
1813 tree retval = gimple_return_retval (stmt);
1814 tree base;
1815 if (retval
1816 && TREE_CODE (retval) != SSA_NAME
1817 && !is_gimple_min_invariant (retval)
1818 && refs_may_alias_p (retval, ref))
1819 return true;
1820 /* If ref escapes the function then the return acts as a use. */
1821 base = get_base_address (ref);
1822 if (!base)
1823 ;
1824 else if (DECL_P (base))
1825 return is_global_var (base);
1826 else if (TREE_CODE (base) == MEM_REF
1827 || TREE_CODE (base) == TARGET_MEM_REF)
1828 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1829 return false;
1830 }
dd277d48 1831
1832 return true;
4ee9c684 1833}
1834
dd277d48 1835/* If the call in statement CALL may clobber the memory reference REF
1836 return true, otherwise return false. */
4ee9c684 1837
dd277d48 1838static bool
3918bd18 1839call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
4ee9c684 1840{
7f7f16d4 1841 tree base;
047fdd47 1842 tree callee;
dd277d48 1843
1844 /* If the call is pure or const it cannot clobber anything. */
1845 if (gimple_call_flags (call)
1846 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1847 return false;
1848
3918bd18 1849 base = ao_ref_base (ref);
dd277d48 1850 if (!base)
1851 return true;
1852
1853 if (TREE_CODE (base) == SSA_NAME
1854 || CONSTANT_CLASS_P (base))
1855 return false;
1856
3787db52 1857 /* A call that is not without side-effects might involve volatile
1858 accesses and thus conflicts with all other volatile accesses. */
1859 if (ref->volatile_p)
1860 return true;
1861
09347e88 1862 /* If the reference is based on a decl that is not aliased the call
1863 cannot possibly clobber it. */
1864 if (DECL_P (base)
1865 && !may_be_aliased (base)
7f7f16d4 1866 /* But local non-readonly statics can be modified through recursion
1867 or the call may implement a threading barrier which we must
1868 treat as may-def. */
09347e88 1869 && (TREE_READONLY (base)
7f7f16d4 1870 || !is_global_var (base)))
09347e88 1871 return false;
1872
047fdd47 1873 callee = gimple_call_fndecl (call);
1874
1875 /* Handle those builtin functions explicitly that do not act as
1876 escape points. See tree-ssa-structalias.c:find_func_aliases
1877 for the list of builtins we might need to handle here. */
1878 if (callee != NULL_TREE
1879 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1880 switch (DECL_FUNCTION_CODE (callee))
1881 {
1882 /* All the following functions clobber memory pointed to by
1883 their first argument. */
1884 case BUILT_IN_STRCPY:
1885 case BUILT_IN_STRNCPY:
047fdd47 1886 case BUILT_IN_MEMCPY:
1887 case BUILT_IN_MEMMOVE:
1888 case BUILT_IN_MEMPCPY:
1889 case BUILT_IN_STPCPY:
1890 case BUILT_IN_STPNCPY:
1891 case BUILT_IN_STRCAT:
1892 case BUILT_IN_STRNCAT:
134899ac 1893 case BUILT_IN_MEMSET:
4c0315d0 1894 case BUILT_IN_TM_MEMSET:
1895 CASE_BUILT_IN_TM_STORE (1):
1896 CASE_BUILT_IN_TM_STORE (2):
1897 CASE_BUILT_IN_TM_STORE (4):
1898 CASE_BUILT_IN_TM_STORE (8):
1899 CASE_BUILT_IN_TM_STORE (FLOAT):
1900 CASE_BUILT_IN_TM_STORE (DOUBLE):
1901 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1902 CASE_BUILT_IN_TM_STORE (M64):
1903 CASE_BUILT_IN_TM_STORE (M128):
1904 CASE_BUILT_IN_TM_STORE (M256):
1905 case BUILT_IN_TM_MEMCPY:
1906 case BUILT_IN_TM_MEMMOVE:
047fdd47 1907 {
134899ac 1908 ao_ref dref;
1909 tree size = NULL_TREE;
77efe819 1910 /* Don't pass in size for strncat, as the maximum size
1911 is strlen (dest) + n + 1 instead of n, resp.
1912 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1913 known. */
1914 if (gimple_call_num_args (call) == 3
1915 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
134899ac 1916 size = gimple_call_arg (call, 2);
1917 ao_ref_init_from_ptr_and_size (&dref,
1918 gimple_call_arg (call, 0),
1919 size);
1920 return refs_may_alias_p_1 (&dref, ref, false);
047fdd47 1921 }
939514e9 1922 case BUILT_IN_STRCPY_CHK:
1923 case BUILT_IN_STRNCPY_CHK:
1924 case BUILT_IN_MEMCPY_CHK:
1925 case BUILT_IN_MEMMOVE_CHK:
1926 case BUILT_IN_MEMPCPY_CHK:
1927 case BUILT_IN_STPCPY_CHK:
1063acde 1928 case BUILT_IN_STPNCPY_CHK:
939514e9 1929 case BUILT_IN_STRCAT_CHK:
1930 case BUILT_IN_STRNCAT_CHK:
1931 case BUILT_IN_MEMSET_CHK:
1932 {
1933 ao_ref dref;
1934 tree size = NULL_TREE;
77efe819 1935 /* Don't pass in size for __strncat_chk, as the maximum size
1936 is strlen (dest) + n + 1 instead of n, resp.
1937 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1938 known. */
1939 if (gimple_call_num_args (call) == 4
1940 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
939514e9 1941 size = gimple_call_arg (call, 2);
1942 ao_ref_init_from_ptr_and_size (&dref,
1943 gimple_call_arg (call, 0),
1944 size);
1945 return refs_may_alias_p_1 (&dref, ref, false);
1946 }
119147d1 1947 case BUILT_IN_BCOPY:
1948 {
1949 ao_ref dref;
1950 tree size = gimple_call_arg (call, 2);
1951 ao_ref_init_from_ptr_and_size (&dref,
1952 gimple_call_arg (call, 1),
1953 size);
1954 return refs_may_alias_p_1 (&dref, ref, false);
1955 }
0d3ca08e 1956 /* Allocating memory does not have any side-effects apart from
1957 being the definition point for the pointer. */
1958 case BUILT_IN_MALLOC:
4ef43bbd 1959 case BUILT_IN_CALLOC:
77efe819 1960 case BUILT_IN_STRDUP:
1961 case BUILT_IN_STRNDUP:
be97d4b6 1962 /* Unix98 specifies that errno is set on allocation failure. */
a29c5f9c 1963 if (flag_errno_math
be97d4b6 1964 && targetm.ref_may_alias_errno (ref))
1965 return true;
0d3ca08e 1966 return false;
ca793672 1967 case BUILT_IN_STACK_SAVE:
1968 case BUILT_IN_ALLOCA:
581bf1c2 1969 case BUILT_IN_ALLOCA_WITH_ALIGN:
fca0886c 1970 case BUILT_IN_ASSUME_ALIGNED:
ca793672 1971 return false;
be2c7f8f 1972 /* But posix_memalign stores a pointer into the memory pointed to
1973 by its first argument. */
1974 case BUILT_IN_POSIX_MEMALIGN:
1975 {
1976 tree ptrptr = gimple_call_arg (call, 0);
1977 ao_ref dref;
1978 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
1979 TYPE_SIZE_UNIT (ptr_type_node));
1980 return (refs_may_alias_p_1 (&dref, ref, false)
1981 || (flag_errno_math
1982 && targetm.ref_may_alias_errno (ref)));
1983 }
047fdd47 1984 /* Freeing memory kills the pointed-to memory. More importantly
1985 the call has to serve as a barrier for moving loads and stores
134899ac 1986 across it. */
047fdd47 1987 case BUILT_IN_FREE:
9d17b4a8 1988 case BUILT_IN_VA_END:
047fdd47 1989 {
1990 tree ptr = gimple_call_arg (call, 0);
1991 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1992 }
047fdd47 1993 case BUILT_IN_GAMMA_R:
1994 case BUILT_IN_GAMMAF_R:
1995 case BUILT_IN_GAMMAL_R:
1996 case BUILT_IN_LGAMMA_R:
1997 case BUILT_IN_LGAMMAF_R:
1998 case BUILT_IN_LGAMMAL_R:
09d9c774 1999 {
2000 tree out = gimple_call_arg (call, 1);
2001 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2002 return true;
2003 if (flag_errno_math)
2004 break;
2005 return false;
2006 }
2007 case BUILT_IN_FREXP:
2008 case BUILT_IN_FREXPF:
2009 case BUILT_IN_FREXPL:
047fdd47 2010 case BUILT_IN_MODF:
2011 case BUILT_IN_MODFF:
2012 case BUILT_IN_MODFL:
2013 {
2014 tree out = gimple_call_arg (call, 1);
2015 return ptr_deref_may_alias_ref_p_1 (out, ref);
2016 }
2017 case BUILT_IN_REMQUO:
2018 case BUILT_IN_REMQUOF:
2019 case BUILT_IN_REMQUOL:
2020 {
2021 tree out = gimple_call_arg (call, 2);
09d9c774 2022 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2023 return true;
2024 if (flag_errno_math)
2025 break;
2026 return false;
047fdd47 2027 }
2028 case BUILT_IN_SINCOS:
2029 case BUILT_IN_SINCOSF:
2030 case BUILT_IN_SINCOSL:
2031 {
2032 tree sin = gimple_call_arg (call, 1);
2033 tree cos = gimple_call_arg (call, 2);
2034 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2035 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2036 }
a44814b9 2037 /* __sync_* builtins and some OpenMP builtins act as threading
2038 barriers. */
2039#undef DEF_SYNC_BUILTIN
2040#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2041#include "sync-builtins.def"
2042#undef DEF_SYNC_BUILTIN
2043 case BUILT_IN_GOMP_ATOMIC_START:
2044 case BUILT_IN_GOMP_ATOMIC_END:
2045 case BUILT_IN_GOMP_BARRIER:
bc7bff74 2046 case BUILT_IN_GOMP_BARRIER_CANCEL:
a44814b9 2047 case BUILT_IN_GOMP_TASKWAIT:
bc7bff74 2048 case BUILT_IN_GOMP_TASKGROUP_END:
a44814b9 2049 case BUILT_IN_GOMP_CRITICAL_START:
2050 case BUILT_IN_GOMP_CRITICAL_END:
2051 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2052 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2053 case BUILT_IN_GOMP_LOOP_END:
bc7bff74 2054 case BUILT_IN_GOMP_LOOP_END_CANCEL:
a44814b9 2055 case BUILT_IN_GOMP_ORDERED_START:
2056 case BUILT_IN_GOMP_ORDERED_END:
a44814b9 2057 case BUILT_IN_GOMP_SECTIONS_END:
bc7bff74 2058 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
a44814b9 2059 case BUILT_IN_GOMP_SINGLE_COPY_START:
2060 case BUILT_IN_GOMP_SINGLE_COPY_END:
2061 return true;
047fdd47 2062 default:
2063 /* Fallthru to general call handling. */;
2064 }
2065
dd277d48 2066 /* Check if base is a global static variable that is not written
2067 by the function. */
047fdd47 2068 if (callee != NULL_TREE
2069 && TREE_CODE (base) == VAR_DECL
d97be713 2070 && TREE_STATIC (base))
4ee9c684 2071 {
924de091 2072 struct cgraph_node *node = cgraph_get_node (callee);
dd277d48 2073 bitmap not_written;
0eaf9bd7 2074
924de091 2075 if (node
2076 && (not_written = ipa_reference_get_not_written_global (node))
dd277d48 2077 && bitmap_bit_p (not_written, DECL_UID (base)))
2078 return false;
4ee9c684 2079 }
4ee9c684 2080
cb245216 2081 /* Check if the base variable is call-clobbered. */
dd277d48 2082 if (DECL_P (base))
cb245216 2083 return pt_solution_includes (gimple_call_clobber_set (call), base);
2622064f 2084 else if ((TREE_CODE (base) == MEM_REF
2085 || TREE_CODE (base) == TARGET_MEM_REF)
917f08fa 2086 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2087 {
2088 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2089 if (!pi)
2090 return true;
2091
cb245216 2092 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
917f08fa 2093 }
4ee9c684 2094
dd277d48 2095 return true;
2096}
4ee9c684 2097
4debfcfc 2098/* If the call in statement CALL may clobber the memory reference REF
2099 return true, otherwise return false. */
2100
2101bool
dd277d48 2102call_may_clobber_ref_p (gimple call, tree ref)
4ee9c684 2103{
3918bd18 2104 bool res;
2105 ao_ref r;
2106 ao_ref_init (&r, ref);
2107 res = call_may_clobber_ref_p_1 (call, &r);
dd277d48 2108 if (res)
2109 ++alias_stats.call_may_clobber_ref_p_may_alias;
2110 else
2111 ++alias_stats.call_may_clobber_ref_p_no_alias;
2112 return res;
4ee9c684 2113}
94e6573f 2114
dd277d48 2115
2116/* If the statement STMT may clobber the memory reference REF return true,
2117 otherwise return false. */
94e6573f 2118
2119bool
3918bd18 2120stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
94e6573f 2121{
dd277d48 2122 if (is_gimple_call (stmt))
2123 {
2124 tree lhs = gimple_call_lhs (stmt);
2125 if (lhs
66b86a74 2126 && TREE_CODE (lhs) != SSA_NAME)
3918bd18 2127 {
2128 ao_ref r;
2129 ao_ref_init (&r, lhs);
2130 if (refs_may_alias_p_1 (ref, &r, true))
2131 return true;
2132 }
94e6573f 2133
3918bd18 2134 return call_may_clobber_ref_p_1 (stmt, ref);
dd277d48 2135 }
8e7c2b9c 2136 else if (gimple_assign_single_p (stmt))
3918bd18 2137 {
8e7c2b9c 2138 tree lhs = gimple_assign_lhs (stmt);
66b86a74 2139 if (TREE_CODE (lhs) != SSA_NAME)
8e7c2b9c 2140 {
2141 ao_ref r;
66b86a74 2142 ao_ref_init (&r, lhs);
8e7c2b9c 2143 return refs_may_alias_p_1 (ref, &r, true);
2144 }
3918bd18 2145 }
dd277d48 2146 else if (gimple_code (stmt) == GIMPLE_ASM)
94e6573f 2147 return true;
2148
6329636b 2149 return false;
94e6573f 2150}
2be14d8b 2151
3918bd18 2152bool
2153stmt_may_clobber_ref_p (gimple stmt, tree ref)
2154{
2155 ao_ref r;
2156 ao_ref_init (&r, ref);
2157 return stmt_may_clobber_ref_p_1 (stmt, &r);
2158}
2159
ab450cc6 2160/* If STMT kills the memory reference REF return true, otherwise
2161 return false. */
2162
2163static bool
2164stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
2165{
07ff6a65 2166 /* For a must-alias check we need to be able to constrain
e6c705fb 2167 the access properly.
2168 FIXME: except for BUILTIN_FREE. */
2169 if (!ao_ref_base (ref)
2170 || ref->max_size == -1)
07ff6a65 2171 return false;
2172
ab450cc6 2173 if (gimple_has_lhs (stmt)
e54d36c4 2174 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2175 /* The assignment is not necessarily carried out if it can throw
2176 and we can catch it in the current function where we could inspect
2177 the previous value.
2178 ??? We only need to care about the RHS throwing. For aggregate
2179 assignments or similar calls and non-call exceptions the LHS
2180 might throw as well. */
2181 && !stmt_can_throw_internal (stmt))
ab450cc6 2182 {
2183 tree base, lhs = gimple_get_lhs (stmt);
137eddca 2184 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
ab450cc6 2185 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2186 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2187 so base == ref->base does not always hold. */
137eddca 2188 if (base != ref->base)
ab450cc6 2189 {
137eddca 2190 /* If both base and ref->base are MEM_REFs, only compare the
2191 first operand, and if the second operand isn't equal constant,
2192 try to add the offsets into offset and ref_offset. */
2193 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2194 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
ab450cc6 2195 {
83d18d6d 2196 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2197 TREE_OPERAND (ref->base, 1)))
137eddca 2198 {
2199 double_int off1 = mem_ref_offset (base);
9a8b9e26 2200 off1 = off1.lshift (BITS_PER_UNIT == 8
2201 ? 3 : exact_log2 (BITS_PER_UNIT));
137eddca 2202 off1 = off1 + double_int::from_shwi (offset);
2203 double_int off2 = mem_ref_offset (ref->base);
9a8b9e26 2204 off2 = off2.lshift (BITS_PER_UNIT == 8
2205 ? 3 : exact_log2 (BITS_PER_UNIT));
137eddca 2206 off2 = off2 + double_int::from_shwi (ref_offset);
2207 if (off1.fits_shwi () && off2.fits_shwi ())
2208 {
2209 offset = off1.to_shwi ();
2210 ref_offset = off2.to_shwi ();
2211 }
2212 else
2213 size = -1;
2214 }
ab450cc6 2215 }
137eddca 2216 else
2217 size = -1;
2218 }
2219 /* For a must-alias check we need to be able to constrain
2220 the access properly. */
2221 if (size != -1 && size == max_size)
2222 {
2223 if (offset <= ref_offset
2224 && offset + size >= ref_offset + ref->max_size)
2225 return true;
ab450cc6 2226 }
2227 }
07ff6a65 2228
2229 if (is_gimple_call (stmt))
2230 {
2231 tree callee = gimple_call_fndecl (stmt);
2232 if (callee != NULL_TREE
2233 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2234 switch (DECL_FUNCTION_CODE (callee))
2235 {
3d0ea943 2236 case BUILT_IN_FREE:
2237 {
2238 tree ptr = gimple_call_arg (stmt, 0);
2239 tree base = ao_ref_base (ref);
2240 if (base && TREE_CODE (base) == MEM_REF
2241 && TREE_OPERAND (base, 0) == ptr)
2242 return true;
2243 break;
2244 }
2245
07ff6a65 2246 case BUILT_IN_MEMCPY:
2247 case BUILT_IN_MEMPCPY:
2248 case BUILT_IN_MEMMOVE:
2249 case BUILT_IN_MEMSET:
939514e9 2250 case BUILT_IN_MEMCPY_CHK:
2251 case BUILT_IN_MEMPCPY_CHK:
2252 case BUILT_IN_MEMMOVE_CHK:
2253 case BUILT_IN_MEMSET_CHK:
07ff6a65 2254 {
2255 tree dest = gimple_call_arg (stmt, 0);
2256 tree len = gimple_call_arg (stmt, 2);
35ec552a 2257 if (!tree_fits_shwi_p (len))
07ff6a65 2258 return false;
e6c705fb 2259 tree rbase = ref->base;
2260 double_int roffset = double_int::from_shwi (ref->offset);
2261 ao_ref dref;
2262 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2263 tree base = ao_ref_base (&dref);
2264 double_int offset = double_int::from_shwi (dref.offset);
2265 double_int bpu = double_int::from_uhwi (BITS_PER_UNIT);
2266 if (!base || dref.size == -1)
2267 return false;
2268 if (TREE_CODE (base) == MEM_REF)
2269 {
2270 if (TREE_CODE (rbase) != MEM_REF)
2271 return false;
2272 // Compare pointers.
2273 offset += bpu * mem_ref_offset (base);
2274 roffset += bpu * mem_ref_offset (rbase);
2275 base = TREE_OPERAND (base, 0);
2276 rbase = TREE_OPERAND (rbase, 0);
2277 }
2278 if (base == rbase)
07ff6a65 2279 {
e6c705fb 2280 double_int size = bpu * tree_to_double_int (len);
2281 double_int rsize = double_int::from_uhwi (ref->max_size);
2282 if (offset.sle (roffset)
2283 && (roffset + rsize).sle (offset + size))
07ff6a65 2284 return true;
2285 }
9d17b4a8 2286 break;
2287 }
2288
2289 case BUILT_IN_VA_END:
2290 {
2291 tree ptr = gimple_call_arg (stmt, 0);
2292 if (TREE_CODE (ptr) == ADDR_EXPR)
2293 {
2294 tree base = ao_ref_base (ref);
2295 if (TREE_OPERAND (ptr, 0) == base)
2296 return true;
2297 }
2298 break;
07ff6a65 2299 }
9d17b4a8 2300
07ff6a65 2301 default:;
2302 }
07ff6a65 2303 }
ab450cc6 2304 return false;
2305}
2306
2307bool
2308stmt_kills_ref_p (gimple stmt, tree ref)
2309{
2310 ao_ref r;
2311 ao_ref_init (&r, ref);
2312 return stmt_kills_ref_p_1 (stmt, &r);
2313}
2314
3918bd18 2315
dd277d48 2316/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2317 TARGET or a statement clobbering the memory reference REF in which
2318 case false is returned. The walk starts with VUSE, one argument of PHI. */
2319
2320static bool
3918bd18 2321maybe_skip_until (gimple phi, tree target, ao_ref *ref,
4fd7a6de 2322 tree vuse, unsigned int *cnt, bitmap *visited,
2323 bool abort_on_visited)
ac926412 2324{
0afe880d 2325 basic_block bb = gimple_bb (phi);
2326
dd277d48 2327 if (!*visited)
2328 *visited = BITMAP_ALLOC (NULL);
ac926412 2329
dd277d48 2330 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2331
2332 /* Walk until we hit the target. */
2333 while (vuse != target)
ac926412 2334 {
dd277d48 2335 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2336 /* Recurse for PHI nodes. */
2337 if (gimple_code (def_stmt) == GIMPLE_PHI)
2338 {
2339 /* An already visited PHI node ends the walk successfully. */
2340 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
4fd7a6de 2341 return !abort_on_visited;
2342 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2343 visited, abort_on_visited);
dd277d48 2344 if (!vuse)
2345 return false;
2346 continue;
2347 }
297a2110 2348 else if (gimple_nop_p (def_stmt))
dd277d48 2349 return false;
297a2110 2350 else
2351 {
2352 /* A clobbering statement or the end of the IL ends it failing. */
2353 ++*cnt;
2354 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2355 return false;
2356 }
0afe880d 2357 /* If we reach a new basic-block see if we already skipped it
2358 in a previous walk that ended successfully. */
2359 if (gimple_bb (def_stmt) != bb)
2360 {
2361 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
4fd7a6de 2362 return !abort_on_visited;
0afe880d 2363 bb = gimple_bb (def_stmt);
2364 }
dd277d48 2365 vuse = gimple_vuse (def_stmt);
ac926412 2366 }
dd277d48 2367 return true;
2368}
ac926412 2369
d7f69727 2370/* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2371 until we hit the phi argument definition that dominates the other one.
2372 Return that, or NULL_TREE if there is no such definition. */
2373
2374static tree
2375get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
4fd7a6de 2376 ao_ref *ref, unsigned int *cnt,
2377 bitmap *visited, bool abort_on_visited)
d7f69727 2378{
2379 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2380 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2381 tree common_vuse;
2382
2383 if (arg0 == arg1)
2384 return arg0;
2385 else if (gimple_nop_p (def0)
2386 || (!gimple_nop_p (def1)
2387 && dominated_by_p (CDI_DOMINATORS,
2388 gimple_bb (def1), gimple_bb (def0))))
2389 {
4fd7a6de 2390 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2391 visited, abort_on_visited))
d7f69727 2392 return arg0;
2393 }
2394 else if (gimple_nop_p (def1)
2395 || dominated_by_p (CDI_DOMINATORS,
2396 gimple_bb (def0), gimple_bb (def1)))
2397 {
4fd7a6de 2398 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2399 visited, abort_on_visited))
d7f69727 2400 return arg1;
2401 }
2402 /* Special case of a diamond:
2403 MEM_1 = ...
2404 goto (cond) ? L1 : L2
2405 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2406 goto L3
2407 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2408 L3: MEM_4 = PHI<MEM_2, MEM_3>
2409 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2410 dominate each other, but still we can easily skip this PHI node
2411 if we recognize that the vuse MEM operand is the same for both,
2412 and that we can skip both statements (they don't clobber us).
2413 This is still linear. Don't use maybe_skip_until, that might
2414 potentially be slow. */
2415 else if ((common_vuse = gimple_vuse (def0))
2416 && common_vuse == gimple_vuse (def1))
2417 {
297a2110 2418 *cnt += 2;
d7f69727 2419 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2420 && !stmt_may_clobber_ref_p_1 (def1, ref))
2421 return common_vuse;
2422 }
2423
2424 return NULL_TREE;
2425}
2426
2427
dd277d48 2428/* Starting from a PHI node for the virtual operand of the memory reference
2429 REF find a continuation virtual operand that allows to continue walking
2430 statements dominating PHI skipping only statements that cannot possibly
297a2110 2431 clobber REF. Increments *CNT for each alias disambiguation done.
2432 Returns NULL_TREE if no suitable virtual operand can be found. */
dd277d48 2433
7814d418 2434tree
297a2110 2435get_continuation_for_phi (gimple phi, ao_ref *ref,
4fd7a6de 2436 unsigned int *cnt, bitmap *visited,
2437 bool abort_on_visited)
dd277d48 2438{
2439 unsigned nargs = gimple_phi_num_args (phi);
2440
2441 /* Through a single-argument PHI we can simply look through. */
2442 if (nargs == 1)
2443 return PHI_ARG_DEF (phi, 0);
2444
d7f69727 2445 /* For two or more arguments try to pairwise skip non-aliasing code
2446 until we hit the phi argument definition that dominates the other one. */
2447 else if (nargs >= 2)
ac926412 2448 {
0afe880d 2449 tree arg0, arg1;
2450 unsigned i;
2451
2452 /* Find a candidate for the virtual operand which definition
2453 dominates those of all others. */
2454 arg0 = PHI_ARG_DEF (phi, 0);
2455 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2456 for (i = 1; i < nargs; ++i)
2457 {
2458 arg1 = PHI_ARG_DEF (phi, i);
2459 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2460 {
2461 arg0 = arg1;
2462 break;
2463 }
2464 if (dominated_by_p (CDI_DOMINATORS,
2465 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2466 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2467 arg0 = arg1;
2468 }
2469
2470 /* Then pairwise reduce against the found candidate. */
2471 for (i = 0; i < nargs; ++i)
7814d418 2472 {
d7f69727 2473 arg1 = PHI_ARG_DEF (phi, i);
297a2110 2474 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
4fd7a6de 2475 cnt, visited, abort_on_visited);
d7f69727 2476 if (!arg0)
2477 return NULL_TREE;
7814d418 2478 }
d7f69727 2479
2480 return arg0;
ac926412 2481 }
dd277d48 2482
2483 return NULL_TREE;
ac926412 2484}
516849c7 2485
dd277d48 2486/* Based on the memory reference REF and its virtual use VUSE call
2487 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2488 itself. That is, for each virtual use for which its defining statement
2489 does not clobber REF.
e683de6d 2490
dd277d48 2491 WALKER is called with REF, the current virtual use and DATA. If
2492 WALKER returns non-NULL the walk stops and its result is returned.
2493 At the end of a non-successful walk NULL is returned.
3857274c 2494
d8021dea 2495 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2496 use which definition is a statement that may clobber REF and DATA.
2497 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2498 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2499 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2500 to adjust REF and *DATA to make that valid.
2501
dd277d48 2502 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2503
2504void *
3918bd18 2505walk_non_aliased_vuses (ao_ref *ref, tree vuse,
297a2110 2506 void *(*walker)(ao_ref *, tree, unsigned int, void *),
3918bd18 2507 void *(*translate)(ao_ref *, tree, void *), void *data)
3857274c 2508{
dd277d48 2509 bitmap visited = NULL;
2510 void *res;
297a2110 2511 unsigned int cnt = 0;
4fd7a6de 2512 bool translated = false;
dd277d48 2513
02c74b9b 2514 timevar_push (TV_ALIAS_STMT_WALK);
2515
dd277d48 2516 do
2517 {
2518 gimple def_stmt;
3857274c 2519
02c74b9b 2520 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
297a2110 2521 res = (*walker) (ref, vuse, cnt, data);
2522 /* Abort walk. */
2523 if (res == (void *)-1)
2524 {
2525 res = NULL;
2526 break;
2527 }
2528 /* Lookup succeeded. */
2529 else if (res != NULL)
dd277d48 2530 break;
3857274c 2531
dd277d48 2532 def_stmt = SSA_NAME_DEF_STMT (vuse);
2533 if (gimple_nop_p (def_stmt))
2534 break;
2535 else if (gimple_code (def_stmt) == GIMPLE_PHI)
4fd7a6de 2536 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2537 &visited, translated);
dd277d48 2538 else
2539 {
297a2110 2540 cnt++;
3918bd18 2541 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
d8021dea 2542 {
2543 if (!translate)
2544 break;
3918bd18 2545 res = (*translate) (ref, vuse, data);
d8021dea 2546 /* Failed lookup and translation. */
2547 if (res == (void *)-1)
2548 {
2549 res = NULL;
2550 break;
2551 }
2552 /* Lookup succeeded. */
2553 else if (res != NULL)
2554 break;
2555 /* Translation succeeded, continue walking. */
4fd7a6de 2556 translated = true;
d8021dea 2557 }
dd277d48 2558 vuse = gimple_vuse (def_stmt);
2559 }
2560 }
2561 while (vuse);
ac926412 2562
dd277d48 2563 if (visited)
2564 BITMAP_FREE (visited);
ac926412 2565
02c74b9b 2566 timevar_pop (TV_ALIAS_STMT_WALK);
2567
dd277d48 2568 return res;
3857274c 2569}
2570
604eef2c 2571
dd277d48 2572/* Based on the memory reference REF call WALKER for each vdef which
2573 defining statement may clobber REF, starting with VDEF. If REF
2574 is NULL_TREE, each defining statement is visited.
2575
2576 WALKER is called with REF, the current vdef and DATA. If WALKER
2577 returns true the walk is stopped, otherwise it continues.
2578
2579 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2580 PHI argument (but only one walk continues on merge points), the
2581 return value is true if any of the walks was successful.
2582
2583 The function returns the number of statements walked. */
604eef2c 2584
2585static unsigned int
1daead67 2586walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2587 bool (*walker)(ao_ref *, tree, void *), void *data,
dd277d48 2588 bitmap *visited, unsigned int cnt)
604eef2c 2589{
dd277d48 2590 do
2591 {
2592 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
604eef2c 2593
dd277d48 2594 if (*visited
2595 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2596 return cnt;
2597
2598 if (gimple_nop_p (def_stmt))
2599 return cnt;
2600 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2601 {
2602 unsigned i;
2603 if (!*visited)
2604 *visited = BITMAP_ALLOC (NULL);
2605 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2606 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2607 walker, data, visited, 0);
2608 return cnt;
2609 }
2610
02c74b9b 2611 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
dd277d48 2612 cnt++;
2613 if ((!ref
1daead67 2614 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
dd277d48 2615 && (*walker) (ref, vdef, data))
2616 return cnt;
2617
2618 vdef = gimple_vuse (def_stmt);
2619 }
2620 while (1);
604eef2c 2621}
2622
dd277d48 2623unsigned int
1daead67 2624walk_aliased_vdefs (ao_ref *ref, tree vdef,
2625 bool (*walker)(ao_ref *, tree, void *), void *data,
dd277d48 2626 bitmap *visited)
df3f0669 2627{
dd277d48 2628 bitmap local_visited = NULL;
2629 unsigned int ret;
2630
02c74b9b 2631 timevar_push (TV_ALIAS_STMT_WALK);
2632
dd277d48 2633 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2634 visited ? visited : &local_visited, 0);
2635 if (local_visited)
2636 BITMAP_FREE (local_visited);
2637
02c74b9b 2638 timevar_pop (TV_ALIAS_STMT_WALK);
2639
dd277d48 2640 return ret;
2641}
2642