]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
Merge from trunk.
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "basic-block.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "langhooks.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-pretty-print.h"
34 #include "dumpfile.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "tree-eh.h"
38 #include "gimple-expr.h"
39 #include "is-a.h"
40 #include "gimple.h"
41 #include "gimple-ssa.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "expr.h"
45 #include "tree-dfa.h"
46 #include "tree-inline.h"
47 #include "params.h"
48 #include "alloc-pool.h"
49 #include "tree-ssa-alias.h"
50 #include "ipa-reference.h"
51
52 /* Broad overview of how alias analysis on gimple works:
53
54 Statements clobbering or using memory are linked through the
55 virtual operand factored use-def chain. The virtual operand
56 is unique per function, its symbol is accessible via gimple_vop (cfun).
57 Virtual operands are used for efficiently walking memory statements
58 in the gimple IL and are useful for things like value-numbering as
59 a generation count for memory references.
60
61 SSA_NAME pointers may have associated points-to information
62 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 points-to information is (re-)computed by the TODO_rebuild_alias
64 pass manager todo. Points-to information is also used for more
65 precise tracking of call-clobbered and call-used variables and
66 related disambiguations.
67
68 This file contains functions for disambiguating memory references,
69 the so called alias-oracle and tools for walking of the gimple IL.
70
71 The main alias-oracle entry-points are
72
73 bool stmt_may_clobber_ref_p (gimple, tree)
74
75 This function queries if a statement may invalidate (parts of)
76 the memory designated by the reference tree argument.
77
78 bool ref_maybe_used_by_stmt_p (gimple, tree)
79
80 This function queries if a statement may need (parts of) the
81 memory designated by the reference tree argument.
82
83 There are variants of these functions that only handle the call
84 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 Note that these do not disambiguate against a possible call lhs.
86
87 bool refs_may_alias_p (tree, tree)
88
89 This function tries to disambiguate two reference trees.
90
91 bool ptr_deref_may_alias_global_p (tree)
92
93 This function queries if dereferencing a pointer variable may
94 alias global memory.
95
96 More low-level disambiguators are available and documented in
97 this file. Low-level disambiguators dealing with points-to
98 information are in tree-ssa-structalias.c. */
99
100
101 /* Query statistics for the different low-level disambiguators.
102 A high-level query may trigger multiple of them. */
103
104 static struct {
105 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
106 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
107 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
109 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
111 } alias_stats;
112
113 void
114 dump_alias_stats (FILE *s)
115 {
116 fprintf (s, "\nAlias oracle query stats:\n");
117 fprintf (s, " refs_may_alias_p: "
118 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
119 HOST_WIDE_INT_PRINT_DEC" queries\n",
120 alias_stats.refs_may_alias_p_no_alias,
121 alias_stats.refs_may_alias_p_no_alias
122 + alias_stats.refs_may_alias_p_may_alias);
123 fprintf (s, " ref_maybe_used_by_call_p: "
124 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC" queries\n",
126 alias_stats.ref_maybe_used_by_call_p_no_alias,
127 alias_stats.refs_may_alias_p_no_alias
128 + alias_stats.ref_maybe_used_by_call_p_may_alias);
129 fprintf (s, " call_may_clobber_ref_p: "
130 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
131 HOST_WIDE_INT_PRINT_DEC" queries\n",
132 alias_stats.call_may_clobber_ref_p_no_alias,
133 alias_stats.call_may_clobber_ref_p_no_alias
134 + alias_stats.call_may_clobber_ref_p_may_alias);
135 }
136
137
138 /* Return true, if dereferencing PTR may alias with a global variable. */
139
140 bool
141 ptr_deref_may_alias_global_p (tree ptr)
142 {
143 struct ptr_info_def *pi;
144
145 /* If we end up with a pointer constant here that may point
146 to global memory. */
147 if (TREE_CODE (ptr) != SSA_NAME)
148 return true;
149
150 pi = SSA_NAME_PTR_INFO (ptr);
151
152 /* If we do not have points-to information for this variable,
153 we have to punt. */
154 if (!pi)
155 return true;
156
157 /* ??? This does not use TBAA to prune globals ptr may not access. */
158 return pt_solution_includes_global (&pi->pt);
159 }
160
161 /* Return true if dereferencing PTR may alias DECL.
162 The caller is responsible for applying TBAA to see if PTR
163 may access DECL at all. */
164
165 static bool
166 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
167 {
168 struct ptr_info_def *pi;
169
170 /* Conversions are irrelevant for points-to information and
171 data-dependence analysis can feed us those. */
172 STRIP_NOPS (ptr);
173
174 /* Anything we do not explicilty handle aliases. */
175 if ((TREE_CODE (ptr) != SSA_NAME
176 && TREE_CODE (ptr) != ADDR_EXPR
177 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
178 || !POINTER_TYPE_P (TREE_TYPE (ptr))
179 || (TREE_CODE (decl) != VAR_DECL
180 && TREE_CODE (decl) != PARM_DECL
181 && TREE_CODE (decl) != RESULT_DECL))
182 return true;
183
184 /* Disregard pointer offsetting. */
185 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
186 {
187 do
188 {
189 ptr = TREE_OPERAND (ptr, 0);
190 }
191 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
192 return ptr_deref_may_alias_decl_p (ptr, decl);
193 }
194
195 /* ADDR_EXPR pointers either just offset another pointer or directly
196 specify the pointed-to set. */
197 if (TREE_CODE (ptr) == ADDR_EXPR)
198 {
199 tree base = get_base_address (TREE_OPERAND (ptr, 0));
200 if (base
201 && (TREE_CODE (base) == MEM_REF
202 || TREE_CODE (base) == TARGET_MEM_REF))
203 ptr = TREE_OPERAND (base, 0);
204 else if (base
205 && DECL_P (base))
206 return base == decl;
207 else if (base
208 && CONSTANT_CLASS_P (base))
209 return false;
210 else
211 return true;
212 }
213
214 /* Non-aliased variables can not be pointed to. */
215 if (!may_be_aliased (decl))
216 return false;
217
218 /* If we do not have useful points-to information for this pointer
219 we cannot disambiguate anything else. */
220 pi = SSA_NAME_PTR_INFO (ptr);
221 if (!pi)
222 return true;
223
224 return pt_solution_includes (&pi->pt, decl);
225 }
226
227 /* Return true if dereferenced PTR1 and PTR2 may alias.
228 The caller is responsible for applying TBAA to see if accesses
229 through PTR1 and PTR2 may conflict at all. */
230
231 bool
232 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
233 {
234 struct ptr_info_def *pi1, *pi2;
235
236 /* Conversions are irrelevant for points-to information and
237 data-dependence analysis can feed us those. */
238 STRIP_NOPS (ptr1);
239 STRIP_NOPS (ptr2);
240
241 /* Disregard pointer offsetting. */
242 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
243 {
244 do
245 {
246 ptr1 = TREE_OPERAND (ptr1, 0);
247 }
248 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
249 return ptr_derefs_may_alias_p (ptr1, ptr2);
250 }
251 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
252 {
253 do
254 {
255 ptr2 = TREE_OPERAND (ptr2, 0);
256 }
257 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
258 return ptr_derefs_may_alias_p (ptr1, ptr2);
259 }
260
261 /* ADDR_EXPR pointers either just offset another pointer or directly
262 specify the pointed-to set. */
263 if (TREE_CODE (ptr1) == ADDR_EXPR)
264 {
265 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
266 if (base
267 && (TREE_CODE (base) == MEM_REF
268 || TREE_CODE (base) == TARGET_MEM_REF))
269 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
270 else if (base
271 && DECL_P (base))
272 return ptr_deref_may_alias_decl_p (ptr2, base);
273 else
274 return true;
275 }
276 if (TREE_CODE (ptr2) == ADDR_EXPR)
277 {
278 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
279 if (base
280 && (TREE_CODE (base) == MEM_REF
281 || TREE_CODE (base) == TARGET_MEM_REF))
282 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
283 else if (base
284 && DECL_P (base))
285 return ptr_deref_may_alias_decl_p (ptr1, base);
286 else
287 return true;
288 }
289
290 /* From here we require SSA name pointers. Anything else aliases. */
291 if (TREE_CODE (ptr1) != SSA_NAME
292 || TREE_CODE (ptr2) != SSA_NAME
293 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
294 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
295 return true;
296
297 /* We may end up with two empty points-to solutions for two same pointers.
298 In this case we still want to say both pointers alias, so shortcut
299 that here. */
300 if (ptr1 == ptr2)
301 return true;
302
303 /* If we do not have useful points-to information for either pointer
304 we cannot disambiguate anything else. */
305 pi1 = SSA_NAME_PTR_INFO (ptr1);
306 pi2 = SSA_NAME_PTR_INFO (ptr2);
307 if (!pi1 || !pi2)
308 return true;
309
310 /* ??? This does not use TBAA to prune decls from the intersection
311 that not both pointers may access. */
312 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
313 }
314
315 /* Return true if dereferencing PTR may alias *REF.
316 The caller is responsible for applying TBAA to see if PTR
317 may access *REF at all. */
318
319 static bool
320 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
321 {
322 tree base = ao_ref_base (ref);
323
324 if (TREE_CODE (base) == MEM_REF
325 || TREE_CODE (base) == TARGET_MEM_REF)
326 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
327 else if (DECL_P (base))
328 return ptr_deref_may_alias_decl_p (ptr, base);
329
330 return true;
331 }
332
333 /* Return true whether REF may refer to global memory. */
334
335 bool
336 ref_may_alias_global_p (tree ref)
337 {
338 tree base = get_base_address (ref);
339 if (DECL_P (base))
340 return is_global_var (base);
341 else if (TREE_CODE (base) == MEM_REF
342 || TREE_CODE (base) == TARGET_MEM_REF)
343 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
344 return true;
345 }
346
347 /* Return true whether STMT may clobber global memory. */
348
349 bool
350 stmt_may_clobber_global_p (gimple stmt)
351 {
352 tree lhs;
353
354 if (!gimple_vdef (stmt))
355 return false;
356
357 /* ??? We can ask the oracle whether an artificial pointer
358 dereference with a pointer with points-to information covering
359 all global memory (what about non-address taken memory?) maybe
360 clobbered by this call. As there is at the moment no convenient
361 way of doing that without generating garbage do some manual
362 checking instead.
363 ??? We could make a NULL ao_ref argument to the various
364 predicates special, meaning any global memory. */
365
366 switch (gimple_code (stmt))
367 {
368 case GIMPLE_ASSIGN:
369 lhs = gimple_assign_lhs (stmt);
370 return (TREE_CODE (lhs) != SSA_NAME
371 && ref_may_alias_global_p (lhs));
372 case GIMPLE_CALL:
373 return true;
374 default:
375 return true;
376 }
377 }
378
379
380 /* Dump alias information on FILE. */
381
382 void
383 dump_alias_info (FILE *file)
384 {
385 unsigned i;
386 const char *funcname
387 = lang_hooks.decl_printable_name (current_function_decl, 2);
388 tree var;
389
390 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
391
392 fprintf (file, "Aliased symbols\n\n");
393
394 FOR_EACH_LOCAL_DECL (cfun, i, var)
395 {
396 if (may_be_aliased (var))
397 dump_variable (file, var);
398 }
399
400 fprintf (file, "\nCall clobber information\n");
401
402 fprintf (file, "\nESCAPED");
403 dump_points_to_solution (file, &cfun->gimple_df->escaped);
404
405 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
406
407 for (i = 1; i < num_ssa_names; i++)
408 {
409 tree ptr = ssa_name (i);
410 struct ptr_info_def *pi;
411
412 if (ptr == NULL_TREE
413 || !POINTER_TYPE_P (TREE_TYPE (ptr))
414 || SSA_NAME_IN_FREE_LIST (ptr))
415 continue;
416
417 pi = SSA_NAME_PTR_INFO (ptr);
418 if (pi)
419 dump_points_to_info_for (file, ptr);
420 }
421
422 fprintf (file, "\n");
423 }
424
425
426 /* Dump alias information on stderr. */
427
428 DEBUG_FUNCTION void
429 debug_alias_info (void)
430 {
431 dump_alias_info (stderr);
432 }
433
434
435 /* Dump the points-to set *PT into FILE. */
436
437 void
438 dump_points_to_solution (FILE *file, struct pt_solution *pt)
439 {
440 if (pt->anything)
441 fprintf (file, ", points-to anything");
442
443 if (pt->nonlocal)
444 fprintf (file, ", points-to non-local");
445
446 if (pt->escaped)
447 fprintf (file, ", points-to escaped");
448
449 if (pt->ipa_escaped)
450 fprintf (file, ", points-to unit escaped");
451
452 if (pt->null)
453 fprintf (file, ", points-to NULL");
454
455 if (pt->vars)
456 {
457 fprintf (file, ", points-to vars: ");
458 dump_decl_set (file, pt->vars);
459 if (pt->vars_contains_nonlocal
460 && pt->vars_contains_escaped_heap)
461 fprintf (file, " (nonlocal, escaped heap)");
462 else if (pt->vars_contains_nonlocal
463 && pt->vars_contains_escaped)
464 fprintf (file, " (nonlocal, escaped)");
465 else if (pt->vars_contains_nonlocal)
466 fprintf (file, " (nonlocal)");
467 else if (pt->vars_contains_escaped_heap)
468 fprintf (file, " (escaped heap)");
469 else if (pt->vars_contains_escaped)
470 fprintf (file, " (escaped)");
471 }
472 }
473
474
475 /* Unified dump function for pt_solution. */
476
477 DEBUG_FUNCTION void
478 debug (pt_solution &ref)
479 {
480 dump_points_to_solution (stderr, &ref);
481 }
482
483 DEBUG_FUNCTION void
484 debug (pt_solution *ptr)
485 {
486 if (ptr)
487 debug (*ptr);
488 else
489 fprintf (stderr, "<nil>\n");
490 }
491
492
493 /* Dump points-to information for SSA_NAME PTR into FILE. */
494
495 void
496 dump_points_to_info_for (FILE *file, tree ptr)
497 {
498 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
499
500 print_generic_expr (file, ptr, dump_flags);
501
502 if (pi)
503 dump_points_to_solution (file, &pi->pt);
504 else
505 fprintf (file, ", points-to anything");
506
507 fprintf (file, "\n");
508 }
509
510
511 /* Dump points-to information for VAR into stderr. */
512
513 DEBUG_FUNCTION void
514 debug_points_to_info_for (tree var)
515 {
516 dump_points_to_info_for (stderr, var);
517 }
518
519
520 /* Initializes the alias-oracle reference representation *R from REF. */
521
522 void
523 ao_ref_init (ao_ref *r, tree ref)
524 {
525 r->ref = ref;
526 r->base = NULL_TREE;
527 r->offset = 0;
528 r->size = -1;
529 r->max_size = -1;
530 r->ref_alias_set = -1;
531 r->base_alias_set = -1;
532 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
533 }
534
535 /* Returns the base object of the memory reference *REF. */
536
537 tree
538 ao_ref_base (ao_ref *ref)
539 {
540 if (ref->base)
541 return ref->base;
542 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
543 &ref->max_size);
544 return ref->base;
545 }
546
547 /* Returns the base object alias set of the memory reference *REF. */
548
549 static alias_set_type
550 ao_ref_base_alias_set (ao_ref *ref)
551 {
552 tree base_ref;
553 if (ref->base_alias_set != -1)
554 return ref->base_alias_set;
555 if (!ref->ref)
556 return 0;
557 base_ref = ref->ref;
558 while (handled_component_p (base_ref))
559 base_ref = TREE_OPERAND (base_ref, 0);
560 ref->base_alias_set = get_alias_set (base_ref);
561 return ref->base_alias_set;
562 }
563
564 /* Returns the reference alias set of the memory reference *REF. */
565
566 alias_set_type
567 ao_ref_alias_set (ao_ref *ref)
568 {
569 if (ref->ref_alias_set != -1)
570 return ref->ref_alias_set;
571 ref->ref_alias_set = get_alias_set (ref->ref);
572 return ref->ref_alias_set;
573 }
574
575 /* Init an alias-oracle reference representation from a gimple pointer
576 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
577 size is assumed to be unknown. The access is assumed to be only
578 to or after of the pointer target, not before it. */
579
580 void
581 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
582 {
583 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
584 ref->ref = NULL_TREE;
585 if (TREE_CODE (ptr) == SSA_NAME)
586 {
587 gimple stmt = SSA_NAME_DEF_STMT (ptr);
588 if (gimple_assign_single_p (stmt)
589 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
590 ptr = gimple_assign_rhs1 (stmt);
591 else if (is_gimple_assign (stmt)
592 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
593 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
594 {
595 ptr = gimple_assign_rhs1 (stmt);
596 extra_offset = BITS_PER_UNIT
597 * int_cst_value (gimple_assign_rhs2 (stmt));
598 }
599 }
600
601 if (TREE_CODE (ptr) == ADDR_EXPR)
602 {
603 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
604 if (ref->base)
605 ref->offset = BITS_PER_UNIT * t;
606 else
607 {
608 size = NULL_TREE;
609 ref->offset = 0;
610 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
611 }
612 }
613 else
614 {
615 ref->base = build2 (MEM_REF, char_type_node,
616 ptr, null_pointer_node);
617 ref->offset = 0;
618 }
619 ref->offset += extra_offset;
620 if (size
621 && tree_fits_shwi_p (size)
622 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
623 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
624 else
625 ref->max_size = ref->size = -1;
626 ref->ref_alias_set = 0;
627 ref->base_alias_set = 0;
628 ref->volatile_p = false;
629 }
630
631 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
632 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
633 decide. */
634
635 static inline int
636 same_type_for_tbaa (tree type1, tree type2)
637 {
638 type1 = TYPE_MAIN_VARIANT (type1);
639 type2 = TYPE_MAIN_VARIANT (type2);
640
641 /* If we would have to do structural comparison bail out. */
642 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
643 || TYPE_STRUCTURAL_EQUALITY_P (type2))
644 return -1;
645
646 /* Compare the canonical types. */
647 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
648 return 1;
649
650 /* ??? Array types are not properly unified in all cases as we have
651 spurious changes in the index types for example. Removing this
652 causes all sorts of problems with the Fortran frontend. */
653 if (TREE_CODE (type1) == ARRAY_TYPE
654 && TREE_CODE (type2) == ARRAY_TYPE)
655 return -1;
656
657 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
658 object of one of its constrained subtypes, e.g. when a function with an
659 unconstrained parameter passed by reference is called on an object and
660 inlined. But, even in the case of a fixed size, type and subtypes are
661 not equivalent enough as to share the same TYPE_CANONICAL, since this
662 would mean that conversions between them are useless, whereas they are
663 not (e.g. type and subtypes can have different modes). So, in the end,
664 they are only guaranteed to have the same alias set. */
665 if (get_alias_set (type1) == get_alias_set (type2))
666 return -1;
667
668 /* The types are known to be not equal. */
669 return 0;
670 }
671
672 /* Determine if the two component references REF1 and REF2 which are
673 based on access types TYPE1 and TYPE2 and of which at least one is based
674 on an indirect reference may alias. REF2 is the only one that can
675 be a decl in which case REF2_IS_DECL is true.
676 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
677 are the respective alias sets. */
678
679 static bool
680 aliasing_component_refs_p (tree ref1,
681 alias_set_type ref1_alias_set,
682 alias_set_type base1_alias_set,
683 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
684 tree ref2,
685 alias_set_type ref2_alias_set,
686 alias_set_type base2_alias_set,
687 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
688 bool ref2_is_decl)
689 {
690 /* If one reference is a component references through pointers try to find a
691 common base and apply offset based disambiguation. This handles
692 for example
693 struct A { int i; int j; } *q;
694 struct B { struct A a; int k; } *p;
695 disambiguating q->i and p->a.j. */
696 tree base1, base2;
697 tree type1, type2;
698 tree *refp;
699 int same_p;
700
701 /* Choose bases and base types to search for. */
702 base1 = ref1;
703 while (handled_component_p (base1))
704 base1 = TREE_OPERAND (base1, 0);
705 type1 = TREE_TYPE (base1);
706 base2 = ref2;
707 while (handled_component_p (base2))
708 base2 = TREE_OPERAND (base2, 0);
709 type2 = TREE_TYPE (base2);
710
711 /* Now search for the type1 in the access path of ref2. This
712 would be a common base for doing offset based disambiguation on. */
713 refp = &ref2;
714 while (handled_component_p (*refp)
715 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
716 refp = &TREE_OPERAND (*refp, 0);
717 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
718 /* If we couldn't compare types we have to bail out. */
719 if (same_p == -1)
720 return true;
721 else if (same_p == 1)
722 {
723 HOST_WIDE_INT offadj, sztmp, msztmp;
724 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
725 offset2 -= offadj;
726 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
727 offset1 -= offadj;
728 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
729 }
730 /* If we didn't find a common base, try the other way around. */
731 refp = &ref1;
732 while (handled_component_p (*refp)
733 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
734 refp = &TREE_OPERAND (*refp, 0);
735 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
736 /* If we couldn't compare types we have to bail out. */
737 if (same_p == -1)
738 return true;
739 else if (same_p == 1)
740 {
741 HOST_WIDE_INT offadj, sztmp, msztmp;
742 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
743 offset1 -= offadj;
744 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
745 offset2 -= offadj;
746 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
747 }
748
749 /* If we have two type access paths B1.path1 and B2.path2 they may
750 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
751 But we can still have a path that goes B1.path1...B2.path2 with
752 a part that we do not see. So we can only disambiguate now
753 if there is no B2 in the tail of path1 and no B1 on the
754 tail of path2. */
755 if (base1_alias_set == ref2_alias_set
756 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
757 return true;
758 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
759 if (!ref2_is_decl)
760 return (base2_alias_set == ref1_alias_set
761 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
762 return false;
763 }
764
765 /* Return true if we can determine that component references REF1 and REF2,
766 that are within a common DECL, cannot overlap. */
767
768 static bool
769 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
770 {
771 auto_vec<tree, 16> component_refs1;
772 auto_vec<tree, 16> component_refs2;
773
774 /* Create the stack of handled components for REF1. */
775 while (handled_component_p (ref1))
776 {
777 component_refs1.safe_push (ref1);
778 ref1 = TREE_OPERAND (ref1, 0);
779 }
780 if (TREE_CODE (ref1) == MEM_REF)
781 {
782 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
783 goto may_overlap;
784 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
785 }
786
787 /* Create the stack of handled components for REF2. */
788 while (handled_component_p (ref2))
789 {
790 component_refs2.safe_push (ref2);
791 ref2 = TREE_OPERAND (ref2, 0);
792 }
793 if (TREE_CODE (ref2) == MEM_REF)
794 {
795 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
796 goto may_overlap;
797 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
798 }
799
800 /* We must have the same base DECL. */
801 gcc_assert (ref1 == ref2);
802
803 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
804 rank. This is sufficient because we start from the same DECL and you
805 cannot reference several fields at a time with COMPONENT_REFs (unlike
806 with ARRAY_RANGE_REFs for arrays) so you always need the same number
807 of them to access a sub-component, unless you're in a union, in which
808 case the return value will precisely be false. */
809 while (true)
810 {
811 do
812 {
813 if (component_refs1.is_empty ())
814 goto may_overlap;
815 ref1 = component_refs1.pop ();
816 }
817 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
818
819 do
820 {
821 if (component_refs2.is_empty ())
822 goto may_overlap;
823 ref2 = component_refs2.pop ();
824 }
825 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
826
827 /* Beware of BIT_FIELD_REF. */
828 if (TREE_CODE (ref1) != COMPONENT_REF
829 || TREE_CODE (ref2) != COMPONENT_REF)
830 goto may_overlap;
831
832 tree field1 = TREE_OPERAND (ref1, 1);
833 tree field2 = TREE_OPERAND (ref2, 1);
834
835 /* ??? We cannot simply use the type of operand #0 of the refs here
836 as the Fortran compiler smuggles type punning into COMPONENT_REFs
837 for common blocks instead of using unions like everyone else. */
838 tree type1 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field1));
839 tree type2 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field2));
840
841 /* We cannot disambiguate fields in a union or qualified union. */
842 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
843 goto may_overlap;
844
845 /* Different fields of the same record type cannot overlap.
846 ??? Bitfields can overlap at RTL level so punt on them. */
847 if (field1 != field2)
848 {
849 component_refs1.release ();
850 component_refs2.release ();
851 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
852 }
853 }
854
855 may_overlap:
856 component_refs1.release ();
857 component_refs2.release ();
858 return false;
859 }
860
861 /* qsort compare function to sort FIELD_DECLs after their
862 DECL_FIELD_CONTEXT TYPE_UID. */
863
864 static inline int
865 ncr_compar (const void *field1_, const void *field2_)
866 {
867 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
868 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
869 unsigned int uid1
870 = TYPE_UID (TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field1)));
871 unsigned int uid2
872 = TYPE_UID (TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field2)));
873 if (uid1 < uid2)
874 return -1;
875 else if (uid1 > uid2)
876 return 1;
877 return 0;
878 }
879
880 /* Return true if we can determine that the fields referenced cannot
881 overlap for any pair of objects. */
882
883 static bool
884 nonoverlapping_component_refs_p (const_tree x, const_tree y)
885 {
886 if (!flag_strict_aliasing
887 || !x || !y
888 || TREE_CODE (x) != COMPONENT_REF
889 || TREE_CODE (y) != COMPONENT_REF)
890 return false;
891
892 auto_vec<const_tree, 16> fieldsx;
893 while (TREE_CODE (x) == COMPONENT_REF)
894 {
895 tree field = TREE_OPERAND (x, 1);
896 tree type = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field));
897 if (TREE_CODE (type) == RECORD_TYPE)
898 fieldsx.safe_push (field);
899 x = TREE_OPERAND (x, 0);
900 }
901 if (fieldsx.length () == 0)
902 return false;
903 auto_vec<const_tree, 16> fieldsy;
904 while (TREE_CODE (y) == COMPONENT_REF)
905 {
906 tree field = TREE_OPERAND (y, 1);
907 tree type = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field));
908 if (TREE_CODE (type) == RECORD_TYPE)
909 fieldsy.safe_push (TREE_OPERAND (y, 1));
910 y = TREE_OPERAND (y, 0);
911 }
912 if (fieldsy.length () == 0)
913 return false;
914
915 /* Most common case first. */
916 if (fieldsx.length () == 1
917 && fieldsy.length () == 1)
918 return ((TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldsx[0]))
919 == TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldsy[0])))
920 && fieldsx[0] != fieldsy[0]
921 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
922
923 if (fieldsx.length () == 2)
924 {
925 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
926 {
927 const_tree tem = fieldsx[0];
928 fieldsx[0] = fieldsx[1];
929 fieldsx[1] = tem;
930 }
931 }
932 else
933 fieldsx.qsort (ncr_compar);
934
935 if (fieldsy.length () == 2)
936 {
937 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
938 {
939 const_tree tem = fieldsy[0];
940 fieldsy[0] = fieldsy[1];
941 fieldsy[1] = tem;
942 }
943 }
944 else
945 fieldsy.qsort (ncr_compar);
946
947 unsigned i = 0, j = 0;
948 do
949 {
950 const_tree fieldx = fieldsx[i];
951 const_tree fieldy = fieldsy[j];
952 tree typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
953 tree typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
954 if (typex == typey)
955 {
956 /* We're left with accessing different fields of a structure,
957 no possible overlap, unless they are both bitfields. */
958 if (fieldx != fieldy)
959 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
960 }
961 if (TYPE_UID (typex) < TYPE_UID (typey))
962 {
963 i++;
964 if (i == fieldsx.length ())
965 break;
966 }
967 else
968 {
969 j++;
970 if (j == fieldsy.length ())
971 break;
972 }
973 }
974 while (1);
975
976 return false;
977 }
978
979
980 /* Return true if two memory references based on the variables BASE1
981 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
982 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
983 if non-NULL are the complete memory reference trees. */
984
985 static bool
986 decl_refs_may_alias_p (tree ref1, tree base1,
987 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
988 tree ref2, tree base2,
989 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
990 {
991 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
992
993 /* If both references are based on different variables, they cannot alias. */
994 if (base1 != base2)
995 return false;
996
997 /* If both references are based on the same variable, they cannot alias if
998 the accesses do not overlap. */
999 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1000 return false;
1001
1002 /* For components with variable position, the above test isn't sufficient,
1003 so we disambiguate component references manually. */
1004 if (ref1 && ref2
1005 && handled_component_p (ref1) && handled_component_p (ref2)
1006 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1007 return false;
1008
1009 return true;
1010 }
1011
1012 /* Return true if an indirect reference based on *PTR1 constrained
1013 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1014 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1015 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1016 in which case they are computed on-demand. REF1 and REF2
1017 if non-NULL are the complete memory reference trees. */
1018
1019 static bool
1020 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1021 HOST_WIDE_INT offset1,
1022 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1023 alias_set_type ref1_alias_set,
1024 alias_set_type base1_alias_set,
1025 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1026 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1027 alias_set_type ref2_alias_set,
1028 alias_set_type base2_alias_set, bool tbaa_p)
1029 {
1030 tree ptr1;
1031 tree ptrtype1, dbase2;
1032 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1033 HOST_WIDE_INT doffset1, doffset2;
1034
1035 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1036 || TREE_CODE (base1) == TARGET_MEM_REF)
1037 && DECL_P (base2));
1038
1039 ptr1 = TREE_OPERAND (base1, 0);
1040
1041 /* The offset embedded in MEM_REFs can be negative. Bias them
1042 so that the resulting offset adjustment is positive. */
1043 offset_int moff = mem_ref_offset (base1);
1044 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1045 if (wi::neg_p (moff))
1046 offset2p += (-moff).to_short_addr ();
1047 else
1048 offset1p += moff.to_short_addr ();
1049
1050 /* If only one reference is based on a variable, they cannot alias if
1051 the pointer access is beyond the extent of the variable access.
1052 (the pointer base cannot validly point to an offset less than zero
1053 of the variable).
1054 ??? IVOPTs creates bases that do not honor this restriction,
1055 so do not apply this optimization for TARGET_MEM_REFs. */
1056 if (TREE_CODE (base1) != TARGET_MEM_REF
1057 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1058 return false;
1059 /* They also cannot alias if the pointer may not point to the decl. */
1060 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1061 return false;
1062
1063 /* Disambiguations that rely on strict aliasing rules follow. */
1064 if (!flag_strict_aliasing || !tbaa_p)
1065 return true;
1066
1067 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1068
1069 /* If the alias set for a pointer access is zero all bets are off. */
1070 if (base1_alias_set == -1)
1071 base1_alias_set = get_deref_alias_set (ptrtype1);
1072 if (base1_alias_set == 0)
1073 return true;
1074 if (base2_alias_set == -1)
1075 base2_alias_set = get_alias_set (base2);
1076
1077 /* When we are trying to disambiguate an access with a pointer dereference
1078 as base versus one with a decl as base we can use both the size
1079 of the decl and its dynamic type for extra disambiguation.
1080 ??? We do not know anything about the dynamic type of the decl
1081 other than that its alias-set contains base2_alias_set as a subset
1082 which does not help us here. */
1083 /* As we know nothing useful about the dynamic type of the decl just
1084 use the usual conflict check rather than a subset test.
1085 ??? We could introduce -fvery-strict-aliasing when the language
1086 does not allow decls to have a dynamic type that differs from their
1087 static type. Then we can check
1088 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1089 if (base1_alias_set != base2_alias_set
1090 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1091 return false;
1092 /* If the size of the access relevant for TBAA through the pointer
1093 is bigger than the size of the decl we can't possibly access the
1094 decl via that pointer. */
1095 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1096 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1097 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1098 /* ??? This in turn may run afoul when a decl of type T which is
1099 a member of union type U is accessed through a pointer to
1100 type U and sizeof T is smaller than sizeof U. */
1101 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1102 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1103 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1104 return false;
1105
1106 if (!ref2)
1107 return true;
1108
1109 /* If the decl is accessed via a MEM_REF, reconstruct the base
1110 we can use for TBAA and an appropriately adjusted offset. */
1111 dbase2 = ref2;
1112 while (handled_component_p (dbase2))
1113 dbase2 = TREE_OPERAND (dbase2, 0);
1114 doffset1 = offset1;
1115 doffset2 = offset2;
1116 if (TREE_CODE (dbase2) == MEM_REF
1117 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1118 {
1119 offset_int moff = mem_ref_offset (dbase2);
1120 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1121 if (wi::neg_p (moff))
1122 doffset1 -= (-moff).to_short_addr ();
1123 else
1124 doffset2 -= moff.to_short_addr ();
1125 }
1126
1127 /* If either reference is view-converted, give up now. */
1128 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1129 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1130 return true;
1131
1132 /* If both references are through the same type, they do not alias
1133 if the accesses do not overlap. This does extra disambiguation
1134 for mixed/pointer accesses but requires strict aliasing.
1135 For MEM_REFs we require that the component-ref offset we computed
1136 is relative to the start of the type which we ensure by
1137 comparing rvalue and access type and disregarding the constant
1138 pointer offset. */
1139 if ((TREE_CODE (base1) != TARGET_MEM_REF
1140 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1141 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1142 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1143
1144 if (ref1 && ref2
1145 && nonoverlapping_component_refs_p (ref1, ref2))
1146 return false;
1147
1148 /* Do access-path based disambiguation. */
1149 if (ref1 && ref2
1150 && (handled_component_p (ref1) || handled_component_p (ref2)))
1151 return aliasing_component_refs_p (ref1,
1152 ref1_alias_set, base1_alias_set,
1153 offset1, max_size1,
1154 ref2,
1155 ref2_alias_set, base2_alias_set,
1156 offset2, max_size2, true);
1157
1158 return true;
1159 }
1160
1161 /* Return true if two indirect references based on *PTR1
1162 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1163 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1164 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1165 in which case they are computed on-demand. REF1 and REF2
1166 if non-NULL are the complete memory reference trees. */
1167
1168 static bool
1169 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1170 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1171 alias_set_type ref1_alias_set,
1172 alias_set_type base1_alias_set,
1173 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1174 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1175 alias_set_type ref2_alias_set,
1176 alias_set_type base2_alias_set, bool tbaa_p)
1177 {
1178 tree ptr1;
1179 tree ptr2;
1180 tree ptrtype1, ptrtype2;
1181
1182 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1183 || TREE_CODE (base1) == TARGET_MEM_REF)
1184 && (TREE_CODE (base2) == MEM_REF
1185 || TREE_CODE (base2) == TARGET_MEM_REF));
1186
1187 ptr1 = TREE_OPERAND (base1, 0);
1188 ptr2 = TREE_OPERAND (base2, 0);
1189
1190 /* If both bases are based on pointers they cannot alias if they may not
1191 point to the same memory object or if they point to the same object
1192 and the accesses do not overlap. */
1193 if ((!cfun || gimple_in_ssa_p (cfun))
1194 && operand_equal_p (ptr1, ptr2, 0)
1195 && (((TREE_CODE (base1) != TARGET_MEM_REF
1196 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1197 && (TREE_CODE (base2) != TARGET_MEM_REF
1198 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1199 || (TREE_CODE (base1) == TARGET_MEM_REF
1200 && TREE_CODE (base2) == TARGET_MEM_REF
1201 && (TMR_STEP (base1) == TMR_STEP (base2)
1202 || (TMR_STEP (base1) && TMR_STEP (base2)
1203 && operand_equal_p (TMR_STEP (base1),
1204 TMR_STEP (base2), 0)))
1205 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1206 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1207 && operand_equal_p (TMR_INDEX (base1),
1208 TMR_INDEX (base2), 0)))
1209 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1210 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1211 && operand_equal_p (TMR_INDEX2 (base1),
1212 TMR_INDEX2 (base2), 0))))))
1213 {
1214 offset_int moff;
1215 /* The offset embedded in MEM_REFs can be negative. Bias them
1216 so that the resulting offset adjustment is positive. */
1217 moff = mem_ref_offset (base1);
1218 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1219 if (wi::neg_p (moff))
1220 offset2 += (-moff).to_short_addr ();
1221 else
1222 offset1 += moff.to_shwi ();
1223 moff = mem_ref_offset (base2);
1224 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1225 if (wi::neg_p (moff))
1226 offset1 += (-moff).to_short_addr ();
1227 else
1228 offset2 += moff.to_short_addr ();
1229 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1230 }
1231 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1232 return false;
1233
1234 /* Disambiguations that rely on strict aliasing rules follow. */
1235 if (!flag_strict_aliasing || !tbaa_p)
1236 return true;
1237
1238 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1239 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1240
1241 /* If the alias set for a pointer access is zero all bets are off. */
1242 if (base1_alias_set == -1)
1243 base1_alias_set = get_deref_alias_set (ptrtype1);
1244 if (base1_alias_set == 0)
1245 return true;
1246 if (base2_alias_set == -1)
1247 base2_alias_set = get_deref_alias_set (ptrtype2);
1248 if (base2_alias_set == 0)
1249 return true;
1250
1251 /* If both references are through the same type, they do not alias
1252 if the accesses do not overlap. This does extra disambiguation
1253 for mixed/pointer accesses but requires strict aliasing. */
1254 if ((TREE_CODE (base1) != TARGET_MEM_REF
1255 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1256 && (TREE_CODE (base2) != TARGET_MEM_REF
1257 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1258 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1259 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1260 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1261 TREE_TYPE (ptrtype2)) == 1)
1262 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1263
1264 /* Do type-based disambiguation. */
1265 if (base1_alias_set != base2_alias_set
1266 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1267 return false;
1268
1269 /* If either reference is view-converted, give up now. */
1270 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1271 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1272 return true;
1273
1274 if (ref1 && ref2
1275 && nonoverlapping_component_refs_p (ref1, ref2))
1276 return false;
1277
1278 /* Do access-path based disambiguation. */
1279 if (ref1 && ref2
1280 && (handled_component_p (ref1) || handled_component_p (ref2)))
1281 return aliasing_component_refs_p (ref1,
1282 ref1_alias_set, base1_alias_set,
1283 offset1, max_size1,
1284 ref2,
1285 ref2_alias_set, base2_alias_set,
1286 offset2, max_size2, false);
1287
1288 return true;
1289 }
1290
1291 /* Return true, if the two memory references REF1 and REF2 may alias. */
1292
1293 bool
1294 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1295 {
1296 tree base1, base2;
1297 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1298 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1299 bool var1_p, var2_p, ind1_p, ind2_p;
1300
1301 gcc_checking_assert ((!ref1->ref
1302 || TREE_CODE (ref1->ref) == SSA_NAME
1303 || DECL_P (ref1->ref)
1304 || TREE_CODE (ref1->ref) == STRING_CST
1305 || handled_component_p (ref1->ref)
1306 || TREE_CODE (ref1->ref) == MEM_REF
1307 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1308 && (!ref2->ref
1309 || TREE_CODE (ref2->ref) == SSA_NAME
1310 || DECL_P (ref2->ref)
1311 || TREE_CODE (ref2->ref) == STRING_CST
1312 || handled_component_p (ref2->ref)
1313 || TREE_CODE (ref2->ref) == MEM_REF
1314 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1315
1316 /* Decompose the references into their base objects and the access. */
1317 base1 = ao_ref_base (ref1);
1318 offset1 = ref1->offset;
1319 max_size1 = ref1->max_size;
1320 base2 = ao_ref_base (ref2);
1321 offset2 = ref2->offset;
1322 max_size2 = ref2->max_size;
1323
1324 /* We can end up with registers or constants as bases for example from
1325 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1326 which is seen as a struct copy. */
1327 if (TREE_CODE (base1) == SSA_NAME
1328 || TREE_CODE (base1) == CONST_DECL
1329 || TREE_CODE (base1) == CONSTRUCTOR
1330 || TREE_CODE (base1) == ADDR_EXPR
1331 || CONSTANT_CLASS_P (base1)
1332 || TREE_CODE (base2) == SSA_NAME
1333 || TREE_CODE (base2) == CONST_DECL
1334 || TREE_CODE (base2) == CONSTRUCTOR
1335 || TREE_CODE (base2) == ADDR_EXPR
1336 || CONSTANT_CLASS_P (base2))
1337 return false;
1338
1339 /* We can end up referring to code via function and label decls.
1340 As we likely do not properly track code aliases conservatively
1341 bail out. */
1342 if (TREE_CODE (base1) == FUNCTION_DECL
1343 || TREE_CODE (base1) == LABEL_DECL
1344 || TREE_CODE (base2) == FUNCTION_DECL
1345 || TREE_CODE (base2) == LABEL_DECL)
1346 return true;
1347
1348 /* Two volatile accesses always conflict. */
1349 if (ref1->volatile_p
1350 && ref2->volatile_p)
1351 return true;
1352
1353 /* Defer to simple offset based disambiguation if we have
1354 references based on two decls. Do this before defering to
1355 TBAA to handle must-alias cases in conformance with the
1356 GCC extension of allowing type-punning through unions. */
1357 var1_p = DECL_P (base1);
1358 var2_p = DECL_P (base2);
1359 if (var1_p && var2_p)
1360 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1361 ref2->ref, base2, offset2, max_size2);
1362
1363 ind1_p = (TREE_CODE (base1) == MEM_REF
1364 || TREE_CODE (base1) == TARGET_MEM_REF);
1365 ind2_p = (TREE_CODE (base2) == MEM_REF
1366 || TREE_CODE (base2) == TARGET_MEM_REF);
1367
1368 /* Canonicalize the pointer-vs-decl case. */
1369 if (ind1_p && var2_p)
1370 {
1371 HOST_WIDE_INT tmp1;
1372 tree tmp2;
1373 ao_ref *tmp3;
1374 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1375 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1376 tmp2 = base1; base1 = base2; base2 = tmp2;
1377 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1378 var1_p = true;
1379 ind1_p = false;
1380 var2_p = false;
1381 ind2_p = true;
1382 }
1383
1384 /* First defer to TBAA if possible. */
1385 if (tbaa_p
1386 && flag_strict_aliasing
1387 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1388 ao_ref_alias_set (ref2)))
1389 return false;
1390
1391 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1392 if (var1_p && ind2_p)
1393 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1394 offset2, max_size2,
1395 ao_ref_alias_set (ref2), -1,
1396 ref1->ref, base1,
1397 offset1, max_size1,
1398 ao_ref_alias_set (ref1),
1399 ao_ref_base_alias_set (ref1),
1400 tbaa_p);
1401 else if (ind1_p && ind2_p)
1402 return indirect_refs_may_alias_p (ref1->ref, base1,
1403 offset1, max_size1,
1404 ao_ref_alias_set (ref1), -1,
1405 ref2->ref, base2,
1406 offset2, max_size2,
1407 ao_ref_alias_set (ref2), -1,
1408 tbaa_p);
1409
1410 /* We really do not want to end up here, but returning true is safe. */
1411 #ifdef ENABLE_CHECKING
1412 gcc_unreachable ();
1413 #else
1414 return true;
1415 #endif
1416 }
1417
1418 bool
1419 refs_may_alias_p (tree ref1, tree ref2)
1420 {
1421 ao_ref r1, r2;
1422 bool res;
1423 ao_ref_init (&r1, ref1);
1424 ao_ref_init (&r2, ref2);
1425 res = refs_may_alias_p_1 (&r1, &r2, true);
1426 if (res)
1427 ++alias_stats.refs_may_alias_p_may_alias;
1428 else
1429 ++alias_stats.refs_may_alias_p_no_alias;
1430 return res;
1431 }
1432
1433 /* Returns true if there is a anti-dependence for the STORE that
1434 executes after the LOAD. */
1435
1436 bool
1437 refs_anti_dependent_p (tree load, tree store)
1438 {
1439 ao_ref r1, r2;
1440 ao_ref_init (&r1, load);
1441 ao_ref_init (&r2, store);
1442 return refs_may_alias_p_1 (&r1, &r2, false);
1443 }
1444
1445 /* Returns true if there is a output dependence for the stores
1446 STORE1 and STORE2. */
1447
1448 bool
1449 refs_output_dependent_p (tree store1, tree store2)
1450 {
1451 ao_ref r1, r2;
1452 ao_ref_init (&r1, store1);
1453 ao_ref_init (&r2, store2);
1454 return refs_may_alias_p_1 (&r1, &r2, false);
1455 }
1456
1457 /* If the call CALL may use the memory reference REF return true,
1458 otherwise return false. */
1459
1460 static bool
1461 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1462 {
1463 tree base, callee;
1464 unsigned i;
1465 int flags = gimple_call_flags (call);
1466
1467 /* Const functions without a static chain do not implicitly use memory. */
1468 if (!gimple_call_chain (call)
1469 && (flags & (ECF_CONST|ECF_NOVOPS)))
1470 goto process_args;
1471
1472 base = ao_ref_base (ref);
1473 if (!base)
1474 return true;
1475
1476 /* A call that is not without side-effects might involve volatile
1477 accesses and thus conflicts with all other volatile accesses. */
1478 if (ref->volatile_p)
1479 return true;
1480
1481 /* If the reference is based on a decl that is not aliased the call
1482 cannot possibly use it. */
1483 if (DECL_P (base)
1484 && !may_be_aliased (base)
1485 /* But local statics can be used through recursion. */
1486 && !is_global_var (base))
1487 goto process_args;
1488
1489 callee = gimple_call_fndecl (call);
1490
1491 /* Handle those builtin functions explicitly that do not act as
1492 escape points. See tree-ssa-structalias.c:find_func_aliases
1493 for the list of builtins we might need to handle here. */
1494 if (callee != NULL_TREE
1495 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1496 switch (DECL_FUNCTION_CODE (callee))
1497 {
1498 /* All the following functions read memory pointed to by
1499 their second argument. strcat/strncat additionally
1500 reads memory pointed to by the first argument. */
1501 case BUILT_IN_STRCAT:
1502 case BUILT_IN_STRNCAT:
1503 {
1504 ao_ref dref;
1505 ao_ref_init_from_ptr_and_size (&dref,
1506 gimple_call_arg (call, 0),
1507 NULL_TREE);
1508 if (refs_may_alias_p_1 (&dref, ref, false))
1509 return true;
1510 }
1511 /* FALLTHRU */
1512 case BUILT_IN_STRCPY:
1513 case BUILT_IN_STRNCPY:
1514 case BUILT_IN_MEMCPY:
1515 case BUILT_IN_MEMMOVE:
1516 case BUILT_IN_MEMPCPY:
1517 case BUILT_IN_STPCPY:
1518 case BUILT_IN_STPNCPY:
1519 case BUILT_IN_TM_MEMCPY:
1520 case BUILT_IN_TM_MEMMOVE:
1521 {
1522 ao_ref dref;
1523 tree size = NULL_TREE;
1524 if (gimple_call_num_args (call) == 3)
1525 size = gimple_call_arg (call, 2);
1526 ao_ref_init_from_ptr_and_size (&dref,
1527 gimple_call_arg (call, 1),
1528 size);
1529 return refs_may_alias_p_1 (&dref, ref, false);
1530 }
1531 case BUILT_IN_STRCAT_CHK:
1532 case BUILT_IN_STRNCAT_CHK:
1533 {
1534 ao_ref dref;
1535 ao_ref_init_from_ptr_and_size (&dref,
1536 gimple_call_arg (call, 0),
1537 NULL_TREE);
1538 if (refs_may_alias_p_1 (&dref, ref, false))
1539 return true;
1540 }
1541 /* FALLTHRU */
1542 case BUILT_IN_STRCPY_CHK:
1543 case BUILT_IN_STRNCPY_CHK:
1544 case BUILT_IN_MEMCPY_CHK:
1545 case BUILT_IN_MEMMOVE_CHK:
1546 case BUILT_IN_MEMPCPY_CHK:
1547 case BUILT_IN_STPCPY_CHK:
1548 case BUILT_IN_STPNCPY_CHK:
1549 {
1550 ao_ref dref;
1551 tree size = NULL_TREE;
1552 if (gimple_call_num_args (call) == 4)
1553 size = gimple_call_arg (call, 2);
1554 ao_ref_init_from_ptr_and_size (&dref,
1555 gimple_call_arg (call, 1),
1556 size);
1557 return refs_may_alias_p_1 (&dref, ref, false);
1558 }
1559 case BUILT_IN_BCOPY:
1560 {
1561 ao_ref dref;
1562 tree size = gimple_call_arg (call, 2);
1563 ao_ref_init_from_ptr_and_size (&dref,
1564 gimple_call_arg (call, 0),
1565 size);
1566 return refs_may_alias_p_1 (&dref, ref, false);
1567 }
1568
1569 /* The following functions read memory pointed to by their
1570 first argument. */
1571 CASE_BUILT_IN_TM_LOAD (1):
1572 CASE_BUILT_IN_TM_LOAD (2):
1573 CASE_BUILT_IN_TM_LOAD (4):
1574 CASE_BUILT_IN_TM_LOAD (8):
1575 CASE_BUILT_IN_TM_LOAD (FLOAT):
1576 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1577 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1578 CASE_BUILT_IN_TM_LOAD (M64):
1579 CASE_BUILT_IN_TM_LOAD (M128):
1580 CASE_BUILT_IN_TM_LOAD (M256):
1581 case BUILT_IN_TM_LOG:
1582 case BUILT_IN_TM_LOG_1:
1583 case BUILT_IN_TM_LOG_2:
1584 case BUILT_IN_TM_LOG_4:
1585 case BUILT_IN_TM_LOG_8:
1586 case BUILT_IN_TM_LOG_FLOAT:
1587 case BUILT_IN_TM_LOG_DOUBLE:
1588 case BUILT_IN_TM_LOG_LDOUBLE:
1589 case BUILT_IN_TM_LOG_M64:
1590 case BUILT_IN_TM_LOG_M128:
1591 case BUILT_IN_TM_LOG_M256:
1592 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1593
1594 /* These read memory pointed to by the first argument. */
1595 case BUILT_IN_STRDUP:
1596 case BUILT_IN_STRNDUP:
1597 {
1598 ao_ref dref;
1599 tree size = NULL_TREE;
1600 if (gimple_call_num_args (call) == 2)
1601 size = gimple_call_arg (call, 1);
1602 ao_ref_init_from_ptr_and_size (&dref,
1603 gimple_call_arg (call, 0),
1604 size);
1605 return refs_may_alias_p_1 (&dref, ref, false);
1606 }
1607 /* These read memory pointed to by the first argument. */
1608 case BUILT_IN_INDEX:
1609 case BUILT_IN_STRCHR:
1610 case BUILT_IN_STRRCHR:
1611 {
1612 ao_ref dref;
1613 ao_ref_init_from_ptr_and_size (&dref,
1614 gimple_call_arg (call, 0),
1615 NULL_TREE);
1616 return refs_may_alias_p_1 (&dref, ref, false);
1617 }
1618 /* These read memory pointed to by the first argument with size
1619 in the third argument. */
1620 case BUILT_IN_MEMCHR:
1621 {
1622 ao_ref dref;
1623 ao_ref_init_from_ptr_and_size (&dref,
1624 gimple_call_arg (call, 0),
1625 gimple_call_arg (call, 2));
1626 return refs_may_alias_p_1 (&dref, ref, false);
1627 }
1628 /* These read memory pointed to by the first and second arguments. */
1629 case BUILT_IN_STRSTR:
1630 case BUILT_IN_STRPBRK:
1631 {
1632 ao_ref dref;
1633 ao_ref_init_from_ptr_and_size (&dref,
1634 gimple_call_arg (call, 0),
1635 NULL_TREE);
1636 if (refs_may_alias_p_1 (&dref, ref, false))
1637 return true;
1638 ao_ref_init_from_ptr_and_size (&dref,
1639 gimple_call_arg (call, 1),
1640 NULL_TREE);
1641 return refs_may_alias_p_1 (&dref, ref, false);
1642 }
1643
1644 /* The following builtins do not read from memory. */
1645 case BUILT_IN_FREE:
1646 case BUILT_IN_MALLOC:
1647 case BUILT_IN_POSIX_MEMALIGN:
1648 case BUILT_IN_CALLOC:
1649 case BUILT_IN_ALLOCA:
1650 case BUILT_IN_ALLOCA_WITH_ALIGN:
1651 case BUILT_IN_STACK_SAVE:
1652 case BUILT_IN_STACK_RESTORE:
1653 case BUILT_IN_MEMSET:
1654 case BUILT_IN_TM_MEMSET:
1655 case BUILT_IN_MEMSET_CHK:
1656 case BUILT_IN_FREXP:
1657 case BUILT_IN_FREXPF:
1658 case BUILT_IN_FREXPL:
1659 case BUILT_IN_GAMMA_R:
1660 case BUILT_IN_GAMMAF_R:
1661 case BUILT_IN_GAMMAL_R:
1662 case BUILT_IN_LGAMMA_R:
1663 case BUILT_IN_LGAMMAF_R:
1664 case BUILT_IN_LGAMMAL_R:
1665 case BUILT_IN_MODF:
1666 case BUILT_IN_MODFF:
1667 case BUILT_IN_MODFL:
1668 case BUILT_IN_REMQUO:
1669 case BUILT_IN_REMQUOF:
1670 case BUILT_IN_REMQUOL:
1671 case BUILT_IN_SINCOS:
1672 case BUILT_IN_SINCOSF:
1673 case BUILT_IN_SINCOSL:
1674 case BUILT_IN_ASSUME_ALIGNED:
1675 case BUILT_IN_VA_END:
1676 return false;
1677 /* __sync_* builtins and some OpenMP builtins act as threading
1678 barriers. */
1679 #undef DEF_SYNC_BUILTIN
1680 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1681 #include "sync-builtins.def"
1682 #undef DEF_SYNC_BUILTIN
1683 case BUILT_IN_GOMP_ATOMIC_START:
1684 case BUILT_IN_GOMP_ATOMIC_END:
1685 case BUILT_IN_GOMP_BARRIER:
1686 case BUILT_IN_GOMP_BARRIER_CANCEL:
1687 case BUILT_IN_GOMP_TASKWAIT:
1688 case BUILT_IN_GOMP_TASKGROUP_END:
1689 case BUILT_IN_GOMP_CRITICAL_START:
1690 case BUILT_IN_GOMP_CRITICAL_END:
1691 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1692 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1693 case BUILT_IN_GOMP_LOOP_END:
1694 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1695 case BUILT_IN_GOMP_ORDERED_START:
1696 case BUILT_IN_GOMP_ORDERED_END:
1697 case BUILT_IN_GOMP_SECTIONS_END:
1698 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1699 case BUILT_IN_GOMP_SINGLE_COPY_START:
1700 case BUILT_IN_GOMP_SINGLE_COPY_END:
1701 return true;
1702
1703 default:
1704 /* Fallthru to general call handling. */;
1705 }
1706
1707 /* Check if base is a global static variable that is not read
1708 by the function. */
1709 if (callee != NULL_TREE
1710 && TREE_CODE (base) == VAR_DECL
1711 && TREE_STATIC (base))
1712 {
1713 struct cgraph_node *node = cgraph_get_node (callee);
1714 bitmap not_read;
1715
1716 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1717 node yet. We should enforce that there are nodes for all decls in the
1718 IL and remove this check instead. */
1719 if (node
1720 && (not_read = ipa_reference_get_not_read_global (node))
1721 && bitmap_bit_p (not_read, DECL_UID (base)))
1722 goto process_args;
1723 }
1724
1725 /* Check if the base variable is call-used. */
1726 if (DECL_P (base))
1727 {
1728 if (pt_solution_includes (gimple_call_use_set (call), base))
1729 return true;
1730 }
1731 else if ((TREE_CODE (base) == MEM_REF
1732 || TREE_CODE (base) == TARGET_MEM_REF)
1733 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1734 {
1735 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1736 if (!pi)
1737 return true;
1738
1739 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1740 return true;
1741 }
1742 else
1743 return true;
1744
1745 /* Inspect call arguments for passed-by-value aliases. */
1746 process_args:
1747 for (i = 0; i < gimple_call_num_args (call); ++i)
1748 {
1749 tree op = gimple_call_arg (call, i);
1750 int flags = gimple_call_arg_flags (call, i);
1751
1752 if (flags & EAF_UNUSED)
1753 continue;
1754
1755 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1756 op = TREE_OPERAND (op, 0);
1757
1758 if (TREE_CODE (op) != SSA_NAME
1759 && !is_gimple_min_invariant (op))
1760 {
1761 ao_ref r;
1762 ao_ref_init (&r, op);
1763 if (refs_may_alias_p_1 (&r, ref, true))
1764 return true;
1765 }
1766 }
1767
1768 return false;
1769 }
1770
1771 static bool
1772 ref_maybe_used_by_call_p (gimple call, tree ref)
1773 {
1774 ao_ref r;
1775 bool res;
1776 ao_ref_init (&r, ref);
1777 res = ref_maybe_used_by_call_p_1 (call, &r);
1778 if (res)
1779 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1780 else
1781 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1782 return res;
1783 }
1784
1785
1786 /* If the statement STMT may use the memory reference REF return
1787 true, otherwise return false. */
1788
1789 bool
1790 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1791 {
1792 if (is_gimple_assign (stmt))
1793 {
1794 tree rhs;
1795
1796 /* All memory assign statements are single. */
1797 if (!gimple_assign_single_p (stmt))
1798 return false;
1799
1800 rhs = gimple_assign_rhs1 (stmt);
1801 if (is_gimple_reg (rhs)
1802 || is_gimple_min_invariant (rhs)
1803 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1804 return false;
1805
1806 return refs_may_alias_p (rhs, ref);
1807 }
1808 else if (is_gimple_call (stmt))
1809 return ref_maybe_used_by_call_p (stmt, ref);
1810 else if (gimple_code (stmt) == GIMPLE_RETURN)
1811 {
1812 tree retval = gimple_return_retval (stmt);
1813 tree base;
1814 if (retval
1815 && TREE_CODE (retval) != SSA_NAME
1816 && !is_gimple_min_invariant (retval)
1817 && refs_may_alias_p (retval, ref))
1818 return true;
1819 /* If ref escapes the function then the return acts as a use. */
1820 base = get_base_address (ref);
1821 if (!base)
1822 ;
1823 else if (DECL_P (base))
1824 return is_global_var (base);
1825 else if (TREE_CODE (base) == MEM_REF
1826 || TREE_CODE (base) == TARGET_MEM_REF)
1827 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1828 return false;
1829 }
1830
1831 return true;
1832 }
1833
1834 /* If the call in statement CALL may clobber the memory reference REF
1835 return true, otherwise return false. */
1836
1837 static bool
1838 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1839 {
1840 tree base;
1841 tree callee;
1842
1843 /* If the call is pure or const it cannot clobber anything. */
1844 if (gimple_call_flags (call)
1845 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1846 return false;
1847
1848 base = ao_ref_base (ref);
1849 if (!base)
1850 return true;
1851
1852 if (TREE_CODE (base) == SSA_NAME
1853 || CONSTANT_CLASS_P (base))
1854 return false;
1855
1856 /* A call that is not without side-effects might involve volatile
1857 accesses and thus conflicts with all other volatile accesses. */
1858 if (ref->volatile_p)
1859 return true;
1860
1861 /* If the reference is based on a decl that is not aliased the call
1862 cannot possibly clobber it. */
1863 if (DECL_P (base)
1864 && !may_be_aliased (base)
1865 /* But local non-readonly statics can be modified through recursion
1866 or the call may implement a threading barrier which we must
1867 treat as may-def. */
1868 && (TREE_READONLY (base)
1869 || !is_global_var (base)))
1870 return false;
1871
1872 callee = gimple_call_fndecl (call);
1873
1874 /* Handle those builtin functions explicitly that do not act as
1875 escape points. See tree-ssa-structalias.c:find_func_aliases
1876 for the list of builtins we might need to handle here. */
1877 if (callee != NULL_TREE
1878 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1879 switch (DECL_FUNCTION_CODE (callee))
1880 {
1881 /* All the following functions clobber memory pointed to by
1882 their first argument. */
1883 case BUILT_IN_STRCPY:
1884 case BUILT_IN_STRNCPY:
1885 case BUILT_IN_MEMCPY:
1886 case BUILT_IN_MEMMOVE:
1887 case BUILT_IN_MEMPCPY:
1888 case BUILT_IN_STPCPY:
1889 case BUILT_IN_STPNCPY:
1890 case BUILT_IN_STRCAT:
1891 case BUILT_IN_STRNCAT:
1892 case BUILT_IN_MEMSET:
1893 case BUILT_IN_TM_MEMSET:
1894 CASE_BUILT_IN_TM_STORE (1):
1895 CASE_BUILT_IN_TM_STORE (2):
1896 CASE_BUILT_IN_TM_STORE (4):
1897 CASE_BUILT_IN_TM_STORE (8):
1898 CASE_BUILT_IN_TM_STORE (FLOAT):
1899 CASE_BUILT_IN_TM_STORE (DOUBLE):
1900 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1901 CASE_BUILT_IN_TM_STORE (M64):
1902 CASE_BUILT_IN_TM_STORE (M128):
1903 CASE_BUILT_IN_TM_STORE (M256):
1904 case BUILT_IN_TM_MEMCPY:
1905 case BUILT_IN_TM_MEMMOVE:
1906 {
1907 ao_ref dref;
1908 tree size = NULL_TREE;
1909 /* Don't pass in size for strncat, as the maximum size
1910 is strlen (dest) + n + 1 instead of n, resp.
1911 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1912 known. */
1913 if (gimple_call_num_args (call) == 3
1914 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1915 size = gimple_call_arg (call, 2);
1916 ao_ref_init_from_ptr_and_size (&dref,
1917 gimple_call_arg (call, 0),
1918 size);
1919 return refs_may_alias_p_1 (&dref, ref, false);
1920 }
1921 case BUILT_IN_STRCPY_CHK:
1922 case BUILT_IN_STRNCPY_CHK:
1923 case BUILT_IN_MEMCPY_CHK:
1924 case BUILT_IN_MEMMOVE_CHK:
1925 case BUILT_IN_MEMPCPY_CHK:
1926 case BUILT_IN_STPCPY_CHK:
1927 case BUILT_IN_STPNCPY_CHK:
1928 case BUILT_IN_STRCAT_CHK:
1929 case BUILT_IN_STRNCAT_CHK:
1930 case BUILT_IN_MEMSET_CHK:
1931 {
1932 ao_ref dref;
1933 tree size = NULL_TREE;
1934 /* Don't pass in size for __strncat_chk, as the maximum size
1935 is strlen (dest) + n + 1 instead of n, resp.
1936 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1937 known. */
1938 if (gimple_call_num_args (call) == 4
1939 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1940 size = gimple_call_arg (call, 2);
1941 ao_ref_init_from_ptr_and_size (&dref,
1942 gimple_call_arg (call, 0),
1943 size);
1944 return refs_may_alias_p_1 (&dref, ref, false);
1945 }
1946 case BUILT_IN_BCOPY:
1947 {
1948 ao_ref dref;
1949 tree size = gimple_call_arg (call, 2);
1950 ao_ref_init_from_ptr_and_size (&dref,
1951 gimple_call_arg (call, 1),
1952 size);
1953 return refs_may_alias_p_1 (&dref, ref, false);
1954 }
1955 /* Allocating memory does not have any side-effects apart from
1956 being the definition point for the pointer. */
1957 case BUILT_IN_MALLOC:
1958 case BUILT_IN_CALLOC:
1959 case BUILT_IN_STRDUP:
1960 case BUILT_IN_STRNDUP:
1961 /* Unix98 specifies that errno is set on allocation failure. */
1962 if (flag_errno_math
1963 && targetm.ref_may_alias_errno (ref))
1964 return true;
1965 return false;
1966 case BUILT_IN_STACK_SAVE:
1967 case BUILT_IN_ALLOCA:
1968 case BUILT_IN_ALLOCA_WITH_ALIGN:
1969 case BUILT_IN_ASSUME_ALIGNED:
1970 return false;
1971 /* But posix_memalign stores a pointer into the memory pointed to
1972 by its first argument. */
1973 case BUILT_IN_POSIX_MEMALIGN:
1974 {
1975 tree ptrptr = gimple_call_arg (call, 0);
1976 ao_ref dref;
1977 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
1978 TYPE_SIZE_UNIT (ptr_type_node));
1979 return (refs_may_alias_p_1 (&dref, ref, false)
1980 || (flag_errno_math
1981 && targetm.ref_may_alias_errno (ref)));
1982 }
1983 /* Freeing memory kills the pointed-to memory. More importantly
1984 the call has to serve as a barrier for moving loads and stores
1985 across it. */
1986 case BUILT_IN_FREE:
1987 case BUILT_IN_VA_END:
1988 {
1989 tree ptr = gimple_call_arg (call, 0);
1990 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1991 }
1992 case BUILT_IN_GAMMA_R:
1993 case BUILT_IN_GAMMAF_R:
1994 case BUILT_IN_GAMMAL_R:
1995 case BUILT_IN_LGAMMA_R:
1996 case BUILT_IN_LGAMMAF_R:
1997 case BUILT_IN_LGAMMAL_R:
1998 {
1999 tree out = gimple_call_arg (call, 1);
2000 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2001 return true;
2002 if (flag_errno_math)
2003 break;
2004 return false;
2005 }
2006 case BUILT_IN_FREXP:
2007 case BUILT_IN_FREXPF:
2008 case BUILT_IN_FREXPL:
2009 case BUILT_IN_MODF:
2010 case BUILT_IN_MODFF:
2011 case BUILT_IN_MODFL:
2012 {
2013 tree out = gimple_call_arg (call, 1);
2014 return ptr_deref_may_alias_ref_p_1 (out, ref);
2015 }
2016 case BUILT_IN_REMQUO:
2017 case BUILT_IN_REMQUOF:
2018 case BUILT_IN_REMQUOL:
2019 {
2020 tree out = gimple_call_arg (call, 2);
2021 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2022 return true;
2023 if (flag_errno_math)
2024 break;
2025 return false;
2026 }
2027 case BUILT_IN_SINCOS:
2028 case BUILT_IN_SINCOSF:
2029 case BUILT_IN_SINCOSL:
2030 {
2031 tree sin = gimple_call_arg (call, 1);
2032 tree cos = gimple_call_arg (call, 2);
2033 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2034 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2035 }
2036 /* __sync_* builtins and some OpenMP builtins act as threading
2037 barriers. */
2038 #undef DEF_SYNC_BUILTIN
2039 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2040 #include "sync-builtins.def"
2041 #undef DEF_SYNC_BUILTIN
2042 case BUILT_IN_GOMP_ATOMIC_START:
2043 case BUILT_IN_GOMP_ATOMIC_END:
2044 case BUILT_IN_GOMP_BARRIER:
2045 case BUILT_IN_GOMP_BARRIER_CANCEL:
2046 case BUILT_IN_GOMP_TASKWAIT:
2047 case BUILT_IN_GOMP_TASKGROUP_END:
2048 case BUILT_IN_GOMP_CRITICAL_START:
2049 case BUILT_IN_GOMP_CRITICAL_END:
2050 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2051 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2052 case BUILT_IN_GOMP_LOOP_END:
2053 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2054 case BUILT_IN_GOMP_ORDERED_START:
2055 case BUILT_IN_GOMP_ORDERED_END:
2056 case BUILT_IN_GOMP_SECTIONS_END:
2057 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2058 case BUILT_IN_GOMP_SINGLE_COPY_START:
2059 case BUILT_IN_GOMP_SINGLE_COPY_END:
2060 return true;
2061 default:
2062 /* Fallthru to general call handling. */;
2063 }
2064
2065 /* Check if base is a global static variable that is not written
2066 by the function. */
2067 if (callee != NULL_TREE
2068 && TREE_CODE (base) == VAR_DECL
2069 && TREE_STATIC (base))
2070 {
2071 struct cgraph_node *node = cgraph_get_node (callee);
2072 bitmap not_written;
2073
2074 if (node
2075 && (not_written = ipa_reference_get_not_written_global (node))
2076 && bitmap_bit_p (not_written, DECL_UID (base)))
2077 return false;
2078 }
2079
2080 /* Check if the base variable is call-clobbered. */
2081 if (DECL_P (base))
2082 return pt_solution_includes (gimple_call_clobber_set (call), base);
2083 else if ((TREE_CODE (base) == MEM_REF
2084 || TREE_CODE (base) == TARGET_MEM_REF)
2085 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2086 {
2087 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2088 if (!pi)
2089 return true;
2090
2091 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2092 }
2093
2094 return true;
2095 }
2096
2097 /* If the call in statement CALL may clobber the memory reference REF
2098 return true, otherwise return false. */
2099
2100 bool
2101 call_may_clobber_ref_p (gimple call, tree ref)
2102 {
2103 bool res;
2104 ao_ref r;
2105 ao_ref_init (&r, ref);
2106 res = call_may_clobber_ref_p_1 (call, &r);
2107 if (res)
2108 ++alias_stats.call_may_clobber_ref_p_may_alias;
2109 else
2110 ++alias_stats.call_may_clobber_ref_p_no_alias;
2111 return res;
2112 }
2113
2114
2115 /* If the statement STMT may clobber the memory reference REF return true,
2116 otherwise return false. */
2117
2118 bool
2119 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
2120 {
2121 if (is_gimple_call (stmt))
2122 {
2123 tree lhs = gimple_call_lhs (stmt);
2124 if (lhs
2125 && TREE_CODE (lhs) != SSA_NAME)
2126 {
2127 ao_ref r;
2128 ao_ref_init (&r, lhs);
2129 if (refs_may_alias_p_1 (ref, &r, true))
2130 return true;
2131 }
2132
2133 return call_may_clobber_ref_p_1 (stmt, ref);
2134 }
2135 else if (gimple_assign_single_p (stmt))
2136 {
2137 tree lhs = gimple_assign_lhs (stmt);
2138 if (TREE_CODE (lhs) != SSA_NAME)
2139 {
2140 ao_ref r;
2141 ao_ref_init (&r, lhs);
2142 return refs_may_alias_p_1 (ref, &r, true);
2143 }
2144 }
2145 else if (gimple_code (stmt) == GIMPLE_ASM)
2146 return true;
2147
2148 return false;
2149 }
2150
2151 bool
2152 stmt_may_clobber_ref_p (gimple stmt, tree ref)
2153 {
2154 ao_ref r;
2155 ao_ref_init (&r, ref);
2156 return stmt_may_clobber_ref_p_1 (stmt, &r);
2157 }
2158
2159 /* If STMT kills the memory reference REF return true, otherwise
2160 return false. */
2161
2162 static bool
2163 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
2164 {
2165 /* For a must-alias check we need to be able to constrain
2166 the access properly.
2167 FIXME: except for BUILTIN_FREE. */
2168 if (!ao_ref_base (ref)
2169 || ref->max_size == -1)
2170 return false;
2171
2172 if (gimple_has_lhs (stmt)
2173 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2174 /* The assignment is not necessarily carried out if it can throw
2175 and we can catch it in the current function where we could inspect
2176 the previous value.
2177 ??? We only need to care about the RHS throwing. For aggregate
2178 assignments or similar calls and non-call exceptions the LHS
2179 might throw as well. */
2180 && !stmt_can_throw_internal (stmt))
2181 {
2182 tree base, lhs = gimple_get_lhs (stmt);
2183 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2184 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2185 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2186 so base == ref->base does not always hold. */
2187 if (base != ref->base)
2188 {
2189 /* If both base and ref->base are MEM_REFs, only compare the
2190 first operand, and if the second operand isn't equal constant,
2191 try to add the offsets into offset and ref_offset. */
2192 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2193 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2194 {
2195 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2196 TREE_OPERAND (ref->base, 1)))
2197 {
2198 offset_int off1 = mem_ref_offset (base);
2199 off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2200 off1 += offset;
2201 offset_int off2 = mem_ref_offset (ref->base);
2202 off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2203 off2 += ref_offset;
2204 if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2205 {
2206 offset = off1.to_shwi ();
2207 ref_offset = off2.to_shwi ();
2208 }
2209 else
2210 size = -1;
2211 }
2212 }
2213 else
2214 size = -1;
2215 }
2216 /* For a must-alias check we need to be able to constrain
2217 the access properly. */
2218 if (size != -1 && size == max_size)
2219 {
2220 if (offset <= ref_offset
2221 && offset + size >= ref_offset + ref->max_size)
2222 return true;
2223 }
2224 }
2225
2226 if (is_gimple_call (stmt))
2227 {
2228 tree callee = gimple_call_fndecl (stmt);
2229 if (callee != NULL_TREE
2230 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2231 switch (DECL_FUNCTION_CODE (callee))
2232 {
2233 case BUILT_IN_FREE:
2234 {
2235 tree ptr = gimple_call_arg (stmt, 0);
2236 tree base = ao_ref_base (ref);
2237 if (base && TREE_CODE (base) == MEM_REF
2238 && TREE_OPERAND (base, 0) == ptr)
2239 return true;
2240 break;
2241 }
2242
2243 case BUILT_IN_MEMCPY:
2244 case BUILT_IN_MEMPCPY:
2245 case BUILT_IN_MEMMOVE:
2246 case BUILT_IN_MEMSET:
2247 case BUILT_IN_MEMCPY_CHK:
2248 case BUILT_IN_MEMPCPY_CHK:
2249 case BUILT_IN_MEMMOVE_CHK:
2250 case BUILT_IN_MEMSET_CHK:
2251 {
2252 tree dest = gimple_call_arg (stmt, 0);
2253 tree len = gimple_call_arg (stmt, 2);
2254 if (!tree_fits_shwi_p (len))
2255 return false;
2256 tree rbase = ref->base;
2257 offset_int roffset = ref->offset;
2258 ao_ref dref;
2259 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2260 tree base = ao_ref_base (&dref);
2261 offset_int offset = dref.offset;
2262 if (!base || dref.size == -1)
2263 return false;
2264 if (TREE_CODE (base) == MEM_REF)
2265 {
2266 if (TREE_CODE (rbase) != MEM_REF)
2267 return false;
2268 // Compare pointers.
2269 offset += wi::lshift (mem_ref_offset (base),
2270 LOG2_BITS_PER_UNIT);
2271 roffset += wi::lshift (mem_ref_offset (rbase),
2272 LOG2_BITS_PER_UNIT);
2273 base = TREE_OPERAND (base, 0);
2274 rbase = TREE_OPERAND (rbase, 0);
2275 }
2276 if (base == rbase
2277 && wi::les_p (offset, roffset)
2278 && wi::les_p (roffset + ref->max_size,
2279 offset + wi::lshift (wi::to_offset (len),
2280 LOG2_BITS_PER_UNIT)))
2281 return true;
2282 break;
2283 }
2284
2285 case BUILT_IN_VA_END:
2286 {
2287 tree ptr = gimple_call_arg (stmt, 0);
2288 if (TREE_CODE (ptr) == ADDR_EXPR)
2289 {
2290 tree base = ao_ref_base (ref);
2291 if (TREE_OPERAND (ptr, 0) == base)
2292 return true;
2293 }
2294 break;
2295 }
2296
2297 default:;
2298 }
2299 }
2300 return false;
2301 }
2302
2303 bool
2304 stmt_kills_ref_p (gimple stmt, tree ref)
2305 {
2306 ao_ref r;
2307 ao_ref_init (&r, ref);
2308 return stmt_kills_ref_p_1 (stmt, &r);
2309 }
2310
2311
2312 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2313 TARGET or a statement clobbering the memory reference REF in which
2314 case false is returned. The walk starts with VUSE, one argument of PHI. */
2315
2316 static bool
2317 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2318 tree vuse, unsigned int *cnt, bitmap *visited,
2319 bool abort_on_visited)
2320 {
2321 basic_block bb = gimple_bb (phi);
2322
2323 if (!*visited)
2324 *visited = BITMAP_ALLOC (NULL);
2325
2326 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2327
2328 /* Walk until we hit the target. */
2329 while (vuse != target)
2330 {
2331 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2332 /* Recurse for PHI nodes. */
2333 if (gimple_code (def_stmt) == GIMPLE_PHI)
2334 {
2335 /* An already visited PHI node ends the walk successfully. */
2336 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2337 return !abort_on_visited;
2338 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2339 visited, abort_on_visited);
2340 if (!vuse)
2341 return false;
2342 continue;
2343 }
2344 else if (gimple_nop_p (def_stmt))
2345 return false;
2346 else
2347 {
2348 /* A clobbering statement or the end of the IL ends it failing. */
2349 ++*cnt;
2350 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2351 return false;
2352 }
2353 /* If we reach a new basic-block see if we already skipped it
2354 in a previous walk that ended successfully. */
2355 if (gimple_bb (def_stmt) != bb)
2356 {
2357 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2358 return !abort_on_visited;
2359 bb = gimple_bb (def_stmt);
2360 }
2361 vuse = gimple_vuse (def_stmt);
2362 }
2363 return true;
2364 }
2365
2366 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2367 until we hit the phi argument definition that dominates the other one.
2368 Return that, or NULL_TREE if there is no such definition. */
2369
2370 static tree
2371 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2372 ao_ref *ref, unsigned int *cnt,
2373 bitmap *visited, bool abort_on_visited)
2374 {
2375 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2376 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2377 tree common_vuse;
2378
2379 if (arg0 == arg1)
2380 return arg0;
2381 else if (gimple_nop_p (def0)
2382 || (!gimple_nop_p (def1)
2383 && dominated_by_p (CDI_DOMINATORS,
2384 gimple_bb (def1), gimple_bb (def0))))
2385 {
2386 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2387 visited, abort_on_visited))
2388 return arg0;
2389 }
2390 else if (gimple_nop_p (def1)
2391 || dominated_by_p (CDI_DOMINATORS,
2392 gimple_bb (def0), gimple_bb (def1)))
2393 {
2394 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2395 visited, abort_on_visited))
2396 return arg1;
2397 }
2398 /* Special case of a diamond:
2399 MEM_1 = ...
2400 goto (cond) ? L1 : L2
2401 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2402 goto L3
2403 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2404 L3: MEM_4 = PHI<MEM_2, MEM_3>
2405 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2406 dominate each other, but still we can easily skip this PHI node
2407 if we recognize that the vuse MEM operand is the same for both,
2408 and that we can skip both statements (they don't clobber us).
2409 This is still linear. Don't use maybe_skip_until, that might
2410 potentially be slow. */
2411 else if ((common_vuse = gimple_vuse (def0))
2412 && common_vuse == gimple_vuse (def1))
2413 {
2414 *cnt += 2;
2415 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2416 && !stmt_may_clobber_ref_p_1 (def1, ref))
2417 return common_vuse;
2418 }
2419
2420 return NULL_TREE;
2421 }
2422
2423
2424 /* Starting from a PHI node for the virtual operand of the memory reference
2425 REF find a continuation virtual operand that allows to continue walking
2426 statements dominating PHI skipping only statements that cannot possibly
2427 clobber REF. Increments *CNT for each alias disambiguation done.
2428 Returns NULL_TREE if no suitable virtual operand can be found. */
2429
2430 tree
2431 get_continuation_for_phi (gimple phi, ao_ref *ref,
2432 unsigned int *cnt, bitmap *visited,
2433 bool abort_on_visited)
2434 {
2435 unsigned nargs = gimple_phi_num_args (phi);
2436
2437 /* Through a single-argument PHI we can simply look through. */
2438 if (nargs == 1)
2439 return PHI_ARG_DEF (phi, 0);
2440
2441 /* For two or more arguments try to pairwise skip non-aliasing code
2442 until we hit the phi argument definition that dominates the other one. */
2443 else if (nargs >= 2)
2444 {
2445 tree arg0, arg1;
2446 unsigned i;
2447
2448 /* Find a candidate for the virtual operand which definition
2449 dominates those of all others. */
2450 arg0 = PHI_ARG_DEF (phi, 0);
2451 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2452 for (i = 1; i < nargs; ++i)
2453 {
2454 arg1 = PHI_ARG_DEF (phi, i);
2455 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2456 {
2457 arg0 = arg1;
2458 break;
2459 }
2460 if (dominated_by_p (CDI_DOMINATORS,
2461 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2462 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2463 arg0 = arg1;
2464 }
2465
2466 /* Then pairwise reduce against the found candidate. */
2467 for (i = 0; i < nargs; ++i)
2468 {
2469 arg1 = PHI_ARG_DEF (phi, i);
2470 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2471 cnt, visited, abort_on_visited);
2472 if (!arg0)
2473 return NULL_TREE;
2474 }
2475
2476 return arg0;
2477 }
2478
2479 return NULL_TREE;
2480 }
2481
2482 /* Based on the memory reference REF and its virtual use VUSE call
2483 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2484 itself. That is, for each virtual use for which its defining statement
2485 does not clobber REF.
2486
2487 WALKER is called with REF, the current virtual use and DATA. If
2488 WALKER returns non-NULL the walk stops and its result is returned.
2489 At the end of a non-successful walk NULL is returned.
2490
2491 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2492 use which definition is a statement that may clobber REF and DATA.
2493 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2494 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2495 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2496 to adjust REF and *DATA to make that valid.
2497
2498 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2499
2500 void *
2501 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2502 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2503 void *(*translate)(ao_ref *, tree, void *), void *data)
2504 {
2505 bitmap visited = NULL;
2506 void *res;
2507 unsigned int cnt = 0;
2508 bool translated = false;
2509
2510 timevar_push (TV_ALIAS_STMT_WALK);
2511
2512 do
2513 {
2514 gimple def_stmt;
2515
2516 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2517 res = (*walker) (ref, vuse, cnt, data);
2518 /* Abort walk. */
2519 if (res == (void *)-1)
2520 {
2521 res = NULL;
2522 break;
2523 }
2524 /* Lookup succeeded. */
2525 else if (res != NULL)
2526 break;
2527
2528 def_stmt = SSA_NAME_DEF_STMT (vuse);
2529 if (gimple_nop_p (def_stmt))
2530 break;
2531 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2532 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2533 &visited, translated);
2534 else
2535 {
2536 cnt++;
2537 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2538 {
2539 if (!translate)
2540 break;
2541 res = (*translate) (ref, vuse, data);
2542 /* Failed lookup and translation. */
2543 if (res == (void *)-1)
2544 {
2545 res = NULL;
2546 break;
2547 }
2548 /* Lookup succeeded. */
2549 else if (res != NULL)
2550 break;
2551 /* Translation succeeded, continue walking. */
2552 translated = true;
2553 }
2554 vuse = gimple_vuse (def_stmt);
2555 }
2556 }
2557 while (vuse);
2558
2559 if (visited)
2560 BITMAP_FREE (visited);
2561
2562 timevar_pop (TV_ALIAS_STMT_WALK);
2563
2564 return res;
2565 }
2566
2567
2568 /* Based on the memory reference REF call WALKER for each vdef which
2569 defining statement may clobber REF, starting with VDEF. If REF
2570 is NULL_TREE, each defining statement is visited.
2571
2572 WALKER is called with REF, the current vdef and DATA. If WALKER
2573 returns true the walk is stopped, otherwise it continues.
2574
2575 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2576 PHI argument (but only one walk continues on merge points), the
2577 return value is true if any of the walks was successful.
2578
2579 The function returns the number of statements walked. */
2580
2581 static unsigned int
2582 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2583 bool (*walker)(ao_ref *, tree, void *), void *data,
2584 bitmap *visited, unsigned int cnt)
2585 {
2586 do
2587 {
2588 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2589
2590 if (*visited
2591 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2592 return cnt;
2593
2594 if (gimple_nop_p (def_stmt))
2595 return cnt;
2596 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2597 {
2598 unsigned i;
2599 if (!*visited)
2600 *visited = BITMAP_ALLOC (NULL);
2601 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2602 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2603 walker, data, visited, 0);
2604 return cnt;
2605 }
2606
2607 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2608 cnt++;
2609 if ((!ref
2610 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2611 && (*walker) (ref, vdef, data))
2612 return cnt;
2613
2614 vdef = gimple_vuse (def_stmt);
2615 }
2616 while (1);
2617 }
2618
2619 unsigned int
2620 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2621 bool (*walker)(ao_ref *, tree, void *), void *data,
2622 bitmap *visited)
2623 {
2624 bitmap local_visited = NULL;
2625 unsigned int ret;
2626
2627 timevar_push (TV_ALIAS_STMT_WALK);
2628
2629 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2630 visited ? visited : &local_visited, 0);
2631 if (local_visited)
2632 BITMAP_FREE (local_visited);
2633
2634 timevar_pop (TV_ALIAS_STMT_WALK);
2635
2636 return ret;
2637 }
2638