]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
52f9270c6045c9646833349f2e6f18376e79ddf2
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_no_alias;
107 } alias_stats;
108
109 void
110 dump_alias_stats (FILE *s)
111 {
112 fprintf (s, "\nAlias oracle query stats:\n");
113 fprintf (s, " refs_may_alias_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.refs_may_alias_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.refs_may_alias_p_may_alias);
119 fprintf (s, " ref_maybe_used_by_call_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.ref_maybe_used_by_call_p_no_alias,
123 alias_stats.refs_may_alias_p_no_alias
124 + alias_stats.ref_maybe_used_by_call_p_may_alias);
125 fprintf (s, " call_may_clobber_ref_p: "
126 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
127 HOST_WIDE_INT_PRINT_DEC" queries\n",
128 alias_stats.call_may_clobber_ref_p_no_alias,
129 alias_stats.call_may_clobber_ref_p_no_alias
130 + alias_stats.call_may_clobber_ref_p_may_alias);
131 fprintf (s, " nonoverlapping_component_refs_p: "
132 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC" queries\n",
134 alias_stats.nonoverlapping_component_refs_p_no_alias,
135 alias_stats.nonoverlapping_component_refs_p_no_alias
136 + alias_stats.nonoverlapping_component_refs_p_may_alias);
137 fprintf (s, " nonoverlapping_component_refs_of_decl_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias,
141 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias
142 + alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias);
143 fprintf (s, " aliasing_component_refs_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.aliasing_component_refs_p_no_alias,
147 alias_stats.aliasing_component_refs_p_no_alias
148 + alias_stats.aliasing_component_refs_p_may_alias);
149 dump_alias_stats_in_alias_c (s);
150 }
151
152
153 /* Return true, if dereferencing PTR may alias with a global variable. */
154
155 bool
156 ptr_deref_may_alias_global_p (tree ptr)
157 {
158 struct ptr_info_def *pi;
159
160 /* If we end up with a pointer constant here that may point
161 to global memory. */
162 if (TREE_CODE (ptr) != SSA_NAME)
163 return true;
164
165 pi = SSA_NAME_PTR_INFO (ptr);
166
167 /* If we do not have points-to information for this variable,
168 we have to punt. */
169 if (!pi)
170 return true;
171
172 /* ??? This does not use TBAA to prune globals ptr may not access. */
173 return pt_solution_includes_global (&pi->pt);
174 }
175
176 /* Return true if dereferencing PTR may alias DECL.
177 The caller is responsible for applying TBAA to see if PTR
178 may access DECL at all. */
179
180 static bool
181 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
182 {
183 struct ptr_info_def *pi;
184
185 /* Conversions are irrelevant for points-to information and
186 data-dependence analysis can feed us those. */
187 STRIP_NOPS (ptr);
188
189 /* Anything we do not explicilty handle aliases. */
190 if ((TREE_CODE (ptr) != SSA_NAME
191 && TREE_CODE (ptr) != ADDR_EXPR
192 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
193 || !POINTER_TYPE_P (TREE_TYPE (ptr))
194 || (!VAR_P (decl)
195 && TREE_CODE (decl) != PARM_DECL
196 && TREE_CODE (decl) != RESULT_DECL))
197 return true;
198
199 /* Disregard pointer offsetting. */
200 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
201 {
202 do
203 {
204 ptr = TREE_OPERAND (ptr, 0);
205 }
206 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
207 return ptr_deref_may_alias_decl_p (ptr, decl);
208 }
209
210 /* ADDR_EXPR pointers either just offset another pointer or directly
211 specify the pointed-to set. */
212 if (TREE_CODE (ptr) == ADDR_EXPR)
213 {
214 tree base = get_base_address (TREE_OPERAND (ptr, 0));
215 if (base
216 && (TREE_CODE (base) == MEM_REF
217 || TREE_CODE (base) == TARGET_MEM_REF))
218 ptr = TREE_OPERAND (base, 0);
219 else if (base
220 && DECL_P (base))
221 return compare_base_decls (base, decl) != 0;
222 else if (base
223 && CONSTANT_CLASS_P (base))
224 return false;
225 else
226 return true;
227 }
228
229 /* Non-aliased variables cannot be pointed to. */
230 if (!may_be_aliased (decl))
231 return false;
232
233 /* If we do not have useful points-to information for this pointer
234 we cannot disambiguate anything else. */
235 pi = SSA_NAME_PTR_INFO (ptr);
236 if (!pi)
237 return true;
238
239 return pt_solution_includes (&pi->pt, decl);
240 }
241
242 /* Return true if dereferenced PTR1 and PTR2 may alias.
243 The caller is responsible for applying TBAA to see if accesses
244 through PTR1 and PTR2 may conflict at all. */
245
246 bool
247 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
248 {
249 struct ptr_info_def *pi1, *pi2;
250
251 /* Conversions are irrelevant for points-to information and
252 data-dependence analysis can feed us those. */
253 STRIP_NOPS (ptr1);
254 STRIP_NOPS (ptr2);
255
256 /* Disregard pointer offsetting. */
257 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
258 {
259 do
260 {
261 ptr1 = TREE_OPERAND (ptr1, 0);
262 }
263 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
264 return ptr_derefs_may_alias_p (ptr1, ptr2);
265 }
266 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
267 {
268 do
269 {
270 ptr2 = TREE_OPERAND (ptr2, 0);
271 }
272 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
273 return ptr_derefs_may_alias_p (ptr1, ptr2);
274 }
275
276 /* ADDR_EXPR pointers either just offset another pointer or directly
277 specify the pointed-to set. */
278 if (TREE_CODE (ptr1) == ADDR_EXPR)
279 {
280 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
281 if (base
282 && (TREE_CODE (base) == MEM_REF
283 || TREE_CODE (base) == TARGET_MEM_REF))
284 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
285 else if (base
286 && DECL_P (base))
287 return ptr_deref_may_alias_decl_p (ptr2, base);
288 else
289 return true;
290 }
291 if (TREE_CODE (ptr2) == ADDR_EXPR)
292 {
293 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
294 if (base
295 && (TREE_CODE (base) == MEM_REF
296 || TREE_CODE (base) == TARGET_MEM_REF))
297 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
298 else if (base
299 && DECL_P (base))
300 return ptr_deref_may_alias_decl_p (ptr1, base);
301 else
302 return true;
303 }
304
305 /* From here we require SSA name pointers. Anything else aliases. */
306 if (TREE_CODE (ptr1) != SSA_NAME
307 || TREE_CODE (ptr2) != SSA_NAME
308 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
309 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
310 return true;
311
312 /* We may end up with two empty points-to solutions for two same pointers.
313 In this case we still want to say both pointers alias, so shortcut
314 that here. */
315 if (ptr1 == ptr2)
316 return true;
317
318 /* If we do not have useful points-to information for either pointer
319 we cannot disambiguate anything else. */
320 pi1 = SSA_NAME_PTR_INFO (ptr1);
321 pi2 = SSA_NAME_PTR_INFO (ptr2);
322 if (!pi1 || !pi2)
323 return true;
324
325 /* ??? This does not use TBAA to prune decls from the intersection
326 that not both pointers may access. */
327 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
328 }
329
330 /* Return true if dereferencing PTR may alias *REF.
331 The caller is responsible for applying TBAA to see if PTR
332 may access *REF at all. */
333
334 static bool
335 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
336 {
337 tree base = ao_ref_base (ref);
338
339 if (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF)
341 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
342 else if (DECL_P (base))
343 return ptr_deref_may_alias_decl_p (ptr, base);
344
345 return true;
346 }
347
348 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
349
350 bool
351 ptrs_compare_unequal (tree ptr1, tree ptr2)
352 {
353 /* First resolve the pointers down to a SSA name pointer base or
354 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
355 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
356 or STRING_CSTs which needs points-to adjustments to track them
357 in the points-to sets. */
358 tree obj1 = NULL_TREE;
359 tree obj2 = NULL_TREE;
360 if (TREE_CODE (ptr1) == ADDR_EXPR)
361 {
362 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
363 if (! tem)
364 return false;
365 if (VAR_P (tem)
366 || TREE_CODE (tem) == PARM_DECL
367 || TREE_CODE (tem) == RESULT_DECL)
368 obj1 = tem;
369 else if (TREE_CODE (tem) == MEM_REF)
370 ptr1 = TREE_OPERAND (tem, 0);
371 }
372 if (TREE_CODE (ptr2) == ADDR_EXPR)
373 {
374 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
375 if (! tem)
376 return false;
377 if (VAR_P (tem)
378 || TREE_CODE (tem) == PARM_DECL
379 || TREE_CODE (tem) == RESULT_DECL)
380 obj2 = tem;
381 else if (TREE_CODE (tem) == MEM_REF)
382 ptr2 = TREE_OPERAND (tem, 0);
383 }
384
385 /* Canonicalize ptr vs. object. */
386 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
387 {
388 std::swap (ptr1, ptr2);
389 std::swap (obj1, obj2);
390 }
391
392 if (obj1 && obj2)
393 /* Other code handles this correctly, no need to duplicate it here. */;
394 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
395 {
396 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
397 /* We may not use restrict to optimize pointer comparisons.
398 See PR71062. So we have to assume that restrict-pointed-to
399 may be in fact obj1. */
400 if (!pi
401 || pi->pt.vars_contains_restrict
402 || pi->pt.vars_contains_interposable)
403 return false;
404 if (VAR_P (obj1)
405 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
406 {
407 varpool_node *node = varpool_node::get (obj1);
408 /* If obj1 may bind to NULL give up (see below). */
409 if (! node
410 || ! node->nonzero_address ()
411 || ! decl_binds_to_current_def_p (obj1))
412 return false;
413 }
414 return !pt_solution_includes (&pi->pt, obj1);
415 }
416
417 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
418 but those require pt.null to be conservatively correct. */
419
420 return false;
421 }
422
423 /* Returns whether reference REF to BASE may refer to global memory. */
424
425 static bool
426 ref_may_alias_global_p_1 (tree base)
427 {
428 if (DECL_P (base))
429 return is_global_var (base);
430 else if (TREE_CODE (base) == MEM_REF
431 || TREE_CODE (base) == TARGET_MEM_REF)
432 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
433 return true;
434 }
435
436 bool
437 ref_may_alias_global_p (ao_ref *ref)
438 {
439 tree base = ao_ref_base (ref);
440 return ref_may_alias_global_p_1 (base);
441 }
442
443 bool
444 ref_may_alias_global_p (tree ref)
445 {
446 tree base = get_base_address (ref);
447 return ref_may_alias_global_p_1 (base);
448 }
449
450 /* Return true whether STMT may clobber global memory. */
451
452 bool
453 stmt_may_clobber_global_p (gimple *stmt)
454 {
455 tree lhs;
456
457 if (!gimple_vdef (stmt))
458 return false;
459
460 /* ??? We can ask the oracle whether an artificial pointer
461 dereference with a pointer with points-to information covering
462 all global memory (what about non-address taken memory?) maybe
463 clobbered by this call. As there is at the moment no convenient
464 way of doing that without generating garbage do some manual
465 checking instead.
466 ??? We could make a NULL ao_ref argument to the various
467 predicates special, meaning any global memory. */
468
469 switch (gimple_code (stmt))
470 {
471 case GIMPLE_ASSIGN:
472 lhs = gimple_assign_lhs (stmt);
473 return (TREE_CODE (lhs) != SSA_NAME
474 && ref_may_alias_global_p (lhs));
475 case GIMPLE_CALL:
476 return true;
477 default:
478 return true;
479 }
480 }
481
482
483 /* Dump alias information on FILE. */
484
485 void
486 dump_alias_info (FILE *file)
487 {
488 unsigned i;
489 tree ptr;
490 const char *funcname
491 = lang_hooks.decl_printable_name (current_function_decl, 2);
492 tree var;
493
494 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
495
496 fprintf (file, "Aliased symbols\n\n");
497
498 FOR_EACH_LOCAL_DECL (cfun, i, var)
499 {
500 if (may_be_aliased (var))
501 dump_variable (file, var);
502 }
503
504 fprintf (file, "\nCall clobber information\n");
505
506 fprintf (file, "\nESCAPED");
507 dump_points_to_solution (file, &cfun->gimple_df->escaped);
508
509 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
510
511 FOR_EACH_SSA_NAME (i, ptr, cfun)
512 {
513 struct ptr_info_def *pi;
514
515 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
516 || SSA_NAME_IN_FREE_LIST (ptr))
517 continue;
518
519 pi = SSA_NAME_PTR_INFO (ptr);
520 if (pi)
521 dump_points_to_info_for (file, ptr);
522 }
523
524 fprintf (file, "\n");
525 }
526
527
528 /* Dump alias information on stderr. */
529
530 DEBUG_FUNCTION void
531 debug_alias_info (void)
532 {
533 dump_alias_info (stderr);
534 }
535
536
537 /* Dump the points-to set *PT into FILE. */
538
539 void
540 dump_points_to_solution (FILE *file, struct pt_solution *pt)
541 {
542 if (pt->anything)
543 fprintf (file, ", points-to anything");
544
545 if (pt->nonlocal)
546 fprintf (file, ", points-to non-local");
547
548 if (pt->escaped)
549 fprintf (file, ", points-to escaped");
550
551 if (pt->ipa_escaped)
552 fprintf (file, ", points-to unit escaped");
553
554 if (pt->null)
555 fprintf (file, ", points-to NULL");
556
557 if (pt->vars)
558 {
559 fprintf (file, ", points-to vars: ");
560 dump_decl_set (file, pt->vars);
561 if (pt->vars_contains_nonlocal
562 || pt->vars_contains_escaped
563 || pt->vars_contains_escaped_heap
564 || pt->vars_contains_restrict)
565 {
566 const char *comma = "";
567 fprintf (file, " (");
568 if (pt->vars_contains_nonlocal)
569 {
570 fprintf (file, "nonlocal");
571 comma = ", ";
572 }
573 if (pt->vars_contains_escaped)
574 {
575 fprintf (file, "%sescaped", comma);
576 comma = ", ";
577 }
578 if (pt->vars_contains_escaped_heap)
579 {
580 fprintf (file, "%sescaped heap", comma);
581 comma = ", ";
582 }
583 if (pt->vars_contains_restrict)
584 {
585 fprintf (file, "%srestrict", comma);
586 comma = ", ";
587 }
588 if (pt->vars_contains_interposable)
589 fprintf (file, "%sinterposable", comma);
590 fprintf (file, ")");
591 }
592 }
593 }
594
595
596 /* Unified dump function for pt_solution. */
597
598 DEBUG_FUNCTION void
599 debug (pt_solution &ref)
600 {
601 dump_points_to_solution (stderr, &ref);
602 }
603
604 DEBUG_FUNCTION void
605 debug (pt_solution *ptr)
606 {
607 if (ptr)
608 debug (*ptr);
609 else
610 fprintf (stderr, "<nil>\n");
611 }
612
613
614 /* Dump points-to information for SSA_NAME PTR into FILE. */
615
616 void
617 dump_points_to_info_for (FILE *file, tree ptr)
618 {
619 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
620
621 print_generic_expr (file, ptr, dump_flags);
622
623 if (pi)
624 dump_points_to_solution (file, &pi->pt);
625 else
626 fprintf (file, ", points-to anything");
627
628 fprintf (file, "\n");
629 }
630
631
632 /* Dump points-to information for VAR into stderr. */
633
634 DEBUG_FUNCTION void
635 debug_points_to_info_for (tree var)
636 {
637 dump_points_to_info_for (stderr, var);
638 }
639
640
641 /* Initializes the alias-oracle reference representation *R from REF. */
642
643 void
644 ao_ref_init (ao_ref *r, tree ref)
645 {
646 r->ref = ref;
647 r->base = NULL_TREE;
648 r->offset = 0;
649 r->size = -1;
650 r->max_size = -1;
651 r->ref_alias_set = -1;
652 r->base_alias_set = -1;
653 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
654 }
655
656 /* Returns the base object of the memory reference *REF. */
657
658 tree
659 ao_ref_base (ao_ref *ref)
660 {
661 bool reverse;
662
663 if (ref->base)
664 return ref->base;
665 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
666 &ref->max_size, &reverse);
667 return ref->base;
668 }
669
670 /* Returns the base object alias set of the memory reference *REF. */
671
672 alias_set_type
673 ao_ref_base_alias_set (ao_ref *ref)
674 {
675 tree base_ref;
676 if (ref->base_alias_set != -1)
677 return ref->base_alias_set;
678 if (!ref->ref)
679 return 0;
680 base_ref = ref->ref;
681 while (handled_component_p (base_ref))
682 base_ref = TREE_OPERAND (base_ref, 0);
683 ref->base_alias_set = get_alias_set (base_ref);
684 return ref->base_alias_set;
685 }
686
687 /* Returns the reference alias set of the memory reference *REF. */
688
689 alias_set_type
690 ao_ref_alias_set (ao_ref *ref)
691 {
692 if (ref->ref_alias_set != -1)
693 return ref->ref_alias_set;
694 ref->ref_alias_set = get_alias_set (ref->ref);
695 return ref->ref_alias_set;
696 }
697
698 /* Init an alias-oracle reference representation from a gimple pointer
699 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
700 size is assumed to be unknown. The access is assumed to be only
701 to or after of the pointer target, not before it. */
702
703 void
704 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
705 {
706 poly_int64 t, size_hwi, extra_offset = 0;
707 ref->ref = NULL_TREE;
708 if (TREE_CODE (ptr) == SSA_NAME)
709 {
710 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
711 if (gimple_assign_single_p (stmt)
712 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
713 ptr = gimple_assign_rhs1 (stmt);
714 else if (is_gimple_assign (stmt)
715 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
716 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
717 {
718 ptr = gimple_assign_rhs1 (stmt);
719 extra_offset *= BITS_PER_UNIT;
720 }
721 }
722
723 if (TREE_CODE (ptr) == ADDR_EXPR)
724 {
725 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
726 if (ref->base)
727 ref->offset = BITS_PER_UNIT * t;
728 else
729 {
730 size = NULL_TREE;
731 ref->offset = 0;
732 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
733 }
734 }
735 else
736 {
737 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
738 ref->base = build2 (MEM_REF, char_type_node,
739 ptr, null_pointer_node);
740 ref->offset = 0;
741 }
742 ref->offset += extra_offset;
743 if (size
744 && poly_int_tree_p (size, &size_hwi)
745 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
746 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
747 else
748 ref->max_size = ref->size = -1;
749 ref->ref_alias_set = 0;
750 ref->base_alias_set = 0;
751 ref->volatile_p = false;
752 }
753
754 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
755 Return -1 if S1 < S2
756 Return 1 if S1 > S2
757 Return 0 if equal or incomparable. */
758
759 static int
760 compare_sizes (tree s1, tree s2)
761 {
762 if (!s1 || !s2)
763 return 0;
764
765 poly_uint64 size1;
766 poly_uint64 size2;
767
768 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
769 return 0;
770 if (known_lt (size1, size2))
771 return -1;
772 if (known_lt (size2, size1))
773 return 1;
774 return 0;
775 }
776
777 /* Compare TYPE1 and TYPE2 by its size.
778 Return -1 if size of TYPE1 < size of TYPE2
779 Return 1 if size of TYPE1 > size of TYPE2
780 Return 0 if types are of equal sizes or we can not compare them. */
781
782 static int
783 compare_type_sizes (tree type1, tree type2)
784 {
785 /* Be conservative for arrays and vectors. We want to support partial
786 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
787 while (TREE_CODE (type1) == ARRAY_TYPE
788 || TREE_CODE (type1) == VECTOR_TYPE)
789 type1 = TREE_TYPE (type1);
790 while (TREE_CODE (type2) == ARRAY_TYPE
791 || TREE_CODE (type2) == VECTOR_TYPE)
792 type2 = TREE_TYPE (type2);
793 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
794 }
795
796 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
797 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
798 decide. */
799
800 static inline int
801 same_type_for_tbaa (tree type1, tree type2)
802 {
803 type1 = TYPE_MAIN_VARIANT (type1);
804 type2 = TYPE_MAIN_VARIANT (type2);
805
806 /* Handle the most common case first. */
807 if (type1 == type2)
808 return 1;
809
810 /* If we would have to do structural comparison bail out. */
811 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
812 || TYPE_STRUCTURAL_EQUALITY_P (type2))
813 return -1;
814
815 /* Compare the canonical types. */
816 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
817 return 1;
818
819 /* ??? Array types are not properly unified in all cases as we have
820 spurious changes in the index types for example. Removing this
821 causes all sorts of problems with the Fortran frontend. */
822 if (TREE_CODE (type1) == ARRAY_TYPE
823 && TREE_CODE (type2) == ARRAY_TYPE)
824 return -1;
825
826 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
827 object of one of its constrained subtypes, e.g. when a function with an
828 unconstrained parameter passed by reference is called on an object and
829 inlined. But, even in the case of a fixed size, type and subtypes are
830 not equivalent enough as to share the same TYPE_CANONICAL, since this
831 would mean that conversions between them are useless, whereas they are
832 not (e.g. type and subtypes can have different modes). So, in the end,
833 they are only guaranteed to have the same alias set. */
834 if (get_alias_set (type1) == get_alias_set (type2))
835 return -1;
836
837 /* The types are known to be not equal. */
838 return 0;
839 }
840
841 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
842 components on it). */
843
844 static bool
845 type_has_components_p (tree type)
846 {
847 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
848 || TREE_CODE (type) == COMPLEX_TYPE;
849 }
850
851 /* Determine if the two component references REF1 and REF2 which are
852 based on access types TYPE1 and TYPE2 and of which at least one is based
853 on an indirect reference may alias. REF2 is the only one that can
854 be a decl in which case REF2_IS_DECL is true.
855 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
856 are the respective alias sets. */
857
858 static bool
859 aliasing_component_refs_p (tree ref1,
860 alias_set_type ref1_alias_set,
861 alias_set_type base1_alias_set,
862 poly_int64 offset1, poly_int64 max_size1,
863 tree ref2,
864 alias_set_type ref2_alias_set,
865 alias_set_type base2_alias_set,
866 poly_int64 offset2, poly_int64 max_size2,
867 bool ref2_is_decl)
868 {
869 /* If one reference is a component references through pointers try to find a
870 common base and apply offset based disambiguation. This handles
871 for example
872 struct A { int i; int j; } *q;
873 struct B { struct A a; int k; } *p;
874 disambiguating q->i and p->a.j. */
875 tree base1, base2;
876 tree type1, type2;
877 tree *refp;
878 int same_p1 = 0, same_p2 = 0;
879 bool maybe_match = false;
880 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
881
882 /* Choose bases and base types to search for. */
883 base1 = ref1;
884 while (handled_component_p (base1))
885 {
886 /* Generally access paths are monotous in the size of object. The
887 exception are trailing arrays of structures. I.e.
888 struct a {int array[0];};
889 or
890 struct a {int array1[0]; int array[];};
891 Such struct has size 0 but accesses to a.array may have non-zero size.
892 In this case the size of TREE_TYPE (base1) is smaller than
893 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
894
895 Because we compare sizes of arrays just by sizes of their elements,
896 we only need to care about zero sized array fields here. */
897 if (TREE_CODE (base1) == COMPONENT_REF
898 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base1, 1))) == ARRAY_TYPE
899 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))
900 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base1, 1)))))
901 && array_at_struct_end_p (base1))
902 {
903 gcc_checking_assert (!end_struct_ref1);
904 end_struct_ref1 = base1;
905 }
906 base1 = TREE_OPERAND (base1, 0);
907 }
908 type1 = TREE_TYPE (base1);
909 base2 = ref2;
910 while (handled_component_p (base2))
911 {
912 if (TREE_CODE (base2) == COMPONENT_REF
913 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base2, 1))) == ARRAY_TYPE
914 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))
915 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (base2, 1)))))
916 && array_at_struct_end_p (base2))
917 {
918 gcc_checking_assert (!end_struct_ref2);
919 end_struct_ref2 = base2;
920 }
921 base2 = TREE_OPERAND (base2, 0);
922 }
923 type2 = TREE_TYPE (base2);
924
925 /* Now search for the type1 in the access path of ref2. This
926 would be a common base for doing offset based disambiguation on.
927 This however only makes sense if type2 is big enough to hold type1. */
928 int cmp_outer = compare_type_sizes (type2, type1);
929
930 /* If type2 is big enough to contain type1 walk its access path.
931 We also need to care of arrays at the end of structs that may extend
932 beyond the end of structure. */
933 if (cmp_outer >= 0
934 || (end_struct_ref2
935 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
936 {
937 refp = &ref2;
938 while (true)
939 {
940 /* We walk from inner type to the outer types. If type we see is
941 already too large to be part of type1, terminate the search. */
942 int cmp = compare_type_sizes (type1, TREE_TYPE (*refp));
943
944 if (cmp < 0
945 && (!end_struct_ref1
946 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
947 TREE_TYPE (*refp)) < 0))
948 break;
949 /* If types may be of same size, see if we can decide about their
950 equality. */
951 if (cmp == 0)
952 {
953 same_p2 = same_type_for_tbaa (TREE_TYPE (*refp), type1);
954 if (same_p2 == 1)
955 break;
956 /* In case we can't decide whether types are same try to
957 continue looking for the exact match.
958 Remember however that we possibly saw a match
959 to bypass the access path continuations tests we do later. */
960 if (same_p2 == -1)
961 maybe_match = true;
962 }
963 if (!handled_component_p (*refp))
964 break;
965 refp = &TREE_OPERAND (*refp, 0);
966 }
967 if (same_p2 == 1)
968 {
969 poly_int64 offadj, sztmp, msztmp;
970 bool reverse;
971
972 /* We assume that arrays can overlap by multiple of their elements
973 size as tested in gcc.dg/torture/alias-2.c.
974 This partial overlap happen only when both arrays are bases of
975 the access and not contained within another component ref.
976 To be safe we also assume partial overlap for VLAs. */
977 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
978 && (!TYPE_SIZE (TREE_TYPE (base1))
979 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
980 || (*refp == base2 && !ref2_is_decl)))
981 {
982 ++alias_stats.aliasing_component_refs_p_may_alias;
983 return true;
984 }
985
986 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
987 offset2 -= offadj;
988 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
989 offset1 -= offadj;
990 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
991 {
992 ++alias_stats.aliasing_component_refs_p_may_alias;
993 return true;
994 }
995 else
996 {
997 ++alias_stats.aliasing_component_refs_p_no_alias;
998 return false;
999 }
1000 }
1001 }
1002
1003 /* If we didn't find a common base, try the other way around. */
1004 if (cmp_outer <= 0
1005 || (end_struct_ref1
1006 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1007 {
1008 refp = &ref1;
1009 while (true)
1010 {
1011 int cmp = compare_type_sizes (type2, TREE_TYPE (*refp));
1012 if (cmp < 0
1013 && (!end_struct_ref2
1014 || compare_type_sizes (TREE_TYPE (end_struct_ref2),
1015 TREE_TYPE (*refp)) < 0))
1016 break;
1017 /* If types may be of same size, see if we can decide about their
1018 equality. */
1019 if (cmp == 0)
1020 {
1021 same_p1 = same_type_for_tbaa (TREE_TYPE (*refp), type2);
1022 if (same_p1 == 1)
1023 break;
1024 if (same_p1 == -1)
1025 maybe_match = true;
1026 }
1027 if (!handled_component_p (*refp))
1028 break;
1029 refp = &TREE_OPERAND (*refp, 0);
1030 }
1031 if (same_p1 == 1)
1032 {
1033 poly_int64 offadj, sztmp, msztmp;
1034 bool reverse;
1035
1036 if (TREE_CODE (TREE_TYPE (base2)) == ARRAY_TYPE
1037 && (!TYPE_SIZE (TREE_TYPE (base2))
1038 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base2))) != INTEGER_CST
1039 || (*refp == base1 && !ref2_is_decl)))
1040 {
1041 ++alias_stats.aliasing_component_refs_p_may_alias;
1042 return true;
1043 }
1044
1045 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
1046 offset1 -= offadj;
1047 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
1048 offset2 -= offadj;
1049 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1050 {
1051 ++alias_stats.aliasing_component_refs_p_may_alias;
1052 return true;
1053 }
1054 else
1055 {
1056 ++alias_stats.aliasing_component_refs_p_no_alias;
1057 return false;
1058 }
1059 }
1060 }
1061
1062 /* In the following code we make an assumption that the types in access
1063 paths do not overlap and thus accesses alias only if one path can be
1064 continuation of another. If we was not able to decide about equivalence,
1065 we need to give up. */
1066 if (maybe_match)
1067 return true;
1068
1069 /* If we have two type access paths B1.path1 and B2.path2 they may
1070 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
1071 But we can still have a path that goes B1.path1...B2.path2 with
1072 a part that we do not see. So we can only disambiguate now
1073 if there is no B2 in the tail of path1 and no B1 on the
1074 tail of path2. */
1075 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
1076 && (!end_struct_ref1
1077 || compare_type_sizes (TREE_TYPE (ref2),
1078 TREE_TYPE (end_struct_ref1)) >= 0)
1079 && type_has_components_p (TREE_TYPE (ref2))
1080 && (base1_alias_set == ref2_alias_set
1081 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
1082 {
1083 ++alias_stats.aliasing_component_refs_p_may_alias;
1084 return true;
1085 }
1086 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
1087 if (!ref2_is_decl
1088 && compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
1089 && (!end_struct_ref2
1090 || compare_type_sizes (TREE_TYPE (ref1),
1091 TREE_TYPE (end_struct_ref2)) >= 0)
1092 && type_has_components_p (TREE_TYPE (ref1))
1093 && (base2_alias_set == ref1_alias_set
1094 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
1095 {
1096 ++alias_stats.aliasing_component_refs_p_may_alias;
1097 return true;
1098 }
1099 ++alias_stats.aliasing_component_refs_p_no_alias;
1100 return false;
1101 }
1102
1103 /* Return true if we can determine that component references REF1 and REF2,
1104 that are within a common DECL, cannot overlap. */
1105
1106 static bool
1107 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
1108 {
1109 auto_vec<tree, 16> component_refs1;
1110 auto_vec<tree, 16> component_refs2;
1111
1112 /* Create the stack of handled components for REF1. */
1113 while (handled_component_p (ref1))
1114 {
1115 component_refs1.safe_push (ref1);
1116 ref1 = TREE_OPERAND (ref1, 0);
1117 }
1118 if (TREE_CODE (ref1) == MEM_REF)
1119 {
1120 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1121 {
1122 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1123 return false;
1124 }
1125 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
1126 }
1127
1128 /* Create the stack of handled components for REF2. */
1129 while (handled_component_p (ref2))
1130 {
1131 component_refs2.safe_push (ref2);
1132 ref2 = TREE_OPERAND (ref2, 0);
1133 }
1134 if (TREE_CODE (ref2) == MEM_REF)
1135 {
1136 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1137 {
1138 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1139 return false;
1140 }
1141 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1142 }
1143
1144 /* Bases must be either same or uncomparable. */
1145 gcc_checking_assert (ref1 == ref2
1146 || (DECL_P (ref1) && DECL_P (ref2)
1147 && compare_base_decls (ref1, ref2) != 0));
1148
1149 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1150 rank. This is sufficient because we start from the same DECL and you
1151 cannot reference several fields at a time with COMPONENT_REFs (unlike
1152 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1153 of them to access a sub-component, unless you're in a union, in which
1154 case the return value will precisely be false. */
1155 while (true)
1156 {
1157 do
1158 {
1159 if (component_refs1.is_empty ())
1160 {
1161 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1162 return false;
1163 }
1164 ref1 = component_refs1.pop ();
1165 }
1166 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1167
1168 do
1169 {
1170 if (component_refs2.is_empty ())
1171 {
1172 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1173 return false;
1174 }
1175 ref2 = component_refs2.pop ();
1176 }
1177 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1178
1179 /* Beware of BIT_FIELD_REF. */
1180 if (TREE_CODE (ref1) != COMPONENT_REF
1181 || TREE_CODE (ref2) != COMPONENT_REF)
1182 {
1183 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1184 return false;
1185 }
1186
1187 tree field1 = TREE_OPERAND (ref1, 1);
1188 tree field2 = TREE_OPERAND (ref2, 1);
1189
1190 /* ??? We cannot simply use the type of operand #0 of the refs here
1191 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1192 for common blocks instead of using unions like everyone else. */
1193 tree type1 = DECL_CONTEXT (field1);
1194 tree type2 = DECL_CONTEXT (field2);
1195
1196 /* We cannot disambiguate fields in a union or qualified union. */
1197 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1198 {
1199 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1200 return false;
1201 }
1202
1203 if (field1 != field2)
1204 {
1205 /* A field and its representative need to be considered the
1206 same. */
1207 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1208 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1209 {
1210 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1211 return false;
1212 }
1213 /* Different fields of the same record type cannot overlap.
1214 ??? Bitfields can overlap at RTL level so punt on them. */
1215 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1216 {
1217 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1218 return false;
1219 }
1220 ++alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias;
1221 return true;
1222 }
1223 }
1224
1225 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1226 return false;
1227 }
1228
1229 /* qsort compare function to sort FIELD_DECLs after their
1230 DECL_FIELD_CONTEXT TYPE_UID. */
1231
1232 static inline int
1233 ncr_compar (const void *field1_, const void *field2_)
1234 {
1235 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1236 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1237 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1238 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1239 if (uid1 < uid2)
1240 return -1;
1241 else if (uid1 > uid2)
1242 return 1;
1243 return 0;
1244 }
1245
1246 /* Return true if we can determine that the fields referenced cannot
1247 overlap for any pair of objects. */
1248
1249 static bool
1250 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1251 {
1252 if (!flag_strict_aliasing
1253 || !x || !y
1254 || !handled_component_p (x)
1255 || !handled_component_p (y))
1256 {
1257 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1258 return false;
1259 }
1260
1261 auto_vec<const_tree, 16> fieldsx;
1262 while (handled_component_p (x))
1263 {
1264 if (TREE_CODE (x) == COMPONENT_REF)
1265 {
1266 tree field = TREE_OPERAND (x, 1);
1267 tree type = DECL_FIELD_CONTEXT (field);
1268 if (TREE_CODE (type) == RECORD_TYPE)
1269 fieldsx.safe_push (field);
1270 }
1271 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR)
1272 fieldsx.truncate (0);
1273 x = TREE_OPERAND (x, 0);
1274 }
1275 if (fieldsx.length () == 0)
1276 return false;
1277 auto_vec<const_tree, 16> fieldsy;
1278 while (handled_component_p (y))
1279 {
1280 if (TREE_CODE (y) == COMPONENT_REF)
1281 {
1282 tree field = TREE_OPERAND (y, 1);
1283 tree type = DECL_FIELD_CONTEXT (field);
1284 if (TREE_CODE (type) == RECORD_TYPE)
1285 fieldsy.safe_push (TREE_OPERAND (y, 1));
1286 }
1287 else if (TREE_CODE (y) == VIEW_CONVERT_EXPR)
1288 fieldsy.truncate (0);
1289 y = TREE_OPERAND (y, 0);
1290 }
1291 if (fieldsy.length () == 0)
1292 {
1293 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1294 return false;
1295 }
1296
1297 /* Most common case first. */
1298 if (fieldsx.length () == 1
1299 && fieldsy.length () == 1)
1300 {
1301 if ((DECL_FIELD_CONTEXT (fieldsx[0])
1302 == DECL_FIELD_CONTEXT (fieldsy[0]))
1303 && fieldsx[0] != fieldsy[0]
1304 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])))
1305 {
1306 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1307 return true;
1308 }
1309 else
1310 {
1311 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1312 return false;
1313 }
1314 }
1315
1316 if (fieldsx.length () == 2)
1317 {
1318 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1319 std::swap (fieldsx[0], fieldsx[1]);
1320 }
1321 else
1322 fieldsx.qsort (ncr_compar);
1323
1324 if (fieldsy.length () == 2)
1325 {
1326 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1327 std::swap (fieldsy[0], fieldsy[1]);
1328 }
1329 else
1330 fieldsy.qsort (ncr_compar);
1331
1332 unsigned i = 0, j = 0;
1333 do
1334 {
1335 const_tree fieldx = fieldsx[i];
1336 const_tree fieldy = fieldsy[j];
1337 tree typex = DECL_FIELD_CONTEXT (fieldx);
1338 tree typey = DECL_FIELD_CONTEXT (fieldy);
1339 if (typex == typey)
1340 {
1341 /* We're left with accessing different fields of a structure,
1342 no possible overlap. */
1343 if (fieldx != fieldy)
1344 {
1345 /* A field and its representative need to be considered the
1346 same. */
1347 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1348 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1349 {
1350 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1351 return false;
1352 }
1353 /* Different fields of the same record type cannot overlap.
1354 ??? Bitfields can overlap at RTL level so punt on them. */
1355 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1356 {
1357 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1358 return false;
1359 }
1360 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1361 return true;
1362 }
1363 }
1364 if (TYPE_UID (typex) < TYPE_UID (typey))
1365 {
1366 i++;
1367 if (i == fieldsx.length ())
1368 break;
1369 }
1370 else
1371 {
1372 j++;
1373 if (j == fieldsy.length ())
1374 break;
1375 }
1376 }
1377 while (1);
1378
1379 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1380 return false;
1381 }
1382
1383
1384 /* Return true if two memory references based on the variables BASE1
1385 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1386 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1387 if non-NULL are the complete memory reference trees. */
1388
1389 static bool
1390 decl_refs_may_alias_p (tree ref1, tree base1,
1391 poly_int64 offset1, poly_int64 max_size1,
1392 tree ref2, tree base2,
1393 poly_int64 offset2, poly_int64 max_size2)
1394 {
1395 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1396
1397 /* If both references are based on different variables, they cannot alias. */
1398 if (compare_base_decls (base1, base2) == 0)
1399 return false;
1400
1401 /* If both references are based on the same variable, they cannot alias if
1402 the accesses do not overlap. */
1403 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1404 return false;
1405
1406 /* For components with variable position, the above test isn't sufficient,
1407 so we disambiguate component references manually. */
1408 if (ref1 && ref2
1409 && handled_component_p (ref1) && handled_component_p (ref2)
1410 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1411 return false;
1412
1413 return true;
1414 }
1415
1416 /* Return true if an indirect reference based on *PTR1 constrained
1417 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1418 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1419 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1420 in which case they are computed on-demand. REF1 and REF2
1421 if non-NULL are the complete memory reference trees. */
1422
1423 static bool
1424 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1425 poly_int64 offset1, poly_int64 max_size1,
1426 alias_set_type ref1_alias_set,
1427 alias_set_type base1_alias_set,
1428 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1429 poly_int64 offset2, poly_int64 max_size2,
1430 alias_set_type ref2_alias_set,
1431 alias_set_type base2_alias_set, bool tbaa_p)
1432 {
1433 tree ptr1;
1434 tree ptrtype1, dbase2;
1435
1436 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1437 || TREE_CODE (base1) == TARGET_MEM_REF)
1438 && DECL_P (base2));
1439
1440 ptr1 = TREE_OPERAND (base1, 0);
1441 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1442
1443 /* If only one reference is based on a variable, they cannot alias if
1444 the pointer access is beyond the extent of the variable access.
1445 (the pointer base cannot validly point to an offset less than zero
1446 of the variable).
1447 ??? IVOPTs creates bases that do not honor this restriction,
1448 so do not apply this optimization for TARGET_MEM_REFs. */
1449 if (TREE_CODE (base1) != TARGET_MEM_REF
1450 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1451 return false;
1452 /* They also cannot alias if the pointer may not point to the decl. */
1453 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1454 return false;
1455
1456 /* Disambiguations that rely on strict aliasing rules follow. */
1457 if (!flag_strict_aliasing || !tbaa_p)
1458 return true;
1459
1460 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1461
1462 /* If the alias set for a pointer access is zero all bets are off. */
1463 if (base1_alias_set == 0)
1464 return true;
1465
1466 /* When we are trying to disambiguate an access with a pointer dereference
1467 as base versus one with a decl as base we can use both the size
1468 of the decl and its dynamic type for extra disambiguation.
1469 ??? We do not know anything about the dynamic type of the decl
1470 other than that its alias-set contains base2_alias_set as a subset
1471 which does not help us here. */
1472 /* As we know nothing useful about the dynamic type of the decl just
1473 use the usual conflict check rather than a subset test.
1474 ??? We could introduce -fvery-strict-aliasing when the language
1475 does not allow decls to have a dynamic type that differs from their
1476 static type. Then we can check
1477 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1478 if (base1_alias_set != base2_alias_set
1479 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1480 return false;
1481 /* If the size of the access relevant for TBAA through the pointer
1482 is bigger than the size of the decl we can't possibly access the
1483 decl via that pointer. */
1484 if (/* ??? This in turn may run afoul when a decl of type T which is
1485 a member of union type U is accessed through a pointer to
1486 type U and sizeof T is smaller than sizeof U. */
1487 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1488 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1489 && compare_sizes (DECL_SIZE (base2),
1490 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1491 return false;
1492
1493 if (!ref2)
1494 return true;
1495
1496 /* If the decl is accessed via a MEM_REF, reconstruct the base
1497 we can use for TBAA and an appropriately adjusted offset. */
1498 dbase2 = ref2;
1499 while (handled_component_p (dbase2))
1500 dbase2 = TREE_OPERAND (dbase2, 0);
1501 poly_int64 doffset1 = offset1;
1502 poly_offset_int doffset2 = offset2;
1503 if (TREE_CODE (dbase2) == MEM_REF
1504 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1505 {
1506 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1507 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1508 /* If second reference is view-converted, give up now. */
1509 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1510 return true;
1511 }
1512
1513 /* If first reference is view-converted, give up now. */
1514 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1515 return true;
1516
1517 /* If both references are through the same type, they do not alias
1518 if the accesses do not overlap. This does extra disambiguation
1519 for mixed/pointer accesses but requires strict aliasing.
1520 For MEM_REFs we require that the component-ref offset we computed
1521 is relative to the start of the type which we ensure by
1522 comparing rvalue and access type and disregarding the constant
1523 pointer offset.
1524
1525 But avoid treating variable length arrays as "objects", instead assume they
1526 can overlap by an exact multiple of their element size.
1527 See gcc.dg/torture/alias-2.c. */
1528 if (((TREE_CODE (base1) != TARGET_MEM_REF
1529 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1530 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1531 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (base2))))
1532 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
1533 && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
1534 || (TYPE_SIZE (TREE_TYPE (base1))
1535 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST))
1536 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
1537 return false;
1538
1539 if (ref1 && ref2
1540 && nonoverlapping_component_refs_p (ref1, ref2))
1541 return false;
1542
1543 /* Do access-path based disambiguation. */
1544 if (ref1 && ref2
1545 && (handled_component_p (ref1) || handled_component_p (ref2)))
1546 return aliasing_component_refs_p (ref1,
1547 ref1_alias_set, base1_alias_set,
1548 offset1, max_size1,
1549 ref2,
1550 ref2_alias_set, base2_alias_set,
1551 offset2, max_size2,
1552 /* Only if the other reference is actual
1553 decl we can safely check only toplevel
1554 part of access path 1. */
1555 same_type_for_tbaa (TREE_TYPE (dbase2),
1556 TREE_TYPE (base2))
1557 == 1);
1558
1559 return true;
1560 }
1561
1562 /* Return true if two indirect references based on *PTR1
1563 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1564 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1565 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1566 in which case they are computed on-demand. REF1 and REF2
1567 if non-NULL are the complete memory reference trees. */
1568
1569 static bool
1570 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1571 poly_int64 offset1, poly_int64 max_size1,
1572 alias_set_type ref1_alias_set,
1573 alias_set_type base1_alias_set,
1574 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1575 poly_int64 offset2, poly_int64 max_size2,
1576 alias_set_type ref2_alias_set,
1577 alias_set_type base2_alias_set, bool tbaa_p)
1578 {
1579 tree ptr1;
1580 tree ptr2;
1581 tree ptrtype1, ptrtype2;
1582
1583 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1584 || TREE_CODE (base1) == TARGET_MEM_REF)
1585 && (TREE_CODE (base2) == MEM_REF
1586 || TREE_CODE (base2) == TARGET_MEM_REF));
1587
1588 ptr1 = TREE_OPERAND (base1, 0);
1589 ptr2 = TREE_OPERAND (base2, 0);
1590
1591 /* If both bases are based on pointers they cannot alias if they may not
1592 point to the same memory object or if they point to the same object
1593 and the accesses do not overlap. */
1594 if ((!cfun || gimple_in_ssa_p (cfun))
1595 && operand_equal_p (ptr1, ptr2, 0)
1596 && (((TREE_CODE (base1) != TARGET_MEM_REF
1597 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1598 && (TREE_CODE (base2) != TARGET_MEM_REF
1599 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1600 || (TREE_CODE (base1) == TARGET_MEM_REF
1601 && TREE_CODE (base2) == TARGET_MEM_REF
1602 && (TMR_STEP (base1) == TMR_STEP (base2)
1603 || (TMR_STEP (base1) && TMR_STEP (base2)
1604 && operand_equal_p (TMR_STEP (base1),
1605 TMR_STEP (base2), 0)))
1606 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1607 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1608 && operand_equal_p (TMR_INDEX (base1),
1609 TMR_INDEX (base2), 0)))
1610 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1611 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1612 && operand_equal_p (TMR_INDEX2 (base1),
1613 TMR_INDEX2 (base2), 0))))))
1614 {
1615 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1616 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1617 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1618 offset2 + moff2, max_size2))
1619 return false;
1620 }
1621 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1622 return false;
1623
1624 /* Disambiguations that rely on strict aliasing rules follow. */
1625 if (!flag_strict_aliasing || !tbaa_p)
1626 return true;
1627
1628 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1629 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1630
1631 /* If the alias set for a pointer access is zero all bets are off. */
1632 if (base1_alias_set == 0
1633 || base2_alias_set == 0)
1634 return true;
1635
1636 /* Do type-based disambiguation. */
1637 if (base1_alias_set != base2_alias_set
1638 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1639 return false;
1640
1641 /* If either reference is view-converted, give up now. */
1642 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1643 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1644 return true;
1645
1646 /* If both references are through the same type, they do not alias
1647 if the accesses do not overlap. This does extra disambiguation
1648 for mixed/pointer accesses but requires strict aliasing. */
1649 if ((TREE_CODE (base1) != TARGET_MEM_REF
1650 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1651 && (TREE_CODE (base2) != TARGET_MEM_REF
1652 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1653 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1654 TREE_TYPE (ptrtype2)) == 1
1655 /* But avoid treating arrays as "objects", instead assume they
1656 can overlap by an exact multiple of their element size.
1657 See gcc.dg/torture/alias-2.c. */
1658 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE
1659 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1660 return false;
1661
1662 if (ref1 && ref2
1663 && nonoverlapping_component_refs_p (ref1, ref2))
1664 return false;
1665
1666 /* Do access-path based disambiguation. */
1667 if (ref1 && ref2
1668 && (handled_component_p (ref1) || handled_component_p (ref2)))
1669 return aliasing_component_refs_p (ref1,
1670 ref1_alias_set, base1_alias_set,
1671 offset1, max_size1,
1672 ref2,
1673 ref2_alias_set, base2_alias_set,
1674 offset2, max_size2, false);
1675
1676 return true;
1677 }
1678
1679 /* Return true, if the two memory references REF1 and REF2 may alias. */
1680
1681 static bool
1682 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1683 {
1684 tree base1, base2;
1685 poly_int64 offset1 = 0, offset2 = 0;
1686 poly_int64 max_size1 = -1, max_size2 = -1;
1687 bool var1_p, var2_p, ind1_p, ind2_p;
1688
1689 gcc_checking_assert ((!ref1->ref
1690 || TREE_CODE (ref1->ref) == SSA_NAME
1691 || DECL_P (ref1->ref)
1692 || TREE_CODE (ref1->ref) == STRING_CST
1693 || handled_component_p (ref1->ref)
1694 || TREE_CODE (ref1->ref) == MEM_REF
1695 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1696 && (!ref2->ref
1697 || TREE_CODE (ref2->ref) == SSA_NAME
1698 || DECL_P (ref2->ref)
1699 || TREE_CODE (ref2->ref) == STRING_CST
1700 || handled_component_p (ref2->ref)
1701 || TREE_CODE (ref2->ref) == MEM_REF
1702 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1703
1704 /* Decompose the references into their base objects and the access. */
1705 base1 = ao_ref_base (ref1);
1706 offset1 = ref1->offset;
1707 max_size1 = ref1->max_size;
1708 base2 = ao_ref_base (ref2);
1709 offset2 = ref2->offset;
1710 max_size2 = ref2->max_size;
1711
1712 /* We can end up with registers or constants as bases for example from
1713 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1714 which is seen as a struct copy. */
1715 if (TREE_CODE (base1) == SSA_NAME
1716 || TREE_CODE (base1) == CONST_DECL
1717 || TREE_CODE (base1) == CONSTRUCTOR
1718 || TREE_CODE (base1) == ADDR_EXPR
1719 || CONSTANT_CLASS_P (base1)
1720 || TREE_CODE (base2) == SSA_NAME
1721 || TREE_CODE (base2) == CONST_DECL
1722 || TREE_CODE (base2) == CONSTRUCTOR
1723 || TREE_CODE (base2) == ADDR_EXPR
1724 || CONSTANT_CLASS_P (base2))
1725 return false;
1726
1727 /* We can end up referring to code via function and label decls.
1728 As we likely do not properly track code aliases conservatively
1729 bail out. */
1730 if (TREE_CODE (base1) == FUNCTION_DECL
1731 || TREE_CODE (base1) == LABEL_DECL
1732 || TREE_CODE (base2) == FUNCTION_DECL
1733 || TREE_CODE (base2) == LABEL_DECL)
1734 return true;
1735
1736 /* Two volatile accesses always conflict. */
1737 if (ref1->volatile_p
1738 && ref2->volatile_p)
1739 return true;
1740
1741 /* Defer to simple offset based disambiguation if we have
1742 references based on two decls. Do this before defering to
1743 TBAA to handle must-alias cases in conformance with the
1744 GCC extension of allowing type-punning through unions. */
1745 var1_p = DECL_P (base1);
1746 var2_p = DECL_P (base2);
1747 if (var1_p && var2_p)
1748 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1749 ref2->ref, base2, offset2, max_size2);
1750
1751 /* Handle restrict based accesses.
1752 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1753 here. */
1754 tree rbase1 = base1;
1755 tree rbase2 = base2;
1756 if (var1_p)
1757 {
1758 rbase1 = ref1->ref;
1759 if (rbase1)
1760 while (handled_component_p (rbase1))
1761 rbase1 = TREE_OPERAND (rbase1, 0);
1762 }
1763 if (var2_p)
1764 {
1765 rbase2 = ref2->ref;
1766 if (rbase2)
1767 while (handled_component_p (rbase2))
1768 rbase2 = TREE_OPERAND (rbase2, 0);
1769 }
1770 if (rbase1 && rbase2
1771 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1772 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1773 /* If the accesses are in the same restrict clique... */
1774 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1775 /* But based on different pointers they do not alias. */
1776 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1777 return false;
1778
1779 ind1_p = (TREE_CODE (base1) == MEM_REF
1780 || TREE_CODE (base1) == TARGET_MEM_REF);
1781 ind2_p = (TREE_CODE (base2) == MEM_REF
1782 || TREE_CODE (base2) == TARGET_MEM_REF);
1783
1784 /* Canonicalize the pointer-vs-decl case. */
1785 if (ind1_p && var2_p)
1786 {
1787 std::swap (offset1, offset2);
1788 std::swap (max_size1, max_size2);
1789 std::swap (base1, base2);
1790 std::swap (ref1, ref2);
1791 var1_p = true;
1792 ind1_p = false;
1793 var2_p = false;
1794 ind2_p = true;
1795 }
1796
1797 /* First defer to TBAA if possible. */
1798 if (tbaa_p
1799 && flag_strict_aliasing
1800 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1801 ao_ref_alias_set (ref2)))
1802 return false;
1803
1804 /* If the reference is based on a pointer that points to memory
1805 that may not be written to then the other reference cannot possibly
1806 clobber it. */
1807 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1808 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1809 || (ind1_p
1810 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1811 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1812 return false;
1813
1814 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1815 if (var1_p && ind2_p)
1816 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1817 offset2, max_size2,
1818 ao_ref_alias_set (ref2),
1819 ao_ref_base_alias_set (ref2),
1820 ref1->ref, base1,
1821 offset1, max_size1,
1822 ao_ref_alias_set (ref1),
1823 ao_ref_base_alias_set (ref1),
1824 tbaa_p);
1825 else if (ind1_p && ind2_p)
1826 return indirect_refs_may_alias_p (ref1->ref, base1,
1827 offset1, max_size1,
1828 ao_ref_alias_set (ref1),
1829 ao_ref_base_alias_set (ref1),
1830 ref2->ref, base2,
1831 offset2, max_size2,
1832 ao_ref_alias_set (ref2),
1833 ao_ref_base_alias_set (ref2),
1834 tbaa_p);
1835
1836 gcc_unreachable ();
1837 }
1838
1839 /* Return true, if the two memory references REF1 and REF2 may alias
1840 and update statistics. */
1841
1842 bool
1843 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1844 {
1845 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1846 if (res)
1847 ++alias_stats.refs_may_alias_p_may_alias;
1848 else
1849 ++alias_stats.refs_may_alias_p_no_alias;
1850 return res;
1851 }
1852
1853 static bool
1854 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1855 {
1856 ao_ref r1;
1857 ao_ref_init (&r1, ref1);
1858 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1859 }
1860
1861 bool
1862 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1863 {
1864 ao_ref r1, r2;
1865 ao_ref_init (&r1, ref1);
1866 ao_ref_init (&r2, ref2);
1867 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1868 }
1869
1870 /* Returns true if there is a anti-dependence for the STORE that
1871 executes after the LOAD. */
1872
1873 bool
1874 refs_anti_dependent_p (tree load, tree store)
1875 {
1876 ao_ref r1, r2;
1877 ao_ref_init (&r1, load);
1878 ao_ref_init (&r2, store);
1879 return refs_may_alias_p_1 (&r1, &r2, false);
1880 }
1881
1882 /* Returns true if there is a output dependence for the stores
1883 STORE1 and STORE2. */
1884
1885 bool
1886 refs_output_dependent_p (tree store1, tree store2)
1887 {
1888 ao_ref r1, r2;
1889 ao_ref_init (&r1, store1);
1890 ao_ref_init (&r2, store2);
1891 return refs_may_alias_p_1 (&r1, &r2, false);
1892 }
1893
1894 /* If the call CALL may use the memory reference REF return true,
1895 otherwise return false. */
1896
1897 static bool
1898 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1899 {
1900 tree base, callee;
1901 unsigned i;
1902 int flags = gimple_call_flags (call);
1903
1904 /* Const functions without a static chain do not implicitly use memory. */
1905 if (!gimple_call_chain (call)
1906 && (flags & (ECF_CONST|ECF_NOVOPS)))
1907 goto process_args;
1908
1909 base = ao_ref_base (ref);
1910 if (!base)
1911 return true;
1912
1913 /* A call that is not without side-effects might involve volatile
1914 accesses and thus conflicts with all other volatile accesses. */
1915 if (ref->volatile_p)
1916 return true;
1917
1918 /* If the reference is based on a decl that is not aliased the call
1919 cannot possibly use it. */
1920 if (DECL_P (base)
1921 && !may_be_aliased (base)
1922 /* But local statics can be used through recursion. */
1923 && !is_global_var (base))
1924 goto process_args;
1925
1926 callee = gimple_call_fndecl (call);
1927
1928 /* Handle those builtin functions explicitly that do not act as
1929 escape points. See tree-ssa-structalias.c:find_func_aliases
1930 for the list of builtins we might need to handle here. */
1931 if (callee != NULL_TREE
1932 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1933 switch (DECL_FUNCTION_CODE (callee))
1934 {
1935 /* All the following functions read memory pointed to by
1936 their second argument. strcat/strncat additionally
1937 reads memory pointed to by the first argument. */
1938 case BUILT_IN_STRCAT:
1939 case BUILT_IN_STRNCAT:
1940 {
1941 ao_ref dref;
1942 ao_ref_init_from_ptr_and_size (&dref,
1943 gimple_call_arg (call, 0),
1944 NULL_TREE);
1945 if (refs_may_alias_p_1 (&dref, ref, false))
1946 return true;
1947 }
1948 /* FALLTHRU */
1949 case BUILT_IN_STRCPY:
1950 case BUILT_IN_STRNCPY:
1951 case BUILT_IN_MEMCPY:
1952 case BUILT_IN_MEMMOVE:
1953 case BUILT_IN_MEMPCPY:
1954 case BUILT_IN_STPCPY:
1955 case BUILT_IN_STPNCPY:
1956 case BUILT_IN_TM_MEMCPY:
1957 case BUILT_IN_TM_MEMMOVE:
1958 {
1959 ao_ref dref;
1960 tree size = NULL_TREE;
1961 if (gimple_call_num_args (call) == 3)
1962 size = gimple_call_arg (call, 2);
1963 ao_ref_init_from_ptr_and_size (&dref,
1964 gimple_call_arg (call, 1),
1965 size);
1966 return refs_may_alias_p_1 (&dref, ref, false);
1967 }
1968 case BUILT_IN_STRCAT_CHK:
1969 case BUILT_IN_STRNCAT_CHK:
1970 {
1971 ao_ref dref;
1972 ao_ref_init_from_ptr_and_size (&dref,
1973 gimple_call_arg (call, 0),
1974 NULL_TREE);
1975 if (refs_may_alias_p_1 (&dref, ref, false))
1976 return true;
1977 }
1978 /* FALLTHRU */
1979 case BUILT_IN_STRCPY_CHK:
1980 case BUILT_IN_STRNCPY_CHK:
1981 case BUILT_IN_MEMCPY_CHK:
1982 case BUILT_IN_MEMMOVE_CHK:
1983 case BUILT_IN_MEMPCPY_CHK:
1984 case BUILT_IN_STPCPY_CHK:
1985 case BUILT_IN_STPNCPY_CHK:
1986 {
1987 ao_ref dref;
1988 tree size = NULL_TREE;
1989 if (gimple_call_num_args (call) == 4)
1990 size = gimple_call_arg (call, 2);
1991 ao_ref_init_from_ptr_and_size (&dref,
1992 gimple_call_arg (call, 1),
1993 size);
1994 return refs_may_alias_p_1 (&dref, ref, false);
1995 }
1996 case BUILT_IN_BCOPY:
1997 {
1998 ao_ref dref;
1999 tree size = gimple_call_arg (call, 2);
2000 ao_ref_init_from_ptr_and_size (&dref,
2001 gimple_call_arg (call, 0),
2002 size);
2003 return refs_may_alias_p_1 (&dref, ref, false);
2004 }
2005
2006 /* The following functions read memory pointed to by their
2007 first argument. */
2008 CASE_BUILT_IN_TM_LOAD (1):
2009 CASE_BUILT_IN_TM_LOAD (2):
2010 CASE_BUILT_IN_TM_LOAD (4):
2011 CASE_BUILT_IN_TM_LOAD (8):
2012 CASE_BUILT_IN_TM_LOAD (FLOAT):
2013 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2014 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2015 CASE_BUILT_IN_TM_LOAD (M64):
2016 CASE_BUILT_IN_TM_LOAD (M128):
2017 CASE_BUILT_IN_TM_LOAD (M256):
2018 case BUILT_IN_TM_LOG:
2019 case BUILT_IN_TM_LOG_1:
2020 case BUILT_IN_TM_LOG_2:
2021 case BUILT_IN_TM_LOG_4:
2022 case BUILT_IN_TM_LOG_8:
2023 case BUILT_IN_TM_LOG_FLOAT:
2024 case BUILT_IN_TM_LOG_DOUBLE:
2025 case BUILT_IN_TM_LOG_LDOUBLE:
2026 case BUILT_IN_TM_LOG_M64:
2027 case BUILT_IN_TM_LOG_M128:
2028 case BUILT_IN_TM_LOG_M256:
2029 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2030
2031 /* These read memory pointed to by the first argument. */
2032 case BUILT_IN_STRDUP:
2033 case BUILT_IN_STRNDUP:
2034 case BUILT_IN_REALLOC:
2035 {
2036 ao_ref dref;
2037 tree size = NULL_TREE;
2038 if (gimple_call_num_args (call) == 2)
2039 size = gimple_call_arg (call, 1);
2040 ao_ref_init_from_ptr_and_size (&dref,
2041 gimple_call_arg (call, 0),
2042 size);
2043 return refs_may_alias_p_1 (&dref, ref, false);
2044 }
2045 /* These read memory pointed to by the first argument. */
2046 case BUILT_IN_INDEX:
2047 case BUILT_IN_STRCHR:
2048 case BUILT_IN_STRRCHR:
2049 {
2050 ao_ref dref;
2051 ao_ref_init_from_ptr_and_size (&dref,
2052 gimple_call_arg (call, 0),
2053 NULL_TREE);
2054 return refs_may_alias_p_1 (&dref, ref, false);
2055 }
2056 /* These read memory pointed to by the first argument with size
2057 in the third argument. */
2058 case BUILT_IN_MEMCHR:
2059 {
2060 ao_ref dref;
2061 ao_ref_init_from_ptr_and_size (&dref,
2062 gimple_call_arg (call, 0),
2063 gimple_call_arg (call, 2));
2064 return refs_may_alias_p_1 (&dref, ref, false);
2065 }
2066 /* These read memory pointed to by the first and second arguments. */
2067 case BUILT_IN_STRSTR:
2068 case BUILT_IN_STRPBRK:
2069 {
2070 ao_ref dref;
2071 ao_ref_init_from_ptr_and_size (&dref,
2072 gimple_call_arg (call, 0),
2073 NULL_TREE);
2074 if (refs_may_alias_p_1 (&dref, ref, false))
2075 return true;
2076 ao_ref_init_from_ptr_and_size (&dref,
2077 gimple_call_arg (call, 1),
2078 NULL_TREE);
2079 return refs_may_alias_p_1 (&dref, ref, false);
2080 }
2081
2082 /* The following builtins do not read from memory. */
2083 case BUILT_IN_FREE:
2084 case BUILT_IN_MALLOC:
2085 case BUILT_IN_POSIX_MEMALIGN:
2086 case BUILT_IN_ALIGNED_ALLOC:
2087 case BUILT_IN_CALLOC:
2088 CASE_BUILT_IN_ALLOCA:
2089 case BUILT_IN_STACK_SAVE:
2090 case BUILT_IN_STACK_RESTORE:
2091 case BUILT_IN_MEMSET:
2092 case BUILT_IN_TM_MEMSET:
2093 case BUILT_IN_MEMSET_CHK:
2094 case BUILT_IN_FREXP:
2095 case BUILT_IN_FREXPF:
2096 case BUILT_IN_FREXPL:
2097 case BUILT_IN_GAMMA_R:
2098 case BUILT_IN_GAMMAF_R:
2099 case BUILT_IN_GAMMAL_R:
2100 case BUILT_IN_LGAMMA_R:
2101 case BUILT_IN_LGAMMAF_R:
2102 case BUILT_IN_LGAMMAL_R:
2103 case BUILT_IN_MODF:
2104 case BUILT_IN_MODFF:
2105 case BUILT_IN_MODFL:
2106 case BUILT_IN_REMQUO:
2107 case BUILT_IN_REMQUOF:
2108 case BUILT_IN_REMQUOL:
2109 case BUILT_IN_SINCOS:
2110 case BUILT_IN_SINCOSF:
2111 case BUILT_IN_SINCOSL:
2112 case BUILT_IN_ASSUME_ALIGNED:
2113 case BUILT_IN_VA_END:
2114 return false;
2115 /* __sync_* builtins and some OpenMP builtins act as threading
2116 barriers. */
2117 #undef DEF_SYNC_BUILTIN
2118 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2119 #include "sync-builtins.def"
2120 #undef DEF_SYNC_BUILTIN
2121 case BUILT_IN_GOMP_ATOMIC_START:
2122 case BUILT_IN_GOMP_ATOMIC_END:
2123 case BUILT_IN_GOMP_BARRIER:
2124 case BUILT_IN_GOMP_BARRIER_CANCEL:
2125 case BUILT_IN_GOMP_TASKWAIT:
2126 case BUILT_IN_GOMP_TASKGROUP_END:
2127 case BUILT_IN_GOMP_CRITICAL_START:
2128 case BUILT_IN_GOMP_CRITICAL_END:
2129 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2130 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2131 case BUILT_IN_GOMP_LOOP_END:
2132 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2133 case BUILT_IN_GOMP_ORDERED_START:
2134 case BUILT_IN_GOMP_ORDERED_END:
2135 case BUILT_IN_GOMP_SECTIONS_END:
2136 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2137 case BUILT_IN_GOMP_SINGLE_COPY_START:
2138 case BUILT_IN_GOMP_SINGLE_COPY_END:
2139 return true;
2140
2141 default:
2142 /* Fallthru to general call handling. */;
2143 }
2144
2145 /* Check if base is a global static variable that is not read
2146 by the function. */
2147 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2148 {
2149 struct cgraph_node *node = cgraph_node::get (callee);
2150 bitmap not_read;
2151
2152 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2153 node yet. We should enforce that there are nodes for all decls in the
2154 IL and remove this check instead. */
2155 if (node
2156 && (not_read = ipa_reference_get_not_read_global (node))
2157 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
2158 goto process_args;
2159 }
2160
2161 /* Check if the base variable is call-used. */
2162 if (DECL_P (base))
2163 {
2164 if (pt_solution_includes (gimple_call_use_set (call), base))
2165 return true;
2166 }
2167 else if ((TREE_CODE (base) == MEM_REF
2168 || TREE_CODE (base) == TARGET_MEM_REF)
2169 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2170 {
2171 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2172 if (!pi)
2173 return true;
2174
2175 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2176 return true;
2177 }
2178 else
2179 return true;
2180
2181 /* Inspect call arguments for passed-by-value aliases. */
2182 process_args:
2183 for (i = 0; i < gimple_call_num_args (call); ++i)
2184 {
2185 tree op = gimple_call_arg (call, i);
2186 int flags = gimple_call_arg_flags (call, i);
2187
2188 if (flags & EAF_UNUSED)
2189 continue;
2190
2191 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2192 op = TREE_OPERAND (op, 0);
2193
2194 if (TREE_CODE (op) != SSA_NAME
2195 && !is_gimple_min_invariant (op))
2196 {
2197 ao_ref r;
2198 ao_ref_init (&r, op);
2199 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2200 return true;
2201 }
2202 }
2203
2204 return false;
2205 }
2206
2207 static bool
2208 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2209 {
2210 bool res;
2211 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2212 if (res)
2213 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2214 else
2215 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2216 return res;
2217 }
2218
2219
2220 /* If the statement STMT may use the memory reference REF return
2221 true, otherwise return false. */
2222
2223 bool
2224 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2225 {
2226 if (is_gimple_assign (stmt))
2227 {
2228 tree rhs;
2229
2230 /* All memory assign statements are single. */
2231 if (!gimple_assign_single_p (stmt))
2232 return false;
2233
2234 rhs = gimple_assign_rhs1 (stmt);
2235 if (is_gimple_reg (rhs)
2236 || is_gimple_min_invariant (rhs)
2237 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2238 return false;
2239
2240 return refs_may_alias_p (rhs, ref, tbaa_p);
2241 }
2242 else if (is_gimple_call (stmt))
2243 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2244 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2245 {
2246 tree retval = gimple_return_retval (return_stmt);
2247 if (retval
2248 && TREE_CODE (retval) != SSA_NAME
2249 && !is_gimple_min_invariant (retval)
2250 && refs_may_alias_p (retval, ref, tbaa_p))
2251 return true;
2252 /* If ref escapes the function then the return acts as a use. */
2253 tree base = ao_ref_base (ref);
2254 if (!base)
2255 ;
2256 else if (DECL_P (base))
2257 return is_global_var (base);
2258 else if (TREE_CODE (base) == MEM_REF
2259 || TREE_CODE (base) == TARGET_MEM_REF)
2260 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2261 return false;
2262 }
2263
2264 return true;
2265 }
2266
2267 bool
2268 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2269 {
2270 ao_ref r;
2271 ao_ref_init (&r, ref);
2272 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2273 }
2274
2275 /* If the call in statement CALL may clobber the memory reference REF
2276 return true, otherwise return false. */
2277
2278 bool
2279 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2280 {
2281 tree base;
2282 tree callee;
2283
2284 /* If the call is pure or const it cannot clobber anything. */
2285 if (gimple_call_flags (call)
2286 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2287 return false;
2288 if (gimple_call_internal_p (call))
2289 switch (gimple_call_internal_fn (call))
2290 {
2291 /* Treat these internal calls like ECF_PURE for aliasing,
2292 they don't write to any memory the program should care about.
2293 They have important other side-effects, and read memory,
2294 so can't be ECF_NOVOPS. */
2295 case IFN_UBSAN_NULL:
2296 case IFN_UBSAN_BOUNDS:
2297 case IFN_UBSAN_VPTR:
2298 case IFN_UBSAN_OBJECT_SIZE:
2299 case IFN_UBSAN_PTR:
2300 case IFN_ASAN_CHECK:
2301 return false;
2302 default:
2303 break;
2304 }
2305
2306 base = ao_ref_base (ref);
2307 if (!base)
2308 return true;
2309
2310 if (TREE_CODE (base) == SSA_NAME
2311 || CONSTANT_CLASS_P (base))
2312 return false;
2313
2314 /* A call that is not without side-effects might involve volatile
2315 accesses and thus conflicts with all other volatile accesses. */
2316 if (ref->volatile_p)
2317 return true;
2318
2319 /* If the reference is based on a decl that is not aliased the call
2320 cannot possibly clobber it. */
2321 if (DECL_P (base)
2322 && !may_be_aliased (base)
2323 /* But local non-readonly statics can be modified through recursion
2324 or the call may implement a threading barrier which we must
2325 treat as may-def. */
2326 && (TREE_READONLY (base)
2327 || !is_global_var (base)))
2328 return false;
2329
2330 /* If the reference is based on a pointer that points to memory
2331 that may not be written to then the call cannot possibly clobber it. */
2332 if ((TREE_CODE (base) == MEM_REF
2333 || TREE_CODE (base) == TARGET_MEM_REF)
2334 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2335 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2336 return false;
2337
2338 callee = gimple_call_fndecl (call);
2339
2340 /* Handle those builtin functions explicitly that do not act as
2341 escape points. See tree-ssa-structalias.c:find_func_aliases
2342 for the list of builtins we might need to handle here. */
2343 if (callee != NULL_TREE
2344 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2345 switch (DECL_FUNCTION_CODE (callee))
2346 {
2347 /* All the following functions clobber memory pointed to by
2348 their first argument. */
2349 case BUILT_IN_STRCPY:
2350 case BUILT_IN_STRNCPY:
2351 case BUILT_IN_MEMCPY:
2352 case BUILT_IN_MEMMOVE:
2353 case BUILT_IN_MEMPCPY:
2354 case BUILT_IN_STPCPY:
2355 case BUILT_IN_STPNCPY:
2356 case BUILT_IN_STRCAT:
2357 case BUILT_IN_STRNCAT:
2358 case BUILT_IN_MEMSET:
2359 case BUILT_IN_TM_MEMSET:
2360 CASE_BUILT_IN_TM_STORE (1):
2361 CASE_BUILT_IN_TM_STORE (2):
2362 CASE_BUILT_IN_TM_STORE (4):
2363 CASE_BUILT_IN_TM_STORE (8):
2364 CASE_BUILT_IN_TM_STORE (FLOAT):
2365 CASE_BUILT_IN_TM_STORE (DOUBLE):
2366 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2367 CASE_BUILT_IN_TM_STORE (M64):
2368 CASE_BUILT_IN_TM_STORE (M128):
2369 CASE_BUILT_IN_TM_STORE (M256):
2370 case BUILT_IN_TM_MEMCPY:
2371 case BUILT_IN_TM_MEMMOVE:
2372 {
2373 ao_ref dref;
2374 tree size = NULL_TREE;
2375 /* Don't pass in size for strncat, as the maximum size
2376 is strlen (dest) + n + 1 instead of n, resp.
2377 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2378 known. */
2379 if (gimple_call_num_args (call) == 3
2380 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2381 size = gimple_call_arg (call, 2);
2382 ao_ref_init_from_ptr_and_size (&dref,
2383 gimple_call_arg (call, 0),
2384 size);
2385 return refs_may_alias_p_1 (&dref, ref, false);
2386 }
2387 case BUILT_IN_STRCPY_CHK:
2388 case BUILT_IN_STRNCPY_CHK:
2389 case BUILT_IN_MEMCPY_CHK:
2390 case BUILT_IN_MEMMOVE_CHK:
2391 case BUILT_IN_MEMPCPY_CHK:
2392 case BUILT_IN_STPCPY_CHK:
2393 case BUILT_IN_STPNCPY_CHK:
2394 case BUILT_IN_STRCAT_CHK:
2395 case BUILT_IN_STRNCAT_CHK:
2396 case BUILT_IN_MEMSET_CHK:
2397 {
2398 ao_ref dref;
2399 tree size = NULL_TREE;
2400 /* Don't pass in size for __strncat_chk, as the maximum size
2401 is strlen (dest) + n + 1 instead of n, resp.
2402 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2403 known. */
2404 if (gimple_call_num_args (call) == 4
2405 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2406 size = gimple_call_arg (call, 2);
2407 ao_ref_init_from_ptr_and_size (&dref,
2408 gimple_call_arg (call, 0),
2409 size);
2410 return refs_may_alias_p_1 (&dref, ref, false);
2411 }
2412 case BUILT_IN_BCOPY:
2413 {
2414 ao_ref dref;
2415 tree size = gimple_call_arg (call, 2);
2416 ao_ref_init_from_ptr_and_size (&dref,
2417 gimple_call_arg (call, 1),
2418 size);
2419 return refs_may_alias_p_1 (&dref, ref, false);
2420 }
2421 /* Allocating memory does not have any side-effects apart from
2422 being the definition point for the pointer. */
2423 case BUILT_IN_MALLOC:
2424 case BUILT_IN_ALIGNED_ALLOC:
2425 case BUILT_IN_CALLOC:
2426 case BUILT_IN_STRDUP:
2427 case BUILT_IN_STRNDUP:
2428 /* Unix98 specifies that errno is set on allocation failure. */
2429 if (flag_errno_math
2430 && targetm.ref_may_alias_errno (ref))
2431 return true;
2432 return false;
2433 case BUILT_IN_STACK_SAVE:
2434 CASE_BUILT_IN_ALLOCA:
2435 case BUILT_IN_ASSUME_ALIGNED:
2436 return false;
2437 /* But posix_memalign stores a pointer into the memory pointed to
2438 by its first argument. */
2439 case BUILT_IN_POSIX_MEMALIGN:
2440 {
2441 tree ptrptr = gimple_call_arg (call, 0);
2442 ao_ref dref;
2443 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2444 TYPE_SIZE_UNIT (ptr_type_node));
2445 return (refs_may_alias_p_1 (&dref, ref, false)
2446 || (flag_errno_math
2447 && targetm.ref_may_alias_errno (ref)));
2448 }
2449 /* Freeing memory kills the pointed-to memory. More importantly
2450 the call has to serve as a barrier for moving loads and stores
2451 across it. */
2452 case BUILT_IN_FREE:
2453 case BUILT_IN_VA_END:
2454 {
2455 tree ptr = gimple_call_arg (call, 0);
2456 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2457 }
2458 /* Realloc serves both as allocation point and deallocation point. */
2459 case BUILT_IN_REALLOC:
2460 {
2461 tree ptr = gimple_call_arg (call, 0);
2462 /* Unix98 specifies that errno is set on allocation failure. */
2463 return ((flag_errno_math
2464 && targetm.ref_may_alias_errno (ref))
2465 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2466 }
2467 case BUILT_IN_GAMMA_R:
2468 case BUILT_IN_GAMMAF_R:
2469 case BUILT_IN_GAMMAL_R:
2470 case BUILT_IN_LGAMMA_R:
2471 case BUILT_IN_LGAMMAF_R:
2472 case BUILT_IN_LGAMMAL_R:
2473 {
2474 tree out = gimple_call_arg (call, 1);
2475 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2476 return true;
2477 if (flag_errno_math)
2478 break;
2479 return false;
2480 }
2481 case BUILT_IN_FREXP:
2482 case BUILT_IN_FREXPF:
2483 case BUILT_IN_FREXPL:
2484 case BUILT_IN_MODF:
2485 case BUILT_IN_MODFF:
2486 case BUILT_IN_MODFL:
2487 {
2488 tree out = gimple_call_arg (call, 1);
2489 return ptr_deref_may_alias_ref_p_1 (out, ref);
2490 }
2491 case BUILT_IN_REMQUO:
2492 case BUILT_IN_REMQUOF:
2493 case BUILT_IN_REMQUOL:
2494 {
2495 tree out = gimple_call_arg (call, 2);
2496 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2497 return true;
2498 if (flag_errno_math)
2499 break;
2500 return false;
2501 }
2502 case BUILT_IN_SINCOS:
2503 case BUILT_IN_SINCOSF:
2504 case BUILT_IN_SINCOSL:
2505 {
2506 tree sin = gimple_call_arg (call, 1);
2507 tree cos = gimple_call_arg (call, 2);
2508 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2509 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2510 }
2511 /* __sync_* builtins and some OpenMP builtins act as threading
2512 barriers. */
2513 #undef DEF_SYNC_BUILTIN
2514 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2515 #include "sync-builtins.def"
2516 #undef DEF_SYNC_BUILTIN
2517 case BUILT_IN_GOMP_ATOMIC_START:
2518 case BUILT_IN_GOMP_ATOMIC_END:
2519 case BUILT_IN_GOMP_BARRIER:
2520 case BUILT_IN_GOMP_BARRIER_CANCEL:
2521 case BUILT_IN_GOMP_TASKWAIT:
2522 case BUILT_IN_GOMP_TASKGROUP_END:
2523 case BUILT_IN_GOMP_CRITICAL_START:
2524 case BUILT_IN_GOMP_CRITICAL_END:
2525 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2526 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2527 case BUILT_IN_GOMP_LOOP_END:
2528 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2529 case BUILT_IN_GOMP_ORDERED_START:
2530 case BUILT_IN_GOMP_ORDERED_END:
2531 case BUILT_IN_GOMP_SECTIONS_END:
2532 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2533 case BUILT_IN_GOMP_SINGLE_COPY_START:
2534 case BUILT_IN_GOMP_SINGLE_COPY_END:
2535 return true;
2536 default:
2537 /* Fallthru to general call handling. */;
2538 }
2539
2540 /* Check if base is a global static variable that is not written
2541 by the function. */
2542 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2543 {
2544 struct cgraph_node *node = cgraph_node::get (callee);
2545 bitmap not_written;
2546
2547 if (node
2548 && (not_written = ipa_reference_get_not_written_global (node))
2549 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2550 return false;
2551 }
2552
2553 /* Check if the base variable is call-clobbered. */
2554 if (DECL_P (base))
2555 return pt_solution_includes (gimple_call_clobber_set (call), base);
2556 else if ((TREE_CODE (base) == MEM_REF
2557 || TREE_CODE (base) == TARGET_MEM_REF)
2558 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2559 {
2560 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2561 if (!pi)
2562 return true;
2563
2564 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2565 }
2566
2567 return true;
2568 }
2569
2570 /* If the call in statement CALL may clobber the memory reference REF
2571 return true, otherwise return false. */
2572
2573 bool
2574 call_may_clobber_ref_p (gcall *call, tree ref)
2575 {
2576 bool res;
2577 ao_ref r;
2578 ao_ref_init (&r, ref);
2579 res = call_may_clobber_ref_p_1 (call, &r);
2580 if (res)
2581 ++alias_stats.call_may_clobber_ref_p_may_alias;
2582 else
2583 ++alias_stats.call_may_clobber_ref_p_no_alias;
2584 return res;
2585 }
2586
2587
2588 /* If the statement STMT may clobber the memory reference REF return true,
2589 otherwise return false. */
2590
2591 bool
2592 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2593 {
2594 if (is_gimple_call (stmt))
2595 {
2596 tree lhs = gimple_call_lhs (stmt);
2597 if (lhs
2598 && TREE_CODE (lhs) != SSA_NAME)
2599 {
2600 ao_ref r;
2601 ao_ref_init (&r, lhs);
2602 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2603 return true;
2604 }
2605
2606 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2607 }
2608 else if (gimple_assign_single_p (stmt))
2609 {
2610 tree lhs = gimple_assign_lhs (stmt);
2611 if (TREE_CODE (lhs) != SSA_NAME)
2612 {
2613 ao_ref r;
2614 ao_ref_init (&r, lhs);
2615 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2616 }
2617 }
2618 else if (gimple_code (stmt) == GIMPLE_ASM)
2619 return true;
2620
2621 return false;
2622 }
2623
2624 bool
2625 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2626 {
2627 ao_ref r;
2628 ao_ref_init (&r, ref);
2629 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2630 }
2631
2632 /* Return true if store1 and store2 described by corresponding tuples
2633 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2634 address. */
2635
2636 static bool
2637 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2638 poly_int64 max_size1,
2639 tree base2, poly_int64 offset2, poly_int64 size2,
2640 poly_int64 max_size2)
2641 {
2642 /* Offsets need to be 0. */
2643 if (maybe_ne (offset1, 0)
2644 || maybe_ne (offset2, 0))
2645 return false;
2646
2647 bool base1_obj_p = SSA_VAR_P (base1);
2648 bool base2_obj_p = SSA_VAR_P (base2);
2649
2650 /* We need one object. */
2651 if (base1_obj_p == base2_obj_p)
2652 return false;
2653 tree obj = base1_obj_p ? base1 : base2;
2654
2655 /* And we need one MEM_REF. */
2656 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2657 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2658 if (base1_memref_p == base2_memref_p)
2659 return false;
2660 tree memref = base1_memref_p ? base1 : base2;
2661
2662 /* Sizes need to be valid. */
2663 if (!known_size_p (max_size1)
2664 || !known_size_p (max_size2)
2665 || !known_size_p (size1)
2666 || !known_size_p (size2))
2667 return false;
2668
2669 /* Max_size needs to match size. */
2670 if (maybe_ne (max_size1, size1)
2671 || maybe_ne (max_size2, size2))
2672 return false;
2673
2674 /* Sizes need to match. */
2675 if (maybe_ne (size1, size2))
2676 return false;
2677
2678
2679 /* Check that memref is a store to pointer with singleton points-to info. */
2680 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2681 return false;
2682 tree ptr = TREE_OPERAND (memref, 0);
2683 if (TREE_CODE (ptr) != SSA_NAME)
2684 return false;
2685 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2686 unsigned int pt_uid;
2687 if (pi == NULL
2688 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2689 return false;
2690
2691 /* Be conservative with non-call exceptions when the address might
2692 be NULL. */
2693 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2694 return false;
2695
2696 /* Check that ptr points relative to obj. */
2697 unsigned int obj_uid = DECL_PT_UID (obj);
2698 if (obj_uid != pt_uid)
2699 return false;
2700
2701 /* Check that the object size is the same as the store size. That ensures us
2702 that ptr points to the start of obj. */
2703 return (DECL_SIZE (obj)
2704 && poly_int_tree_p (DECL_SIZE (obj))
2705 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2706 }
2707
2708 /* If STMT kills the memory reference REF return true, otherwise
2709 return false. */
2710
2711 bool
2712 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2713 {
2714 if (!ao_ref_base (ref))
2715 return false;
2716
2717 if (gimple_has_lhs (stmt)
2718 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2719 /* The assignment is not necessarily carried out if it can throw
2720 and we can catch it in the current function where we could inspect
2721 the previous value.
2722 ??? We only need to care about the RHS throwing. For aggregate
2723 assignments or similar calls and non-call exceptions the LHS
2724 might throw as well. */
2725 && !stmt_can_throw_internal (cfun, stmt))
2726 {
2727 tree lhs = gimple_get_lhs (stmt);
2728 /* If LHS is literally a base of the access we are done. */
2729 if (ref->ref)
2730 {
2731 tree base = ref->ref;
2732 tree innermost_dropped_array_ref = NULL_TREE;
2733 if (handled_component_p (base))
2734 {
2735 tree saved_lhs0 = NULL_TREE;
2736 if (handled_component_p (lhs))
2737 {
2738 saved_lhs0 = TREE_OPERAND (lhs, 0);
2739 TREE_OPERAND (lhs, 0) = integer_zero_node;
2740 }
2741 do
2742 {
2743 /* Just compare the outermost handled component, if
2744 they are equal we have found a possible common
2745 base. */
2746 tree saved_base0 = TREE_OPERAND (base, 0);
2747 TREE_OPERAND (base, 0) = integer_zero_node;
2748 bool res = operand_equal_p (lhs, base, 0);
2749 TREE_OPERAND (base, 0) = saved_base0;
2750 if (res)
2751 break;
2752 /* Remember if we drop an array-ref that we need to
2753 double-check not being at struct end. */
2754 if (TREE_CODE (base) == ARRAY_REF
2755 || TREE_CODE (base) == ARRAY_RANGE_REF)
2756 innermost_dropped_array_ref = base;
2757 /* Otherwise drop handled components of the access. */
2758 base = saved_base0;
2759 }
2760 while (handled_component_p (base));
2761 if (saved_lhs0)
2762 TREE_OPERAND (lhs, 0) = saved_lhs0;
2763 }
2764 /* Finally check if the lhs has the same address and size as the
2765 base candidate of the access. Watch out if we have dropped
2766 an array-ref that was at struct end, this means ref->ref may
2767 be outside of the TYPE_SIZE of its base. */
2768 if ((! innermost_dropped_array_ref
2769 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2770 && (lhs == base
2771 || (((TYPE_SIZE (TREE_TYPE (lhs))
2772 == TYPE_SIZE (TREE_TYPE (base)))
2773 || (TYPE_SIZE (TREE_TYPE (lhs))
2774 && TYPE_SIZE (TREE_TYPE (base))
2775 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2776 TYPE_SIZE (TREE_TYPE (base)),
2777 0)))
2778 && operand_equal_p (lhs, base,
2779 OEP_ADDRESS_OF
2780 | OEP_MATCH_SIDE_EFFECTS))))
2781 return true;
2782 }
2783
2784 /* Now look for non-literal equal bases with the restriction of
2785 handling constant offset and size. */
2786 /* For a must-alias check we need to be able to constrain
2787 the access properly. */
2788 if (!ref->max_size_known_p ())
2789 return false;
2790 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2791 bool reverse;
2792 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2793 &reverse);
2794 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2795 so base == ref->base does not always hold. */
2796 if (base != ref->base)
2797 {
2798 /* Try using points-to info. */
2799 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2800 ref->offset, ref->size, ref->max_size))
2801 return true;
2802
2803 /* If both base and ref->base are MEM_REFs, only compare the
2804 first operand, and if the second operand isn't equal constant,
2805 try to add the offsets into offset and ref_offset. */
2806 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2807 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2808 {
2809 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2810 TREE_OPERAND (ref->base, 1)))
2811 {
2812 poly_offset_int off1 = mem_ref_offset (base);
2813 off1 <<= LOG2_BITS_PER_UNIT;
2814 off1 += offset;
2815 poly_offset_int off2 = mem_ref_offset (ref->base);
2816 off2 <<= LOG2_BITS_PER_UNIT;
2817 off2 += ref_offset;
2818 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2819 size = -1;
2820 }
2821 }
2822 else
2823 size = -1;
2824 }
2825 /* For a must-alias check we need to be able to constrain
2826 the access properly. */
2827 if (known_eq (size, max_size)
2828 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2829 return true;
2830 }
2831
2832 if (is_gimple_call (stmt))
2833 {
2834 tree callee = gimple_call_fndecl (stmt);
2835 if (callee != NULL_TREE
2836 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2837 switch (DECL_FUNCTION_CODE (callee))
2838 {
2839 case BUILT_IN_FREE:
2840 {
2841 tree ptr = gimple_call_arg (stmt, 0);
2842 tree base = ao_ref_base (ref);
2843 if (base && TREE_CODE (base) == MEM_REF
2844 && TREE_OPERAND (base, 0) == ptr)
2845 return true;
2846 break;
2847 }
2848
2849 case BUILT_IN_MEMCPY:
2850 case BUILT_IN_MEMPCPY:
2851 case BUILT_IN_MEMMOVE:
2852 case BUILT_IN_MEMSET:
2853 case BUILT_IN_MEMCPY_CHK:
2854 case BUILT_IN_MEMPCPY_CHK:
2855 case BUILT_IN_MEMMOVE_CHK:
2856 case BUILT_IN_MEMSET_CHK:
2857 case BUILT_IN_STRNCPY:
2858 case BUILT_IN_STPNCPY:
2859 {
2860 /* For a must-alias check we need to be able to constrain
2861 the access properly. */
2862 if (!ref->max_size_known_p ())
2863 return false;
2864 tree dest = gimple_call_arg (stmt, 0);
2865 tree len = gimple_call_arg (stmt, 2);
2866 if (!poly_int_tree_p (len))
2867 return false;
2868 tree rbase = ref->base;
2869 poly_offset_int roffset = ref->offset;
2870 ao_ref dref;
2871 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2872 tree base = ao_ref_base (&dref);
2873 poly_offset_int offset = dref.offset;
2874 if (!base || !known_size_p (dref.size))
2875 return false;
2876 if (TREE_CODE (base) == MEM_REF)
2877 {
2878 if (TREE_CODE (rbase) != MEM_REF)
2879 return false;
2880 // Compare pointers.
2881 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2882 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2883 base = TREE_OPERAND (base, 0);
2884 rbase = TREE_OPERAND (rbase, 0);
2885 }
2886 if (base == rbase
2887 && known_subrange_p (roffset, ref->max_size, offset,
2888 wi::to_poly_offset (len)
2889 << LOG2_BITS_PER_UNIT))
2890 return true;
2891 break;
2892 }
2893
2894 case BUILT_IN_VA_END:
2895 {
2896 tree ptr = gimple_call_arg (stmt, 0);
2897 if (TREE_CODE (ptr) == ADDR_EXPR)
2898 {
2899 tree base = ao_ref_base (ref);
2900 if (TREE_OPERAND (ptr, 0) == base)
2901 return true;
2902 }
2903 break;
2904 }
2905
2906 default:;
2907 }
2908 }
2909 return false;
2910 }
2911
2912 bool
2913 stmt_kills_ref_p (gimple *stmt, tree ref)
2914 {
2915 ao_ref r;
2916 ao_ref_init (&r, ref);
2917 return stmt_kills_ref_p (stmt, &r);
2918 }
2919
2920
2921 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2922 TARGET or a statement clobbering the memory reference REF in which
2923 case false is returned. The walk starts with VUSE, one argument of PHI. */
2924
2925 static bool
2926 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2927 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2928 bool abort_on_visited,
2929 void *(*translate)(ao_ref *, tree, void *, bool *),
2930 void *data)
2931 {
2932 basic_block bb = gimple_bb (phi);
2933
2934 if (!*visited)
2935 *visited = BITMAP_ALLOC (NULL);
2936
2937 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2938
2939 /* Walk until we hit the target. */
2940 while (vuse != target)
2941 {
2942 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2943 /* If we are searching for the target VUSE by walking up to
2944 TARGET_BB dominating the original PHI we are finished once
2945 we reach a default def or a definition in a block dominating
2946 that block. Update TARGET and return. */
2947 if (!target
2948 && (gimple_nop_p (def_stmt)
2949 || dominated_by_p (CDI_DOMINATORS,
2950 target_bb, gimple_bb (def_stmt))))
2951 {
2952 target = vuse;
2953 return true;
2954 }
2955
2956 /* Recurse for PHI nodes. */
2957 if (gimple_code (def_stmt) == GIMPLE_PHI)
2958 {
2959 /* An already visited PHI node ends the walk successfully. */
2960 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2961 return !abort_on_visited;
2962 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2963 visited, abort_on_visited,
2964 translate, data);
2965 if (!vuse)
2966 return false;
2967 continue;
2968 }
2969 else if (gimple_nop_p (def_stmt))
2970 return false;
2971 else
2972 {
2973 /* A clobbering statement or the end of the IL ends it failing. */
2974 if ((int)limit <= 0)
2975 return false;
2976 --limit;
2977 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2978 {
2979 bool disambiguate_only = true;
2980 if (translate
2981 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2982 ;
2983 else
2984 return false;
2985 }
2986 }
2987 /* If we reach a new basic-block see if we already skipped it
2988 in a previous walk that ended successfully. */
2989 if (gimple_bb (def_stmt) != bb)
2990 {
2991 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2992 return !abort_on_visited;
2993 bb = gimple_bb (def_stmt);
2994 }
2995 vuse = gimple_vuse (def_stmt);
2996 }
2997 return true;
2998 }
2999
3000
3001 /* Starting from a PHI node for the virtual operand of the memory reference
3002 REF find a continuation virtual operand that allows to continue walking
3003 statements dominating PHI skipping only statements that cannot possibly
3004 clobber REF. Decrements LIMIT for each alias disambiguation done
3005 and aborts the walk, returning NULL_TREE if it reaches zero.
3006 Returns NULL_TREE if no suitable virtual operand can be found. */
3007
3008 tree
3009 get_continuation_for_phi (gimple *phi, ao_ref *ref,
3010 unsigned int &limit, bitmap *visited,
3011 bool abort_on_visited,
3012 void *(*translate)(ao_ref *, tree, void *, bool *),
3013 void *data)
3014 {
3015 unsigned nargs = gimple_phi_num_args (phi);
3016
3017 /* Through a single-argument PHI we can simply look through. */
3018 if (nargs == 1)
3019 return PHI_ARG_DEF (phi, 0);
3020
3021 /* For two or more arguments try to pairwise skip non-aliasing code
3022 until we hit the phi argument definition that dominates the other one. */
3023 basic_block phi_bb = gimple_bb (phi);
3024 tree arg0, arg1;
3025 unsigned i;
3026
3027 /* Find a candidate for the virtual operand which definition
3028 dominates those of all others. */
3029 /* First look if any of the args themselves satisfy this. */
3030 for (i = 0; i < nargs; ++i)
3031 {
3032 arg0 = PHI_ARG_DEF (phi, i);
3033 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3034 break;
3035 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3036 if (def_bb != phi_bb
3037 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3038 break;
3039 arg0 = NULL_TREE;
3040 }
3041 /* If not, look if we can reach such candidate by walking defs
3042 until we hit the immediate dominator. maybe_skip_until will
3043 do that for us. */
3044 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3045
3046 /* Then check against the (to be) found candidate. */
3047 for (i = 0; i < nargs; ++i)
3048 {
3049 arg1 = PHI_ARG_DEF (phi, i);
3050 if (arg1 == arg0)
3051 ;
3052 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
3053 abort_on_visited,
3054 /* Do not translate when walking over
3055 backedges. */
3056 dominated_by_p
3057 (CDI_DOMINATORS,
3058 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3059 phi_bb)
3060 ? NULL : translate, data))
3061 return NULL_TREE;
3062 }
3063
3064 return arg0;
3065 }
3066
3067 /* Based on the memory reference REF and its virtual use VUSE call
3068 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3069 itself. That is, for each virtual use for which its defining statement
3070 does not clobber REF.
3071
3072 WALKER is called with REF, the current virtual use and DATA. If
3073 WALKER returns non-NULL the walk stops and its result is returned.
3074 At the end of a non-successful walk NULL is returned.
3075
3076 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3077 use which definition is a statement that may clobber REF and DATA.
3078 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3079 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3080 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3081 to adjust REF and *DATA to make that valid.
3082
3083 VALUEIZE if non-NULL is called with the next VUSE that is considered
3084 and return value is substituted for that. This can be used to
3085 implement optimistic value-numbering for example. Note that the
3086 VUSE argument is assumed to be valueized already.
3087
3088 LIMIT specifies the number of alias queries we are allowed to do,
3089 the walk stops when it reaches zero and NULL is returned. LIMIT
3090 is decremented by the number of alias queries (plus adjustments
3091 done by the callbacks) upon return.
3092
3093 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3094
3095 void *
3096 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
3097 void *(*walker)(ao_ref *, tree, void *),
3098 void *(*translate)(ao_ref *, tree, void *, bool *),
3099 tree (*valueize)(tree),
3100 unsigned &limit, void *data)
3101 {
3102 bitmap visited = NULL;
3103 void *res;
3104 bool translated = false;
3105
3106 timevar_push (TV_ALIAS_STMT_WALK);
3107
3108 do
3109 {
3110 gimple *def_stmt;
3111
3112 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3113 res = (*walker) (ref, vuse, data);
3114 /* Abort walk. */
3115 if (res == (void *)-1)
3116 {
3117 res = NULL;
3118 break;
3119 }
3120 /* Lookup succeeded. */
3121 else if (res != NULL)
3122 break;
3123
3124 if (valueize)
3125 {
3126 vuse = valueize (vuse);
3127 if (!vuse)
3128 {
3129 res = NULL;
3130 break;
3131 }
3132 }
3133 def_stmt = SSA_NAME_DEF_STMT (vuse);
3134 if (gimple_nop_p (def_stmt))
3135 break;
3136 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3137 vuse = get_continuation_for_phi (def_stmt, ref, limit,
3138 &visited, translated, translate, data);
3139 else
3140 {
3141 if ((int)limit <= 0)
3142 {
3143 res = NULL;
3144 break;
3145 }
3146 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
3147 {
3148 if (!translate)
3149 break;
3150 bool disambiguate_only = false;
3151 res = (*translate) (ref, vuse, data, &disambiguate_only);
3152 /* Failed lookup and translation. */
3153 if (res == (void *)-1)
3154 {
3155 res = NULL;
3156 break;
3157 }
3158 /* Lookup succeeded. */
3159 else if (res != NULL)
3160 break;
3161 /* Translation succeeded, continue walking. */
3162 translated = translated || !disambiguate_only;
3163 }
3164 vuse = gimple_vuse (def_stmt);
3165 }
3166 }
3167 while (vuse);
3168
3169 if (visited)
3170 BITMAP_FREE (visited);
3171
3172 timevar_pop (TV_ALIAS_STMT_WALK);
3173
3174 return res;
3175 }
3176
3177
3178 /* Based on the memory reference REF call WALKER for each vdef which
3179 defining statement may clobber REF, starting with VDEF. If REF
3180 is NULL_TREE, each defining statement is visited.
3181
3182 WALKER is called with REF, the current vdef and DATA. If WALKER
3183 returns true the walk is stopped, otherwise it continues.
3184
3185 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3186 The pointer may be NULL and then we do not track this information.
3187
3188 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3189 PHI argument (but only one walk continues on merge points), the
3190 return value is true if any of the walks was successful.
3191
3192 The function returns the number of statements walked or -1 if
3193 LIMIT stmts were walked and the walk was aborted at this point.
3194 If LIMIT is zero the walk is not aborted. */
3195
3196 static int
3197 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3198 bool (*walker)(ao_ref *, tree, void *), void *data,
3199 bitmap *visited, unsigned int cnt,
3200 bool *function_entry_reached, unsigned limit)
3201 {
3202 do
3203 {
3204 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3205
3206 if (*visited
3207 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3208 return cnt;
3209
3210 if (gimple_nop_p (def_stmt))
3211 {
3212 if (function_entry_reached)
3213 *function_entry_reached = true;
3214 return cnt;
3215 }
3216 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3217 {
3218 unsigned i;
3219 if (!*visited)
3220 *visited = BITMAP_ALLOC (NULL);
3221 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3222 {
3223 int res = walk_aliased_vdefs_1 (ref,
3224 gimple_phi_arg_def (def_stmt, i),
3225 walker, data, visited, cnt,
3226 function_entry_reached, limit);
3227 if (res == -1)
3228 return -1;
3229 cnt = res;
3230 }
3231 return cnt;
3232 }
3233
3234 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3235 cnt++;
3236 if (cnt == limit)
3237 return -1;
3238 if ((!ref
3239 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3240 && (*walker) (ref, vdef, data))
3241 return cnt;
3242
3243 vdef = gimple_vuse (def_stmt);
3244 }
3245 while (1);
3246 }
3247
3248 int
3249 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3250 bool (*walker)(ao_ref *, tree, void *), void *data,
3251 bitmap *visited,
3252 bool *function_entry_reached, unsigned int limit)
3253 {
3254 bitmap local_visited = NULL;
3255 int ret;
3256
3257 timevar_push (TV_ALIAS_STMT_WALK);
3258
3259 if (function_entry_reached)
3260 *function_entry_reached = false;
3261
3262 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3263 visited ? visited : &local_visited, 0,
3264 function_entry_reached, limit);
3265 if (local_visited)
3266 BITMAP_FREE (local_visited);
3267
3268 timevar_pop (TV_ALIAS_STMT_WALK);
3269
3270 return ret;
3271 }
3272