]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
9cd39a2b6af2545b23a45b2bd5a9401cc3856919
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41
42 /* Broad overview of how alias analysis on gimple works:
43
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
50
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
57
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
60
61 The main alias-oracle entry-points are
62
63 bool stmt_may_clobber_ref_p (gimple *, tree)
64
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
67
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
69
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
72
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
76
77 bool refs_may_alias_p (tree, tree)
78
79 This function tries to disambiguate two reference trees.
80
81 bool ptr_deref_may_alias_global_p (tree)
82
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
85
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
89
90
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
93
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
102 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
103 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
104 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_may_alias;
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_of_decl_p_no_alias;
107 } alias_stats;
108
109 void
110 dump_alias_stats (FILE *s)
111 {
112 fprintf (s, "\nAlias oracle query stats:\n");
113 fprintf (s, " refs_may_alias_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.refs_may_alias_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.refs_may_alias_p_may_alias);
119 fprintf (s, " ref_maybe_used_by_call_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.ref_maybe_used_by_call_p_no_alias,
123 alias_stats.refs_may_alias_p_no_alias
124 + alias_stats.ref_maybe_used_by_call_p_may_alias);
125 fprintf (s, " call_may_clobber_ref_p: "
126 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
127 HOST_WIDE_INT_PRINT_DEC" queries\n",
128 alias_stats.call_may_clobber_ref_p_no_alias,
129 alias_stats.call_may_clobber_ref_p_no_alias
130 + alias_stats.call_may_clobber_ref_p_may_alias);
131 fprintf (s, " nonoverlapping_component_refs_p: "
132 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC" queries\n",
134 alias_stats.nonoverlapping_component_refs_p_no_alias,
135 alias_stats.nonoverlapping_component_refs_p_no_alias
136 + alias_stats.nonoverlapping_component_refs_p_may_alias);
137 fprintf (s, " nonoverlapping_component_refs_of_decl_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias,
141 alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias
142 + alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias);
143 fprintf (s, " aliasing_component_refs_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.aliasing_component_refs_p_no_alias,
147 alias_stats.aliasing_component_refs_p_no_alias
148 + alias_stats.aliasing_component_refs_p_may_alias);
149 dump_alias_stats_in_alias_c (s);
150 }
151
152
153 /* Return true, if dereferencing PTR may alias with a global variable. */
154
155 bool
156 ptr_deref_may_alias_global_p (tree ptr)
157 {
158 struct ptr_info_def *pi;
159
160 /* If we end up with a pointer constant here that may point
161 to global memory. */
162 if (TREE_CODE (ptr) != SSA_NAME)
163 return true;
164
165 pi = SSA_NAME_PTR_INFO (ptr);
166
167 /* If we do not have points-to information for this variable,
168 we have to punt. */
169 if (!pi)
170 return true;
171
172 /* ??? This does not use TBAA to prune globals ptr may not access. */
173 return pt_solution_includes_global (&pi->pt);
174 }
175
176 /* Return true if dereferencing PTR may alias DECL.
177 The caller is responsible for applying TBAA to see if PTR
178 may access DECL at all. */
179
180 static bool
181 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
182 {
183 struct ptr_info_def *pi;
184
185 /* Conversions are irrelevant for points-to information and
186 data-dependence analysis can feed us those. */
187 STRIP_NOPS (ptr);
188
189 /* Anything we do not explicilty handle aliases. */
190 if ((TREE_CODE (ptr) != SSA_NAME
191 && TREE_CODE (ptr) != ADDR_EXPR
192 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
193 || !POINTER_TYPE_P (TREE_TYPE (ptr))
194 || (!VAR_P (decl)
195 && TREE_CODE (decl) != PARM_DECL
196 && TREE_CODE (decl) != RESULT_DECL))
197 return true;
198
199 /* Disregard pointer offsetting. */
200 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
201 {
202 do
203 {
204 ptr = TREE_OPERAND (ptr, 0);
205 }
206 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
207 return ptr_deref_may_alias_decl_p (ptr, decl);
208 }
209
210 /* ADDR_EXPR pointers either just offset another pointer or directly
211 specify the pointed-to set. */
212 if (TREE_CODE (ptr) == ADDR_EXPR)
213 {
214 tree base = get_base_address (TREE_OPERAND (ptr, 0));
215 if (base
216 && (TREE_CODE (base) == MEM_REF
217 || TREE_CODE (base) == TARGET_MEM_REF))
218 ptr = TREE_OPERAND (base, 0);
219 else if (base
220 && DECL_P (base))
221 return compare_base_decls (base, decl) != 0;
222 else if (base
223 && CONSTANT_CLASS_P (base))
224 return false;
225 else
226 return true;
227 }
228
229 /* Non-aliased variables cannot be pointed to. */
230 if (!may_be_aliased (decl))
231 return false;
232
233 /* If we do not have useful points-to information for this pointer
234 we cannot disambiguate anything else. */
235 pi = SSA_NAME_PTR_INFO (ptr);
236 if (!pi)
237 return true;
238
239 return pt_solution_includes (&pi->pt, decl);
240 }
241
242 /* Return true if dereferenced PTR1 and PTR2 may alias.
243 The caller is responsible for applying TBAA to see if accesses
244 through PTR1 and PTR2 may conflict at all. */
245
246 bool
247 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
248 {
249 struct ptr_info_def *pi1, *pi2;
250
251 /* Conversions are irrelevant for points-to information and
252 data-dependence analysis can feed us those. */
253 STRIP_NOPS (ptr1);
254 STRIP_NOPS (ptr2);
255
256 /* Disregard pointer offsetting. */
257 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
258 {
259 do
260 {
261 ptr1 = TREE_OPERAND (ptr1, 0);
262 }
263 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
264 return ptr_derefs_may_alias_p (ptr1, ptr2);
265 }
266 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
267 {
268 do
269 {
270 ptr2 = TREE_OPERAND (ptr2, 0);
271 }
272 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
273 return ptr_derefs_may_alias_p (ptr1, ptr2);
274 }
275
276 /* ADDR_EXPR pointers either just offset another pointer or directly
277 specify the pointed-to set. */
278 if (TREE_CODE (ptr1) == ADDR_EXPR)
279 {
280 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
281 if (base
282 && (TREE_CODE (base) == MEM_REF
283 || TREE_CODE (base) == TARGET_MEM_REF))
284 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
285 else if (base
286 && DECL_P (base))
287 return ptr_deref_may_alias_decl_p (ptr2, base);
288 else
289 return true;
290 }
291 if (TREE_CODE (ptr2) == ADDR_EXPR)
292 {
293 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
294 if (base
295 && (TREE_CODE (base) == MEM_REF
296 || TREE_CODE (base) == TARGET_MEM_REF))
297 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
298 else if (base
299 && DECL_P (base))
300 return ptr_deref_may_alias_decl_p (ptr1, base);
301 else
302 return true;
303 }
304
305 /* From here we require SSA name pointers. Anything else aliases. */
306 if (TREE_CODE (ptr1) != SSA_NAME
307 || TREE_CODE (ptr2) != SSA_NAME
308 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
309 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
310 return true;
311
312 /* We may end up with two empty points-to solutions for two same pointers.
313 In this case we still want to say both pointers alias, so shortcut
314 that here. */
315 if (ptr1 == ptr2)
316 return true;
317
318 /* If we do not have useful points-to information for either pointer
319 we cannot disambiguate anything else. */
320 pi1 = SSA_NAME_PTR_INFO (ptr1);
321 pi2 = SSA_NAME_PTR_INFO (ptr2);
322 if (!pi1 || !pi2)
323 return true;
324
325 /* ??? This does not use TBAA to prune decls from the intersection
326 that not both pointers may access. */
327 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
328 }
329
330 /* Return true if dereferencing PTR may alias *REF.
331 The caller is responsible for applying TBAA to see if PTR
332 may access *REF at all. */
333
334 static bool
335 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
336 {
337 tree base = ao_ref_base (ref);
338
339 if (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF)
341 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
342 else if (DECL_P (base))
343 return ptr_deref_may_alias_decl_p (ptr, base);
344
345 return true;
346 }
347
348 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
349
350 bool
351 ptrs_compare_unequal (tree ptr1, tree ptr2)
352 {
353 /* First resolve the pointers down to a SSA name pointer base or
354 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
355 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
356 or STRING_CSTs which needs points-to adjustments to track them
357 in the points-to sets. */
358 tree obj1 = NULL_TREE;
359 tree obj2 = NULL_TREE;
360 if (TREE_CODE (ptr1) == ADDR_EXPR)
361 {
362 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
363 if (! tem)
364 return false;
365 if (VAR_P (tem)
366 || TREE_CODE (tem) == PARM_DECL
367 || TREE_CODE (tem) == RESULT_DECL)
368 obj1 = tem;
369 else if (TREE_CODE (tem) == MEM_REF)
370 ptr1 = TREE_OPERAND (tem, 0);
371 }
372 if (TREE_CODE (ptr2) == ADDR_EXPR)
373 {
374 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
375 if (! tem)
376 return false;
377 if (VAR_P (tem)
378 || TREE_CODE (tem) == PARM_DECL
379 || TREE_CODE (tem) == RESULT_DECL)
380 obj2 = tem;
381 else if (TREE_CODE (tem) == MEM_REF)
382 ptr2 = TREE_OPERAND (tem, 0);
383 }
384
385 /* Canonicalize ptr vs. object. */
386 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
387 {
388 std::swap (ptr1, ptr2);
389 std::swap (obj1, obj2);
390 }
391
392 if (obj1 && obj2)
393 /* Other code handles this correctly, no need to duplicate it here. */;
394 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
395 {
396 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
397 /* We may not use restrict to optimize pointer comparisons.
398 See PR71062. So we have to assume that restrict-pointed-to
399 may be in fact obj1. */
400 if (!pi
401 || pi->pt.vars_contains_restrict
402 || pi->pt.vars_contains_interposable)
403 return false;
404 if (VAR_P (obj1)
405 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
406 {
407 varpool_node *node = varpool_node::get (obj1);
408 /* If obj1 may bind to NULL give up (see below). */
409 if (! node
410 || ! node->nonzero_address ()
411 || ! decl_binds_to_current_def_p (obj1))
412 return false;
413 }
414 return !pt_solution_includes (&pi->pt, obj1);
415 }
416
417 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
418 but those require pt.null to be conservatively correct. */
419
420 return false;
421 }
422
423 /* Returns whether reference REF to BASE may refer to global memory. */
424
425 static bool
426 ref_may_alias_global_p_1 (tree base)
427 {
428 if (DECL_P (base))
429 return is_global_var (base);
430 else if (TREE_CODE (base) == MEM_REF
431 || TREE_CODE (base) == TARGET_MEM_REF)
432 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
433 return true;
434 }
435
436 bool
437 ref_may_alias_global_p (ao_ref *ref)
438 {
439 tree base = ao_ref_base (ref);
440 return ref_may_alias_global_p_1 (base);
441 }
442
443 bool
444 ref_may_alias_global_p (tree ref)
445 {
446 tree base = get_base_address (ref);
447 return ref_may_alias_global_p_1 (base);
448 }
449
450 /* Return true whether STMT may clobber global memory. */
451
452 bool
453 stmt_may_clobber_global_p (gimple *stmt)
454 {
455 tree lhs;
456
457 if (!gimple_vdef (stmt))
458 return false;
459
460 /* ??? We can ask the oracle whether an artificial pointer
461 dereference with a pointer with points-to information covering
462 all global memory (what about non-address taken memory?) maybe
463 clobbered by this call. As there is at the moment no convenient
464 way of doing that without generating garbage do some manual
465 checking instead.
466 ??? We could make a NULL ao_ref argument to the various
467 predicates special, meaning any global memory. */
468
469 switch (gimple_code (stmt))
470 {
471 case GIMPLE_ASSIGN:
472 lhs = gimple_assign_lhs (stmt);
473 return (TREE_CODE (lhs) != SSA_NAME
474 && ref_may_alias_global_p (lhs));
475 case GIMPLE_CALL:
476 return true;
477 default:
478 return true;
479 }
480 }
481
482
483 /* Dump alias information on FILE. */
484
485 void
486 dump_alias_info (FILE *file)
487 {
488 unsigned i;
489 tree ptr;
490 const char *funcname
491 = lang_hooks.decl_printable_name (current_function_decl, 2);
492 tree var;
493
494 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
495
496 fprintf (file, "Aliased symbols\n\n");
497
498 FOR_EACH_LOCAL_DECL (cfun, i, var)
499 {
500 if (may_be_aliased (var))
501 dump_variable (file, var);
502 }
503
504 fprintf (file, "\nCall clobber information\n");
505
506 fprintf (file, "\nESCAPED");
507 dump_points_to_solution (file, &cfun->gimple_df->escaped);
508
509 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
510
511 FOR_EACH_SSA_NAME (i, ptr, cfun)
512 {
513 struct ptr_info_def *pi;
514
515 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
516 || SSA_NAME_IN_FREE_LIST (ptr))
517 continue;
518
519 pi = SSA_NAME_PTR_INFO (ptr);
520 if (pi)
521 dump_points_to_info_for (file, ptr);
522 }
523
524 fprintf (file, "\n");
525 }
526
527
528 /* Dump alias information on stderr. */
529
530 DEBUG_FUNCTION void
531 debug_alias_info (void)
532 {
533 dump_alias_info (stderr);
534 }
535
536
537 /* Dump the points-to set *PT into FILE. */
538
539 void
540 dump_points_to_solution (FILE *file, struct pt_solution *pt)
541 {
542 if (pt->anything)
543 fprintf (file, ", points-to anything");
544
545 if (pt->nonlocal)
546 fprintf (file, ", points-to non-local");
547
548 if (pt->escaped)
549 fprintf (file, ", points-to escaped");
550
551 if (pt->ipa_escaped)
552 fprintf (file, ", points-to unit escaped");
553
554 if (pt->null)
555 fprintf (file, ", points-to NULL");
556
557 if (pt->vars)
558 {
559 fprintf (file, ", points-to vars: ");
560 dump_decl_set (file, pt->vars);
561 if (pt->vars_contains_nonlocal
562 || pt->vars_contains_escaped
563 || pt->vars_contains_escaped_heap
564 || pt->vars_contains_restrict)
565 {
566 const char *comma = "";
567 fprintf (file, " (");
568 if (pt->vars_contains_nonlocal)
569 {
570 fprintf (file, "nonlocal");
571 comma = ", ";
572 }
573 if (pt->vars_contains_escaped)
574 {
575 fprintf (file, "%sescaped", comma);
576 comma = ", ";
577 }
578 if (pt->vars_contains_escaped_heap)
579 {
580 fprintf (file, "%sescaped heap", comma);
581 comma = ", ";
582 }
583 if (pt->vars_contains_restrict)
584 {
585 fprintf (file, "%srestrict", comma);
586 comma = ", ";
587 }
588 if (pt->vars_contains_interposable)
589 fprintf (file, "%sinterposable", comma);
590 fprintf (file, ")");
591 }
592 }
593 }
594
595
596 /* Unified dump function for pt_solution. */
597
598 DEBUG_FUNCTION void
599 debug (pt_solution &ref)
600 {
601 dump_points_to_solution (stderr, &ref);
602 }
603
604 DEBUG_FUNCTION void
605 debug (pt_solution *ptr)
606 {
607 if (ptr)
608 debug (*ptr);
609 else
610 fprintf (stderr, "<nil>\n");
611 }
612
613
614 /* Dump points-to information for SSA_NAME PTR into FILE. */
615
616 void
617 dump_points_to_info_for (FILE *file, tree ptr)
618 {
619 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
620
621 print_generic_expr (file, ptr, dump_flags);
622
623 if (pi)
624 dump_points_to_solution (file, &pi->pt);
625 else
626 fprintf (file, ", points-to anything");
627
628 fprintf (file, "\n");
629 }
630
631
632 /* Dump points-to information for VAR into stderr. */
633
634 DEBUG_FUNCTION void
635 debug_points_to_info_for (tree var)
636 {
637 dump_points_to_info_for (stderr, var);
638 }
639
640
641 /* Initializes the alias-oracle reference representation *R from REF. */
642
643 void
644 ao_ref_init (ao_ref *r, tree ref)
645 {
646 r->ref = ref;
647 r->base = NULL_TREE;
648 r->offset = 0;
649 r->size = -1;
650 r->max_size = -1;
651 r->ref_alias_set = -1;
652 r->base_alias_set = -1;
653 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
654 }
655
656 /* Returns the base object of the memory reference *REF. */
657
658 tree
659 ao_ref_base (ao_ref *ref)
660 {
661 bool reverse;
662
663 if (ref->base)
664 return ref->base;
665 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
666 &ref->max_size, &reverse);
667 return ref->base;
668 }
669
670 /* Returns the base object alias set of the memory reference *REF. */
671
672 alias_set_type
673 ao_ref_base_alias_set (ao_ref *ref)
674 {
675 tree base_ref;
676 if (ref->base_alias_set != -1)
677 return ref->base_alias_set;
678 if (!ref->ref)
679 return 0;
680 base_ref = ref->ref;
681 while (handled_component_p (base_ref))
682 base_ref = TREE_OPERAND (base_ref, 0);
683 ref->base_alias_set = get_alias_set (base_ref);
684 return ref->base_alias_set;
685 }
686
687 /* Returns the reference alias set of the memory reference *REF. */
688
689 alias_set_type
690 ao_ref_alias_set (ao_ref *ref)
691 {
692 if (ref->ref_alias_set != -1)
693 return ref->ref_alias_set;
694 ref->ref_alias_set = get_alias_set (ref->ref);
695 return ref->ref_alias_set;
696 }
697
698 /* Init an alias-oracle reference representation from a gimple pointer
699 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
700 size is assumed to be unknown. The access is assumed to be only
701 to or after of the pointer target, not before it. */
702
703 void
704 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
705 {
706 poly_int64 t, size_hwi, extra_offset = 0;
707 ref->ref = NULL_TREE;
708 if (TREE_CODE (ptr) == SSA_NAME)
709 {
710 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
711 if (gimple_assign_single_p (stmt)
712 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
713 ptr = gimple_assign_rhs1 (stmt);
714 else if (is_gimple_assign (stmt)
715 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
716 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
717 {
718 ptr = gimple_assign_rhs1 (stmt);
719 extra_offset *= BITS_PER_UNIT;
720 }
721 }
722
723 if (TREE_CODE (ptr) == ADDR_EXPR)
724 {
725 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
726 if (ref->base)
727 ref->offset = BITS_PER_UNIT * t;
728 else
729 {
730 size = NULL_TREE;
731 ref->offset = 0;
732 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
733 }
734 }
735 else
736 {
737 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
738 ref->base = build2 (MEM_REF, char_type_node,
739 ptr, null_pointer_node);
740 ref->offset = 0;
741 }
742 ref->offset += extra_offset;
743 if (size
744 && poly_int_tree_p (size, &size_hwi)
745 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
746 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
747 else
748 ref->max_size = ref->size = -1;
749 ref->ref_alias_set = 0;
750 ref->base_alias_set = 0;
751 ref->volatile_p = false;
752 }
753
754 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
755 Return -1 if S1 < S2
756 Return 1 if S1 > S2
757 Return 0 if equal or incomparable. */
758
759 static int
760 compare_sizes (tree s1, tree s2)
761 {
762 if (!s1 || !s2)
763 return 0;
764
765 poly_uint64 size1;
766 poly_uint64 size2;
767
768 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
769 return 0;
770 if (known_lt (size1, size2))
771 return -1;
772 if (known_lt (size2, size1))
773 return 1;
774 return 0;
775 }
776
777 /* Compare TYPE1 and TYPE2 by its size.
778 Return -1 if size of TYPE1 < size of TYPE2
779 Return 1 if size of TYPE1 > size of TYPE2
780 Return 0 if types are of equal sizes or we can not compare them. */
781
782 static int
783 compare_type_sizes (tree type1, tree type2)
784 {
785 /* Be conservative for arrays and vectors. We want to support partial
786 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
787 while (TREE_CODE (type1) == ARRAY_TYPE
788 || TREE_CODE (type1) == VECTOR_TYPE)
789 type1 = TREE_TYPE (type1);
790 while (TREE_CODE (type2) == ARRAY_TYPE
791 || TREE_CODE (type2) == VECTOR_TYPE)
792 type2 = TREE_TYPE (type2);
793 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
794 }
795
796 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
797 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
798 decide. */
799
800 static inline int
801 same_type_for_tbaa (tree type1, tree type2)
802 {
803 type1 = TYPE_MAIN_VARIANT (type1);
804 type2 = TYPE_MAIN_VARIANT (type2);
805
806 /* Handle the most common case first. */
807 if (type1 == type2)
808 return 1;
809
810 /* If we would have to do structural comparison bail out. */
811 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
812 || TYPE_STRUCTURAL_EQUALITY_P (type2))
813 return -1;
814
815 /* Compare the canonical types. */
816 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
817 return 1;
818
819 /* ??? Array types are not properly unified in all cases as we have
820 spurious changes in the index types for example. Removing this
821 causes all sorts of problems with the Fortran frontend. */
822 if (TREE_CODE (type1) == ARRAY_TYPE
823 && TREE_CODE (type2) == ARRAY_TYPE)
824 return -1;
825
826 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
827 object of one of its constrained subtypes, e.g. when a function with an
828 unconstrained parameter passed by reference is called on an object and
829 inlined. But, even in the case of a fixed size, type and subtypes are
830 not equivalent enough as to share the same TYPE_CANONICAL, since this
831 would mean that conversions between them are useless, whereas they are
832 not (e.g. type and subtypes can have different modes). So, in the end,
833 they are only guaranteed to have the same alias set. */
834 if (get_alias_set (type1) == get_alias_set (type2))
835 return -1;
836
837 /* The types are known to be not equal. */
838 return 0;
839 }
840
841 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
842 components on it). */
843
844 static bool
845 type_has_components_p (tree type)
846 {
847 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
848 || TREE_CODE (type) == COMPLEX_TYPE;
849 }
850
851 /* Determine if the two component references REF1 and REF2 which are
852 based on access types TYPE1 and TYPE2 and of which at least one is based
853 on an indirect reference may alias. REF2 is the only one that can
854 be a decl in which case REF2_IS_DECL is true.
855 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
856 are the respective alias sets. */
857
858 static bool
859 aliasing_component_refs_p (tree ref1,
860 alias_set_type ref1_alias_set,
861 alias_set_type base1_alias_set,
862 poly_int64 offset1, poly_int64 max_size1,
863 tree ref2,
864 alias_set_type ref2_alias_set,
865 alias_set_type base2_alias_set,
866 poly_int64 offset2, poly_int64 max_size2,
867 bool ref2_is_decl)
868 {
869 /* If one reference is a component references through pointers try to find a
870 common base and apply offset based disambiguation. This handles
871 for example
872 struct A { int i; int j; } *q;
873 struct B { struct A a; int k; } *p;
874 disambiguating q->i and p->a.j. */
875 tree base1, base2;
876 tree type1, type2;
877 tree *refp;
878 int same_p1 = 0, same_p2 = 0;
879 bool maybe_match = false;
880
881 /* Choose bases and base types to search for. */
882 base1 = ref1;
883 while (handled_component_p (base1))
884 base1 = TREE_OPERAND (base1, 0);
885 type1 = TREE_TYPE (base1);
886 base2 = ref2;
887 while (handled_component_p (base2))
888 base2 = TREE_OPERAND (base2, 0);
889 type2 = TREE_TYPE (base2);
890
891 /* Now search for the type1 in the access path of ref2. This
892 would be a common base for doing offset based disambiguation on.
893 This however only makes sense if type2 is big enough to hold type1. */
894 int cmp_outer = compare_type_sizes (type2, type1);
895 if (cmp_outer >= 0)
896 {
897 refp = &ref2;
898 while (true)
899 {
900 /* We walk from inner type to the outer types. If type we see is
901 already too large to be part of type1, terminate the search. */
902 int cmp = compare_type_sizes (type1, TREE_TYPE (*refp));
903 if (cmp < 0)
904 break;
905 /* If types may be of same size, see if we can decide about their
906 equality. */
907 if (cmp == 0)
908 {
909 same_p2 = same_type_for_tbaa (TREE_TYPE (*refp), type1);
910 if (same_p2 == 1)
911 break;
912 /* In case we can't decide whether types are same try to
913 continue looking for the exact match.
914 Remember however that we possibly saw a match
915 to bypass the access path continuations tests we do later. */
916 if (same_p2 == -1)
917 maybe_match = true;
918 }
919 if (!handled_component_p (*refp))
920 break;
921 refp = &TREE_OPERAND (*refp, 0);
922 }
923 if (same_p2 == 1)
924 {
925 poly_int64 offadj, sztmp, msztmp;
926 bool reverse;
927
928 /* We assume that arrays can overlap by multiple of their elements
929 size as tested in gcc.dg/torture/alias-2.c.
930 This partial overlap happen only when both arrays are bases of
931 the access and not contained within another component ref.
932 To be safe we also assume partial overlap for VLAs. */
933 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
934 && (!TYPE_SIZE (TREE_TYPE (base1))
935 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
936 || (*refp == base2 && !ref2_is_decl)))
937 {
938 ++alias_stats.aliasing_component_refs_p_may_alias;
939 return true;
940 }
941
942 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
943 offset2 -= offadj;
944 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
945 offset1 -= offadj;
946 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
947 {
948 ++alias_stats.aliasing_component_refs_p_may_alias;
949 return true;
950 }
951 else
952 {
953 ++alias_stats.aliasing_component_refs_p_no_alias;
954 return false;
955 }
956 }
957 }
958
959 /* If we didn't find a common base, try the other way around. */
960 if (cmp_outer <= 0)
961 {
962 refp = &ref1;
963 while (true)
964 {
965 int cmp = compare_type_sizes (type2, TREE_TYPE (*refp));
966 if (cmp < 0)
967 break;
968 /* If types may be of same size, see if we can decide about their
969 equality. */
970 if (cmp == 0)
971 {
972 same_p1 = same_type_for_tbaa (TREE_TYPE (*refp), type2);
973 if (same_p1 == 1)
974 break;
975 if (same_p1 == -1)
976 maybe_match = true;
977 }
978 if (!handled_component_p (*refp))
979 break;
980 refp = &TREE_OPERAND (*refp, 0);
981 }
982 if (same_p1 == 1)
983 {
984 poly_int64 offadj, sztmp, msztmp;
985 bool reverse;
986
987 if (TREE_CODE (TREE_TYPE (base2)) == ARRAY_TYPE
988 && (!TYPE_SIZE (TREE_TYPE (base2))
989 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base2))) != INTEGER_CST
990 || (*refp == base1 && !ref2_is_decl)))
991 {
992 ++alias_stats.aliasing_component_refs_p_may_alias;
993 return true;
994 }
995
996 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
997 offset1 -= offadj;
998 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
999 offset2 -= offadj;
1000 if (ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1001 {
1002 ++alias_stats.aliasing_component_refs_p_may_alias;
1003 return true;
1004 }
1005 else
1006 {
1007 ++alias_stats.aliasing_component_refs_p_no_alias;
1008 return false;
1009 }
1010 }
1011 }
1012
1013 /* In the following code we make an assumption that the types in access
1014 paths do not overlap and thus accesses alias only if one path can be
1015 continuation of another. If we was not able to decide about equivalence,
1016 we need to give up. */
1017 if (maybe_match)
1018 return true;
1019
1020 /* If we have two type access paths B1.path1 and B2.path2 they may
1021 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
1022 But we can still have a path that goes B1.path1...B2.path2 with
1023 a part that we do not see. So we can only disambiguate now
1024 if there is no B2 in the tail of path1 and no B1 on the
1025 tail of path2. */
1026 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0
1027 && type_has_components_p (TREE_TYPE (ref2))
1028 && (base1_alias_set == ref2_alias_set
1029 || alias_set_subset_of (base1_alias_set, ref2_alias_set)))
1030 {
1031 ++alias_stats.aliasing_component_refs_p_may_alias;
1032 return true;
1033 }
1034 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
1035 if (!ref2_is_decl
1036 && compare_type_sizes (TREE_TYPE (ref1), type2) >= 0
1037 && type_has_components_p (TREE_TYPE (ref1))
1038 && (base2_alias_set == ref1_alias_set
1039 || alias_set_subset_of (base2_alias_set, ref1_alias_set)))
1040 {
1041 ++alias_stats.aliasing_component_refs_p_may_alias;
1042 return true;
1043 }
1044 ++alias_stats.aliasing_component_refs_p_no_alias;
1045 return false;
1046 }
1047
1048 /* Return true if we can determine that component references REF1 and REF2,
1049 that are within a common DECL, cannot overlap. */
1050
1051 static bool
1052 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
1053 {
1054 auto_vec<tree, 16> component_refs1;
1055 auto_vec<tree, 16> component_refs2;
1056
1057 /* Create the stack of handled components for REF1. */
1058 while (handled_component_p (ref1))
1059 {
1060 component_refs1.safe_push (ref1);
1061 ref1 = TREE_OPERAND (ref1, 0);
1062 }
1063 if (TREE_CODE (ref1) == MEM_REF)
1064 {
1065 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
1066 {
1067 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1068 return false;
1069 }
1070 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
1071 }
1072
1073 /* Create the stack of handled components for REF2. */
1074 while (handled_component_p (ref2))
1075 {
1076 component_refs2.safe_push (ref2);
1077 ref2 = TREE_OPERAND (ref2, 0);
1078 }
1079 if (TREE_CODE (ref2) == MEM_REF)
1080 {
1081 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
1082 {
1083 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1084 return false;
1085 }
1086 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
1087 }
1088
1089 /* Bases must be either same or uncomparable. */
1090 gcc_checking_assert (ref1 == ref2
1091 || (DECL_P (ref1) && DECL_P (ref2)
1092 && compare_base_decls (ref1, ref2) != 0));
1093
1094 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1095 rank. This is sufficient because we start from the same DECL and you
1096 cannot reference several fields at a time with COMPONENT_REFs (unlike
1097 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1098 of them to access a sub-component, unless you're in a union, in which
1099 case the return value will precisely be false. */
1100 while (true)
1101 {
1102 do
1103 {
1104 if (component_refs1.is_empty ())
1105 {
1106 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1107 return false;
1108 }
1109 ref1 = component_refs1.pop ();
1110 }
1111 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1112
1113 do
1114 {
1115 if (component_refs2.is_empty ())
1116 {
1117 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1118 return false;
1119 }
1120 ref2 = component_refs2.pop ();
1121 }
1122 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1123
1124 /* Beware of BIT_FIELD_REF. */
1125 if (TREE_CODE (ref1) != COMPONENT_REF
1126 || TREE_CODE (ref2) != COMPONENT_REF)
1127 {
1128 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1129 return false;
1130 }
1131
1132 tree field1 = TREE_OPERAND (ref1, 1);
1133 tree field2 = TREE_OPERAND (ref2, 1);
1134
1135 /* ??? We cannot simply use the type of operand #0 of the refs here
1136 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1137 for common blocks instead of using unions like everyone else. */
1138 tree type1 = DECL_CONTEXT (field1);
1139 tree type2 = DECL_CONTEXT (field2);
1140
1141 /* We cannot disambiguate fields in a union or qualified union. */
1142 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
1143 {
1144 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1145 return false;
1146 }
1147
1148 if (field1 != field2)
1149 {
1150 /* A field and its representative need to be considered the
1151 same. */
1152 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
1153 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
1154 {
1155 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1156 return false;
1157 }
1158 /* Different fields of the same record type cannot overlap.
1159 ??? Bitfields can overlap at RTL level so punt on them. */
1160 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1161 {
1162 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1163 return false;
1164 }
1165 ++alias_stats.nonoverlapping_component_refs_of_decl_p_no_alias;
1166 return true;
1167 }
1168 }
1169
1170 ++alias_stats.nonoverlapping_component_refs_of_decl_p_may_alias;
1171 return false;
1172 }
1173
1174 /* qsort compare function to sort FIELD_DECLs after their
1175 DECL_FIELD_CONTEXT TYPE_UID. */
1176
1177 static inline int
1178 ncr_compar (const void *field1_, const void *field2_)
1179 {
1180 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1181 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1182 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
1183 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
1184 if (uid1 < uid2)
1185 return -1;
1186 else if (uid1 > uid2)
1187 return 1;
1188 return 0;
1189 }
1190
1191 /* Return true if we can determine that the fields referenced cannot
1192 overlap for any pair of objects. */
1193
1194 static bool
1195 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1196 {
1197 if (!flag_strict_aliasing
1198 || !x || !y
1199 || !handled_component_p (x)
1200 || !handled_component_p (y))
1201 {
1202 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1203 return false;
1204 }
1205
1206 auto_vec<const_tree, 16> fieldsx;
1207 while (handled_component_p (x))
1208 {
1209 if (TREE_CODE (x) == COMPONENT_REF)
1210 {
1211 tree field = TREE_OPERAND (x, 1);
1212 tree type = DECL_FIELD_CONTEXT (field);
1213 if (TREE_CODE (type) == RECORD_TYPE)
1214 fieldsx.safe_push (field);
1215 }
1216 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR)
1217 fieldsx.truncate (0);
1218 x = TREE_OPERAND (x, 0);
1219 }
1220 if (fieldsx.length () == 0)
1221 return false;
1222 auto_vec<const_tree, 16> fieldsy;
1223 while (handled_component_p (y))
1224 {
1225 if (TREE_CODE (y) == COMPONENT_REF)
1226 {
1227 tree field = TREE_OPERAND (y, 1);
1228 tree type = DECL_FIELD_CONTEXT (field);
1229 if (TREE_CODE (type) == RECORD_TYPE)
1230 fieldsy.safe_push (TREE_OPERAND (y, 1));
1231 }
1232 else if (TREE_CODE (y) == VIEW_CONVERT_EXPR)
1233 fieldsy.truncate (0);
1234 y = TREE_OPERAND (y, 0);
1235 }
1236 if (fieldsy.length () == 0)
1237 {
1238 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1239 return false;
1240 }
1241
1242 /* Most common case first. */
1243 if (fieldsx.length () == 1
1244 && fieldsy.length () == 1)
1245 {
1246 if ((DECL_FIELD_CONTEXT (fieldsx[0])
1247 == DECL_FIELD_CONTEXT (fieldsy[0]))
1248 && fieldsx[0] != fieldsy[0]
1249 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])))
1250 {
1251 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1252 return true;
1253 }
1254 else
1255 {
1256 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1257 return false;
1258 }
1259 }
1260
1261 if (fieldsx.length () == 2)
1262 {
1263 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1264 std::swap (fieldsx[0], fieldsx[1]);
1265 }
1266 else
1267 fieldsx.qsort (ncr_compar);
1268
1269 if (fieldsy.length () == 2)
1270 {
1271 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1272 std::swap (fieldsy[0], fieldsy[1]);
1273 }
1274 else
1275 fieldsy.qsort (ncr_compar);
1276
1277 unsigned i = 0, j = 0;
1278 do
1279 {
1280 const_tree fieldx = fieldsx[i];
1281 const_tree fieldy = fieldsy[j];
1282 tree typex = DECL_FIELD_CONTEXT (fieldx);
1283 tree typey = DECL_FIELD_CONTEXT (fieldy);
1284 if (typex == typey)
1285 {
1286 /* We're left with accessing different fields of a structure,
1287 no possible overlap. */
1288 if (fieldx != fieldy)
1289 {
1290 /* A field and its representative need to be considered the
1291 same. */
1292 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1293 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1294 {
1295 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1296 return false;
1297 }
1298 /* Different fields of the same record type cannot overlap.
1299 ??? Bitfields can overlap at RTL level so punt on them. */
1300 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1301 {
1302 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1303 return false;
1304 }
1305 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1306 return true;
1307 }
1308 }
1309 if (TYPE_UID (typex) < TYPE_UID (typey))
1310 {
1311 i++;
1312 if (i == fieldsx.length ())
1313 break;
1314 }
1315 else
1316 {
1317 j++;
1318 if (j == fieldsy.length ())
1319 break;
1320 }
1321 }
1322 while (1);
1323
1324 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1325 return false;
1326 }
1327
1328
1329 /* Return true if two memory references based on the variables BASE1
1330 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1331 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1332 if non-NULL are the complete memory reference trees. */
1333
1334 static bool
1335 decl_refs_may_alias_p (tree ref1, tree base1,
1336 poly_int64 offset1, poly_int64 max_size1,
1337 tree ref2, tree base2,
1338 poly_int64 offset2, poly_int64 max_size2)
1339 {
1340 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1341
1342 /* If both references are based on different variables, they cannot alias. */
1343 if (compare_base_decls (base1, base2) == 0)
1344 return false;
1345
1346 /* If both references are based on the same variable, they cannot alias if
1347 the accesses do not overlap. */
1348 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1349 return false;
1350
1351 /* For components with variable position, the above test isn't sufficient,
1352 so we disambiguate component references manually. */
1353 if (ref1 && ref2
1354 && handled_component_p (ref1) && handled_component_p (ref2)
1355 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1356 return false;
1357
1358 return true;
1359 }
1360
1361 /* Return true if an indirect reference based on *PTR1 constrained
1362 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1363 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1364 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1365 in which case they are computed on-demand. REF1 and REF2
1366 if non-NULL are the complete memory reference trees. */
1367
1368 static bool
1369 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1370 poly_int64 offset1, poly_int64 max_size1,
1371 alias_set_type ref1_alias_set,
1372 alias_set_type base1_alias_set,
1373 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1374 poly_int64 offset2, poly_int64 max_size2,
1375 alias_set_type ref2_alias_set,
1376 alias_set_type base2_alias_set, bool tbaa_p)
1377 {
1378 tree ptr1;
1379 tree ptrtype1, dbase2;
1380
1381 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1382 || TREE_CODE (base1) == TARGET_MEM_REF)
1383 && DECL_P (base2));
1384
1385 ptr1 = TREE_OPERAND (base1, 0);
1386 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1387
1388 /* If only one reference is based on a variable, they cannot alias if
1389 the pointer access is beyond the extent of the variable access.
1390 (the pointer base cannot validly point to an offset less than zero
1391 of the variable).
1392 ??? IVOPTs creates bases that do not honor this restriction,
1393 so do not apply this optimization for TARGET_MEM_REFs. */
1394 if (TREE_CODE (base1) != TARGET_MEM_REF
1395 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1396 return false;
1397 /* They also cannot alias if the pointer may not point to the decl. */
1398 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1399 return false;
1400
1401 /* Disambiguations that rely on strict aliasing rules follow. */
1402 if (!flag_strict_aliasing || !tbaa_p)
1403 return true;
1404
1405 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1406
1407 /* If the alias set for a pointer access is zero all bets are off. */
1408 if (base1_alias_set == 0)
1409 return true;
1410
1411 /* When we are trying to disambiguate an access with a pointer dereference
1412 as base versus one with a decl as base we can use both the size
1413 of the decl and its dynamic type for extra disambiguation.
1414 ??? We do not know anything about the dynamic type of the decl
1415 other than that its alias-set contains base2_alias_set as a subset
1416 which does not help us here. */
1417 /* As we know nothing useful about the dynamic type of the decl just
1418 use the usual conflict check rather than a subset test.
1419 ??? We could introduce -fvery-strict-aliasing when the language
1420 does not allow decls to have a dynamic type that differs from their
1421 static type. Then we can check
1422 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1423 if (base1_alias_set != base2_alias_set
1424 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1425 return false;
1426 /* If the size of the access relevant for TBAA through the pointer
1427 is bigger than the size of the decl we can't possibly access the
1428 decl via that pointer. */
1429 if (/* ??? This in turn may run afoul when a decl of type T which is
1430 a member of union type U is accessed through a pointer to
1431 type U and sizeof T is smaller than sizeof U. */
1432 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1433 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1434 && compare_sizes (DECL_SIZE (base2),
1435 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
1436 return false;
1437
1438 if (!ref2)
1439 return true;
1440
1441 /* If the decl is accessed via a MEM_REF, reconstruct the base
1442 we can use for TBAA and an appropriately adjusted offset. */
1443 dbase2 = ref2;
1444 while (handled_component_p (dbase2))
1445 dbase2 = TREE_OPERAND (dbase2, 0);
1446 poly_int64 doffset1 = offset1;
1447 poly_offset_int doffset2 = offset2;
1448 if (TREE_CODE (dbase2) == MEM_REF
1449 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1450 {
1451 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1452 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
1453 /* If second reference is view-converted, give up now. */
1454 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
1455 return true;
1456 }
1457
1458 /* If first reference is view-converted, give up now. */
1459 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
1460 return true;
1461
1462 /* If both references are through the same type, they do not alias
1463 if the accesses do not overlap. This does extra disambiguation
1464 for mixed/pointer accesses but requires strict aliasing.
1465 For MEM_REFs we require that the component-ref offset we computed
1466 is relative to the start of the type which we ensure by
1467 comparing rvalue and access type and disregarding the constant
1468 pointer offset.
1469
1470 But avoid treating variable length arrays as "objects", instead assume they
1471 can overlap by an exact multiple of their element size.
1472 See gcc.dg/torture/alias-2.c. */
1473 if (((TREE_CODE (base1) != TARGET_MEM_REF
1474 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1475 && (TREE_CODE (dbase2) != TARGET_MEM_REF
1476 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (base2))))
1477 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1
1478 && (TREE_CODE (TREE_TYPE (base1)) != ARRAY_TYPE
1479 || (TYPE_SIZE (TREE_TYPE (base1))
1480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) == INTEGER_CST)))
1481 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1482
1483 if (ref1 && ref2
1484 && nonoverlapping_component_refs_p (ref1, ref2))
1485 return false;
1486
1487 /* Do access-path based disambiguation. */
1488 if (ref1 && ref2
1489 && (handled_component_p (ref1) || handled_component_p (ref2)))
1490 return aliasing_component_refs_p (ref1,
1491 ref1_alias_set, base1_alias_set,
1492 offset1, max_size1,
1493 ref2,
1494 ref2_alias_set, base2_alias_set,
1495 offset2, max_size2,
1496 /* Only if the other reference is actual
1497 decl we can safely check only toplevel
1498 part of access path 1. */
1499 same_type_for_tbaa (TREE_TYPE (dbase2),
1500 TREE_TYPE (base2))
1501 == 1);
1502
1503 return true;
1504 }
1505
1506 /* Return true if two indirect references based on *PTR1
1507 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1508 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1509 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1510 in which case they are computed on-demand. REF1 and REF2
1511 if non-NULL are the complete memory reference trees. */
1512
1513 static bool
1514 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1515 poly_int64 offset1, poly_int64 max_size1,
1516 alias_set_type ref1_alias_set,
1517 alias_set_type base1_alias_set,
1518 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1519 poly_int64 offset2, poly_int64 max_size2,
1520 alias_set_type ref2_alias_set,
1521 alias_set_type base2_alias_set, bool tbaa_p)
1522 {
1523 tree ptr1;
1524 tree ptr2;
1525 tree ptrtype1, ptrtype2;
1526
1527 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1528 || TREE_CODE (base1) == TARGET_MEM_REF)
1529 && (TREE_CODE (base2) == MEM_REF
1530 || TREE_CODE (base2) == TARGET_MEM_REF));
1531
1532 ptr1 = TREE_OPERAND (base1, 0);
1533 ptr2 = TREE_OPERAND (base2, 0);
1534
1535 /* If both bases are based on pointers they cannot alias if they may not
1536 point to the same memory object or if they point to the same object
1537 and the accesses do not overlap. */
1538 if ((!cfun || gimple_in_ssa_p (cfun))
1539 && operand_equal_p (ptr1, ptr2, 0)
1540 && (((TREE_CODE (base1) != TARGET_MEM_REF
1541 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1542 && (TREE_CODE (base2) != TARGET_MEM_REF
1543 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1544 || (TREE_CODE (base1) == TARGET_MEM_REF
1545 && TREE_CODE (base2) == TARGET_MEM_REF
1546 && (TMR_STEP (base1) == TMR_STEP (base2)
1547 || (TMR_STEP (base1) && TMR_STEP (base2)
1548 && operand_equal_p (TMR_STEP (base1),
1549 TMR_STEP (base2), 0)))
1550 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1551 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1552 && operand_equal_p (TMR_INDEX (base1),
1553 TMR_INDEX (base2), 0)))
1554 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1555 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1556 && operand_equal_p (TMR_INDEX2 (base1),
1557 TMR_INDEX2 (base2), 0))))))
1558 {
1559 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1560 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1561 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1562 offset2 + moff2, max_size2);
1563 }
1564 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1565 return false;
1566
1567 /* Disambiguations that rely on strict aliasing rules follow. */
1568 if (!flag_strict_aliasing || !tbaa_p)
1569 return true;
1570
1571 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1572 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1573
1574 /* If the alias set for a pointer access is zero all bets are off. */
1575 if (base1_alias_set == 0
1576 || base2_alias_set == 0)
1577 return true;
1578
1579 /* Do type-based disambiguation. */
1580 if (base1_alias_set != base2_alias_set
1581 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1582 return false;
1583
1584 /* If either reference is view-converted, give up now. */
1585 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1586 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1587 return true;
1588
1589 /* If both references are through the same type, they do not alias
1590 if the accesses do not overlap. This does extra disambiguation
1591 for mixed/pointer accesses but requires strict aliasing. */
1592 if ((TREE_CODE (base1) != TARGET_MEM_REF
1593 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1594 && (TREE_CODE (base2) != TARGET_MEM_REF
1595 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1596 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1597 TREE_TYPE (ptrtype2)) == 1
1598 /* But avoid treating arrays as "objects", instead assume they
1599 can overlap by an exact multiple of their element size.
1600 See gcc.dg/torture/alias-2.c. */
1601 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1602 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1603
1604 if (ref1 && ref2
1605 && nonoverlapping_component_refs_p (ref1, ref2))
1606 return false;
1607
1608 /* Do access-path based disambiguation. */
1609 if (ref1 && ref2
1610 && (handled_component_p (ref1) || handled_component_p (ref2)))
1611 return aliasing_component_refs_p (ref1,
1612 ref1_alias_set, base1_alias_set,
1613 offset1, max_size1,
1614 ref2,
1615 ref2_alias_set, base2_alias_set,
1616 offset2, max_size2, false);
1617
1618 return true;
1619 }
1620
1621 /* Return true, if the two memory references REF1 and REF2 may alias. */
1622
1623 static bool
1624 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1625 {
1626 tree base1, base2;
1627 poly_int64 offset1 = 0, offset2 = 0;
1628 poly_int64 max_size1 = -1, max_size2 = -1;
1629 bool var1_p, var2_p, ind1_p, ind2_p;
1630
1631 gcc_checking_assert ((!ref1->ref
1632 || TREE_CODE (ref1->ref) == SSA_NAME
1633 || DECL_P (ref1->ref)
1634 || TREE_CODE (ref1->ref) == STRING_CST
1635 || handled_component_p (ref1->ref)
1636 || TREE_CODE (ref1->ref) == MEM_REF
1637 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1638 && (!ref2->ref
1639 || TREE_CODE (ref2->ref) == SSA_NAME
1640 || DECL_P (ref2->ref)
1641 || TREE_CODE (ref2->ref) == STRING_CST
1642 || handled_component_p (ref2->ref)
1643 || TREE_CODE (ref2->ref) == MEM_REF
1644 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1645
1646 /* Decompose the references into their base objects and the access. */
1647 base1 = ao_ref_base (ref1);
1648 offset1 = ref1->offset;
1649 max_size1 = ref1->max_size;
1650 base2 = ao_ref_base (ref2);
1651 offset2 = ref2->offset;
1652 max_size2 = ref2->max_size;
1653
1654 /* We can end up with registers or constants as bases for example from
1655 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1656 which is seen as a struct copy. */
1657 if (TREE_CODE (base1) == SSA_NAME
1658 || TREE_CODE (base1) == CONST_DECL
1659 || TREE_CODE (base1) == CONSTRUCTOR
1660 || TREE_CODE (base1) == ADDR_EXPR
1661 || CONSTANT_CLASS_P (base1)
1662 || TREE_CODE (base2) == SSA_NAME
1663 || TREE_CODE (base2) == CONST_DECL
1664 || TREE_CODE (base2) == CONSTRUCTOR
1665 || TREE_CODE (base2) == ADDR_EXPR
1666 || CONSTANT_CLASS_P (base2))
1667 return false;
1668
1669 /* We can end up referring to code via function and label decls.
1670 As we likely do not properly track code aliases conservatively
1671 bail out. */
1672 if (TREE_CODE (base1) == FUNCTION_DECL
1673 || TREE_CODE (base1) == LABEL_DECL
1674 || TREE_CODE (base2) == FUNCTION_DECL
1675 || TREE_CODE (base2) == LABEL_DECL)
1676 return true;
1677
1678 /* Two volatile accesses always conflict. */
1679 if (ref1->volatile_p
1680 && ref2->volatile_p)
1681 return true;
1682
1683 /* Defer to simple offset based disambiguation if we have
1684 references based on two decls. Do this before defering to
1685 TBAA to handle must-alias cases in conformance with the
1686 GCC extension of allowing type-punning through unions. */
1687 var1_p = DECL_P (base1);
1688 var2_p = DECL_P (base2);
1689 if (var1_p && var2_p)
1690 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1691 ref2->ref, base2, offset2, max_size2);
1692
1693 /* Handle restrict based accesses.
1694 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1695 here. */
1696 tree rbase1 = base1;
1697 tree rbase2 = base2;
1698 if (var1_p)
1699 {
1700 rbase1 = ref1->ref;
1701 if (rbase1)
1702 while (handled_component_p (rbase1))
1703 rbase1 = TREE_OPERAND (rbase1, 0);
1704 }
1705 if (var2_p)
1706 {
1707 rbase2 = ref2->ref;
1708 if (rbase2)
1709 while (handled_component_p (rbase2))
1710 rbase2 = TREE_OPERAND (rbase2, 0);
1711 }
1712 if (rbase1 && rbase2
1713 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1714 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1715 /* If the accesses are in the same restrict clique... */
1716 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1717 /* But based on different pointers they do not alias. */
1718 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1719 return false;
1720
1721 ind1_p = (TREE_CODE (base1) == MEM_REF
1722 || TREE_CODE (base1) == TARGET_MEM_REF);
1723 ind2_p = (TREE_CODE (base2) == MEM_REF
1724 || TREE_CODE (base2) == TARGET_MEM_REF);
1725
1726 /* Canonicalize the pointer-vs-decl case. */
1727 if (ind1_p && var2_p)
1728 {
1729 std::swap (offset1, offset2);
1730 std::swap (max_size1, max_size2);
1731 std::swap (base1, base2);
1732 std::swap (ref1, ref2);
1733 var1_p = true;
1734 ind1_p = false;
1735 var2_p = false;
1736 ind2_p = true;
1737 }
1738
1739 /* First defer to TBAA if possible. */
1740 if (tbaa_p
1741 && flag_strict_aliasing
1742 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1743 ao_ref_alias_set (ref2)))
1744 return false;
1745
1746 /* If the reference is based on a pointer that points to memory
1747 that may not be written to then the other reference cannot possibly
1748 clobber it. */
1749 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
1750 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
1751 || (ind1_p
1752 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
1753 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
1754 return false;
1755
1756 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1757 if (var1_p && ind2_p)
1758 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1759 offset2, max_size2,
1760 ao_ref_alias_set (ref2),
1761 ao_ref_base_alias_set (ref2),
1762 ref1->ref, base1,
1763 offset1, max_size1,
1764 ao_ref_alias_set (ref1),
1765 ao_ref_base_alias_set (ref1),
1766 tbaa_p);
1767 else if (ind1_p && ind2_p)
1768 return indirect_refs_may_alias_p (ref1->ref, base1,
1769 offset1, max_size1,
1770 ao_ref_alias_set (ref1),
1771 ao_ref_base_alias_set (ref1),
1772 ref2->ref, base2,
1773 offset2, max_size2,
1774 ao_ref_alias_set (ref2),
1775 ao_ref_base_alias_set (ref2),
1776 tbaa_p);
1777
1778 gcc_unreachable ();
1779 }
1780
1781 /* Return true, if the two memory references REF1 and REF2 may alias
1782 and update statistics. */
1783
1784 bool
1785 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1786 {
1787 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
1788 if (res)
1789 ++alias_stats.refs_may_alias_p_may_alias;
1790 else
1791 ++alias_stats.refs_may_alias_p_no_alias;
1792 return res;
1793 }
1794
1795 static bool
1796 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
1797 {
1798 ao_ref r1;
1799 ao_ref_init (&r1, ref1);
1800 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
1801 }
1802
1803 bool
1804 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
1805 {
1806 ao_ref r1, r2;
1807 ao_ref_init (&r1, ref1);
1808 ao_ref_init (&r2, ref2);
1809 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
1810 }
1811
1812 /* Returns true if there is a anti-dependence for the STORE that
1813 executes after the LOAD. */
1814
1815 bool
1816 refs_anti_dependent_p (tree load, tree store)
1817 {
1818 ao_ref r1, r2;
1819 ao_ref_init (&r1, load);
1820 ao_ref_init (&r2, store);
1821 return refs_may_alias_p_1 (&r1, &r2, false);
1822 }
1823
1824 /* Returns true if there is a output dependence for the stores
1825 STORE1 and STORE2. */
1826
1827 bool
1828 refs_output_dependent_p (tree store1, tree store2)
1829 {
1830 ao_ref r1, r2;
1831 ao_ref_init (&r1, store1);
1832 ao_ref_init (&r2, store2);
1833 return refs_may_alias_p_1 (&r1, &r2, false);
1834 }
1835
1836 /* If the call CALL may use the memory reference REF return true,
1837 otherwise return false. */
1838
1839 static bool
1840 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
1841 {
1842 tree base, callee;
1843 unsigned i;
1844 int flags = gimple_call_flags (call);
1845
1846 /* Const functions without a static chain do not implicitly use memory. */
1847 if (!gimple_call_chain (call)
1848 && (flags & (ECF_CONST|ECF_NOVOPS)))
1849 goto process_args;
1850
1851 base = ao_ref_base (ref);
1852 if (!base)
1853 return true;
1854
1855 /* A call that is not without side-effects might involve volatile
1856 accesses and thus conflicts with all other volatile accesses. */
1857 if (ref->volatile_p)
1858 return true;
1859
1860 /* If the reference is based on a decl that is not aliased the call
1861 cannot possibly use it. */
1862 if (DECL_P (base)
1863 && !may_be_aliased (base)
1864 /* But local statics can be used through recursion. */
1865 && !is_global_var (base))
1866 goto process_args;
1867
1868 callee = gimple_call_fndecl (call);
1869
1870 /* Handle those builtin functions explicitly that do not act as
1871 escape points. See tree-ssa-structalias.c:find_func_aliases
1872 for the list of builtins we might need to handle here. */
1873 if (callee != NULL_TREE
1874 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1875 switch (DECL_FUNCTION_CODE (callee))
1876 {
1877 /* All the following functions read memory pointed to by
1878 their second argument. strcat/strncat additionally
1879 reads memory pointed to by the first argument. */
1880 case BUILT_IN_STRCAT:
1881 case BUILT_IN_STRNCAT:
1882 {
1883 ao_ref dref;
1884 ao_ref_init_from_ptr_and_size (&dref,
1885 gimple_call_arg (call, 0),
1886 NULL_TREE);
1887 if (refs_may_alias_p_1 (&dref, ref, false))
1888 return true;
1889 }
1890 /* FALLTHRU */
1891 case BUILT_IN_STRCPY:
1892 case BUILT_IN_STRNCPY:
1893 case BUILT_IN_MEMCPY:
1894 case BUILT_IN_MEMMOVE:
1895 case BUILT_IN_MEMPCPY:
1896 case BUILT_IN_STPCPY:
1897 case BUILT_IN_STPNCPY:
1898 case BUILT_IN_TM_MEMCPY:
1899 case BUILT_IN_TM_MEMMOVE:
1900 {
1901 ao_ref dref;
1902 tree size = NULL_TREE;
1903 if (gimple_call_num_args (call) == 3)
1904 size = gimple_call_arg (call, 2);
1905 ao_ref_init_from_ptr_and_size (&dref,
1906 gimple_call_arg (call, 1),
1907 size);
1908 return refs_may_alias_p_1 (&dref, ref, false);
1909 }
1910 case BUILT_IN_STRCAT_CHK:
1911 case BUILT_IN_STRNCAT_CHK:
1912 {
1913 ao_ref dref;
1914 ao_ref_init_from_ptr_and_size (&dref,
1915 gimple_call_arg (call, 0),
1916 NULL_TREE);
1917 if (refs_may_alias_p_1 (&dref, ref, false))
1918 return true;
1919 }
1920 /* FALLTHRU */
1921 case BUILT_IN_STRCPY_CHK:
1922 case BUILT_IN_STRNCPY_CHK:
1923 case BUILT_IN_MEMCPY_CHK:
1924 case BUILT_IN_MEMMOVE_CHK:
1925 case BUILT_IN_MEMPCPY_CHK:
1926 case BUILT_IN_STPCPY_CHK:
1927 case BUILT_IN_STPNCPY_CHK:
1928 {
1929 ao_ref dref;
1930 tree size = NULL_TREE;
1931 if (gimple_call_num_args (call) == 4)
1932 size = gimple_call_arg (call, 2);
1933 ao_ref_init_from_ptr_and_size (&dref,
1934 gimple_call_arg (call, 1),
1935 size);
1936 return refs_may_alias_p_1 (&dref, ref, false);
1937 }
1938 case BUILT_IN_BCOPY:
1939 {
1940 ao_ref dref;
1941 tree size = gimple_call_arg (call, 2);
1942 ao_ref_init_from_ptr_and_size (&dref,
1943 gimple_call_arg (call, 0),
1944 size);
1945 return refs_may_alias_p_1 (&dref, ref, false);
1946 }
1947
1948 /* The following functions read memory pointed to by their
1949 first argument. */
1950 CASE_BUILT_IN_TM_LOAD (1):
1951 CASE_BUILT_IN_TM_LOAD (2):
1952 CASE_BUILT_IN_TM_LOAD (4):
1953 CASE_BUILT_IN_TM_LOAD (8):
1954 CASE_BUILT_IN_TM_LOAD (FLOAT):
1955 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1956 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1957 CASE_BUILT_IN_TM_LOAD (M64):
1958 CASE_BUILT_IN_TM_LOAD (M128):
1959 CASE_BUILT_IN_TM_LOAD (M256):
1960 case BUILT_IN_TM_LOG:
1961 case BUILT_IN_TM_LOG_1:
1962 case BUILT_IN_TM_LOG_2:
1963 case BUILT_IN_TM_LOG_4:
1964 case BUILT_IN_TM_LOG_8:
1965 case BUILT_IN_TM_LOG_FLOAT:
1966 case BUILT_IN_TM_LOG_DOUBLE:
1967 case BUILT_IN_TM_LOG_LDOUBLE:
1968 case BUILT_IN_TM_LOG_M64:
1969 case BUILT_IN_TM_LOG_M128:
1970 case BUILT_IN_TM_LOG_M256:
1971 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1972
1973 /* These read memory pointed to by the first argument. */
1974 case BUILT_IN_STRDUP:
1975 case BUILT_IN_STRNDUP:
1976 case BUILT_IN_REALLOC:
1977 {
1978 ao_ref dref;
1979 tree size = NULL_TREE;
1980 if (gimple_call_num_args (call) == 2)
1981 size = gimple_call_arg (call, 1);
1982 ao_ref_init_from_ptr_and_size (&dref,
1983 gimple_call_arg (call, 0),
1984 size);
1985 return refs_may_alias_p_1 (&dref, ref, false);
1986 }
1987 /* These read memory pointed to by the first argument. */
1988 case BUILT_IN_INDEX:
1989 case BUILT_IN_STRCHR:
1990 case BUILT_IN_STRRCHR:
1991 {
1992 ao_ref dref;
1993 ao_ref_init_from_ptr_and_size (&dref,
1994 gimple_call_arg (call, 0),
1995 NULL_TREE);
1996 return refs_may_alias_p_1 (&dref, ref, false);
1997 }
1998 /* These read memory pointed to by the first argument with size
1999 in the third argument. */
2000 case BUILT_IN_MEMCHR:
2001 {
2002 ao_ref dref;
2003 ao_ref_init_from_ptr_and_size (&dref,
2004 gimple_call_arg (call, 0),
2005 gimple_call_arg (call, 2));
2006 return refs_may_alias_p_1 (&dref, ref, false);
2007 }
2008 /* These read memory pointed to by the first and second arguments. */
2009 case BUILT_IN_STRSTR:
2010 case BUILT_IN_STRPBRK:
2011 {
2012 ao_ref dref;
2013 ao_ref_init_from_ptr_and_size (&dref,
2014 gimple_call_arg (call, 0),
2015 NULL_TREE);
2016 if (refs_may_alias_p_1 (&dref, ref, false))
2017 return true;
2018 ao_ref_init_from_ptr_and_size (&dref,
2019 gimple_call_arg (call, 1),
2020 NULL_TREE);
2021 return refs_may_alias_p_1 (&dref, ref, false);
2022 }
2023
2024 /* The following builtins do not read from memory. */
2025 case BUILT_IN_FREE:
2026 case BUILT_IN_MALLOC:
2027 case BUILT_IN_POSIX_MEMALIGN:
2028 case BUILT_IN_ALIGNED_ALLOC:
2029 case BUILT_IN_CALLOC:
2030 CASE_BUILT_IN_ALLOCA:
2031 case BUILT_IN_STACK_SAVE:
2032 case BUILT_IN_STACK_RESTORE:
2033 case BUILT_IN_MEMSET:
2034 case BUILT_IN_TM_MEMSET:
2035 case BUILT_IN_MEMSET_CHK:
2036 case BUILT_IN_FREXP:
2037 case BUILT_IN_FREXPF:
2038 case BUILT_IN_FREXPL:
2039 case BUILT_IN_GAMMA_R:
2040 case BUILT_IN_GAMMAF_R:
2041 case BUILT_IN_GAMMAL_R:
2042 case BUILT_IN_LGAMMA_R:
2043 case BUILT_IN_LGAMMAF_R:
2044 case BUILT_IN_LGAMMAL_R:
2045 case BUILT_IN_MODF:
2046 case BUILT_IN_MODFF:
2047 case BUILT_IN_MODFL:
2048 case BUILT_IN_REMQUO:
2049 case BUILT_IN_REMQUOF:
2050 case BUILT_IN_REMQUOL:
2051 case BUILT_IN_SINCOS:
2052 case BUILT_IN_SINCOSF:
2053 case BUILT_IN_SINCOSL:
2054 case BUILT_IN_ASSUME_ALIGNED:
2055 case BUILT_IN_VA_END:
2056 return false;
2057 /* __sync_* builtins and some OpenMP builtins act as threading
2058 barriers. */
2059 #undef DEF_SYNC_BUILTIN
2060 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2061 #include "sync-builtins.def"
2062 #undef DEF_SYNC_BUILTIN
2063 case BUILT_IN_GOMP_ATOMIC_START:
2064 case BUILT_IN_GOMP_ATOMIC_END:
2065 case BUILT_IN_GOMP_BARRIER:
2066 case BUILT_IN_GOMP_BARRIER_CANCEL:
2067 case BUILT_IN_GOMP_TASKWAIT:
2068 case BUILT_IN_GOMP_TASKGROUP_END:
2069 case BUILT_IN_GOMP_CRITICAL_START:
2070 case BUILT_IN_GOMP_CRITICAL_END:
2071 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2072 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2073 case BUILT_IN_GOMP_LOOP_END:
2074 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2075 case BUILT_IN_GOMP_ORDERED_START:
2076 case BUILT_IN_GOMP_ORDERED_END:
2077 case BUILT_IN_GOMP_SECTIONS_END:
2078 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2079 case BUILT_IN_GOMP_SINGLE_COPY_START:
2080 case BUILT_IN_GOMP_SINGLE_COPY_END:
2081 return true;
2082
2083 default:
2084 /* Fallthru to general call handling. */;
2085 }
2086
2087 /* Check if base is a global static variable that is not read
2088 by the function. */
2089 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2090 {
2091 struct cgraph_node *node = cgraph_node::get (callee);
2092 bitmap not_read;
2093
2094 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2095 node yet. We should enforce that there are nodes for all decls in the
2096 IL and remove this check instead. */
2097 if (node
2098 && (not_read = ipa_reference_get_not_read_global (node))
2099 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
2100 goto process_args;
2101 }
2102
2103 /* Check if the base variable is call-used. */
2104 if (DECL_P (base))
2105 {
2106 if (pt_solution_includes (gimple_call_use_set (call), base))
2107 return true;
2108 }
2109 else if ((TREE_CODE (base) == MEM_REF
2110 || TREE_CODE (base) == TARGET_MEM_REF)
2111 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2112 {
2113 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2114 if (!pi)
2115 return true;
2116
2117 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2118 return true;
2119 }
2120 else
2121 return true;
2122
2123 /* Inspect call arguments for passed-by-value aliases. */
2124 process_args:
2125 for (i = 0; i < gimple_call_num_args (call); ++i)
2126 {
2127 tree op = gimple_call_arg (call, i);
2128 int flags = gimple_call_arg_flags (call, i);
2129
2130 if (flags & EAF_UNUSED)
2131 continue;
2132
2133 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2134 op = TREE_OPERAND (op, 0);
2135
2136 if (TREE_CODE (op) != SSA_NAME
2137 && !is_gimple_min_invariant (op))
2138 {
2139 ao_ref r;
2140 ao_ref_init (&r, op);
2141 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2142 return true;
2143 }
2144 }
2145
2146 return false;
2147 }
2148
2149 static bool
2150 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2151 {
2152 bool res;
2153 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2154 if (res)
2155 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2156 else
2157 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2158 return res;
2159 }
2160
2161
2162 /* If the statement STMT may use the memory reference REF return
2163 true, otherwise return false. */
2164
2165 bool
2166 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2167 {
2168 if (is_gimple_assign (stmt))
2169 {
2170 tree rhs;
2171
2172 /* All memory assign statements are single. */
2173 if (!gimple_assign_single_p (stmt))
2174 return false;
2175
2176 rhs = gimple_assign_rhs1 (stmt);
2177 if (is_gimple_reg (rhs)
2178 || is_gimple_min_invariant (rhs)
2179 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2180 return false;
2181
2182 return refs_may_alias_p (rhs, ref, tbaa_p);
2183 }
2184 else if (is_gimple_call (stmt))
2185 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2186 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2187 {
2188 tree retval = gimple_return_retval (return_stmt);
2189 if (retval
2190 && TREE_CODE (retval) != SSA_NAME
2191 && !is_gimple_min_invariant (retval)
2192 && refs_may_alias_p (retval, ref, tbaa_p))
2193 return true;
2194 /* If ref escapes the function then the return acts as a use. */
2195 tree base = ao_ref_base (ref);
2196 if (!base)
2197 ;
2198 else if (DECL_P (base))
2199 return is_global_var (base);
2200 else if (TREE_CODE (base) == MEM_REF
2201 || TREE_CODE (base) == TARGET_MEM_REF)
2202 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2203 return false;
2204 }
2205
2206 return true;
2207 }
2208
2209 bool
2210 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2211 {
2212 ao_ref r;
2213 ao_ref_init (&r, ref);
2214 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2215 }
2216
2217 /* If the call in statement CALL may clobber the memory reference REF
2218 return true, otherwise return false. */
2219
2220 bool
2221 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
2222 {
2223 tree base;
2224 tree callee;
2225
2226 /* If the call is pure or const it cannot clobber anything. */
2227 if (gimple_call_flags (call)
2228 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2229 return false;
2230 if (gimple_call_internal_p (call))
2231 switch (gimple_call_internal_fn (call))
2232 {
2233 /* Treat these internal calls like ECF_PURE for aliasing,
2234 they don't write to any memory the program should care about.
2235 They have important other side-effects, and read memory,
2236 so can't be ECF_NOVOPS. */
2237 case IFN_UBSAN_NULL:
2238 case IFN_UBSAN_BOUNDS:
2239 case IFN_UBSAN_VPTR:
2240 case IFN_UBSAN_OBJECT_SIZE:
2241 case IFN_UBSAN_PTR:
2242 case IFN_ASAN_CHECK:
2243 return false;
2244 default:
2245 break;
2246 }
2247
2248 base = ao_ref_base (ref);
2249 if (!base)
2250 return true;
2251
2252 if (TREE_CODE (base) == SSA_NAME
2253 || CONSTANT_CLASS_P (base))
2254 return false;
2255
2256 /* A call that is not without side-effects might involve volatile
2257 accesses and thus conflicts with all other volatile accesses. */
2258 if (ref->volatile_p)
2259 return true;
2260
2261 /* If the reference is based on a decl that is not aliased the call
2262 cannot possibly clobber it. */
2263 if (DECL_P (base)
2264 && !may_be_aliased (base)
2265 /* But local non-readonly statics can be modified through recursion
2266 or the call may implement a threading barrier which we must
2267 treat as may-def. */
2268 && (TREE_READONLY (base)
2269 || !is_global_var (base)))
2270 return false;
2271
2272 /* If the reference is based on a pointer that points to memory
2273 that may not be written to then the call cannot possibly clobber it. */
2274 if ((TREE_CODE (base) == MEM_REF
2275 || TREE_CODE (base) == TARGET_MEM_REF)
2276 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
2277 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
2278 return false;
2279
2280 callee = gimple_call_fndecl (call);
2281
2282 /* Handle those builtin functions explicitly that do not act as
2283 escape points. See tree-ssa-structalias.c:find_func_aliases
2284 for the list of builtins we might need to handle here. */
2285 if (callee != NULL_TREE
2286 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2287 switch (DECL_FUNCTION_CODE (callee))
2288 {
2289 /* All the following functions clobber memory pointed to by
2290 their first argument. */
2291 case BUILT_IN_STRCPY:
2292 case BUILT_IN_STRNCPY:
2293 case BUILT_IN_MEMCPY:
2294 case BUILT_IN_MEMMOVE:
2295 case BUILT_IN_MEMPCPY:
2296 case BUILT_IN_STPCPY:
2297 case BUILT_IN_STPNCPY:
2298 case BUILT_IN_STRCAT:
2299 case BUILT_IN_STRNCAT:
2300 case BUILT_IN_MEMSET:
2301 case BUILT_IN_TM_MEMSET:
2302 CASE_BUILT_IN_TM_STORE (1):
2303 CASE_BUILT_IN_TM_STORE (2):
2304 CASE_BUILT_IN_TM_STORE (4):
2305 CASE_BUILT_IN_TM_STORE (8):
2306 CASE_BUILT_IN_TM_STORE (FLOAT):
2307 CASE_BUILT_IN_TM_STORE (DOUBLE):
2308 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2309 CASE_BUILT_IN_TM_STORE (M64):
2310 CASE_BUILT_IN_TM_STORE (M128):
2311 CASE_BUILT_IN_TM_STORE (M256):
2312 case BUILT_IN_TM_MEMCPY:
2313 case BUILT_IN_TM_MEMMOVE:
2314 {
2315 ao_ref dref;
2316 tree size = NULL_TREE;
2317 /* Don't pass in size for strncat, as the maximum size
2318 is strlen (dest) + n + 1 instead of n, resp.
2319 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2320 known. */
2321 if (gimple_call_num_args (call) == 3
2322 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2323 size = gimple_call_arg (call, 2);
2324 ao_ref_init_from_ptr_and_size (&dref,
2325 gimple_call_arg (call, 0),
2326 size);
2327 return refs_may_alias_p_1 (&dref, ref, false);
2328 }
2329 case BUILT_IN_STRCPY_CHK:
2330 case BUILT_IN_STRNCPY_CHK:
2331 case BUILT_IN_MEMCPY_CHK:
2332 case BUILT_IN_MEMMOVE_CHK:
2333 case BUILT_IN_MEMPCPY_CHK:
2334 case BUILT_IN_STPCPY_CHK:
2335 case BUILT_IN_STPNCPY_CHK:
2336 case BUILT_IN_STRCAT_CHK:
2337 case BUILT_IN_STRNCAT_CHK:
2338 case BUILT_IN_MEMSET_CHK:
2339 {
2340 ao_ref dref;
2341 tree size = NULL_TREE;
2342 /* Don't pass in size for __strncat_chk, as the maximum size
2343 is strlen (dest) + n + 1 instead of n, resp.
2344 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2345 known. */
2346 if (gimple_call_num_args (call) == 4
2347 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2348 size = gimple_call_arg (call, 2);
2349 ao_ref_init_from_ptr_and_size (&dref,
2350 gimple_call_arg (call, 0),
2351 size);
2352 return refs_may_alias_p_1 (&dref, ref, false);
2353 }
2354 case BUILT_IN_BCOPY:
2355 {
2356 ao_ref dref;
2357 tree size = gimple_call_arg (call, 2);
2358 ao_ref_init_from_ptr_and_size (&dref,
2359 gimple_call_arg (call, 1),
2360 size);
2361 return refs_may_alias_p_1 (&dref, ref, false);
2362 }
2363 /* Allocating memory does not have any side-effects apart from
2364 being the definition point for the pointer. */
2365 case BUILT_IN_MALLOC:
2366 case BUILT_IN_ALIGNED_ALLOC:
2367 case BUILT_IN_CALLOC:
2368 case BUILT_IN_STRDUP:
2369 case BUILT_IN_STRNDUP:
2370 /* Unix98 specifies that errno is set on allocation failure. */
2371 if (flag_errno_math
2372 && targetm.ref_may_alias_errno (ref))
2373 return true;
2374 return false;
2375 case BUILT_IN_STACK_SAVE:
2376 CASE_BUILT_IN_ALLOCA:
2377 case BUILT_IN_ASSUME_ALIGNED:
2378 return false;
2379 /* But posix_memalign stores a pointer into the memory pointed to
2380 by its first argument. */
2381 case BUILT_IN_POSIX_MEMALIGN:
2382 {
2383 tree ptrptr = gimple_call_arg (call, 0);
2384 ao_ref dref;
2385 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2386 TYPE_SIZE_UNIT (ptr_type_node));
2387 return (refs_may_alias_p_1 (&dref, ref, false)
2388 || (flag_errno_math
2389 && targetm.ref_may_alias_errno (ref)));
2390 }
2391 /* Freeing memory kills the pointed-to memory. More importantly
2392 the call has to serve as a barrier for moving loads and stores
2393 across it. */
2394 case BUILT_IN_FREE:
2395 case BUILT_IN_VA_END:
2396 {
2397 tree ptr = gimple_call_arg (call, 0);
2398 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2399 }
2400 /* Realloc serves both as allocation point and deallocation point. */
2401 case BUILT_IN_REALLOC:
2402 {
2403 tree ptr = gimple_call_arg (call, 0);
2404 /* Unix98 specifies that errno is set on allocation failure. */
2405 return ((flag_errno_math
2406 && targetm.ref_may_alias_errno (ref))
2407 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2408 }
2409 case BUILT_IN_GAMMA_R:
2410 case BUILT_IN_GAMMAF_R:
2411 case BUILT_IN_GAMMAL_R:
2412 case BUILT_IN_LGAMMA_R:
2413 case BUILT_IN_LGAMMAF_R:
2414 case BUILT_IN_LGAMMAL_R:
2415 {
2416 tree out = gimple_call_arg (call, 1);
2417 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2418 return true;
2419 if (flag_errno_math)
2420 break;
2421 return false;
2422 }
2423 case BUILT_IN_FREXP:
2424 case BUILT_IN_FREXPF:
2425 case BUILT_IN_FREXPL:
2426 case BUILT_IN_MODF:
2427 case BUILT_IN_MODFF:
2428 case BUILT_IN_MODFL:
2429 {
2430 tree out = gimple_call_arg (call, 1);
2431 return ptr_deref_may_alias_ref_p_1 (out, ref);
2432 }
2433 case BUILT_IN_REMQUO:
2434 case BUILT_IN_REMQUOF:
2435 case BUILT_IN_REMQUOL:
2436 {
2437 tree out = gimple_call_arg (call, 2);
2438 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2439 return true;
2440 if (flag_errno_math)
2441 break;
2442 return false;
2443 }
2444 case BUILT_IN_SINCOS:
2445 case BUILT_IN_SINCOSF:
2446 case BUILT_IN_SINCOSL:
2447 {
2448 tree sin = gimple_call_arg (call, 1);
2449 tree cos = gimple_call_arg (call, 2);
2450 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2451 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2452 }
2453 /* __sync_* builtins and some OpenMP builtins act as threading
2454 barriers. */
2455 #undef DEF_SYNC_BUILTIN
2456 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2457 #include "sync-builtins.def"
2458 #undef DEF_SYNC_BUILTIN
2459 case BUILT_IN_GOMP_ATOMIC_START:
2460 case BUILT_IN_GOMP_ATOMIC_END:
2461 case BUILT_IN_GOMP_BARRIER:
2462 case BUILT_IN_GOMP_BARRIER_CANCEL:
2463 case BUILT_IN_GOMP_TASKWAIT:
2464 case BUILT_IN_GOMP_TASKGROUP_END:
2465 case BUILT_IN_GOMP_CRITICAL_START:
2466 case BUILT_IN_GOMP_CRITICAL_END:
2467 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2468 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2469 case BUILT_IN_GOMP_LOOP_END:
2470 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2471 case BUILT_IN_GOMP_ORDERED_START:
2472 case BUILT_IN_GOMP_ORDERED_END:
2473 case BUILT_IN_GOMP_SECTIONS_END:
2474 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2475 case BUILT_IN_GOMP_SINGLE_COPY_START:
2476 case BUILT_IN_GOMP_SINGLE_COPY_END:
2477 return true;
2478 default:
2479 /* Fallthru to general call handling. */;
2480 }
2481
2482 /* Check if base is a global static variable that is not written
2483 by the function. */
2484 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2485 {
2486 struct cgraph_node *node = cgraph_node::get (callee);
2487 bitmap not_written;
2488
2489 if (node
2490 && (not_written = ipa_reference_get_not_written_global (node))
2491 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2492 return false;
2493 }
2494
2495 /* Check if the base variable is call-clobbered. */
2496 if (DECL_P (base))
2497 return pt_solution_includes (gimple_call_clobber_set (call), base);
2498 else if ((TREE_CODE (base) == MEM_REF
2499 || TREE_CODE (base) == TARGET_MEM_REF)
2500 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2501 {
2502 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2503 if (!pi)
2504 return true;
2505
2506 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2507 }
2508
2509 return true;
2510 }
2511
2512 /* If the call in statement CALL may clobber the memory reference REF
2513 return true, otherwise return false. */
2514
2515 bool
2516 call_may_clobber_ref_p (gcall *call, tree ref)
2517 {
2518 bool res;
2519 ao_ref r;
2520 ao_ref_init (&r, ref);
2521 res = call_may_clobber_ref_p_1 (call, &r);
2522 if (res)
2523 ++alias_stats.call_may_clobber_ref_p_may_alias;
2524 else
2525 ++alias_stats.call_may_clobber_ref_p_no_alias;
2526 return res;
2527 }
2528
2529
2530 /* If the statement STMT may clobber the memory reference REF return true,
2531 otherwise return false. */
2532
2533 bool
2534 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
2535 {
2536 if (is_gimple_call (stmt))
2537 {
2538 tree lhs = gimple_call_lhs (stmt);
2539 if (lhs
2540 && TREE_CODE (lhs) != SSA_NAME)
2541 {
2542 ao_ref r;
2543 ao_ref_init (&r, lhs);
2544 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
2545 return true;
2546 }
2547
2548 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2549 }
2550 else if (gimple_assign_single_p (stmt))
2551 {
2552 tree lhs = gimple_assign_lhs (stmt);
2553 if (TREE_CODE (lhs) != SSA_NAME)
2554 {
2555 ao_ref r;
2556 ao_ref_init (&r, lhs);
2557 return refs_may_alias_p_1 (ref, &r, tbaa_p);
2558 }
2559 }
2560 else if (gimple_code (stmt) == GIMPLE_ASM)
2561 return true;
2562
2563 return false;
2564 }
2565
2566 bool
2567 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
2568 {
2569 ao_ref r;
2570 ao_ref_init (&r, ref);
2571 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
2572 }
2573
2574 /* Return true if store1 and store2 described by corresponding tuples
2575 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2576 address. */
2577
2578 static bool
2579 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2580 poly_int64 max_size1,
2581 tree base2, poly_int64 offset2, poly_int64 size2,
2582 poly_int64 max_size2)
2583 {
2584 /* Offsets need to be 0. */
2585 if (maybe_ne (offset1, 0)
2586 || maybe_ne (offset2, 0))
2587 return false;
2588
2589 bool base1_obj_p = SSA_VAR_P (base1);
2590 bool base2_obj_p = SSA_VAR_P (base2);
2591
2592 /* We need one object. */
2593 if (base1_obj_p == base2_obj_p)
2594 return false;
2595 tree obj = base1_obj_p ? base1 : base2;
2596
2597 /* And we need one MEM_REF. */
2598 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2599 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2600 if (base1_memref_p == base2_memref_p)
2601 return false;
2602 tree memref = base1_memref_p ? base1 : base2;
2603
2604 /* Sizes need to be valid. */
2605 if (!known_size_p (max_size1)
2606 || !known_size_p (max_size2)
2607 || !known_size_p (size1)
2608 || !known_size_p (size2))
2609 return false;
2610
2611 /* Max_size needs to match size. */
2612 if (maybe_ne (max_size1, size1)
2613 || maybe_ne (max_size2, size2))
2614 return false;
2615
2616 /* Sizes need to match. */
2617 if (maybe_ne (size1, size2))
2618 return false;
2619
2620
2621 /* Check that memref is a store to pointer with singleton points-to info. */
2622 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2623 return false;
2624 tree ptr = TREE_OPERAND (memref, 0);
2625 if (TREE_CODE (ptr) != SSA_NAME)
2626 return false;
2627 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2628 unsigned int pt_uid;
2629 if (pi == NULL
2630 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2631 return false;
2632
2633 /* Be conservative with non-call exceptions when the address might
2634 be NULL. */
2635 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
2636 return false;
2637
2638 /* Check that ptr points relative to obj. */
2639 unsigned int obj_uid = DECL_PT_UID (obj);
2640 if (obj_uid != pt_uid)
2641 return false;
2642
2643 /* Check that the object size is the same as the store size. That ensures us
2644 that ptr points to the start of obj. */
2645 return (DECL_SIZE (obj)
2646 && poly_int_tree_p (DECL_SIZE (obj))
2647 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2648 }
2649
2650 /* If STMT kills the memory reference REF return true, otherwise
2651 return false. */
2652
2653 bool
2654 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2655 {
2656 if (!ao_ref_base (ref))
2657 return false;
2658
2659 if (gimple_has_lhs (stmt)
2660 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2661 /* The assignment is not necessarily carried out if it can throw
2662 and we can catch it in the current function where we could inspect
2663 the previous value.
2664 ??? We only need to care about the RHS throwing. For aggregate
2665 assignments or similar calls and non-call exceptions the LHS
2666 might throw as well. */
2667 && !stmt_can_throw_internal (cfun, stmt))
2668 {
2669 tree lhs = gimple_get_lhs (stmt);
2670 /* If LHS is literally a base of the access we are done. */
2671 if (ref->ref)
2672 {
2673 tree base = ref->ref;
2674 tree innermost_dropped_array_ref = NULL_TREE;
2675 if (handled_component_p (base))
2676 {
2677 tree saved_lhs0 = NULL_TREE;
2678 if (handled_component_p (lhs))
2679 {
2680 saved_lhs0 = TREE_OPERAND (lhs, 0);
2681 TREE_OPERAND (lhs, 0) = integer_zero_node;
2682 }
2683 do
2684 {
2685 /* Just compare the outermost handled component, if
2686 they are equal we have found a possible common
2687 base. */
2688 tree saved_base0 = TREE_OPERAND (base, 0);
2689 TREE_OPERAND (base, 0) = integer_zero_node;
2690 bool res = operand_equal_p (lhs, base, 0);
2691 TREE_OPERAND (base, 0) = saved_base0;
2692 if (res)
2693 break;
2694 /* Remember if we drop an array-ref that we need to
2695 double-check not being at struct end. */
2696 if (TREE_CODE (base) == ARRAY_REF
2697 || TREE_CODE (base) == ARRAY_RANGE_REF)
2698 innermost_dropped_array_ref = base;
2699 /* Otherwise drop handled components of the access. */
2700 base = saved_base0;
2701 }
2702 while (handled_component_p (base));
2703 if (saved_lhs0)
2704 TREE_OPERAND (lhs, 0) = saved_lhs0;
2705 }
2706 /* Finally check if the lhs has the same address and size as the
2707 base candidate of the access. Watch out if we have dropped
2708 an array-ref that was at struct end, this means ref->ref may
2709 be outside of the TYPE_SIZE of its base. */
2710 if ((! innermost_dropped_array_ref
2711 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2712 && (lhs == base
2713 || (((TYPE_SIZE (TREE_TYPE (lhs))
2714 == TYPE_SIZE (TREE_TYPE (base)))
2715 || (TYPE_SIZE (TREE_TYPE (lhs))
2716 && TYPE_SIZE (TREE_TYPE (base))
2717 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2718 TYPE_SIZE (TREE_TYPE (base)),
2719 0)))
2720 && operand_equal_p (lhs, base,
2721 OEP_ADDRESS_OF
2722 | OEP_MATCH_SIDE_EFFECTS))))
2723 return true;
2724 }
2725
2726 /* Now look for non-literal equal bases with the restriction of
2727 handling constant offset and size. */
2728 /* For a must-alias check we need to be able to constrain
2729 the access properly. */
2730 if (!ref->max_size_known_p ())
2731 return false;
2732 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2733 bool reverse;
2734 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2735 &reverse);
2736 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2737 so base == ref->base does not always hold. */
2738 if (base != ref->base)
2739 {
2740 /* Try using points-to info. */
2741 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2742 ref->offset, ref->size, ref->max_size))
2743 return true;
2744
2745 /* If both base and ref->base are MEM_REFs, only compare the
2746 first operand, and if the second operand isn't equal constant,
2747 try to add the offsets into offset and ref_offset. */
2748 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2749 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2750 {
2751 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2752 TREE_OPERAND (ref->base, 1)))
2753 {
2754 poly_offset_int off1 = mem_ref_offset (base);
2755 off1 <<= LOG2_BITS_PER_UNIT;
2756 off1 += offset;
2757 poly_offset_int off2 = mem_ref_offset (ref->base);
2758 off2 <<= LOG2_BITS_PER_UNIT;
2759 off2 += ref_offset;
2760 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2761 size = -1;
2762 }
2763 }
2764 else
2765 size = -1;
2766 }
2767 /* For a must-alias check we need to be able to constrain
2768 the access properly. */
2769 if (known_eq (size, max_size)
2770 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2771 return true;
2772 }
2773
2774 if (is_gimple_call (stmt))
2775 {
2776 tree callee = gimple_call_fndecl (stmt);
2777 if (callee != NULL_TREE
2778 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2779 switch (DECL_FUNCTION_CODE (callee))
2780 {
2781 case BUILT_IN_FREE:
2782 {
2783 tree ptr = gimple_call_arg (stmt, 0);
2784 tree base = ao_ref_base (ref);
2785 if (base && TREE_CODE (base) == MEM_REF
2786 && TREE_OPERAND (base, 0) == ptr)
2787 return true;
2788 break;
2789 }
2790
2791 case BUILT_IN_MEMCPY:
2792 case BUILT_IN_MEMPCPY:
2793 case BUILT_IN_MEMMOVE:
2794 case BUILT_IN_MEMSET:
2795 case BUILT_IN_MEMCPY_CHK:
2796 case BUILT_IN_MEMPCPY_CHK:
2797 case BUILT_IN_MEMMOVE_CHK:
2798 case BUILT_IN_MEMSET_CHK:
2799 case BUILT_IN_STRNCPY:
2800 case BUILT_IN_STPNCPY:
2801 {
2802 /* For a must-alias check we need to be able to constrain
2803 the access properly. */
2804 if (!ref->max_size_known_p ())
2805 return false;
2806 tree dest = gimple_call_arg (stmt, 0);
2807 tree len = gimple_call_arg (stmt, 2);
2808 if (!poly_int_tree_p (len))
2809 return false;
2810 tree rbase = ref->base;
2811 poly_offset_int roffset = ref->offset;
2812 ao_ref dref;
2813 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2814 tree base = ao_ref_base (&dref);
2815 poly_offset_int offset = dref.offset;
2816 if (!base || !known_size_p (dref.size))
2817 return false;
2818 if (TREE_CODE (base) == MEM_REF)
2819 {
2820 if (TREE_CODE (rbase) != MEM_REF)
2821 return false;
2822 // Compare pointers.
2823 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2824 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2825 base = TREE_OPERAND (base, 0);
2826 rbase = TREE_OPERAND (rbase, 0);
2827 }
2828 if (base == rbase
2829 && known_subrange_p (roffset, ref->max_size, offset,
2830 wi::to_poly_offset (len)
2831 << LOG2_BITS_PER_UNIT))
2832 return true;
2833 break;
2834 }
2835
2836 case BUILT_IN_VA_END:
2837 {
2838 tree ptr = gimple_call_arg (stmt, 0);
2839 if (TREE_CODE (ptr) == ADDR_EXPR)
2840 {
2841 tree base = ao_ref_base (ref);
2842 if (TREE_OPERAND (ptr, 0) == base)
2843 return true;
2844 }
2845 break;
2846 }
2847
2848 default:;
2849 }
2850 }
2851 return false;
2852 }
2853
2854 bool
2855 stmt_kills_ref_p (gimple *stmt, tree ref)
2856 {
2857 ao_ref r;
2858 ao_ref_init (&r, ref);
2859 return stmt_kills_ref_p (stmt, &r);
2860 }
2861
2862
2863 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2864 TARGET or a statement clobbering the memory reference REF in which
2865 case false is returned. The walk starts with VUSE, one argument of PHI. */
2866
2867 static bool
2868 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
2869 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
2870 bool abort_on_visited,
2871 void *(*translate)(ao_ref *, tree, void *, bool *),
2872 void *data)
2873 {
2874 basic_block bb = gimple_bb (phi);
2875
2876 if (!*visited)
2877 *visited = BITMAP_ALLOC (NULL);
2878
2879 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2880
2881 /* Walk until we hit the target. */
2882 while (vuse != target)
2883 {
2884 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2885 /* If we are searching for the target VUSE by walking up to
2886 TARGET_BB dominating the original PHI we are finished once
2887 we reach a default def or a definition in a block dominating
2888 that block. Update TARGET and return. */
2889 if (!target
2890 && (gimple_nop_p (def_stmt)
2891 || dominated_by_p (CDI_DOMINATORS,
2892 target_bb, gimple_bb (def_stmt))))
2893 {
2894 target = vuse;
2895 return true;
2896 }
2897
2898 /* Recurse for PHI nodes. */
2899 if (gimple_code (def_stmt) == GIMPLE_PHI)
2900 {
2901 /* An already visited PHI node ends the walk successfully. */
2902 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2903 return !abort_on_visited;
2904 vuse = get_continuation_for_phi (def_stmt, ref, limit,
2905 visited, abort_on_visited,
2906 translate, data);
2907 if (!vuse)
2908 return false;
2909 continue;
2910 }
2911 else if (gimple_nop_p (def_stmt))
2912 return false;
2913 else
2914 {
2915 /* A clobbering statement or the end of the IL ends it failing. */
2916 if ((int)limit <= 0)
2917 return false;
2918 --limit;
2919 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2920 {
2921 bool disambiguate_only = true;
2922 if (translate
2923 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2924 ;
2925 else
2926 return false;
2927 }
2928 }
2929 /* If we reach a new basic-block see if we already skipped it
2930 in a previous walk that ended successfully. */
2931 if (gimple_bb (def_stmt) != bb)
2932 {
2933 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2934 return !abort_on_visited;
2935 bb = gimple_bb (def_stmt);
2936 }
2937 vuse = gimple_vuse (def_stmt);
2938 }
2939 return true;
2940 }
2941
2942
2943 /* Starting from a PHI node for the virtual operand of the memory reference
2944 REF find a continuation virtual operand that allows to continue walking
2945 statements dominating PHI skipping only statements that cannot possibly
2946 clobber REF. Decrements LIMIT for each alias disambiguation done
2947 and aborts the walk, returning NULL_TREE if it reaches zero.
2948 Returns NULL_TREE if no suitable virtual operand can be found. */
2949
2950 tree
2951 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2952 unsigned int &limit, bitmap *visited,
2953 bool abort_on_visited,
2954 void *(*translate)(ao_ref *, tree, void *, bool *),
2955 void *data)
2956 {
2957 unsigned nargs = gimple_phi_num_args (phi);
2958
2959 /* Through a single-argument PHI we can simply look through. */
2960 if (nargs == 1)
2961 return PHI_ARG_DEF (phi, 0);
2962
2963 /* For two or more arguments try to pairwise skip non-aliasing code
2964 until we hit the phi argument definition that dominates the other one. */
2965 basic_block phi_bb = gimple_bb (phi);
2966 tree arg0, arg1;
2967 unsigned i;
2968
2969 /* Find a candidate for the virtual operand which definition
2970 dominates those of all others. */
2971 /* First look if any of the args themselves satisfy this. */
2972 for (i = 0; i < nargs; ++i)
2973 {
2974 arg0 = PHI_ARG_DEF (phi, i);
2975 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2976 break;
2977 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2978 if (def_bb != phi_bb
2979 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2980 break;
2981 arg0 = NULL_TREE;
2982 }
2983 /* If not, look if we can reach such candidate by walking defs
2984 until we hit the immediate dominator. maybe_skip_until will
2985 do that for us. */
2986 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
2987
2988 /* Then check against the (to be) found candidate. */
2989 for (i = 0; i < nargs; ++i)
2990 {
2991 arg1 = PHI_ARG_DEF (phi, i);
2992 if (arg1 == arg0)
2993 ;
2994 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
2995 abort_on_visited,
2996 /* Do not translate when walking over
2997 backedges. */
2998 dominated_by_p
2999 (CDI_DOMINATORS,
3000 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3001 phi_bb)
3002 ? NULL : translate, data))
3003 return NULL_TREE;
3004 }
3005
3006 return arg0;
3007 }
3008
3009 /* Based on the memory reference REF and its virtual use VUSE call
3010 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3011 itself. That is, for each virtual use for which its defining statement
3012 does not clobber REF.
3013
3014 WALKER is called with REF, the current virtual use and DATA. If
3015 WALKER returns non-NULL the walk stops and its result is returned.
3016 At the end of a non-successful walk NULL is returned.
3017
3018 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3019 use which definition is a statement that may clobber REF and DATA.
3020 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3021 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3022 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3023 to adjust REF and *DATA to make that valid.
3024
3025 VALUEIZE if non-NULL is called with the next VUSE that is considered
3026 and return value is substituted for that. This can be used to
3027 implement optimistic value-numbering for example. Note that the
3028 VUSE argument is assumed to be valueized already.
3029
3030 LIMIT specifies the number of alias queries we are allowed to do,
3031 the walk stops when it reaches zero and NULL is returned. LIMIT
3032 is decremented by the number of alias queries (plus adjustments
3033 done by the callbacks) upon return.
3034
3035 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3036
3037 void *
3038 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
3039 void *(*walker)(ao_ref *, tree, void *),
3040 void *(*translate)(ao_ref *, tree, void *, bool *),
3041 tree (*valueize)(tree),
3042 unsigned &limit, void *data)
3043 {
3044 bitmap visited = NULL;
3045 void *res;
3046 bool translated = false;
3047
3048 timevar_push (TV_ALIAS_STMT_WALK);
3049
3050 do
3051 {
3052 gimple *def_stmt;
3053
3054 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3055 res = (*walker) (ref, vuse, data);
3056 /* Abort walk. */
3057 if (res == (void *)-1)
3058 {
3059 res = NULL;
3060 break;
3061 }
3062 /* Lookup succeeded. */
3063 else if (res != NULL)
3064 break;
3065
3066 if (valueize)
3067 {
3068 vuse = valueize (vuse);
3069 if (!vuse)
3070 {
3071 res = NULL;
3072 break;
3073 }
3074 }
3075 def_stmt = SSA_NAME_DEF_STMT (vuse);
3076 if (gimple_nop_p (def_stmt))
3077 break;
3078 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3079 vuse = get_continuation_for_phi (def_stmt, ref, limit,
3080 &visited, translated, translate, data);
3081 else
3082 {
3083 if ((int)limit <= 0)
3084 {
3085 res = NULL;
3086 break;
3087 }
3088 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
3089 {
3090 if (!translate)
3091 break;
3092 bool disambiguate_only = false;
3093 res = (*translate) (ref, vuse, data, &disambiguate_only);
3094 /* Failed lookup and translation. */
3095 if (res == (void *)-1)
3096 {
3097 res = NULL;
3098 break;
3099 }
3100 /* Lookup succeeded. */
3101 else if (res != NULL)
3102 break;
3103 /* Translation succeeded, continue walking. */
3104 translated = translated || !disambiguate_only;
3105 }
3106 vuse = gimple_vuse (def_stmt);
3107 }
3108 }
3109 while (vuse);
3110
3111 if (visited)
3112 BITMAP_FREE (visited);
3113
3114 timevar_pop (TV_ALIAS_STMT_WALK);
3115
3116 return res;
3117 }
3118
3119
3120 /* Based on the memory reference REF call WALKER for each vdef which
3121 defining statement may clobber REF, starting with VDEF. If REF
3122 is NULL_TREE, each defining statement is visited.
3123
3124 WALKER is called with REF, the current vdef and DATA. If WALKER
3125 returns true the walk is stopped, otherwise it continues.
3126
3127 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3128 The pointer may be NULL and then we do not track this information.
3129
3130 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3131 PHI argument (but only one walk continues on merge points), the
3132 return value is true if any of the walks was successful.
3133
3134 The function returns the number of statements walked or -1 if
3135 LIMIT stmts were walked and the walk was aborted at this point.
3136 If LIMIT is zero the walk is not aborted. */
3137
3138 static int
3139 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3140 bool (*walker)(ao_ref *, tree, void *), void *data,
3141 bitmap *visited, unsigned int cnt,
3142 bool *function_entry_reached, unsigned limit)
3143 {
3144 do
3145 {
3146 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3147
3148 if (*visited
3149 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3150 return cnt;
3151
3152 if (gimple_nop_p (def_stmt))
3153 {
3154 if (function_entry_reached)
3155 *function_entry_reached = true;
3156 return cnt;
3157 }
3158 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3159 {
3160 unsigned i;
3161 if (!*visited)
3162 *visited = BITMAP_ALLOC (NULL);
3163 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3164 {
3165 int res = walk_aliased_vdefs_1 (ref,
3166 gimple_phi_arg_def (def_stmt, i),
3167 walker, data, visited, cnt,
3168 function_entry_reached, limit);
3169 if (res == -1)
3170 return -1;
3171 cnt = res;
3172 }
3173 return cnt;
3174 }
3175
3176 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3177 cnt++;
3178 if (cnt == limit)
3179 return -1;
3180 if ((!ref
3181 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3182 && (*walker) (ref, vdef, data))
3183 return cnt;
3184
3185 vdef = gimple_vuse (def_stmt);
3186 }
3187 while (1);
3188 }
3189
3190 int
3191 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3192 bool (*walker)(ao_ref *, tree, void *), void *data,
3193 bitmap *visited,
3194 bool *function_entry_reached, unsigned int limit)
3195 {
3196 bitmap local_visited = NULL;
3197 int ret;
3198
3199 timevar_push (TV_ALIAS_STMT_WALK);
3200
3201 if (function_entry_reached)
3202 *function_entry_reached = false;
3203
3204 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3205 visited ? visited : &local_visited, 0,
3206 function_entry_reached, limit);
3207 if (local_visited)
3208 BITMAP_FREE (local_visited);
3209
3210 timevar_pop (TV_ALIAS_STMT_WALK);
3211
3212 return ret;
3213 }
3214