]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
2015-09-29 Richard Biener <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "tm_p.h"
32 #include "target.h"
33
34 #include "dominance.h"
35 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
36 #include "langhooks.h"
37 #include "flags.h"
38 #include "tree-pretty-print.h"
39 #include "dumpfile.h"
40 #include "internal-fn.h"
41 #include "tree-eh.h"
42 #include "insn-config.h"
43 #include "expmed.h"
44 #include "dojump.h"
45 #include "explow.h"
46 #include "calls.h"
47 #include "emit-rtl.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-inline.h"
53 #include "params.h"
54 #include "alloc-pool.h"
55 #include "cgraph.h"
56 #include "ipa-reference.h"
57
58 /* Broad overview of how alias analysis on gimple works:
59
60 Statements clobbering or using memory are linked through the
61 virtual operand factored use-def chain. The virtual operand
62 is unique per function, its symbol is accessible via gimple_vop (cfun).
63 Virtual operands are used for efficiently walking memory statements
64 in the gimple IL and are useful for things like value-numbering as
65 a generation count for memory references.
66
67 SSA_NAME pointers may have associated points-to information
68 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
69 points-to information is (re-)computed by the TODO_rebuild_alias
70 pass manager todo. Points-to information is also used for more
71 precise tracking of call-clobbered and call-used variables and
72 related disambiguations.
73
74 This file contains functions for disambiguating memory references,
75 the so called alias-oracle and tools for walking of the gimple IL.
76
77 The main alias-oracle entry-points are
78
79 bool stmt_may_clobber_ref_p (gimple *, tree)
80
81 This function queries if a statement may invalidate (parts of)
82 the memory designated by the reference tree argument.
83
84 bool ref_maybe_used_by_stmt_p (gimple *, tree)
85
86 This function queries if a statement may need (parts of) the
87 memory designated by the reference tree argument.
88
89 There are variants of these functions that only handle the call
90 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
91 Note that these do not disambiguate against a possible call lhs.
92
93 bool refs_may_alias_p (tree, tree)
94
95 This function tries to disambiguate two reference trees.
96
97 bool ptr_deref_may_alias_global_p (tree)
98
99 This function queries if dereferencing a pointer variable may
100 alias global memory.
101
102 More low-level disambiguators are available and documented in
103 this file. Low-level disambiguators dealing with points-to
104 information are in tree-ssa-structalias.c. */
105
106
107 /* Query statistics for the different low-level disambiguators.
108 A high-level query may trigger multiple of them. */
109
110 static struct {
111 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
112 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
113 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
114 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
115 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
116 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
117 } alias_stats;
118
119 void
120 dump_alias_stats (FILE *s)
121 {
122 fprintf (s, "\nAlias oracle query stats:\n");
123 fprintf (s, " refs_may_alias_p: "
124 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC" queries\n",
126 alias_stats.refs_may_alias_p_no_alias,
127 alias_stats.refs_may_alias_p_no_alias
128 + alias_stats.refs_may_alias_p_may_alias);
129 fprintf (s, " ref_maybe_used_by_call_p: "
130 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
131 HOST_WIDE_INT_PRINT_DEC" queries\n",
132 alias_stats.ref_maybe_used_by_call_p_no_alias,
133 alias_stats.refs_may_alias_p_no_alias
134 + alias_stats.ref_maybe_used_by_call_p_may_alias);
135 fprintf (s, " call_may_clobber_ref_p: "
136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC" queries\n",
138 alias_stats.call_may_clobber_ref_p_no_alias,
139 alias_stats.call_may_clobber_ref_p_no_alias
140 + alias_stats.call_may_clobber_ref_p_may_alias);
141 dump_alias_stats_in_alias_c (s);
142 }
143
144
145 /* Return true, if dereferencing PTR may alias with a global variable. */
146
147 bool
148 ptr_deref_may_alias_global_p (tree ptr)
149 {
150 struct ptr_info_def *pi;
151
152 /* If we end up with a pointer constant here that may point
153 to global memory. */
154 if (TREE_CODE (ptr) != SSA_NAME)
155 return true;
156
157 pi = SSA_NAME_PTR_INFO (ptr);
158
159 /* If we do not have points-to information for this variable,
160 we have to punt. */
161 if (!pi)
162 return true;
163
164 /* ??? This does not use TBAA to prune globals ptr may not access. */
165 return pt_solution_includes_global (&pi->pt);
166 }
167
168 /* Return true if dereferencing PTR may alias DECL.
169 The caller is responsible for applying TBAA to see if PTR
170 may access DECL at all. */
171
172 static bool
173 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
174 {
175 struct ptr_info_def *pi;
176
177 /* Conversions are irrelevant for points-to information and
178 data-dependence analysis can feed us those. */
179 STRIP_NOPS (ptr);
180
181 /* Anything we do not explicilty handle aliases. */
182 if ((TREE_CODE (ptr) != SSA_NAME
183 && TREE_CODE (ptr) != ADDR_EXPR
184 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
185 || !POINTER_TYPE_P (TREE_TYPE (ptr))
186 || (TREE_CODE (decl) != VAR_DECL
187 && TREE_CODE (decl) != PARM_DECL
188 && TREE_CODE (decl) != RESULT_DECL))
189 return true;
190
191 /* Disregard pointer offsetting. */
192 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
193 {
194 do
195 {
196 ptr = TREE_OPERAND (ptr, 0);
197 }
198 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
199 return ptr_deref_may_alias_decl_p (ptr, decl);
200 }
201
202 /* ADDR_EXPR pointers either just offset another pointer or directly
203 specify the pointed-to set. */
204 if (TREE_CODE (ptr) == ADDR_EXPR)
205 {
206 tree base = get_base_address (TREE_OPERAND (ptr, 0));
207 if (base
208 && (TREE_CODE (base) == MEM_REF
209 || TREE_CODE (base) == TARGET_MEM_REF))
210 ptr = TREE_OPERAND (base, 0);
211 else if (base
212 && DECL_P (base))
213 return base == decl;
214 else if (base
215 && CONSTANT_CLASS_P (base))
216 return false;
217 else
218 return true;
219 }
220
221 /* Non-aliased variables can not be pointed to. */
222 if (!may_be_aliased (decl))
223 return false;
224
225 /* If we do not have useful points-to information for this pointer
226 we cannot disambiguate anything else. */
227 pi = SSA_NAME_PTR_INFO (ptr);
228 if (!pi)
229 return true;
230
231 return pt_solution_includes (&pi->pt, decl);
232 }
233
234 /* Return true if dereferenced PTR1 and PTR2 may alias.
235 The caller is responsible for applying TBAA to see if accesses
236 through PTR1 and PTR2 may conflict at all. */
237
238 bool
239 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
240 {
241 struct ptr_info_def *pi1, *pi2;
242
243 /* Conversions are irrelevant for points-to information and
244 data-dependence analysis can feed us those. */
245 STRIP_NOPS (ptr1);
246 STRIP_NOPS (ptr2);
247
248 /* Disregard pointer offsetting. */
249 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
250 {
251 do
252 {
253 ptr1 = TREE_OPERAND (ptr1, 0);
254 }
255 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
256 return ptr_derefs_may_alias_p (ptr1, ptr2);
257 }
258 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
259 {
260 do
261 {
262 ptr2 = TREE_OPERAND (ptr2, 0);
263 }
264 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
265 return ptr_derefs_may_alias_p (ptr1, ptr2);
266 }
267
268 /* ADDR_EXPR pointers either just offset another pointer or directly
269 specify the pointed-to set. */
270 if (TREE_CODE (ptr1) == ADDR_EXPR)
271 {
272 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
273 if (base
274 && (TREE_CODE (base) == MEM_REF
275 || TREE_CODE (base) == TARGET_MEM_REF))
276 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
277 else if (base
278 && DECL_P (base))
279 return ptr_deref_may_alias_decl_p (ptr2, base);
280 else
281 return true;
282 }
283 if (TREE_CODE (ptr2) == ADDR_EXPR)
284 {
285 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
286 if (base
287 && (TREE_CODE (base) == MEM_REF
288 || TREE_CODE (base) == TARGET_MEM_REF))
289 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
290 else if (base
291 && DECL_P (base))
292 return ptr_deref_may_alias_decl_p (ptr1, base);
293 else
294 return true;
295 }
296
297 /* From here we require SSA name pointers. Anything else aliases. */
298 if (TREE_CODE (ptr1) != SSA_NAME
299 || TREE_CODE (ptr2) != SSA_NAME
300 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
301 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
302 return true;
303
304 /* We may end up with two empty points-to solutions for two same pointers.
305 In this case we still want to say both pointers alias, so shortcut
306 that here. */
307 if (ptr1 == ptr2)
308 return true;
309
310 /* If we do not have useful points-to information for either pointer
311 we cannot disambiguate anything else. */
312 pi1 = SSA_NAME_PTR_INFO (ptr1);
313 pi2 = SSA_NAME_PTR_INFO (ptr2);
314 if (!pi1 || !pi2)
315 return true;
316
317 /* ??? This does not use TBAA to prune decls from the intersection
318 that not both pointers may access. */
319 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
320 }
321
322 /* Return true if dereferencing PTR may alias *REF.
323 The caller is responsible for applying TBAA to see if PTR
324 may access *REF at all. */
325
326 static bool
327 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
328 {
329 tree base = ao_ref_base (ref);
330
331 if (TREE_CODE (base) == MEM_REF
332 || TREE_CODE (base) == TARGET_MEM_REF)
333 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
334 else if (DECL_P (base))
335 return ptr_deref_may_alias_decl_p (ptr, base);
336
337 return true;
338 }
339
340 /* Returns whether reference REF to BASE may refer to global memory. */
341
342 static bool
343 ref_may_alias_global_p_1 (tree base)
344 {
345 if (DECL_P (base))
346 return is_global_var (base);
347 else if (TREE_CODE (base) == MEM_REF
348 || TREE_CODE (base) == TARGET_MEM_REF)
349 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
350 return true;
351 }
352
353 bool
354 ref_may_alias_global_p (ao_ref *ref)
355 {
356 tree base = ao_ref_base (ref);
357 return ref_may_alias_global_p_1 (base);
358 }
359
360 bool
361 ref_may_alias_global_p (tree ref)
362 {
363 tree base = get_base_address (ref);
364 return ref_may_alias_global_p_1 (base);
365 }
366
367 /* Return true whether STMT may clobber global memory. */
368
369 bool
370 stmt_may_clobber_global_p (gimple *stmt)
371 {
372 tree lhs;
373
374 if (!gimple_vdef (stmt))
375 return false;
376
377 /* ??? We can ask the oracle whether an artificial pointer
378 dereference with a pointer with points-to information covering
379 all global memory (what about non-address taken memory?) maybe
380 clobbered by this call. As there is at the moment no convenient
381 way of doing that without generating garbage do some manual
382 checking instead.
383 ??? We could make a NULL ao_ref argument to the various
384 predicates special, meaning any global memory. */
385
386 switch (gimple_code (stmt))
387 {
388 case GIMPLE_ASSIGN:
389 lhs = gimple_assign_lhs (stmt);
390 return (TREE_CODE (lhs) != SSA_NAME
391 && ref_may_alias_global_p (lhs));
392 case GIMPLE_CALL:
393 return true;
394 default:
395 return true;
396 }
397 }
398
399
400 /* Dump alias information on FILE. */
401
402 void
403 dump_alias_info (FILE *file)
404 {
405 unsigned i;
406 const char *funcname
407 = lang_hooks.decl_printable_name (current_function_decl, 2);
408 tree var;
409
410 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
411
412 fprintf (file, "Aliased symbols\n\n");
413
414 FOR_EACH_LOCAL_DECL (cfun, i, var)
415 {
416 if (may_be_aliased (var))
417 dump_variable (file, var);
418 }
419
420 fprintf (file, "\nCall clobber information\n");
421
422 fprintf (file, "\nESCAPED");
423 dump_points_to_solution (file, &cfun->gimple_df->escaped);
424
425 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
426
427 for (i = 1; i < num_ssa_names; i++)
428 {
429 tree ptr = ssa_name (i);
430 struct ptr_info_def *pi;
431
432 if (ptr == NULL_TREE
433 || !POINTER_TYPE_P (TREE_TYPE (ptr))
434 || SSA_NAME_IN_FREE_LIST (ptr))
435 continue;
436
437 pi = SSA_NAME_PTR_INFO (ptr);
438 if (pi)
439 dump_points_to_info_for (file, ptr);
440 }
441
442 fprintf (file, "\n");
443 }
444
445
446 /* Dump alias information on stderr. */
447
448 DEBUG_FUNCTION void
449 debug_alias_info (void)
450 {
451 dump_alias_info (stderr);
452 }
453
454
455 /* Dump the points-to set *PT into FILE. */
456
457 void
458 dump_points_to_solution (FILE *file, struct pt_solution *pt)
459 {
460 if (pt->anything)
461 fprintf (file, ", points-to anything");
462
463 if (pt->nonlocal)
464 fprintf (file, ", points-to non-local");
465
466 if (pt->escaped)
467 fprintf (file, ", points-to escaped");
468
469 if (pt->ipa_escaped)
470 fprintf (file, ", points-to unit escaped");
471
472 if (pt->null)
473 fprintf (file, ", points-to NULL");
474
475 if (pt->vars)
476 {
477 fprintf (file, ", points-to vars: ");
478 dump_decl_set (file, pt->vars);
479 if (pt->vars_contains_nonlocal
480 && pt->vars_contains_escaped_heap)
481 fprintf (file, " (nonlocal, escaped heap)");
482 else if (pt->vars_contains_nonlocal
483 && pt->vars_contains_escaped)
484 fprintf (file, " (nonlocal, escaped)");
485 else if (pt->vars_contains_nonlocal)
486 fprintf (file, " (nonlocal)");
487 else if (pt->vars_contains_escaped_heap)
488 fprintf (file, " (escaped heap)");
489 else if (pt->vars_contains_escaped)
490 fprintf (file, " (escaped)");
491 }
492 }
493
494
495 /* Unified dump function for pt_solution. */
496
497 DEBUG_FUNCTION void
498 debug (pt_solution &ref)
499 {
500 dump_points_to_solution (stderr, &ref);
501 }
502
503 DEBUG_FUNCTION void
504 debug (pt_solution *ptr)
505 {
506 if (ptr)
507 debug (*ptr);
508 else
509 fprintf (stderr, "<nil>\n");
510 }
511
512
513 /* Dump points-to information for SSA_NAME PTR into FILE. */
514
515 void
516 dump_points_to_info_for (FILE *file, tree ptr)
517 {
518 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
519
520 print_generic_expr (file, ptr, dump_flags);
521
522 if (pi)
523 dump_points_to_solution (file, &pi->pt);
524 else
525 fprintf (file, ", points-to anything");
526
527 fprintf (file, "\n");
528 }
529
530
531 /* Dump points-to information for VAR into stderr. */
532
533 DEBUG_FUNCTION void
534 debug_points_to_info_for (tree var)
535 {
536 dump_points_to_info_for (stderr, var);
537 }
538
539
540 /* Initializes the alias-oracle reference representation *R from REF. */
541
542 void
543 ao_ref_init (ao_ref *r, tree ref)
544 {
545 r->ref = ref;
546 r->base = NULL_TREE;
547 r->offset = 0;
548 r->size = -1;
549 r->max_size = -1;
550 r->ref_alias_set = -1;
551 r->base_alias_set = -1;
552 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
553 }
554
555 /* Returns the base object of the memory reference *REF. */
556
557 tree
558 ao_ref_base (ao_ref *ref)
559 {
560 if (ref->base)
561 return ref->base;
562 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
563 &ref->max_size);
564 return ref->base;
565 }
566
567 /* Returns the base object alias set of the memory reference *REF. */
568
569 alias_set_type
570 ao_ref_base_alias_set (ao_ref *ref)
571 {
572 tree base_ref;
573 if (ref->base_alias_set != -1)
574 return ref->base_alias_set;
575 if (!ref->ref)
576 return 0;
577 base_ref = ref->ref;
578 while (handled_component_p (base_ref))
579 base_ref = TREE_OPERAND (base_ref, 0);
580 ref->base_alias_set = get_alias_set (base_ref);
581 return ref->base_alias_set;
582 }
583
584 /* Returns the reference alias set of the memory reference *REF. */
585
586 alias_set_type
587 ao_ref_alias_set (ao_ref *ref)
588 {
589 if (ref->ref_alias_set != -1)
590 return ref->ref_alias_set;
591 ref->ref_alias_set = get_alias_set (ref->ref);
592 return ref->ref_alias_set;
593 }
594
595 /* Init an alias-oracle reference representation from a gimple pointer
596 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
597 size is assumed to be unknown. The access is assumed to be only
598 to or after of the pointer target, not before it. */
599
600 void
601 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
602 {
603 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
604 ref->ref = NULL_TREE;
605 if (TREE_CODE (ptr) == SSA_NAME)
606 {
607 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
608 if (gimple_assign_single_p (stmt)
609 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
610 ptr = gimple_assign_rhs1 (stmt);
611 else if (is_gimple_assign (stmt)
612 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
613 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
614 {
615 ptr = gimple_assign_rhs1 (stmt);
616 extra_offset = BITS_PER_UNIT
617 * int_cst_value (gimple_assign_rhs2 (stmt));
618 }
619 }
620
621 if (TREE_CODE (ptr) == ADDR_EXPR)
622 {
623 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
624 if (ref->base)
625 ref->offset = BITS_PER_UNIT * t;
626 else
627 {
628 size = NULL_TREE;
629 ref->offset = 0;
630 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
631 }
632 }
633 else
634 {
635 ref->base = build2 (MEM_REF, char_type_node,
636 ptr, null_pointer_node);
637 ref->offset = 0;
638 }
639 ref->offset += extra_offset;
640 if (size
641 && tree_fits_shwi_p (size)
642 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
643 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
644 else
645 ref->max_size = ref->size = -1;
646 ref->ref_alias_set = 0;
647 ref->base_alias_set = 0;
648 ref->volatile_p = false;
649 }
650
651 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
652 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
653 decide. */
654
655 static inline int
656 same_type_for_tbaa (tree type1, tree type2)
657 {
658 type1 = TYPE_MAIN_VARIANT (type1);
659 type2 = TYPE_MAIN_VARIANT (type2);
660
661 /* If we would have to do structural comparison bail out. */
662 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
663 || TYPE_STRUCTURAL_EQUALITY_P (type2))
664 return -1;
665
666 /* Compare the canonical types. */
667 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
668 return 1;
669
670 /* ??? Array types are not properly unified in all cases as we have
671 spurious changes in the index types for example. Removing this
672 causes all sorts of problems with the Fortran frontend. */
673 if (TREE_CODE (type1) == ARRAY_TYPE
674 && TREE_CODE (type2) == ARRAY_TYPE)
675 return -1;
676
677 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
678 object of one of its constrained subtypes, e.g. when a function with an
679 unconstrained parameter passed by reference is called on an object and
680 inlined. But, even in the case of a fixed size, type and subtypes are
681 not equivalent enough as to share the same TYPE_CANONICAL, since this
682 would mean that conversions between them are useless, whereas they are
683 not (e.g. type and subtypes can have different modes). So, in the end,
684 they are only guaranteed to have the same alias set. */
685 if (get_alias_set (type1) == get_alias_set (type2))
686 return -1;
687
688 /* The types are known to be not equal. */
689 return 0;
690 }
691
692 /* Determine if the two component references REF1 and REF2 which are
693 based on access types TYPE1 and TYPE2 and of which at least one is based
694 on an indirect reference may alias. REF2 is the only one that can
695 be a decl in which case REF2_IS_DECL is true.
696 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
697 are the respective alias sets. */
698
699 static bool
700 aliasing_component_refs_p (tree ref1,
701 alias_set_type ref1_alias_set,
702 alias_set_type base1_alias_set,
703 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
704 tree ref2,
705 alias_set_type ref2_alias_set,
706 alias_set_type base2_alias_set,
707 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
708 bool ref2_is_decl)
709 {
710 /* If one reference is a component references through pointers try to find a
711 common base and apply offset based disambiguation. This handles
712 for example
713 struct A { int i; int j; } *q;
714 struct B { struct A a; int k; } *p;
715 disambiguating q->i and p->a.j. */
716 tree base1, base2;
717 tree type1, type2;
718 tree *refp;
719 int same_p;
720
721 /* Choose bases and base types to search for. */
722 base1 = ref1;
723 while (handled_component_p (base1))
724 base1 = TREE_OPERAND (base1, 0);
725 type1 = TREE_TYPE (base1);
726 base2 = ref2;
727 while (handled_component_p (base2))
728 base2 = TREE_OPERAND (base2, 0);
729 type2 = TREE_TYPE (base2);
730
731 /* Now search for the type1 in the access path of ref2. This
732 would be a common base for doing offset based disambiguation on. */
733 refp = &ref2;
734 while (handled_component_p (*refp)
735 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
736 refp = &TREE_OPERAND (*refp, 0);
737 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
738 /* If we couldn't compare types we have to bail out. */
739 if (same_p == -1)
740 return true;
741 else if (same_p == 1)
742 {
743 HOST_WIDE_INT offadj, sztmp, msztmp;
744 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
745 offset2 -= offadj;
746 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
747 offset1 -= offadj;
748 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
749 }
750 /* If we didn't find a common base, try the other way around. */
751 refp = &ref1;
752 while (handled_component_p (*refp)
753 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
754 refp = &TREE_OPERAND (*refp, 0);
755 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
756 /* If we couldn't compare types we have to bail out. */
757 if (same_p == -1)
758 return true;
759 else if (same_p == 1)
760 {
761 HOST_WIDE_INT offadj, sztmp, msztmp;
762 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
763 offset1 -= offadj;
764 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
765 offset2 -= offadj;
766 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
767 }
768
769 /* If we have two type access paths B1.path1 and B2.path2 they may
770 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
771 But we can still have a path that goes B1.path1...B2.path2 with
772 a part that we do not see. So we can only disambiguate now
773 if there is no B2 in the tail of path1 and no B1 on the
774 tail of path2. */
775 if (base1_alias_set == ref2_alias_set
776 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
777 return true;
778 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
779 if (!ref2_is_decl)
780 return (base2_alias_set == ref1_alias_set
781 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
782 return false;
783 }
784
785 /* Return true if we can determine that component references REF1 and REF2,
786 that are within a common DECL, cannot overlap. */
787
788 static bool
789 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
790 {
791 auto_vec<tree, 16> component_refs1;
792 auto_vec<tree, 16> component_refs2;
793
794 /* Create the stack of handled components for REF1. */
795 while (handled_component_p (ref1))
796 {
797 component_refs1.safe_push (ref1);
798 ref1 = TREE_OPERAND (ref1, 0);
799 }
800 if (TREE_CODE (ref1) == MEM_REF)
801 {
802 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
803 goto may_overlap;
804 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
805 }
806
807 /* Create the stack of handled components for REF2. */
808 while (handled_component_p (ref2))
809 {
810 component_refs2.safe_push (ref2);
811 ref2 = TREE_OPERAND (ref2, 0);
812 }
813 if (TREE_CODE (ref2) == MEM_REF)
814 {
815 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
816 goto may_overlap;
817 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
818 }
819
820 /* We must have the same base DECL. */
821 gcc_assert (ref1 == ref2);
822
823 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
824 rank. This is sufficient because we start from the same DECL and you
825 cannot reference several fields at a time with COMPONENT_REFs (unlike
826 with ARRAY_RANGE_REFs for arrays) so you always need the same number
827 of them to access a sub-component, unless you're in a union, in which
828 case the return value will precisely be false. */
829 while (true)
830 {
831 do
832 {
833 if (component_refs1.is_empty ())
834 goto may_overlap;
835 ref1 = component_refs1.pop ();
836 }
837 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
838
839 do
840 {
841 if (component_refs2.is_empty ())
842 goto may_overlap;
843 ref2 = component_refs2.pop ();
844 }
845 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
846
847 /* Beware of BIT_FIELD_REF. */
848 if (TREE_CODE (ref1) != COMPONENT_REF
849 || TREE_CODE (ref2) != COMPONENT_REF)
850 goto may_overlap;
851
852 tree field1 = TREE_OPERAND (ref1, 1);
853 tree field2 = TREE_OPERAND (ref2, 1);
854
855 /* ??? We cannot simply use the type of operand #0 of the refs here
856 as the Fortran compiler smuggles type punning into COMPONENT_REFs
857 for common blocks instead of using unions like everyone else. */
858 tree type1 = DECL_CONTEXT (field1);
859 tree type2 = DECL_CONTEXT (field2);
860
861 /* We cannot disambiguate fields in a union or qualified union. */
862 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
863 goto may_overlap;
864
865 /* Different fields of the same record type cannot overlap.
866 ??? Bitfields can overlap at RTL level so punt on them. */
867 if (field1 != field2)
868 {
869 component_refs1.release ();
870 component_refs2.release ();
871 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
872 }
873 }
874
875 may_overlap:
876 component_refs1.release ();
877 component_refs2.release ();
878 return false;
879 }
880
881 /* qsort compare function to sort FIELD_DECLs after their
882 DECL_FIELD_CONTEXT TYPE_UID. */
883
884 static inline int
885 ncr_compar (const void *field1_, const void *field2_)
886 {
887 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
888 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
889 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
890 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
891 if (uid1 < uid2)
892 return -1;
893 else if (uid1 > uid2)
894 return 1;
895 return 0;
896 }
897
898 /* Return true if we can determine that the fields referenced cannot
899 overlap for any pair of objects. */
900
901 static bool
902 nonoverlapping_component_refs_p (const_tree x, const_tree y)
903 {
904 if (!flag_strict_aliasing
905 || !x || !y
906 || TREE_CODE (x) != COMPONENT_REF
907 || TREE_CODE (y) != COMPONENT_REF)
908 return false;
909
910 auto_vec<const_tree, 16> fieldsx;
911 while (TREE_CODE (x) == COMPONENT_REF)
912 {
913 tree field = TREE_OPERAND (x, 1);
914 tree type = DECL_FIELD_CONTEXT (field);
915 if (TREE_CODE (type) == RECORD_TYPE)
916 fieldsx.safe_push (field);
917 x = TREE_OPERAND (x, 0);
918 }
919 if (fieldsx.length () == 0)
920 return false;
921 auto_vec<const_tree, 16> fieldsy;
922 while (TREE_CODE (y) == COMPONENT_REF)
923 {
924 tree field = TREE_OPERAND (y, 1);
925 tree type = DECL_FIELD_CONTEXT (field);
926 if (TREE_CODE (type) == RECORD_TYPE)
927 fieldsy.safe_push (TREE_OPERAND (y, 1));
928 y = TREE_OPERAND (y, 0);
929 }
930 if (fieldsy.length () == 0)
931 return false;
932
933 /* Most common case first. */
934 if (fieldsx.length () == 1
935 && fieldsy.length () == 1)
936 return ((DECL_FIELD_CONTEXT (fieldsx[0])
937 == DECL_FIELD_CONTEXT (fieldsy[0]))
938 && fieldsx[0] != fieldsy[0]
939 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
940
941 if (fieldsx.length () == 2)
942 {
943 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
944 std::swap (fieldsx[0], fieldsx[1]);
945 }
946 else
947 fieldsx.qsort (ncr_compar);
948
949 if (fieldsy.length () == 2)
950 {
951 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
952 std::swap (fieldsy[0], fieldsy[1]);
953 }
954 else
955 fieldsy.qsort (ncr_compar);
956
957 unsigned i = 0, j = 0;
958 do
959 {
960 const_tree fieldx = fieldsx[i];
961 const_tree fieldy = fieldsy[j];
962 tree typex = DECL_FIELD_CONTEXT (fieldx);
963 tree typey = DECL_FIELD_CONTEXT (fieldy);
964 if (typex == typey)
965 {
966 /* We're left with accessing different fields of a structure,
967 no possible overlap, unless they are both bitfields. */
968 if (fieldx != fieldy)
969 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
970 }
971 if (TYPE_UID (typex) < TYPE_UID (typey))
972 {
973 i++;
974 if (i == fieldsx.length ())
975 break;
976 }
977 else
978 {
979 j++;
980 if (j == fieldsy.length ())
981 break;
982 }
983 }
984 while (1);
985
986 return false;
987 }
988
989
990 /* Return true if two memory references based on the variables BASE1
991 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
992 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
993 if non-NULL are the complete memory reference trees. */
994
995 static bool
996 decl_refs_may_alias_p (tree ref1, tree base1,
997 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
998 tree ref2, tree base2,
999 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
1000 {
1001 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1002
1003 /* If both references are based on different variables, they cannot alias. */
1004 if (base1 != base2)
1005 return false;
1006
1007 /* If both references are based on the same variable, they cannot alias if
1008 the accesses do not overlap. */
1009 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1010 return false;
1011
1012 /* For components with variable position, the above test isn't sufficient,
1013 so we disambiguate component references manually. */
1014 if (ref1 && ref2
1015 && handled_component_p (ref1) && handled_component_p (ref2)
1016 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1017 return false;
1018
1019 return true;
1020 }
1021
1022 /* Return true if an indirect reference based on *PTR1 constrained
1023 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1024 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1025 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1026 in which case they are computed on-demand. REF1 and REF2
1027 if non-NULL are the complete memory reference trees. */
1028
1029 static bool
1030 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1031 HOST_WIDE_INT offset1,
1032 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1033 alias_set_type ref1_alias_set,
1034 alias_set_type base1_alias_set,
1035 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1036 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1037 alias_set_type ref2_alias_set,
1038 alias_set_type base2_alias_set, bool tbaa_p)
1039 {
1040 tree ptr1;
1041 tree ptrtype1, dbase2;
1042 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1043 HOST_WIDE_INT doffset1, doffset2;
1044
1045 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1046 || TREE_CODE (base1) == TARGET_MEM_REF)
1047 && DECL_P (base2));
1048
1049 ptr1 = TREE_OPERAND (base1, 0);
1050
1051 /* The offset embedded in MEM_REFs can be negative. Bias them
1052 so that the resulting offset adjustment is positive. */
1053 offset_int moff = mem_ref_offset (base1);
1054 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1055 if (wi::neg_p (moff))
1056 offset2p += (-moff).to_short_addr ();
1057 else
1058 offset1p += moff.to_short_addr ();
1059
1060 /* If only one reference is based on a variable, they cannot alias if
1061 the pointer access is beyond the extent of the variable access.
1062 (the pointer base cannot validly point to an offset less than zero
1063 of the variable).
1064 ??? IVOPTs creates bases that do not honor this restriction,
1065 so do not apply this optimization for TARGET_MEM_REFs. */
1066 if (TREE_CODE (base1) != TARGET_MEM_REF
1067 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1068 return false;
1069 /* They also cannot alias if the pointer may not point to the decl. */
1070 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1071 return false;
1072
1073 /* Disambiguations that rely on strict aliasing rules follow. */
1074 if (!flag_strict_aliasing || !tbaa_p)
1075 return true;
1076
1077 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1078
1079 /* If the alias set for a pointer access is zero all bets are off. */
1080 if (base1_alias_set == -1)
1081 base1_alias_set = get_deref_alias_set (ptrtype1);
1082 if (base1_alias_set == 0)
1083 return true;
1084 if (base2_alias_set == -1)
1085 base2_alias_set = get_alias_set (base2);
1086
1087 /* When we are trying to disambiguate an access with a pointer dereference
1088 as base versus one with a decl as base we can use both the size
1089 of the decl and its dynamic type for extra disambiguation.
1090 ??? We do not know anything about the dynamic type of the decl
1091 other than that its alias-set contains base2_alias_set as a subset
1092 which does not help us here. */
1093 /* As we know nothing useful about the dynamic type of the decl just
1094 use the usual conflict check rather than a subset test.
1095 ??? We could introduce -fvery-strict-aliasing when the language
1096 does not allow decls to have a dynamic type that differs from their
1097 static type. Then we can check
1098 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1099 if (base1_alias_set != base2_alias_set
1100 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1101 return false;
1102 /* If the size of the access relevant for TBAA through the pointer
1103 is bigger than the size of the decl we can't possibly access the
1104 decl via that pointer. */
1105 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1106 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1107 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1108 /* ??? This in turn may run afoul when a decl of type T which is
1109 a member of union type U is accessed through a pointer to
1110 type U and sizeof T is smaller than sizeof U. */
1111 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1112 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1113 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1114 return false;
1115
1116 if (!ref2)
1117 return true;
1118
1119 /* If the decl is accessed via a MEM_REF, reconstruct the base
1120 we can use for TBAA and an appropriately adjusted offset. */
1121 dbase2 = ref2;
1122 while (handled_component_p (dbase2))
1123 dbase2 = TREE_OPERAND (dbase2, 0);
1124 doffset1 = offset1;
1125 doffset2 = offset2;
1126 if (TREE_CODE (dbase2) == MEM_REF
1127 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1128 {
1129 offset_int moff = mem_ref_offset (dbase2);
1130 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1131 if (wi::neg_p (moff))
1132 doffset1 -= (-moff).to_short_addr ();
1133 else
1134 doffset2 -= moff.to_short_addr ();
1135 }
1136
1137 /* If either reference is view-converted, give up now. */
1138 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1139 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1140 return true;
1141
1142 /* If both references are through the same type, they do not alias
1143 if the accesses do not overlap. This does extra disambiguation
1144 for mixed/pointer accesses but requires strict aliasing.
1145 For MEM_REFs we require that the component-ref offset we computed
1146 is relative to the start of the type which we ensure by
1147 comparing rvalue and access type and disregarding the constant
1148 pointer offset. */
1149 if ((TREE_CODE (base1) != TARGET_MEM_REF
1150 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1151 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1152 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1153
1154 if (ref1 && ref2
1155 && nonoverlapping_component_refs_p (ref1, ref2))
1156 return false;
1157
1158 /* Do access-path based disambiguation. */
1159 if (ref1 && ref2
1160 && (handled_component_p (ref1) || handled_component_p (ref2)))
1161 return aliasing_component_refs_p (ref1,
1162 ref1_alias_set, base1_alias_set,
1163 offset1, max_size1,
1164 ref2,
1165 ref2_alias_set, base2_alias_set,
1166 offset2, max_size2, true);
1167
1168 return true;
1169 }
1170
1171 /* Return true if two indirect references based on *PTR1
1172 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1173 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1174 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1175 in which case they are computed on-demand. REF1 and REF2
1176 if non-NULL are the complete memory reference trees. */
1177
1178 static bool
1179 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1180 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1181 alias_set_type ref1_alias_set,
1182 alias_set_type base1_alias_set,
1183 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1184 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1185 alias_set_type ref2_alias_set,
1186 alias_set_type base2_alias_set, bool tbaa_p)
1187 {
1188 tree ptr1;
1189 tree ptr2;
1190 tree ptrtype1, ptrtype2;
1191
1192 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1193 || TREE_CODE (base1) == TARGET_MEM_REF)
1194 && (TREE_CODE (base2) == MEM_REF
1195 || TREE_CODE (base2) == TARGET_MEM_REF));
1196
1197 ptr1 = TREE_OPERAND (base1, 0);
1198 ptr2 = TREE_OPERAND (base2, 0);
1199
1200 /* If both bases are based on pointers they cannot alias if they may not
1201 point to the same memory object or if they point to the same object
1202 and the accesses do not overlap. */
1203 if ((!cfun || gimple_in_ssa_p (cfun))
1204 && operand_equal_p (ptr1, ptr2, 0)
1205 && (((TREE_CODE (base1) != TARGET_MEM_REF
1206 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1207 && (TREE_CODE (base2) != TARGET_MEM_REF
1208 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1209 || (TREE_CODE (base1) == TARGET_MEM_REF
1210 && TREE_CODE (base2) == TARGET_MEM_REF
1211 && (TMR_STEP (base1) == TMR_STEP (base2)
1212 || (TMR_STEP (base1) && TMR_STEP (base2)
1213 && operand_equal_p (TMR_STEP (base1),
1214 TMR_STEP (base2), 0)))
1215 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1216 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1217 && operand_equal_p (TMR_INDEX (base1),
1218 TMR_INDEX (base2), 0)))
1219 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1220 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1221 && operand_equal_p (TMR_INDEX2 (base1),
1222 TMR_INDEX2 (base2), 0))))))
1223 {
1224 offset_int moff;
1225 /* The offset embedded in MEM_REFs can be negative. Bias them
1226 so that the resulting offset adjustment is positive. */
1227 moff = mem_ref_offset (base1);
1228 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1229 if (wi::neg_p (moff))
1230 offset2 += (-moff).to_short_addr ();
1231 else
1232 offset1 += moff.to_shwi ();
1233 moff = mem_ref_offset (base2);
1234 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1235 if (wi::neg_p (moff))
1236 offset1 += (-moff).to_short_addr ();
1237 else
1238 offset2 += moff.to_short_addr ();
1239 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1240 }
1241 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1242 return false;
1243
1244 /* Disambiguations that rely on strict aliasing rules follow. */
1245 if (!flag_strict_aliasing || !tbaa_p)
1246 return true;
1247
1248 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1249 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1250
1251 /* If the alias set for a pointer access is zero all bets are off. */
1252 if (base1_alias_set == -1)
1253 base1_alias_set = get_deref_alias_set (ptrtype1);
1254 if (base1_alias_set == 0)
1255 return true;
1256 if (base2_alias_set == -1)
1257 base2_alias_set = get_deref_alias_set (ptrtype2);
1258 if (base2_alias_set == 0)
1259 return true;
1260
1261 /* If both references are through the same type, they do not alias
1262 if the accesses do not overlap. This does extra disambiguation
1263 for mixed/pointer accesses but requires strict aliasing. */
1264 if ((TREE_CODE (base1) != TARGET_MEM_REF
1265 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1266 && (TREE_CODE (base2) != TARGET_MEM_REF
1267 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1268 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1269 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1270 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1271 TREE_TYPE (ptrtype2)) == 1)
1272 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1273
1274 /* Do type-based disambiguation. */
1275 if (base1_alias_set != base2_alias_set
1276 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1277 return false;
1278
1279 /* If either reference is view-converted, give up now. */
1280 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1281 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1282 return true;
1283
1284 if (ref1 && ref2
1285 && nonoverlapping_component_refs_p (ref1, ref2))
1286 return false;
1287
1288 /* Do access-path based disambiguation. */
1289 if (ref1 && ref2
1290 && (handled_component_p (ref1) || handled_component_p (ref2)))
1291 return aliasing_component_refs_p (ref1,
1292 ref1_alias_set, base1_alias_set,
1293 offset1, max_size1,
1294 ref2,
1295 ref2_alias_set, base2_alias_set,
1296 offset2, max_size2, false);
1297
1298 return true;
1299 }
1300
1301 /* Return true, if the two memory references REF1 and REF2 may alias. */
1302
1303 bool
1304 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1305 {
1306 tree base1, base2;
1307 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1308 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1309 bool var1_p, var2_p, ind1_p, ind2_p;
1310
1311 gcc_checking_assert ((!ref1->ref
1312 || TREE_CODE (ref1->ref) == SSA_NAME
1313 || DECL_P (ref1->ref)
1314 || TREE_CODE (ref1->ref) == STRING_CST
1315 || handled_component_p (ref1->ref)
1316 || TREE_CODE (ref1->ref) == MEM_REF
1317 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1318 && (!ref2->ref
1319 || TREE_CODE (ref2->ref) == SSA_NAME
1320 || DECL_P (ref2->ref)
1321 || TREE_CODE (ref2->ref) == STRING_CST
1322 || handled_component_p (ref2->ref)
1323 || TREE_CODE (ref2->ref) == MEM_REF
1324 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1325
1326 /* Decompose the references into their base objects and the access. */
1327 base1 = ao_ref_base (ref1);
1328 offset1 = ref1->offset;
1329 max_size1 = ref1->max_size;
1330 base2 = ao_ref_base (ref2);
1331 offset2 = ref2->offset;
1332 max_size2 = ref2->max_size;
1333
1334 /* We can end up with registers or constants as bases for example from
1335 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1336 which is seen as a struct copy. */
1337 if (TREE_CODE (base1) == SSA_NAME
1338 || TREE_CODE (base1) == CONST_DECL
1339 || TREE_CODE (base1) == CONSTRUCTOR
1340 || TREE_CODE (base1) == ADDR_EXPR
1341 || CONSTANT_CLASS_P (base1)
1342 || TREE_CODE (base2) == SSA_NAME
1343 || TREE_CODE (base2) == CONST_DECL
1344 || TREE_CODE (base2) == CONSTRUCTOR
1345 || TREE_CODE (base2) == ADDR_EXPR
1346 || CONSTANT_CLASS_P (base2))
1347 return false;
1348
1349 /* We can end up referring to code via function and label decls.
1350 As we likely do not properly track code aliases conservatively
1351 bail out. */
1352 if (TREE_CODE (base1) == FUNCTION_DECL
1353 || TREE_CODE (base1) == LABEL_DECL
1354 || TREE_CODE (base2) == FUNCTION_DECL
1355 || TREE_CODE (base2) == LABEL_DECL)
1356 return true;
1357
1358 /* Two volatile accesses always conflict. */
1359 if (ref1->volatile_p
1360 && ref2->volatile_p)
1361 return true;
1362
1363 /* Defer to simple offset based disambiguation if we have
1364 references based on two decls. Do this before defering to
1365 TBAA to handle must-alias cases in conformance with the
1366 GCC extension of allowing type-punning through unions. */
1367 var1_p = DECL_P (base1);
1368 var2_p = DECL_P (base2);
1369 if (var1_p && var2_p)
1370 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1371 ref2->ref, base2, offset2, max_size2);
1372
1373 /* Handle restrict based accesses.
1374 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1375 here. */
1376 tree rbase1 = base1;
1377 tree rbase2 = base2;
1378 if (var1_p)
1379 {
1380 rbase1 = ref1->ref;
1381 if (rbase1)
1382 while (handled_component_p (rbase1))
1383 rbase1 = TREE_OPERAND (rbase1, 0);
1384 }
1385 if (var2_p)
1386 {
1387 rbase2 = ref2->ref;
1388 if (rbase2)
1389 while (handled_component_p (rbase2))
1390 rbase2 = TREE_OPERAND (rbase2, 0);
1391 }
1392 if (rbase1 && rbase2
1393 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1394 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1395 /* If the accesses are in the same restrict clique... */
1396 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1397 /* But based on different pointers they do not alias. */
1398 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1399 return false;
1400
1401 ind1_p = (TREE_CODE (base1) == MEM_REF
1402 || TREE_CODE (base1) == TARGET_MEM_REF);
1403 ind2_p = (TREE_CODE (base2) == MEM_REF
1404 || TREE_CODE (base2) == TARGET_MEM_REF);
1405
1406 /* Canonicalize the pointer-vs-decl case. */
1407 if (ind1_p && var2_p)
1408 {
1409 std::swap (offset1, offset2);
1410 std::swap (max_size1, max_size2);
1411 std::swap (base1, base2);
1412 std::swap (ref1, ref2);
1413 var1_p = true;
1414 ind1_p = false;
1415 var2_p = false;
1416 ind2_p = true;
1417 }
1418
1419 /* First defer to TBAA if possible. */
1420 if (tbaa_p
1421 && flag_strict_aliasing
1422 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1423 ao_ref_alias_set (ref2)))
1424 return false;
1425
1426 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1427 if (var1_p && ind2_p)
1428 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1429 offset2, max_size2,
1430 ao_ref_alias_set (ref2), -1,
1431 ref1->ref, base1,
1432 offset1, max_size1,
1433 ao_ref_alias_set (ref1),
1434 ao_ref_base_alias_set (ref1),
1435 tbaa_p);
1436 else if (ind1_p && ind2_p)
1437 return indirect_refs_may_alias_p (ref1->ref, base1,
1438 offset1, max_size1,
1439 ao_ref_alias_set (ref1), -1,
1440 ref2->ref, base2,
1441 offset2, max_size2,
1442 ao_ref_alias_set (ref2), -1,
1443 tbaa_p);
1444
1445 /* We really do not want to end up here, but returning true is safe. */
1446 #ifdef ENABLE_CHECKING
1447 gcc_unreachable ();
1448 #else
1449 return true;
1450 #endif
1451 }
1452
1453 static bool
1454 refs_may_alias_p (tree ref1, ao_ref *ref2)
1455 {
1456 ao_ref r1;
1457 ao_ref_init (&r1, ref1);
1458 return refs_may_alias_p_1 (&r1, ref2, true);
1459 }
1460
1461 bool
1462 refs_may_alias_p (tree ref1, tree ref2)
1463 {
1464 ao_ref r1, r2;
1465 bool res;
1466 ao_ref_init (&r1, ref1);
1467 ao_ref_init (&r2, ref2);
1468 res = refs_may_alias_p_1 (&r1, &r2, true);
1469 if (res)
1470 ++alias_stats.refs_may_alias_p_may_alias;
1471 else
1472 ++alias_stats.refs_may_alias_p_no_alias;
1473 return res;
1474 }
1475
1476 /* Returns true if there is a anti-dependence for the STORE that
1477 executes after the LOAD. */
1478
1479 bool
1480 refs_anti_dependent_p (tree load, tree store)
1481 {
1482 ao_ref r1, r2;
1483 ao_ref_init (&r1, load);
1484 ao_ref_init (&r2, store);
1485 return refs_may_alias_p_1 (&r1, &r2, false);
1486 }
1487
1488 /* Returns true if there is a output dependence for the stores
1489 STORE1 and STORE2. */
1490
1491 bool
1492 refs_output_dependent_p (tree store1, tree store2)
1493 {
1494 ao_ref r1, r2;
1495 ao_ref_init (&r1, store1);
1496 ao_ref_init (&r2, store2);
1497 return refs_may_alias_p_1 (&r1, &r2, false);
1498 }
1499
1500 /* If the call CALL may use the memory reference REF return true,
1501 otherwise return false. */
1502
1503 static bool
1504 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1505 {
1506 tree base, callee;
1507 unsigned i;
1508 int flags = gimple_call_flags (call);
1509
1510 /* Const functions without a static chain do not implicitly use memory. */
1511 if (!gimple_call_chain (call)
1512 && (flags & (ECF_CONST|ECF_NOVOPS)))
1513 goto process_args;
1514
1515 base = ao_ref_base (ref);
1516 if (!base)
1517 return true;
1518
1519 /* A call that is not without side-effects might involve volatile
1520 accesses and thus conflicts with all other volatile accesses. */
1521 if (ref->volatile_p)
1522 return true;
1523
1524 /* If the reference is based on a decl that is not aliased the call
1525 cannot possibly use it. */
1526 if (DECL_P (base)
1527 && !may_be_aliased (base)
1528 /* But local statics can be used through recursion. */
1529 && !is_global_var (base))
1530 goto process_args;
1531
1532 callee = gimple_call_fndecl (call);
1533
1534 /* Handle those builtin functions explicitly that do not act as
1535 escape points. See tree-ssa-structalias.c:find_func_aliases
1536 for the list of builtins we might need to handle here. */
1537 if (callee != NULL_TREE
1538 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1539 switch (DECL_FUNCTION_CODE (callee))
1540 {
1541 /* All the following functions read memory pointed to by
1542 their second argument. strcat/strncat additionally
1543 reads memory pointed to by the first argument. */
1544 case BUILT_IN_STRCAT:
1545 case BUILT_IN_STRNCAT:
1546 {
1547 ao_ref dref;
1548 ao_ref_init_from_ptr_and_size (&dref,
1549 gimple_call_arg (call, 0),
1550 NULL_TREE);
1551 if (refs_may_alias_p_1 (&dref, ref, false))
1552 return true;
1553 }
1554 /* FALLTHRU */
1555 case BUILT_IN_STRCPY:
1556 case BUILT_IN_STRNCPY:
1557 case BUILT_IN_MEMCPY:
1558 case BUILT_IN_MEMMOVE:
1559 case BUILT_IN_MEMPCPY:
1560 case BUILT_IN_STPCPY:
1561 case BUILT_IN_STPNCPY:
1562 case BUILT_IN_TM_MEMCPY:
1563 case BUILT_IN_TM_MEMMOVE:
1564 {
1565 ao_ref dref;
1566 tree size = NULL_TREE;
1567 if (gimple_call_num_args (call) == 3)
1568 size = gimple_call_arg (call, 2);
1569 ao_ref_init_from_ptr_and_size (&dref,
1570 gimple_call_arg (call, 1),
1571 size);
1572 return refs_may_alias_p_1 (&dref, ref, false);
1573 }
1574 case BUILT_IN_STRCAT_CHK:
1575 case BUILT_IN_STRNCAT_CHK:
1576 {
1577 ao_ref dref;
1578 ao_ref_init_from_ptr_and_size (&dref,
1579 gimple_call_arg (call, 0),
1580 NULL_TREE);
1581 if (refs_may_alias_p_1 (&dref, ref, false))
1582 return true;
1583 }
1584 /* FALLTHRU */
1585 case BUILT_IN_STRCPY_CHK:
1586 case BUILT_IN_STRNCPY_CHK:
1587 case BUILT_IN_MEMCPY_CHK:
1588 case BUILT_IN_MEMMOVE_CHK:
1589 case BUILT_IN_MEMPCPY_CHK:
1590 case BUILT_IN_STPCPY_CHK:
1591 case BUILT_IN_STPNCPY_CHK:
1592 {
1593 ao_ref dref;
1594 tree size = NULL_TREE;
1595 if (gimple_call_num_args (call) == 4)
1596 size = gimple_call_arg (call, 2);
1597 ao_ref_init_from_ptr_and_size (&dref,
1598 gimple_call_arg (call, 1),
1599 size);
1600 return refs_may_alias_p_1 (&dref, ref, false);
1601 }
1602 case BUILT_IN_BCOPY:
1603 {
1604 ao_ref dref;
1605 tree size = gimple_call_arg (call, 2);
1606 ao_ref_init_from_ptr_and_size (&dref,
1607 gimple_call_arg (call, 0),
1608 size);
1609 return refs_may_alias_p_1 (&dref, ref, false);
1610 }
1611
1612 /* The following functions read memory pointed to by their
1613 first argument. */
1614 CASE_BUILT_IN_TM_LOAD (1):
1615 CASE_BUILT_IN_TM_LOAD (2):
1616 CASE_BUILT_IN_TM_LOAD (4):
1617 CASE_BUILT_IN_TM_LOAD (8):
1618 CASE_BUILT_IN_TM_LOAD (FLOAT):
1619 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1620 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1621 CASE_BUILT_IN_TM_LOAD (M64):
1622 CASE_BUILT_IN_TM_LOAD (M128):
1623 CASE_BUILT_IN_TM_LOAD (M256):
1624 case BUILT_IN_TM_LOG:
1625 case BUILT_IN_TM_LOG_1:
1626 case BUILT_IN_TM_LOG_2:
1627 case BUILT_IN_TM_LOG_4:
1628 case BUILT_IN_TM_LOG_8:
1629 case BUILT_IN_TM_LOG_FLOAT:
1630 case BUILT_IN_TM_LOG_DOUBLE:
1631 case BUILT_IN_TM_LOG_LDOUBLE:
1632 case BUILT_IN_TM_LOG_M64:
1633 case BUILT_IN_TM_LOG_M128:
1634 case BUILT_IN_TM_LOG_M256:
1635 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1636
1637 /* These read memory pointed to by the first argument. */
1638 case BUILT_IN_STRDUP:
1639 case BUILT_IN_STRNDUP:
1640 case BUILT_IN_REALLOC:
1641 {
1642 ao_ref dref;
1643 tree size = NULL_TREE;
1644 if (gimple_call_num_args (call) == 2)
1645 size = gimple_call_arg (call, 1);
1646 ao_ref_init_from_ptr_and_size (&dref,
1647 gimple_call_arg (call, 0),
1648 size);
1649 return refs_may_alias_p_1 (&dref, ref, false);
1650 }
1651 /* These read memory pointed to by the first argument. */
1652 case BUILT_IN_INDEX:
1653 case BUILT_IN_STRCHR:
1654 case BUILT_IN_STRRCHR:
1655 {
1656 ao_ref dref;
1657 ao_ref_init_from_ptr_and_size (&dref,
1658 gimple_call_arg (call, 0),
1659 NULL_TREE);
1660 return refs_may_alias_p_1 (&dref, ref, false);
1661 }
1662 /* These read memory pointed to by the first argument with size
1663 in the third argument. */
1664 case BUILT_IN_MEMCHR:
1665 {
1666 ao_ref dref;
1667 ao_ref_init_from_ptr_and_size (&dref,
1668 gimple_call_arg (call, 0),
1669 gimple_call_arg (call, 2));
1670 return refs_may_alias_p_1 (&dref, ref, false);
1671 }
1672 /* These read memory pointed to by the first and second arguments. */
1673 case BUILT_IN_STRSTR:
1674 case BUILT_IN_STRPBRK:
1675 {
1676 ao_ref dref;
1677 ao_ref_init_from_ptr_and_size (&dref,
1678 gimple_call_arg (call, 0),
1679 NULL_TREE);
1680 if (refs_may_alias_p_1 (&dref, ref, false))
1681 return true;
1682 ao_ref_init_from_ptr_and_size (&dref,
1683 gimple_call_arg (call, 1),
1684 NULL_TREE);
1685 return refs_may_alias_p_1 (&dref, ref, false);
1686 }
1687
1688 /* The following builtins do not read from memory. */
1689 case BUILT_IN_FREE:
1690 case BUILT_IN_MALLOC:
1691 case BUILT_IN_POSIX_MEMALIGN:
1692 case BUILT_IN_ALIGNED_ALLOC:
1693 case BUILT_IN_CALLOC:
1694 case BUILT_IN_ALLOCA:
1695 case BUILT_IN_ALLOCA_WITH_ALIGN:
1696 case BUILT_IN_STACK_SAVE:
1697 case BUILT_IN_STACK_RESTORE:
1698 case BUILT_IN_MEMSET:
1699 case BUILT_IN_TM_MEMSET:
1700 case BUILT_IN_MEMSET_CHK:
1701 case BUILT_IN_FREXP:
1702 case BUILT_IN_FREXPF:
1703 case BUILT_IN_FREXPL:
1704 case BUILT_IN_GAMMA_R:
1705 case BUILT_IN_GAMMAF_R:
1706 case BUILT_IN_GAMMAL_R:
1707 case BUILT_IN_LGAMMA_R:
1708 case BUILT_IN_LGAMMAF_R:
1709 case BUILT_IN_LGAMMAL_R:
1710 case BUILT_IN_MODF:
1711 case BUILT_IN_MODFF:
1712 case BUILT_IN_MODFL:
1713 case BUILT_IN_REMQUO:
1714 case BUILT_IN_REMQUOF:
1715 case BUILT_IN_REMQUOL:
1716 case BUILT_IN_SINCOS:
1717 case BUILT_IN_SINCOSF:
1718 case BUILT_IN_SINCOSL:
1719 case BUILT_IN_ASSUME_ALIGNED:
1720 case BUILT_IN_VA_END:
1721 return false;
1722 /* __sync_* builtins and some OpenMP builtins act as threading
1723 barriers. */
1724 #undef DEF_SYNC_BUILTIN
1725 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1726 #include "sync-builtins.def"
1727 #undef DEF_SYNC_BUILTIN
1728 case BUILT_IN_GOMP_ATOMIC_START:
1729 case BUILT_IN_GOMP_ATOMIC_END:
1730 case BUILT_IN_GOMP_BARRIER:
1731 case BUILT_IN_GOMP_BARRIER_CANCEL:
1732 case BUILT_IN_GOMP_TASKWAIT:
1733 case BUILT_IN_GOMP_TASKGROUP_END:
1734 case BUILT_IN_GOMP_CRITICAL_START:
1735 case BUILT_IN_GOMP_CRITICAL_END:
1736 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1737 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1738 case BUILT_IN_GOMP_LOOP_END:
1739 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1740 case BUILT_IN_GOMP_ORDERED_START:
1741 case BUILT_IN_GOMP_ORDERED_END:
1742 case BUILT_IN_GOMP_SECTIONS_END:
1743 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1744 case BUILT_IN_GOMP_SINGLE_COPY_START:
1745 case BUILT_IN_GOMP_SINGLE_COPY_END:
1746 return true;
1747
1748 default:
1749 /* Fallthru to general call handling. */;
1750 }
1751
1752 /* Check if base is a global static variable that is not read
1753 by the function. */
1754 if (callee != NULL_TREE
1755 && TREE_CODE (base) == VAR_DECL
1756 && TREE_STATIC (base))
1757 {
1758 struct cgraph_node *node = cgraph_node::get (callee);
1759 bitmap not_read;
1760
1761 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1762 node yet. We should enforce that there are nodes for all decls in the
1763 IL and remove this check instead. */
1764 if (node
1765 && (not_read = ipa_reference_get_not_read_global (node))
1766 && bitmap_bit_p (not_read, DECL_UID (base)))
1767 goto process_args;
1768 }
1769
1770 /* Check if the base variable is call-used. */
1771 if (DECL_P (base))
1772 {
1773 if (pt_solution_includes (gimple_call_use_set (call), base))
1774 return true;
1775 }
1776 else if ((TREE_CODE (base) == MEM_REF
1777 || TREE_CODE (base) == TARGET_MEM_REF)
1778 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1779 {
1780 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1781 if (!pi)
1782 return true;
1783
1784 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1785 return true;
1786 }
1787 else
1788 return true;
1789
1790 /* Inspect call arguments for passed-by-value aliases. */
1791 process_args:
1792 for (i = 0; i < gimple_call_num_args (call); ++i)
1793 {
1794 tree op = gimple_call_arg (call, i);
1795 int flags = gimple_call_arg_flags (call, i);
1796
1797 if (flags & EAF_UNUSED)
1798 continue;
1799
1800 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1801 op = TREE_OPERAND (op, 0);
1802
1803 if (TREE_CODE (op) != SSA_NAME
1804 && !is_gimple_min_invariant (op))
1805 {
1806 ao_ref r;
1807 ao_ref_init (&r, op);
1808 if (refs_may_alias_p_1 (&r, ref, true))
1809 return true;
1810 }
1811 }
1812
1813 return false;
1814 }
1815
1816 static bool
1817 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1818 {
1819 bool res;
1820 res = ref_maybe_used_by_call_p_1 (call, ref);
1821 if (res)
1822 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1823 else
1824 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1825 return res;
1826 }
1827
1828
1829 /* If the statement STMT may use the memory reference REF return
1830 true, otherwise return false. */
1831
1832 bool
1833 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
1834 {
1835 if (is_gimple_assign (stmt))
1836 {
1837 tree rhs;
1838
1839 /* All memory assign statements are single. */
1840 if (!gimple_assign_single_p (stmt))
1841 return false;
1842
1843 rhs = gimple_assign_rhs1 (stmt);
1844 if (is_gimple_reg (rhs)
1845 || is_gimple_min_invariant (rhs)
1846 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1847 return false;
1848
1849 return refs_may_alias_p (rhs, ref);
1850 }
1851 else if (is_gimple_call (stmt))
1852 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1853 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1854 {
1855 tree retval = gimple_return_retval (return_stmt);
1856 if (retval
1857 && TREE_CODE (retval) != SSA_NAME
1858 && !is_gimple_min_invariant (retval)
1859 && refs_may_alias_p (retval, ref))
1860 return true;
1861 /* If ref escapes the function then the return acts as a use. */
1862 tree base = ao_ref_base (ref);
1863 if (!base)
1864 ;
1865 else if (DECL_P (base))
1866 return is_global_var (base);
1867 else if (TREE_CODE (base) == MEM_REF
1868 || TREE_CODE (base) == TARGET_MEM_REF)
1869 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1870 return false;
1871 }
1872
1873 return true;
1874 }
1875
1876 bool
1877 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
1878 {
1879 ao_ref r;
1880 ao_ref_init (&r, ref);
1881 return ref_maybe_used_by_stmt_p (stmt, &r);
1882 }
1883
1884 /* If the call in statement CALL may clobber the memory reference REF
1885 return true, otherwise return false. */
1886
1887 bool
1888 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1889 {
1890 tree base;
1891 tree callee;
1892
1893 /* If the call is pure or const it cannot clobber anything. */
1894 if (gimple_call_flags (call)
1895 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1896 return false;
1897 if (gimple_call_internal_p (call))
1898 switch (gimple_call_internal_fn (call))
1899 {
1900 /* Treat these internal calls like ECF_PURE for aliasing,
1901 they don't write to any memory the program should care about.
1902 They have important other side-effects, and read memory,
1903 so can't be ECF_NOVOPS. */
1904 case IFN_UBSAN_NULL:
1905 case IFN_UBSAN_BOUNDS:
1906 case IFN_UBSAN_VPTR:
1907 case IFN_UBSAN_OBJECT_SIZE:
1908 case IFN_ASAN_CHECK:
1909 return false;
1910 default:
1911 break;
1912 }
1913
1914 base = ao_ref_base (ref);
1915 if (!base)
1916 return true;
1917
1918 if (TREE_CODE (base) == SSA_NAME
1919 || CONSTANT_CLASS_P (base))
1920 return false;
1921
1922 /* A call that is not without side-effects might involve volatile
1923 accesses and thus conflicts with all other volatile accesses. */
1924 if (ref->volatile_p)
1925 return true;
1926
1927 /* If the reference is based on a decl that is not aliased the call
1928 cannot possibly clobber it. */
1929 if (DECL_P (base)
1930 && !may_be_aliased (base)
1931 /* But local non-readonly statics can be modified through recursion
1932 or the call may implement a threading barrier which we must
1933 treat as may-def. */
1934 && (TREE_READONLY (base)
1935 || !is_global_var (base)))
1936 return false;
1937
1938 callee = gimple_call_fndecl (call);
1939
1940 /* Handle those builtin functions explicitly that do not act as
1941 escape points. See tree-ssa-structalias.c:find_func_aliases
1942 for the list of builtins we might need to handle here. */
1943 if (callee != NULL_TREE
1944 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1945 switch (DECL_FUNCTION_CODE (callee))
1946 {
1947 /* All the following functions clobber memory pointed to by
1948 their first argument. */
1949 case BUILT_IN_STRCPY:
1950 case BUILT_IN_STRNCPY:
1951 case BUILT_IN_MEMCPY:
1952 case BUILT_IN_MEMMOVE:
1953 case BUILT_IN_MEMPCPY:
1954 case BUILT_IN_STPCPY:
1955 case BUILT_IN_STPNCPY:
1956 case BUILT_IN_STRCAT:
1957 case BUILT_IN_STRNCAT:
1958 case BUILT_IN_MEMSET:
1959 case BUILT_IN_TM_MEMSET:
1960 CASE_BUILT_IN_TM_STORE (1):
1961 CASE_BUILT_IN_TM_STORE (2):
1962 CASE_BUILT_IN_TM_STORE (4):
1963 CASE_BUILT_IN_TM_STORE (8):
1964 CASE_BUILT_IN_TM_STORE (FLOAT):
1965 CASE_BUILT_IN_TM_STORE (DOUBLE):
1966 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1967 CASE_BUILT_IN_TM_STORE (M64):
1968 CASE_BUILT_IN_TM_STORE (M128):
1969 CASE_BUILT_IN_TM_STORE (M256):
1970 case BUILT_IN_TM_MEMCPY:
1971 case BUILT_IN_TM_MEMMOVE:
1972 {
1973 ao_ref dref;
1974 tree size = NULL_TREE;
1975 /* Don't pass in size for strncat, as the maximum size
1976 is strlen (dest) + n + 1 instead of n, resp.
1977 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1978 known. */
1979 if (gimple_call_num_args (call) == 3
1980 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1981 size = gimple_call_arg (call, 2);
1982 ao_ref_init_from_ptr_and_size (&dref,
1983 gimple_call_arg (call, 0),
1984 size);
1985 return refs_may_alias_p_1 (&dref, ref, false);
1986 }
1987 case BUILT_IN_STRCPY_CHK:
1988 case BUILT_IN_STRNCPY_CHK:
1989 case BUILT_IN_MEMCPY_CHK:
1990 case BUILT_IN_MEMMOVE_CHK:
1991 case BUILT_IN_MEMPCPY_CHK:
1992 case BUILT_IN_STPCPY_CHK:
1993 case BUILT_IN_STPNCPY_CHK:
1994 case BUILT_IN_STRCAT_CHK:
1995 case BUILT_IN_STRNCAT_CHK:
1996 case BUILT_IN_MEMSET_CHK:
1997 {
1998 ao_ref dref;
1999 tree size = NULL_TREE;
2000 /* Don't pass in size for __strncat_chk, as the maximum size
2001 is strlen (dest) + n + 1 instead of n, resp.
2002 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2003 known. */
2004 if (gimple_call_num_args (call) == 4
2005 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2006 size = gimple_call_arg (call, 2);
2007 ao_ref_init_from_ptr_and_size (&dref,
2008 gimple_call_arg (call, 0),
2009 size);
2010 return refs_may_alias_p_1 (&dref, ref, false);
2011 }
2012 case BUILT_IN_BCOPY:
2013 {
2014 ao_ref dref;
2015 tree size = gimple_call_arg (call, 2);
2016 ao_ref_init_from_ptr_and_size (&dref,
2017 gimple_call_arg (call, 1),
2018 size);
2019 return refs_may_alias_p_1 (&dref, ref, false);
2020 }
2021 /* Allocating memory does not have any side-effects apart from
2022 being the definition point for the pointer. */
2023 case BUILT_IN_MALLOC:
2024 case BUILT_IN_ALIGNED_ALLOC:
2025 case BUILT_IN_CALLOC:
2026 case BUILT_IN_STRDUP:
2027 case BUILT_IN_STRNDUP:
2028 /* Unix98 specifies that errno is set on allocation failure. */
2029 if (flag_errno_math
2030 && targetm.ref_may_alias_errno (ref))
2031 return true;
2032 return false;
2033 case BUILT_IN_STACK_SAVE:
2034 case BUILT_IN_ALLOCA:
2035 case BUILT_IN_ALLOCA_WITH_ALIGN:
2036 case BUILT_IN_ASSUME_ALIGNED:
2037 return false;
2038 /* But posix_memalign stores a pointer into the memory pointed to
2039 by its first argument. */
2040 case BUILT_IN_POSIX_MEMALIGN:
2041 {
2042 tree ptrptr = gimple_call_arg (call, 0);
2043 ao_ref dref;
2044 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2045 TYPE_SIZE_UNIT (ptr_type_node));
2046 return (refs_may_alias_p_1 (&dref, ref, false)
2047 || (flag_errno_math
2048 && targetm.ref_may_alias_errno (ref)));
2049 }
2050 /* Freeing memory kills the pointed-to memory. More importantly
2051 the call has to serve as a barrier for moving loads and stores
2052 across it. */
2053 case BUILT_IN_FREE:
2054 case BUILT_IN_VA_END:
2055 {
2056 tree ptr = gimple_call_arg (call, 0);
2057 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2058 }
2059 /* Realloc serves both as allocation point and deallocation point. */
2060 case BUILT_IN_REALLOC:
2061 {
2062 tree ptr = gimple_call_arg (call, 0);
2063 /* Unix98 specifies that errno is set on allocation failure. */
2064 return ((flag_errno_math
2065 && targetm.ref_may_alias_errno (ref))
2066 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2067 }
2068 case BUILT_IN_GAMMA_R:
2069 case BUILT_IN_GAMMAF_R:
2070 case BUILT_IN_GAMMAL_R:
2071 case BUILT_IN_LGAMMA_R:
2072 case BUILT_IN_LGAMMAF_R:
2073 case BUILT_IN_LGAMMAL_R:
2074 {
2075 tree out = gimple_call_arg (call, 1);
2076 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2077 return true;
2078 if (flag_errno_math)
2079 break;
2080 return false;
2081 }
2082 case BUILT_IN_FREXP:
2083 case BUILT_IN_FREXPF:
2084 case BUILT_IN_FREXPL:
2085 case BUILT_IN_MODF:
2086 case BUILT_IN_MODFF:
2087 case BUILT_IN_MODFL:
2088 {
2089 tree out = gimple_call_arg (call, 1);
2090 return ptr_deref_may_alias_ref_p_1 (out, ref);
2091 }
2092 case BUILT_IN_REMQUO:
2093 case BUILT_IN_REMQUOF:
2094 case BUILT_IN_REMQUOL:
2095 {
2096 tree out = gimple_call_arg (call, 2);
2097 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2098 return true;
2099 if (flag_errno_math)
2100 break;
2101 return false;
2102 }
2103 case BUILT_IN_SINCOS:
2104 case BUILT_IN_SINCOSF:
2105 case BUILT_IN_SINCOSL:
2106 {
2107 tree sin = gimple_call_arg (call, 1);
2108 tree cos = gimple_call_arg (call, 2);
2109 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2110 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2111 }
2112 /* __sync_* builtins and some OpenMP builtins act as threading
2113 barriers. */
2114 #undef DEF_SYNC_BUILTIN
2115 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2116 #include "sync-builtins.def"
2117 #undef DEF_SYNC_BUILTIN
2118 case BUILT_IN_GOMP_ATOMIC_START:
2119 case BUILT_IN_GOMP_ATOMIC_END:
2120 case BUILT_IN_GOMP_BARRIER:
2121 case BUILT_IN_GOMP_BARRIER_CANCEL:
2122 case BUILT_IN_GOMP_TASKWAIT:
2123 case BUILT_IN_GOMP_TASKGROUP_END:
2124 case BUILT_IN_GOMP_CRITICAL_START:
2125 case BUILT_IN_GOMP_CRITICAL_END:
2126 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2127 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2128 case BUILT_IN_GOMP_LOOP_END:
2129 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2130 case BUILT_IN_GOMP_ORDERED_START:
2131 case BUILT_IN_GOMP_ORDERED_END:
2132 case BUILT_IN_GOMP_SECTIONS_END:
2133 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2134 case BUILT_IN_GOMP_SINGLE_COPY_START:
2135 case BUILT_IN_GOMP_SINGLE_COPY_END:
2136 return true;
2137 default:
2138 /* Fallthru to general call handling. */;
2139 }
2140
2141 /* Check if base is a global static variable that is not written
2142 by the function. */
2143 if (callee != NULL_TREE
2144 && TREE_CODE (base) == VAR_DECL
2145 && TREE_STATIC (base))
2146 {
2147 struct cgraph_node *node = cgraph_node::get (callee);
2148 bitmap not_written;
2149
2150 if (node
2151 && (not_written = ipa_reference_get_not_written_global (node))
2152 && bitmap_bit_p (not_written, DECL_UID (base)))
2153 return false;
2154 }
2155
2156 /* Check if the base variable is call-clobbered. */
2157 if (DECL_P (base))
2158 return pt_solution_includes (gimple_call_clobber_set (call), base);
2159 else if ((TREE_CODE (base) == MEM_REF
2160 || TREE_CODE (base) == TARGET_MEM_REF)
2161 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2162 {
2163 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2164 if (!pi)
2165 return true;
2166
2167 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2168 }
2169
2170 return true;
2171 }
2172
2173 /* If the call in statement CALL may clobber the memory reference REF
2174 return true, otherwise return false. */
2175
2176 bool
2177 call_may_clobber_ref_p (gcall *call, tree ref)
2178 {
2179 bool res;
2180 ao_ref r;
2181 ao_ref_init (&r, ref);
2182 res = call_may_clobber_ref_p_1 (call, &r);
2183 if (res)
2184 ++alias_stats.call_may_clobber_ref_p_may_alias;
2185 else
2186 ++alias_stats.call_may_clobber_ref_p_no_alias;
2187 return res;
2188 }
2189
2190
2191 /* If the statement STMT may clobber the memory reference REF return true,
2192 otherwise return false. */
2193
2194 bool
2195 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
2196 {
2197 if (is_gimple_call (stmt))
2198 {
2199 tree lhs = gimple_call_lhs (stmt);
2200 if (lhs
2201 && TREE_CODE (lhs) != SSA_NAME)
2202 {
2203 ao_ref r;
2204 ao_ref_init (&r, lhs);
2205 if (refs_may_alias_p_1 (ref, &r, true))
2206 return true;
2207 }
2208
2209 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2210 }
2211 else if (gimple_assign_single_p (stmt))
2212 {
2213 tree lhs = gimple_assign_lhs (stmt);
2214 if (TREE_CODE (lhs) != SSA_NAME)
2215 {
2216 ao_ref r;
2217 ao_ref_init (&r, lhs);
2218 return refs_may_alias_p_1 (ref, &r, true);
2219 }
2220 }
2221 else if (gimple_code (stmt) == GIMPLE_ASM)
2222 return true;
2223
2224 return false;
2225 }
2226
2227 bool
2228 stmt_may_clobber_ref_p (gimple *stmt, tree ref)
2229 {
2230 ao_ref r;
2231 ao_ref_init (&r, ref);
2232 return stmt_may_clobber_ref_p_1 (stmt, &r);
2233 }
2234
2235 /* If STMT kills the memory reference REF return true, otherwise
2236 return false. */
2237
2238 bool
2239 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2240 {
2241 if (!ao_ref_base (ref))
2242 return false;
2243
2244 if (gimple_has_lhs (stmt)
2245 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2246 /* The assignment is not necessarily carried out if it can throw
2247 and we can catch it in the current function where we could inspect
2248 the previous value.
2249 ??? We only need to care about the RHS throwing. For aggregate
2250 assignments or similar calls and non-call exceptions the LHS
2251 might throw as well. */
2252 && !stmt_can_throw_internal (stmt))
2253 {
2254 tree lhs = gimple_get_lhs (stmt);
2255 /* If LHS is literally a base of the access we are done. */
2256 if (ref->ref)
2257 {
2258 tree base = ref->ref;
2259 if (handled_component_p (base))
2260 {
2261 tree saved_lhs0 = NULL_TREE;
2262 if (handled_component_p (lhs))
2263 {
2264 saved_lhs0 = TREE_OPERAND (lhs, 0);
2265 TREE_OPERAND (lhs, 0) = integer_zero_node;
2266 }
2267 do
2268 {
2269 /* Just compare the outermost handled component, if
2270 they are equal we have found a possible common
2271 base. */
2272 tree saved_base0 = TREE_OPERAND (base, 0);
2273 TREE_OPERAND (base, 0) = integer_zero_node;
2274 bool res = operand_equal_p (lhs, base, 0);
2275 TREE_OPERAND (base, 0) = saved_base0;
2276 if (res)
2277 break;
2278 /* Otherwise drop handled components of the access. */
2279 base = saved_base0;
2280 }
2281 while (handled_component_p (base));
2282 if (saved_lhs0)
2283 TREE_OPERAND (lhs, 0) = saved_lhs0;
2284 }
2285 /* Finally check if the lhs has the same address and size as the
2286 base candidate of the access. */
2287 if (lhs == base
2288 || (((TYPE_SIZE (TREE_TYPE (lhs))
2289 == TYPE_SIZE (TREE_TYPE (base)))
2290 || (TYPE_SIZE (TREE_TYPE (lhs))
2291 && TYPE_SIZE (TREE_TYPE (base))
2292 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2293 TYPE_SIZE (TREE_TYPE (base)), 0)))
2294 && operand_equal_p (lhs, base, OEP_ADDRESS_OF)))
2295 return true;
2296 }
2297
2298 /* Now look for non-literal equal bases with the restriction of
2299 handling constant offset and size. */
2300 /* For a must-alias check we need to be able to constrain
2301 the access properly. */
2302 if (ref->max_size == -1)
2303 return false;
2304 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2305 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2306 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2307 so base == ref->base does not always hold. */
2308 if (base != ref->base)
2309 {
2310 /* If both base and ref->base are MEM_REFs, only compare the
2311 first operand, and if the second operand isn't equal constant,
2312 try to add the offsets into offset and ref_offset. */
2313 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2314 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2315 {
2316 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2317 TREE_OPERAND (ref->base, 1)))
2318 {
2319 offset_int off1 = mem_ref_offset (base);
2320 off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2321 off1 += offset;
2322 offset_int off2 = mem_ref_offset (ref->base);
2323 off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2324 off2 += ref_offset;
2325 if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2326 {
2327 offset = off1.to_shwi ();
2328 ref_offset = off2.to_shwi ();
2329 }
2330 else
2331 size = -1;
2332 }
2333 }
2334 else
2335 size = -1;
2336 }
2337 /* For a must-alias check we need to be able to constrain
2338 the access properly. */
2339 if (size != -1 && size == max_size)
2340 {
2341 if (offset <= ref_offset
2342 && offset + size >= ref_offset + ref->max_size)
2343 return true;
2344 }
2345 }
2346
2347 if (is_gimple_call (stmt))
2348 {
2349 tree callee = gimple_call_fndecl (stmt);
2350 if (callee != NULL_TREE
2351 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2352 switch (DECL_FUNCTION_CODE (callee))
2353 {
2354 case BUILT_IN_FREE:
2355 {
2356 tree ptr = gimple_call_arg (stmt, 0);
2357 tree base = ao_ref_base (ref);
2358 if (base && TREE_CODE (base) == MEM_REF
2359 && TREE_OPERAND (base, 0) == ptr)
2360 return true;
2361 break;
2362 }
2363
2364 case BUILT_IN_MEMCPY:
2365 case BUILT_IN_MEMPCPY:
2366 case BUILT_IN_MEMMOVE:
2367 case BUILT_IN_MEMSET:
2368 case BUILT_IN_MEMCPY_CHK:
2369 case BUILT_IN_MEMPCPY_CHK:
2370 case BUILT_IN_MEMMOVE_CHK:
2371 case BUILT_IN_MEMSET_CHK:
2372 {
2373 /* For a must-alias check we need to be able to constrain
2374 the access properly. */
2375 if (ref->max_size == -1)
2376 return false;
2377 tree dest = gimple_call_arg (stmt, 0);
2378 tree len = gimple_call_arg (stmt, 2);
2379 if (!tree_fits_shwi_p (len))
2380 return false;
2381 tree rbase = ref->base;
2382 offset_int roffset = ref->offset;
2383 ao_ref dref;
2384 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2385 tree base = ao_ref_base (&dref);
2386 offset_int offset = dref.offset;
2387 if (!base || dref.size == -1)
2388 return false;
2389 if (TREE_CODE (base) == MEM_REF)
2390 {
2391 if (TREE_CODE (rbase) != MEM_REF)
2392 return false;
2393 // Compare pointers.
2394 offset += wi::lshift (mem_ref_offset (base),
2395 LOG2_BITS_PER_UNIT);
2396 roffset += wi::lshift (mem_ref_offset (rbase),
2397 LOG2_BITS_PER_UNIT);
2398 base = TREE_OPERAND (base, 0);
2399 rbase = TREE_OPERAND (rbase, 0);
2400 }
2401 if (base == rbase
2402 && wi::les_p (offset, roffset)
2403 && wi::les_p (roffset + ref->max_size,
2404 offset + wi::lshift (wi::to_offset (len),
2405 LOG2_BITS_PER_UNIT)))
2406 return true;
2407 break;
2408 }
2409
2410 case BUILT_IN_VA_END:
2411 {
2412 tree ptr = gimple_call_arg (stmt, 0);
2413 if (TREE_CODE (ptr) == ADDR_EXPR)
2414 {
2415 tree base = ao_ref_base (ref);
2416 if (TREE_OPERAND (ptr, 0) == base)
2417 return true;
2418 }
2419 break;
2420 }
2421
2422 default:;
2423 }
2424 }
2425 return false;
2426 }
2427
2428 bool
2429 stmt_kills_ref_p (gimple *stmt, tree ref)
2430 {
2431 ao_ref r;
2432 ao_ref_init (&r, ref);
2433 return stmt_kills_ref_p (stmt, &r);
2434 }
2435
2436
2437 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2438 TARGET or a statement clobbering the memory reference REF in which
2439 case false is returned. The walk starts with VUSE, one argument of PHI. */
2440
2441 static bool
2442 maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
2443 tree vuse, unsigned int *cnt, bitmap *visited,
2444 bool abort_on_visited,
2445 void *(*translate)(ao_ref *, tree, void *, bool *),
2446 void *data)
2447 {
2448 basic_block bb = gimple_bb (phi);
2449
2450 if (!*visited)
2451 *visited = BITMAP_ALLOC (NULL);
2452
2453 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2454
2455 /* Walk until we hit the target. */
2456 while (vuse != target)
2457 {
2458 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2459 /* Recurse for PHI nodes. */
2460 if (gimple_code (def_stmt) == GIMPLE_PHI)
2461 {
2462 /* An already visited PHI node ends the walk successfully. */
2463 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2464 return !abort_on_visited;
2465 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2466 visited, abort_on_visited,
2467 translate, data);
2468 if (!vuse)
2469 return false;
2470 continue;
2471 }
2472 else if (gimple_nop_p (def_stmt))
2473 return false;
2474 else
2475 {
2476 /* A clobbering statement or the end of the IL ends it failing. */
2477 ++*cnt;
2478 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2479 {
2480 bool disambiguate_only = true;
2481 if (translate
2482 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2483 ;
2484 else
2485 return false;
2486 }
2487 }
2488 /* If we reach a new basic-block see if we already skipped it
2489 in a previous walk that ended successfully. */
2490 if (gimple_bb (def_stmt) != bb)
2491 {
2492 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2493 return !abort_on_visited;
2494 bb = gimple_bb (def_stmt);
2495 }
2496 vuse = gimple_vuse (def_stmt);
2497 }
2498 return true;
2499 }
2500
2501 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2502 until we hit the phi argument definition that dominates the other one.
2503 Return that, or NULL_TREE if there is no such definition. */
2504
2505 static tree
2506 get_continuation_for_phi_1 (gimple *phi, tree arg0, tree arg1,
2507 ao_ref *ref, unsigned int *cnt,
2508 bitmap *visited, bool abort_on_visited,
2509 void *(*translate)(ao_ref *, tree, void *, bool *),
2510 void *data)
2511 {
2512 gimple *def0 = SSA_NAME_DEF_STMT (arg0);
2513 gimple *def1 = SSA_NAME_DEF_STMT (arg1);
2514 tree common_vuse;
2515
2516 if (arg0 == arg1)
2517 return arg0;
2518 else if (gimple_nop_p (def0)
2519 || (!gimple_nop_p (def1)
2520 && dominated_by_p (CDI_DOMINATORS,
2521 gimple_bb (def1), gimple_bb (def0))))
2522 {
2523 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2524 visited, abort_on_visited, translate, data))
2525 return arg0;
2526 }
2527 else if (gimple_nop_p (def1)
2528 || dominated_by_p (CDI_DOMINATORS,
2529 gimple_bb (def0), gimple_bb (def1)))
2530 {
2531 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2532 visited, abort_on_visited, translate, data))
2533 return arg1;
2534 }
2535 /* Special case of a diamond:
2536 MEM_1 = ...
2537 goto (cond) ? L1 : L2
2538 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2539 goto L3
2540 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2541 L3: MEM_4 = PHI<MEM_2, MEM_3>
2542 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2543 dominate each other, but still we can easily skip this PHI node
2544 if we recognize that the vuse MEM operand is the same for both,
2545 and that we can skip both statements (they don't clobber us).
2546 This is still linear. Don't use maybe_skip_until, that might
2547 potentially be slow. */
2548 else if ((common_vuse = gimple_vuse (def0))
2549 && common_vuse == gimple_vuse (def1))
2550 {
2551 bool disambiguate_only = true;
2552 *cnt += 2;
2553 if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2554 || (translate
2555 && (*translate) (ref, arg0, data, &disambiguate_only) == NULL))
2556 && (!stmt_may_clobber_ref_p_1 (def1, ref)
2557 || (translate
2558 && (*translate) (ref, arg1, data, &disambiguate_only) == NULL)))
2559 return common_vuse;
2560 }
2561
2562 return NULL_TREE;
2563 }
2564
2565
2566 /* Starting from a PHI node for the virtual operand of the memory reference
2567 REF find a continuation virtual operand that allows to continue walking
2568 statements dominating PHI skipping only statements that cannot possibly
2569 clobber REF. Increments *CNT for each alias disambiguation done.
2570 Returns NULL_TREE if no suitable virtual operand can be found. */
2571
2572 tree
2573 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2574 unsigned int *cnt, bitmap *visited,
2575 bool abort_on_visited,
2576 void *(*translate)(ao_ref *, tree, void *, bool *),
2577 void *data)
2578 {
2579 unsigned nargs = gimple_phi_num_args (phi);
2580
2581 /* Through a single-argument PHI we can simply look through. */
2582 if (nargs == 1)
2583 return PHI_ARG_DEF (phi, 0);
2584
2585 /* For two or more arguments try to pairwise skip non-aliasing code
2586 until we hit the phi argument definition that dominates the other one. */
2587 else if (nargs >= 2)
2588 {
2589 tree arg0, arg1;
2590 unsigned i;
2591
2592 /* Find a candidate for the virtual operand which definition
2593 dominates those of all others. */
2594 arg0 = PHI_ARG_DEF (phi, 0);
2595 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2596 for (i = 1; i < nargs; ++i)
2597 {
2598 arg1 = PHI_ARG_DEF (phi, i);
2599 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2600 {
2601 arg0 = arg1;
2602 break;
2603 }
2604 if (dominated_by_p (CDI_DOMINATORS,
2605 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2606 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2607 arg0 = arg1;
2608 }
2609
2610 /* Then pairwise reduce against the found candidate. */
2611 for (i = 0; i < nargs; ++i)
2612 {
2613 arg1 = PHI_ARG_DEF (phi, i);
2614 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2615 cnt, visited, abort_on_visited,
2616 translate, data);
2617 if (!arg0)
2618 return NULL_TREE;
2619 }
2620
2621 return arg0;
2622 }
2623
2624 return NULL_TREE;
2625 }
2626
2627 /* Based on the memory reference REF and its virtual use VUSE call
2628 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2629 itself. That is, for each virtual use for which its defining statement
2630 does not clobber REF.
2631
2632 WALKER is called with REF, the current virtual use and DATA. If
2633 WALKER returns non-NULL the walk stops and its result is returned.
2634 At the end of a non-successful walk NULL is returned.
2635
2636 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2637 use which definition is a statement that may clobber REF and DATA.
2638 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2639 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2640 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2641 to adjust REF and *DATA to make that valid.
2642
2643 VALUEIZE if non-NULL is called with the next VUSE that is considered
2644 and return value is substituted for that. This can be used to
2645 implement optimistic value-numbering for example. Note that the
2646 VUSE argument is assumed to be valueized already.
2647
2648 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2649
2650 void *
2651 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2652 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2653 void *(*translate)(ao_ref *, tree, void *, bool *),
2654 tree (*valueize)(tree),
2655 void *data)
2656 {
2657 bitmap visited = NULL;
2658 void *res;
2659 unsigned int cnt = 0;
2660 bool translated = false;
2661
2662 timevar_push (TV_ALIAS_STMT_WALK);
2663
2664 do
2665 {
2666 gimple *def_stmt;
2667
2668 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2669 res = (*walker) (ref, vuse, cnt, data);
2670 /* Abort walk. */
2671 if (res == (void *)-1)
2672 {
2673 res = NULL;
2674 break;
2675 }
2676 /* Lookup succeeded. */
2677 else if (res != NULL)
2678 break;
2679
2680 if (valueize)
2681 vuse = valueize (vuse);
2682 def_stmt = SSA_NAME_DEF_STMT (vuse);
2683 if (gimple_nop_p (def_stmt))
2684 break;
2685 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2686 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2687 &visited, translated, translate, data);
2688 else
2689 {
2690 cnt++;
2691 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2692 {
2693 if (!translate)
2694 break;
2695 bool disambiguate_only = false;
2696 res = (*translate) (ref, vuse, data, &disambiguate_only);
2697 /* Failed lookup and translation. */
2698 if (res == (void *)-1)
2699 {
2700 res = NULL;
2701 break;
2702 }
2703 /* Lookup succeeded. */
2704 else if (res != NULL)
2705 break;
2706 /* Translation succeeded, continue walking. */
2707 translated = translated || !disambiguate_only;
2708 }
2709 vuse = gimple_vuse (def_stmt);
2710 }
2711 }
2712 while (vuse);
2713
2714 if (visited)
2715 BITMAP_FREE (visited);
2716
2717 timevar_pop (TV_ALIAS_STMT_WALK);
2718
2719 return res;
2720 }
2721
2722
2723 /* Based on the memory reference REF call WALKER for each vdef which
2724 defining statement may clobber REF, starting with VDEF. If REF
2725 is NULL_TREE, each defining statement is visited.
2726
2727 WALKER is called with REF, the current vdef and DATA. If WALKER
2728 returns true the walk is stopped, otherwise it continues.
2729
2730 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2731 The pointer may be NULL and then we do not track this information.
2732
2733 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2734 PHI argument (but only one walk continues on merge points), the
2735 return value is true if any of the walks was successful.
2736
2737 The function returns the number of statements walked. */
2738
2739 static unsigned int
2740 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2741 bool (*walker)(ao_ref *, tree, void *), void *data,
2742 bitmap *visited, unsigned int cnt,
2743 bool *function_entry_reached)
2744 {
2745 do
2746 {
2747 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2748
2749 if (*visited
2750 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2751 return cnt;
2752
2753 if (gimple_nop_p (def_stmt))
2754 {
2755 if (function_entry_reached)
2756 *function_entry_reached = true;
2757 return cnt;
2758 }
2759 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2760 {
2761 unsigned i;
2762 if (!*visited)
2763 *visited = BITMAP_ALLOC (NULL);
2764 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2765 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2766 walker, data, visited, 0,
2767 function_entry_reached);
2768 return cnt;
2769 }
2770
2771 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2772 cnt++;
2773 if ((!ref
2774 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2775 && (*walker) (ref, vdef, data))
2776 return cnt;
2777
2778 vdef = gimple_vuse (def_stmt);
2779 }
2780 while (1);
2781 }
2782
2783 unsigned int
2784 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2785 bool (*walker)(ao_ref *, tree, void *), void *data,
2786 bitmap *visited,
2787 bool *function_entry_reached)
2788 {
2789 bitmap local_visited = NULL;
2790 unsigned int ret;
2791
2792 timevar_push (TV_ALIAS_STMT_WALK);
2793
2794 if (function_entry_reached)
2795 *function_entry_reached = false;
2796
2797 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2798 visited ? visited : &local_visited, 0,
2799 function_entry_reached);
2800 if (local_visited)
2801 BITMAP_FREE (local_visited);
2802
2803 timevar_pop (TV_ALIAS_STMT_WALK);
2804
2805 return ret;
2806 }
2807