]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "vec.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "inchash.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "tm_p.h"
34 #include "target.h"
35 #include "predict.h"
36
37 #include "hard-reg-set.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "basic-block.h"
41 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
42 #include "langhooks.h"
43 #include "flags.h"
44 #include "tree-pretty-print.h"
45 #include "dumpfile.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "tree-eh.h"
49 #include "gimple-expr.h"
50 #include "is-a.h"
51 #include "gimple.h"
52 #include "gimple-ssa.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "hashtab.h"
56 #include "rtl.h"
57 #include "statistics.h"
58 #include "insn-config.h"
59 #include "expmed.h"
60 #include "dojump.h"
61 #include "explow.h"
62 #include "calls.h"
63 #include "emit-rtl.h"
64 #include "varasm.h"
65 #include "stmt.h"
66 #include "expr.h"
67 #include "tree-dfa.h"
68 #include "tree-inline.h"
69 #include "params.h"
70 #include "alloc-pool.h"
71 #include "bitmap.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "ipa-reference.h"
77
78 /* Broad overview of how alias analysis on gimple works:
79
80 Statements clobbering or using memory are linked through the
81 virtual operand factored use-def chain. The virtual operand
82 is unique per function, its symbol is accessible via gimple_vop (cfun).
83 Virtual operands are used for efficiently walking memory statements
84 in the gimple IL and are useful for things like value-numbering as
85 a generation count for memory references.
86
87 SSA_NAME pointers may have associated points-to information
88 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
89 points-to information is (re-)computed by the TODO_rebuild_alias
90 pass manager todo. Points-to information is also used for more
91 precise tracking of call-clobbered and call-used variables and
92 related disambiguations.
93
94 This file contains functions for disambiguating memory references,
95 the so called alias-oracle and tools for walking of the gimple IL.
96
97 The main alias-oracle entry-points are
98
99 bool stmt_may_clobber_ref_p (gimple, tree)
100
101 This function queries if a statement may invalidate (parts of)
102 the memory designated by the reference tree argument.
103
104 bool ref_maybe_used_by_stmt_p (gimple, tree)
105
106 This function queries if a statement may need (parts of) the
107 memory designated by the reference tree argument.
108
109 There are variants of these functions that only handle the call
110 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
111 Note that these do not disambiguate against a possible call lhs.
112
113 bool refs_may_alias_p (tree, tree)
114
115 This function tries to disambiguate two reference trees.
116
117 bool ptr_deref_may_alias_global_p (tree)
118
119 This function queries if dereferencing a pointer variable may
120 alias global memory.
121
122 More low-level disambiguators are available and documented in
123 this file. Low-level disambiguators dealing with points-to
124 information are in tree-ssa-structalias.c. */
125
126
127 /* Query statistics for the different low-level disambiguators.
128 A high-level query may trigger multiple of them. */
129
130 static struct {
131 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
132 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
133 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
134 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
135 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
136 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
137 } alias_stats;
138
139 void
140 dump_alias_stats (FILE *s)
141 {
142 fprintf (s, "\nAlias oracle query stats:\n");
143 fprintf (s, " refs_may_alias_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.refs_may_alias_p_no_alias,
147 alias_stats.refs_may_alias_p_no_alias
148 + alias_stats.refs_may_alias_p_may_alias);
149 fprintf (s, " ref_maybe_used_by_call_p: "
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC" queries\n",
152 alias_stats.ref_maybe_used_by_call_p_no_alias,
153 alias_stats.refs_may_alias_p_no_alias
154 + alias_stats.ref_maybe_used_by_call_p_may_alias);
155 fprintf (s, " call_may_clobber_ref_p: "
156 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
157 HOST_WIDE_INT_PRINT_DEC" queries\n",
158 alias_stats.call_may_clobber_ref_p_no_alias,
159 alias_stats.call_may_clobber_ref_p_no_alias
160 + alias_stats.call_may_clobber_ref_p_may_alias);
161 dump_alias_stats_in_alias_c (s);
162 }
163
164
165 /* Return true, if dereferencing PTR may alias with a global variable. */
166
167 bool
168 ptr_deref_may_alias_global_p (tree ptr)
169 {
170 struct ptr_info_def *pi;
171
172 /* If we end up with a pointer constant here that may point
173 to global memory. */
174 if (TREE_CODE (ptr) != SSA_NAME)
175 return true;
176
177 pi = SSA_NAME_PTR_INFO (ptr);
178
179 /* If we do not have points-to information for this variable,
180 we have to punt. */
181 if (!pi)
182 return true;
183
184 /* ??? This does not use TBAA to prune globals ptr may not access. */
185 return pt_solution_includes_global (&pi->pt);
186 }
187
188 /* Return true if dereferencing PTR may alias DECL.
189 The caller is responsible for applying TBAA to see if PTR
190 may access DECL at all. */
191
192 static bool
193 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
194 {
195 struct ptr_info_def *pi;
196
197 /* Conversions are irrelevant for points-to information and
198 data-dependence analysis can feed us those. */
199 STRIP_NOPS (ptr);
200
201 /* Anything we do not explicilty handle aliases. */
202 if ((TREE_CODE (ptr) != SSA_NAME
203 && TREE_CODE (ptr) != ADDR_EXPR
204 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
205 || !POINTER_TYPE_P (TREE_TYPE (ptr))
206 || (TREE_CODE (decl) != VAR_DECL
207 && TREE_CODE (decl) != PARM_DECL
208 && TREE_CODE (decl) != RESULT_DECL))
209 return true;
210
211 /* Disregard pointer offsetting. */
212 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
213 {
214 do
215 {
216 ptr = TREE_OPERAND (ptr, 0);
217 }
218 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
219 return ptr_deref_may_alias_decl_p (ptr, decl);
220 }
221
222 /* ADDR_EXPR pointers either just offset another pointer or directly
223 specify the pointed-to set. */
224 if (TREE_CODE (ptr) == ADDR_EXPR)
225 {
226 tree base = get_base_address (TREE_OPERAND (ptr, 0));
227 if (base
228 && (TREE_CODE (base) == MEM_REF
229 || TREE_CODE (base) == TARGET_MEM_REF))
230 ptr = TREE_OPERAND (base, 0);
231 else if (base
232 && DECL_P (base))
233 return base == decl;
234 else if (base
235 && CONSTANT_CLASS_P (base))
236 return false;
237 else
238 return true;
239 }
240
241 /* Non-aliased variables can not be pointed to. */
242 if (!may_be_aliased (decl))
243 return false;
244
245 /* If we do not have useful points-to information for this pointer
246 we cannot disambiguate anything else. */
247 pi = SSA_NAME_PTR_INFO (ptr);
248 if (!pi)
249 return true;
250
251 return pt_solution_includes (&pi->pt, decl);
252 }
253
254 /* Return true if dereferenced PTR1 and PTR2 may alias.
255 The caller is responsible for applying TBAA to see if accesses
256 through PTR1 and PTR2 may conflict at all. */
257
258 bool
259 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
260 {
261 struct ptr_info_def *pi1, *pi2;
262
263 /* Conversions are irrelevant for points-to information and
264 data-dependence analysis can feed us those. */
265 STRIP_NOPS (ptr1);
266 STRIP_NOPS (ptr2);
267
268 /* Disregard pointer offsetting. */
269 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
270 {
271 do
272 {
273 ptr1 = TREE_OPERAND (ptr1, 0);
274 }
275 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
276 return ptr_derefs_may_alias_p (ptr1, ptr2);
277 }
278 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
279 {
280 do
281 {
282 ptr2 = TREE_OPERAND (ptr2, 0);
283 }
284 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
285 return ptr_derefs_may_alias_p (ptr1, ptr2);
286 }
287
288 /* ADDR_EXPR pointers either just offset another pointer or directly
289 specify the pointed-to set. */
290 if (TREE_CODE (ptr1) == ADDR_EXPR)
291 {
292 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
293 if (base
294 && (TREE_CODE (base) == MEM_REF
295 || TREE_CODE (base) == TARGET_MEM_REF))
296 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
297 else if (base
298 && DECL_P (base))
299 return ptr_deref_may_alias_decl_p (ptr2, base);
300 else
301 return true;
302 }
303 if (TREE_CODE (ptr2) == ADDR_EXPR)
304 {
305 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
306 if (base
307 && (TREE_CODE (base) == MEM_REF
308 || TREE_CODE (base) == TARGET_MEM_REF))
309 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
310 else if (base
311 && DECL_P (base))
312 return ptr_deref_may_alias_decl_p (ptr1, base);
313 else
314 return true;
315 }
316
317 /* From here we require SSA name pointers. Anything else aliases. */
318 if (TREE_CODE (ptr1) != SSA_NAME
319 || TREE_CODE (ptr2) != SSA_NAME
320 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
321 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
322 return true;
323
324 /* We may end up with two empty points-to solutions for two same pointers.
325 In this case we still want to say both pointers alias, so shortcut
326 that here. */
327 if (ptr1 == ptr2)
328 return true;
329
330 /* If we do not have useful points-to information for either pointer
331 we cannot disambiguate anything else. */
332 pi1 = SSA_NAME_PTR_INFO (ptr1);
333 pi2 = SSA_NAME_PTR_INFO (ptr2);
334 if (!pi1 || !pi2)
335 return true;
336
337 /* ??? This does not use TBAA to prune decls from the intersection
338 that not both pointers may access. */
339 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
340 }
341
342 /* Return true if dereferencing PTR may alias *REF.
343 The caller is responsible for applying TBAA to see if PTR
344 may access *REF at all. */
345
346 static bool
347 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
348 {
349 tree base = ao_ref_base (ref);
350
351 if (TREE_CODE (base) == MEM_REF
352 || TREE_CODE (base) == TARGET_MEM_REF)
353 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
354 else if (DECL_P (base))
355 return ptr_deref_may_alias_decl_p (ptr, base);
356
357 return true;
358 }
359
360 /* Returns whether reference REF to BASE may refer to global memory. */
361
362 static bool
363 ref_may_alias_global_p_1 (tree base)
364 {
365 if (DECL_P (base))
366 return is_global_var (base);
367 else if (TREE_CODE (base) == MEM_REF
368 || TREE_CODE (base) == TARGET_MEM_REF)
369 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
370 return true;
371 }
372
373 bool
374 ref_may_alias_global_p (ao_ref *ref)
375 {
376 tree base = ao_ref_base (ref);
377 return ref_may_alias_global_p_1 (base);
378 }
379
380 bool
381 ref_may_alias_global_p (tree ref)
382 {
383 tree base = get_base_address (ref);
384 return ref_may_alias_global_p_1 (base);
385 }
386
387 /* Return true whether STMT may clobber global memory. */
388
389 bool
390 stmt_may_clobber_global_p (gimple stmt)
391 {
392 tree lhs;
393
394 if (!gimple_vdef (stmt))
395 return false;
396
397 /* ??? We can ask the oracle whether an artificial pointer
398 dereference with a pointer with points-to information covering
399 all global memory (what about non-address taken memory?) maybe
400 clobbered by this call. As there is at the moment no convenient
401 way of doing that without generating garbage do some manual
402 checking instead.
403 ??? We could make a NULL ao_ref argument to the various
404 predicates special, meaning any global memory. */
405
406 switch (gimple_code (stmt))
407 {
408 case GIMPLE_ASSIGN:
409 lhs = gimple_assign_lhs (stmt);
410 return (TREE_CODE (lhs) != SSA_NAME
411 && ref_may_alias_global_p (lhs));
412 case GIMPLE_CALL:
413 return true;
414 default:
415 return true;
416 }
417 }
418
419
420 /* Dump alias information on FILE. */
421
422 void
423 dump_alias_info (FILE *file)
424 {
425 unsigned i;
426 const char *funcname
427 = lang_hooks.decl_printable_name (current_function_decl, 2);
428 tree var;
429
430 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
431
432 fprintf (file, "Aliased symbols\n\n");
433
434 FOR_EACH_LOCAL_DECL (cfun, i, var)
435 {
436 if (may_be_aliased (var))
437 dump_variable (file, var);
438 }
439
440 fprintf (file, "\nCall clobber information\n");
441
442 fprintf (file, "\nESCAPED");
443 dump_points_to_solution (file, &cfun->gimple_df->escaped);
444
445 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
446
447 for (i = 1; i < num_ssa_names; i++)
448 {
449 tree ptr = ssa_name (i);
450 struct ptr_info_def *pi;
451
452 if (ptr == NULL_TREE
453 || !POINTER_TYPE_P (TREE_TYPE (ptr))
454 || SSA_NAME_IN_FREE_LIST (ptr))
455 continue;
456
457 pi = SSA_NAME_PTR_INFO (ptr);
458 if (pi)
459 dump_points_to_info_for (file, ptr);
460 }
461
462 fprintf (file, "\n");
463 }
464
465
466 /* Dump alias information on stderr. */
467
468 DEBUG_FUNCTION void
469 debug_alias_info (void)
470 {
471 dump_alias_info (stderr);
472 }
473
474
475 /* Dump the points-to set *PT into FILE. */
476
477 void
478 dump_points_to_solution (FILE *file, struct pt_solution *pt)
479 {
480 if (pt->anything)
481 fprintf (file, ", points-to anything");
482
483 if (pt->nonlocal)
484 fprintf (file, ", points-to non-local");
485
486 if (pt->escaped)
487 fprintf (file, ", points-to escaped");
488
489 if (pt->ipa_escaped)
490 fprintf (file, ", points-to unit escaped");
491
492 if (pt->null)
493 fprintf (file, ", points-to NULL");
494
495 if (pt->vars)
496 {
497 fprintf (file, ", points-to vars: ");
498 dump_decl_set (file, pt->vars);
499 if (pt->vars_contains_nonlocal
500 && pt->vars_contains_escaped_heap)
501 fprintf (file, " (nonlocal, escaped heap)");
502 else if (pt->vars_contains_nonlocal
503 && pt->vars_contains_escaped)
504 fprintf (file, " (nonlocal, escaped)");
505 else if (pt->vars_contains_nonlocal)
506 fprintf (file, " (nonlocal)");
507 else if (pt->vars_contains_escaped_heap)
508 fprintf (file, " (escaped heap)");
509 else if (pt->vars_contains_escaped)
510 fprintf (file, " (escaped)");
511 }
512 }
513
514
515 /* Unified dump function for pt_solution. */
516
517 DEBUG_FUNCTION void
518 debug (pt_solution &ref)
519 {
520 dump_points_to_solution (stderr, &ref);
521 }
522
523 DEBUG_FUNCTION void
524 debug (pt_solution *ptr)
525 {
526 if (ptr)
527 debug (*ptr);
528 else
529 fprintf (stderr, "<nil>\n");
530 }
531
532
533 /* Dump points-to information for SSA_NAME PTR into FILE. */
534
535 void
536 dump_points_to_info_for (FILE *file, tree ptr)
537 {
538 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
539
540 print_generic_expr (file, ptr, dump_flags);
541
542 if (pi)
543 dump_points_to_solution (file, &pi->pt);
544 else
545 fprintf (file, ", points-to anything");
546
547 fprintf (file, "\n");
548 }
549
550
551 /* Dump points-to information for VAR into stderr. */
552
553 DEBUG_FUNCTION void
554 debug_points_to_info_for (tree var)
555 {
556 dump_points_to_info_for (stderr, var);
557 }
558
559
560 /* Initializes the alias-oracle reference representation *R from REF. */
561
562 void
563 ao_ref_init (ao_ref *r, tree ref)
564 {
565 r->ref = ref;
566 r->base = NULL_TREE;
567 r->offset = 0;
568 r->size = -1;
569 r->max_size = -1;
570 r->ref_alias_set = -1;
571 r->base_alias_set = -1;
572 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
573 }
574
575 /* Returns the base object of the memory reference *REF. */
576
577 tree
578 ao_ref_base (ao_ref *ref)
579 {
580 if (ref->base)
581 return ref->base;
582 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
583 &ref->max_size);
584 return ref->base;
585 }
586
587 /* Returns the base object alias set of the memory reference *REF. */
588
589 alias_set_type
590 ao_ref_base_alias_set (ao_ref *ref)
591 {
592 tree base_ref;
593 if (ref->base_alias_set != -1)
594 return ref->base_alias_set;
595 if (!ref->ref)
596 return 0;
597 base_ref = ref->ref;
598 while (handled_component_p (base_ref))
599 base_ref = TREE_OPERAND (base_ref, 0);
600 ref->base_alias_set = get_alias_set (base_ref);
601 return ref->base_alias_set;
602 }
603
604 /* Returns the reference alias set of the memory reference *REF. */
605
606 alias_set_type
607 ao_ref_alias_set (ao_ref *ref)
608 {
609 if (ref->ref_alias_set != -1)
610 return ref->ref_alias_set;
611 ref->ref_alias_set = get_alias_set (ref->ref);
612 return ref->ref_alias_set;
613 }
614
615 /* Init an alias-oracle reference representation from a gimple pointer
616 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
617 size is assumed to be unknown. The access is assumed to be only
618 to or after of the pointer target, not before it. */
619
620 void
621 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
622 {
623 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
624 ref->ref = NULL_TREE;
625 if (TREE_CODE (ptr) == SSA_NAME)
626 {
627 gimple stmt = SSA_NAME_DEF_STMT (ptr);
628 if (gimple_assign_single_p (stmt)
629 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
630 ptr = gimple_assign_rhs1 (stmt);
631 else if (is_gimple_assign (stmt)
632 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
633 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
634 {
635 ptr = gimple_assign_rhs1 (stmt);
636 extra_offset = BITS_PER_UNIT
637 * int_cst_value (gimple_assign_rhs2 (stmt));
638 }
639 }
640
641 if (TREE_CODE (ptr) == ADDR_EXPR)
642 {
643 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
644 if (ref->base)
645 ref->offset = BITS_PER_UNIT * t;
646 else
647 {
648 size = NULL_TREE;
649 ref->offset = 0;
650 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
651 }
652 }
653 else
654 {
655 ref->base = build2 (MEM_REF, char_type_node,
656 ptr, null_pointer_node);
657 ref->offset = 0;
658 }
659 ref->offset += extra_offset;
660 if (size
661 && tree_fits_shwi_p (size)
662 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
663 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
664 else
665 ref->max_size = ref->size = -1;
666 ref->ref_alias_set = 0;
667 ref->base_alias_set = 0;
668 ref->volatile_p = false;
669 }
670
671 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
672 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
673 decide. */
674
675 static inline int
676 same_type_for_tbaa (tree type1, tree type2)
677 {
678 type1 = TYPE_MAIN_VARIANT (type1);
679 type2 = TYPE_MAIN_VARIANT (type2);
680
681 /* If we would have to do structural comparison bail out. */
682 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
683 || TYPE_STRUCTURAL_EQUALITY_P (type2))
684 return -1;
685
686 /* Compare the canonical types. */
687 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
688 return 1;
689
690 /* ??? Array types are not properly unified in all cases as we have
691 spurious changes in the index types for example. Removing this
692 causes all sorts of problems with the Fortran frontend. */
693 if (TREE_CODE (type1) == ARRAY_TYPE
694 && TREE_CODE (type2) == ARRAY_TYPE)
695 return -1;
696
697 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
698 object of one of its constrained subtypes, e.g. when a function with an
699 unconstrained parameter passed by reference is called on an object and
700 inlined. But, even in the case of a fixed size, type and subtypes are
701 not equivalent enough as to share the same TYPE_CANONICAL, since this
702 would mean that conversions between them are useless, whereas they are
703 not (e.g. type and subtypes can have different modes). So, in the end,
704 they are only guaranteed to have the same alias set. */
705 if (get_alias_set (type1) == get_alias_set (type2))
706 return -1;
707
708 /* The types are known to be not equal. */
709 return 0;
710 }
711
712 /* Determine if the two component references REF1 and REF2 which are
713 based on access types TYPE1 and TYPE2 and of which at least one is based
714 on an indirect reference may alias. REF2 is the only one that can
715 be a decl in which case REF2_IS_DECL is true.
716 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
717 are the respective alias sets. */
718
719 static bool
720 aliasing_component_refs_p (tree ref1,
721 alias_set_type ref1_alias_set,
722 alias_set_type base1_alias_set,
723 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
724 tree ref2,
725 alias_set_type ref2_alias_set,
726 alias_set_type base2_alias_set,
727 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
728 bool ref2_is_decl)
729 {
730 /* If one reference is a component references through pointers try to find a
731 common base and apply offset based disambiguation. This handles
732 for example
733 struct A { int i; int j; } *q;
734 struct B { struct A a; int k; } *p;
735 disambiguating q->i and p->a.j. */
736 tree base1, base2;
737 tree type1, type2;
738 tree *refp;
739 int same_p;
740
741 /* Choose bases and base types to search for. */
742 base1 = ref1;
743 while (handled_component_p (base1))
744 base1 = TREE_OPERAND (base1, 0);
745 type1 = TREE_TYPE (base1);
746 base2 = ref2;
747 while (handled_component_p (base2))
748 base2 = TREE_OPERAND (base2, 0);
749 type2 = TREE_TYPE (base2);
750
751 /* Now search for the type1 in the access path of ref2. This
752 would be a common base for doing offset based disambiguation on. */
753 refp = &ref2;
754 while (handled_component_p (*refp)
755 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
756 refp = &TREE_OPERAND (*refp, 0);
757 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
758 /* If we couldn't compare types we have to bail out. */
759 if (same_p == -1)
760 return true;
761 else if (same_p == 1)
762 {
763 HOST_WIDE_INT offadj, sztmp, msztmp;
764 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
765 offset2 -= offadj;
766 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
767 offset1 -= offadj;
768 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
769 }
770 /* If we didn't find a common base, try the other way around. */
771 refp = &ref1;
772 while (handled_component_p (*refp)
773 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
774 refp = &TREE_OPERAND (*refp, 0);
775 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
776 /* If we couldn't compare types we have to bail out. */
777 if (same_p == -1)
778 return true;
779 else if (same_p == 1)
780 {
781 HOST_WIDE_INT offadj, sztmp, msztmp;
782 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
783 offset1 -= offadj;
784 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
785 offset2 -= offadj;
786 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
787 }
788
789 /* If we have two type access paths B1.path1 and B2.path2 they may
790 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
791 But we can still have a path that goes B1.path1...B2.path2 with
792 a part that we do not see. So we can only disambiguate now
793 if there is no B2 in the tail of path1 and no B1 on the
794 tail of path2. */
795 if (base1_alias_set == ref2_alias_set
796 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
797 return true;
798 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
799 if (!ref2_is_decl)
800 return (base2_alias_set == ref1_alias_set
801 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
802 return false;
803 }
804
805 /* Return true if we can determine that component references REF1 and REF2,
806 that are within a common DECL, cannot overlap. */
807
808 static bool
809 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
810 {
811 auto_vec<tree, 16> component_refs1;
812 auto_vec<tree, 16> component_refs2;
813
814 /* Create the stack of handled components for REF1. */
815 while (handled_component_p (ref1))
816 {
817 component_refs1.safe_push (ref1);
818 ref1 = TREE_OPERAND (ref1, 0);
819 }
820 if (TREE_CODE (ref1) == MEM_REF)
821 {
822 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
823 goto may_overlap;
824 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
825 }
826
827 /* Create the stack of handled components for REF2. */
828 while (handled_component_p (ref2))
829 {
830 component_refs2.safe_push (ref2);
831 ref2 = TREE_OPERAND (ref2, 0);
832 }
833 if (TREE_CODE (ref2) == MEM_REF)
834 {
835 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
836 goto may_overlap;
837 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
838 }
839
840 /* We must have the same base DECL. */
841 gcc_assert (ref1 == ref2);
842
843 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
844 rank. This is sufficient because we start from the same DECL and you
845 cannot reference several fields at a time with COMPONENT_REFs (unlike
846 with ARRAY_RANGE_REFs for arrays) so you always need the same number
847 of them to access a sub-component, unless you're in a union, in which
848 case the return value will precisely be false. */
849 while (true)
850 {
851 do
852 {
853 if (component_refs1.is_empty ())
854 goto may_overlap;
855 ref1 = component_refs1.pop ();
856 }
857 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
858
859 do
860 {
861 if (component_refs2.is_empty ())
862 goto may_overlap;
863 ref2 = component_refs2.pop ();
864 }
865 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
866
867 /* Beware of BIT_FIELD_REF. */
868 if (TREE_CODE (ref1) != COMPONENT_REF
869 || TREE_CODE (ref2) != COMPONENT_REF)
870 goto may_overlap;
871
872 tree field1 = TREE_OPERAND (ref1, 1);
873 tree field2 = TREE_OPERAND (ref2, 1);
874
875 /* ??? We cannot simply use the type of operand #0 of the refs here
876 as the Fortran compiler smuggles type punning into COMPONENT_REFs
877 for common blocks instead of using unions like everyone else. */
878 tree type1 = DECL_CONTEXT (field1);
879 tree type2 = DECL_CONTEXT (field2);
880
881 /* We cannot disambiguate fields in a union or qualified union. */
882 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
883 goto may_overlap;
884
885 /* Different fields of the same record type cannot overlap.
886 ??? Bitfields can overlap at RTL level so punt on them. */
887 if (field1 != field2)
888 {
889 component_refs1.release ();
890 component_refs2.release ();
891 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
892 }
893 }
894
895 may_overlap:
896 component_refs1.release ();
897 component_refs2.release ();
898 return false;
899 }
900
901 /* qsort compare function to sort FIELD_DECLs after their
902 DECL_FIELD_CONTEXT TYPE_UID. */
903
904 static inline int
905 ncr_compar (const void *field1_, const void *field2_)
906 {
907 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
908 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
909 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
910 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
911 if (uid1 < uid2)
912 return -1;
913 else if (uid1 > uid2)
914 return 1;
915 return 0;
916 }
917
918 /* Return true if we can determine that the fields referenced cannot
919 overlap for any pair of objects. */
920
921 static bool
922 nonoverlapping_component_refs_p (const_tree x, const_tree y)
923 {
924 if (!flag_strict_aliasing
925 || !x || !y
926 || TREE_CODE (x) != COMPONENT_REF
927 || TREE_CODE (y) != COMPONENT_REF)
928 return false;
929
930 auto_vec<const_tree, 16> fieldsx;
931 while (TREE_CODE (x) == COMPONENT_REF)
932 {
933 tree field = TREE_OPERAND (x, 1);
934 tree type = DECL_FIELD_CONTEXT (field);
935 if (TREE_CODE (type) == RECORD_TYPE)
936 fieldsx.safe_push (field);
937 x = TREE_OPERAND (x, 0);
938 }
939 if (fieldsx.length () == 0)
940 return false;
941 auto_vec<const_tree, 16> fieldsy;
942 while (TREE_CODE (y) == COMPONENT_REF)
943 {
944 tree field = TREE_OPERAND (y, 1);
945 tree type = DECL_FIELD_CONTEXT (field);
946 if (TREE_CODE (type) == RECORD_TYPE)
947 fieldsy.safe_push (TREE_OPERAND (y, 1));
948 y = TREE_OPERAND (y, 0);
949 }
950 if (fieldsy.length () == 0)
951 return false;
952
953 /* Most common case first. */
954 if (fieldsx.length () == 1
955 && fieldsy.length () == 1)
956 return ((DECL_FIELD_CONTEXT (fieldsx[0])
957 == DECL_FIELD_CONTEXT (fieldsy[0]))
958 && fieldsx[0] != fieldsy[0]
959 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
960
961 if (fieldsx.length () == 2)
962 {
963 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
964 {
965 const_tree tem = fieldsx[0];
966 fieldsx[0] = fieldsx[1];
967 fieldsx[1] = tem;
968 }
969 }
970 else
971 fieldsx.qsort (ncr_compar);
972
973 if (fieldsy.length () == 2)
974 {
975 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
976 {
977 const_tree tem = fieldsy[0];
978 fieldsy[0] = fieldsy[1];
979 fieldsy[1] = tem;
980 }
981 }
982 else
983 fieldsy.qsort (ncr_compar);
984
985 unsigned i = 0, j = 0;
986 do
987 {
988 const_tree fieldx = fieldsx[i];
989 const_tree fieldy = fieldsy[j];
990 tree typex = DECL_FIELD_CONTEXT (fieldx);
991 tree typey = DECL_FIELD_CONTEXT (fieldy);
992 if (typex == typey)
993 {
994 /* We're left with accessing different fields of a structure,
995 no possible overlap, unless they are both bitfields. */
996 if (fieldx != fieldy)
997 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
998 }
999 if (TYPE_UID (typex) < TYPE_UID (typey))
1000 {
1001 i++;
1002 if (i == fieldsx.length ())
1003 break;
1004 }
1005 else
1006 {
1007 j++;
1008 if (j == fieldsy.length ())
1009 break;
1010 }
1011 }
1012 while (1);
1013
1014 return false;
1015 }
1016
1017
1018 /* Return true if two memory references based on the variables BASE1
1019 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1020 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1021 if non-NULL are the complete memory reference trees. */
1022
1023 static bool
1024 decl_refs_may_alias_p (tree ref1, tree base1,
1025 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1026 tree ref2, tree base2,
1027 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
1028 {
1029 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1030
1031 /* If both references are based on different variables, they cannot alias. */
1032 if (base1 != base2)
1033 return false;
1034
1035 /* If both references are based on the same variable, they cannot alias if
1036 the accesses do not overlap. */
1037 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1038 return false;
1039
1040 /* For components with variable position, the above test isn't sufficient,
1041 so we disambiguate component references manually. */
1042 if (ref1 && ref2
1043 && handled_component_p (ref1) && handled_component_p (ref2)
1044 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1045 return false;
1046
1047 return true;
1048 }
1049
1050 /* Return true if an indirect reference based on *PTR1 constrained
1051 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1052 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1053 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1054 in which case they are computed on-demand. REF1 and REF2
1055 if non-NULL are the complete memory reference trees. */
1056
1057 static bool
1058 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1059 HOST_WIDE_INT offset1,
1060 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1061 alias_set_type ref1_alias_set,
1062 alias_set_type base1_alias_set,
1063 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1064 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1065 alias_set_type ref2_alias_set,
1066 alias_set_type base2_alias_set, bool tbaa_p)
1067 {
1068 tree ptr1;
1069 tree ptrtype1, dbase2;
1070 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1071 HOST_WIDE_INT doffset1, doffset2;
1072
1073 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1074 || TREE_CODE (base1) == TARGET_MEM_REF)
1075 && DECL_P (base2));
1076
1077 ptr1 = TREE_OPERAND (base1, 0);
1078
1079 /* The offset embedded in MEM_REFs can be negative. Bias them
1080 so that the resulting offset adjustment is positive. */
1081 offset_int moff = mem_ref_offset (base1);
1082 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1083 if (wi::neg_p (moff))
1084 offset2p += (-moff).to_short_addr ();
1085 else
1086 offset1p += moff.to_short_addr ();
1087
1088 /* If only one reference is based on a variable, they cannot alias if
1089 the pointer access is beyond the extent of the variable access.
1090 (the pointer base cannot validly point to an offset less than zero
1091 of the variable).
1092 ??? IVOPTs creates bases that do not honor this restriction,
1093 so do not apply this optimization for TARGET_MEM_REFs. */
1094 if (TREE_CODE (base1) != TARGET_MEM_REF
1095 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1096 return false;
1097 /* They also cannot alias if the pointer may not point to the decl. */
1098 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1099 return false;
1100
1101 /* Disambiguations that rely on strict aliasing rules follow. */
1102 if (!flag_strict_aliasing || !tbaa_p)
1103 return true;
1104
1105 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1106
1107 /* If the alias set for a pointer access is zero all bets are off. */
1108 if (base1_alias_set == -1)
1109 base1_alias_set = get_deref_alias_set (ptrtype1);
1110 if (base1_alias_set == 0)
1111 return true;
1112 if (base2_alias_set == -1)
1113 base2_alias_set = get_alias_set (base2);
1114
1115 /* When we are trying to disambiguate an access with a pointer dereference
1116 as base versus one with a decl as base we can use both the size
1117 of the decl and its dynamic type for extra disambiguation.
1118 ??? We do not know anything about the dynamic type of the decl
1119 other than that its alias-set contains base2_alias_set as a subset
1120 which does not help us here. */
1121 /* As we know nothing useful about the dynamic type of the decl just
1122 use the usual conflict check rather than a subset test.
1123 ??? We could introduce -fvery-strict-aliasing when the language
1124 does not allow decls to have a dynamic type that differs from their
1125 static type. Then we can check
1126 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1127 if (base1_alias_set != base2_alias_set
1128 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1129 return false;
1130 /* If the size of the access relevant for TBAA through the pointer
1131 is bigger than the size of the decl we can't possibly access the
1132 decl via that pointer. */
1133 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1134 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1135 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1136 /* ??? This in turn may run afoul when a decl of type T which is
1137 a member of union type U is accessed through a pointer to
1138 type U and sizeof T is smaller than sizeof U. */
1139 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1140 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1141 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1142 return false;
1143
1144 if (!ref2)
1145 return true;
1146
1147 /* If the decl is accessed via a MEM_REF, reconstruct the base
1148 we can use for TBAA and an appropriately adjusted offset. */
1149 dbase2 = ref2;
1150 while (handled_component_p (dbase2))
1151 dbase2 = TREE_OPERAND (dbase2, 0);
1152 doffset1 = offset1;
1153 doffset2 = offset2;
1154 if (TREE_CODE (dbase2) == MEM_REF
1155 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1156 {
1157 offset_int moff = mem_ref_offset (dbase2);
1158 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1159 if (wi::neg_p (moff))
1160 doffset1 -= (-moff).to_short_addr ();
1161 else
1162 doffset2 -= moff.to_short_addr ();
1163 }
1164
1165 /* If either reference is view-converted, give up now. */
1166 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1167 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1168 return true;
1169
1170 /* If both references are through the same type, they do not alias
1171 if the accesses do not overlap. This does extra disambiguation
1172 for mixed/pointer accesses but requires strict aliasing.
1173 For MEM_REFs we require that the component-ref offset we computed
1174 is relative to the start of the type which we ensure by
1175 comparing rvalue and access type and disregarding the constant
1176 pointer offset. */
1177 if ((TREE_CODE (base1) != TARGET_MEM_REF
1178 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1179 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1180 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1181
1182 if (ref1 && ref2
1183 && nonoverlapping_component_refs_p (ref1, ref2))
1184 return false;
1185
1186 /* Do access-path based disambiguation. */
1187 if (ref1 && ref2
1188 && (handled_component_p (ref1) || handled_component_p (ref2)))
1189 return aliasing_component_refs_p (ref1,
1190 ref1_alias_set, base1_alias_set,
1191 offset1, max_size1,
1192 ref2,
1193 ref2_alias_set, base2_alias_set,
1194 offset2, max_size2, true);
1195
1196 return true;
1197 }
1198
1199 /* Return true if two indirect references based on *PTR1
1200 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1201 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1202 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1203 in which case they are computed on-demand. REF1 and REF2
1204 if non-NULL are the complete memory reference trees. */
1205
1206 static bool
1207 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1208 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1209 alias_set_type ref1_alias_set,
1210 alias_set_type base1_alias_set,
1211 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1212 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1213 alias_set_type ref2_alias_set,
1214 alias_set_type base2_alias_set, bool tbaa_p)
1215 {
1216 tree ptr1;
1217 tree ptr2;
1218 tree ptrtype1, ptrtype2;
1219
1220 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1221 || TREE_CODE (base1) == TARGET_MEM_REF)
1222 && (TREE_CODE (base2) == MEM_REF
1223 || TREE_CODE (base2) == TARGET_MEM_REF));
1224
1225 ptr1 = TREE_OPERAND (base1, 0);
1226 ptr2 = TREE_OPERAND (base2, 0);
1227
1228 /* If both bases are based on pointers they cannot alias if they may not
1229 point to the same memory object or if they point to the same object
1230 and the accesses do not overlap. */
1231 if ((!cfun || gimple_in_ssa_p (cfun))
1232 && operand_equal_p (ptr1, ptr2, 0)
1233 && (((TREE_CODE (base1) != TARGET_MEM_REF
1234 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1235 && (TREE_CODE (base2) != TARGET_MEM_REF
1236 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1237 || (TREE_CODE (base1) == TARGET_MEM_REF
1238 && TREE_CODE (base2) == TARGET_MEM_REF
1239 && (TMR_STEP (base1) == TMR_STEP (base2)
1240 || (TMR_STEP (base1) && TMR_STEP (base2)
1241 && operand_equal_p (TMR_STEP (base1),
1242 TMR_STEP (base2), 0)))
1243 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1244 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1245 && operand_equal_p (TMR_INDEX (base1),
1246 TMR_INDEX (base2), 0)))
1247 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1248 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1249 && operand_equal_p (TMR_INDEX2 (base1),
1250 TMR_INDEX2 (base2), 0))))))
1251 {
1252 offset_int moff;
1253 /* The offset embedded in MEM_REFs can be negative. Bias them
1254 so that the resulting offset adjustment is positive. */
1255 moff = mem_ref_offset (base1);
1256 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1257 if (wi::neg_p (moff))
1258 offset2 += (-moff).to_short_addr ();
1259 else
1260 offset1 += moff.to_shwi ();
1261 moff = mem_ref_offset (base2);
1262 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1263 if (wi::neg_p (moff))
1264 offset1 += (-moff).to_short_addr ();
1265 else
1266 offset2 += moff.to_short_addr ();
1267 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1268 }
1269 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1270 return false;
1271
1272 /* Disambiguations that rely on strict aliasing rules follow. */
1273 if (!flag_strict_aliasing || !tbaa_p)
1274 return true;
1275
1276 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1277 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1278
1279 /* If the alias set for a pointer access is zero all bets are off. */
1280 if (base1_alias_set == -1)
1281 base1_alias_set = get_deref_alias_set (ptrtype1);
1282 if (base1_alias_set == 0)
1283 return true;
1284 if (base2_alias_set == -1)
1285 base2_alias_set = get_deref_alias_set (ptrtype2);
1286 if (base2_alias_set == 0)
1287 return true;
1288
1289 /* If both references are through the same type, they do not alias
1290 if the accesses do not overlap. This does extra disambiguation
1291 for mixed/pointer accesses but requires strict aliasing. */
1292 if ((TREE_CODE (base1) != TARGET_MEM_REF
1293 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1294 && (TREE_CODE (base2) != TARGET_MEM_REF
1295 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1296 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1297 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1298 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1299 TREE_TYPE (ptrtype2)) == 1)
1300 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1301
1302 /* Do type-based disambiguation. */
1303 if (base1_alias_set != base2_alias_set
1304 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1305 return false;
1306
1307 /* If either reference is view-converted, give up now. */
1308 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1309 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1310 return true;
1311
1312 if (ref1 && ref2
1313 && nonoverlapping_component_refs_p (ref1, ref2))
1314 return false;
1315
1316 /* Do access-path based disambiguation. */
1317 if (ref1 && ref2
1318 && (handled_component_p (ref1) || handled_component_p (ref2)))
1319 return aliasing_component_refs_p (ref1,
1320 ref1_alias_set, base1_alias_set,
1321 offset1, max_size1,
1322 ref2,
1323 ref2_alias_set, base2_alias_set,
1324 offset2, max_size2, false);
1325
1326 return true;
1327 }
1328
1329 /* Return true, if the two memory references REF1 and REF2 may alias. */
1330
1331 bool
1332 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1333 {
1334 tree base1, base2;
1335 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1336 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1337 bool var1_p, var2_p, ind1_p, ind2_p;
1338
1339 gcc_checking_assert ((!ref1->ref
1340 || TREE_CODE (ref1->ref) == SSA_NAME
1341 || DECL_P (ref1->ref)
1342 || TREE_CODE (ref1->ref) == STRING_CST
1343 || handled_component_p (ref1->ref)
1344 || TREE_CODE (ref1->ref) == MEM_REF
1345 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1346 && (!ref2->ref
1347 || TREE_CODE (ref2->ref) == SSA_NAME
1348 || DECL_P (ref2->ref)
1349 || TREE_CODE (ref2->ref) == STRING_CST
1350 || handled_component_p (ref2->ref)
1351 || TREE_CODE (ref2->ref) == MEM_REF
1352 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1353
1354 /* Decompose the references into their base objects and the access. */
1355 base1 = ao_ref_base (ref1);
1356 offset1 = ref1->offset;
1357 max_size1 = ref1->max_size;
1358 base2 = ao_ref_base (ref2);
1359 offset2 = ref2->offset;
1360 max_size2 = ref2->max_size;
1361
1362 /* We can end up with registers or constants as bases for example from
1363 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1364 which is seen as a struct copy. */
1365 if (TREE_CODE (base1) == SSA_NAME
1366 || TREE_CODE (base1) == CONST_DECL
1367 || TREE_CODE (base1) == CONSTRUCTOR
1368 || TREE_CODE (base1) == ADDR_EXPR
1369 || CONSTANT_CLASS_P (base1)
1370 || TREE_CODE (base2) == SSA_NAME
1371 || TREE_CODE (base2) == CONST_DECL
1372 || TREE_CODE (base2) == CONSTRUCTOR
1373 || TREE_CODE (base2) == ADDR_EXPR
1374 || CONSTANT_CLASS_P (base2))
1375 return false;
1376
1377 /* We can end up referring to code via function and label decls.
1378 As we likely do not properly track code aliases conservatively
1379 bail out. */
1380 if (TREE_CODE (base1) == FUNCTION_DECL
1381 || TREE_CODE (base1) == LABEL_DECL
1382 || TREE_CODE (base2) == FUNCTION_DECL
1383 || TREE_CODE (base2) == LABEL_DECL)
1384 return true;
1385
1386 /* Two volatile accesses always conflict. */
1387 if (ref1->volatile_p
1388 && ref2->volatile_p)
1389 return true;
1390
1391 /* Defer to simple offset based disambiguation if we have
1392 references based on two decls. Do this before defering to
1393 TBAA to handle must-alias cases in conformance with the
1394 GCC extension of allowing type-punning through unions. */
1395 var1_p = DECL_P (base1);
1396 var2_p = DECL_P (base2);
1397 if (var1_p && var2_p)
1398 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1399 ref2->ref, base2, offset2, max_size2);
1400
1401 /* Handle restrict based accesses.
1402 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1403 here. */
1404 tree rbase1 = base1;
1405 tree rbase2 = base2;
1406 if (var1_p)
1407 {
1408 rbase1 = ref1->ref;
1409 if (rbase1)
1410 while (handled_component_p (rbase1))
1411 rbase1 = TREE_OPERAND (rbase1, 0);
1412 }
1413 if (var2_p)
1414 {
1415 rbase2 = ref2->ref;
1416 if (rbase2)
1417 while (handled_component_p (rbase2))
1418 rbase2 = TREE_OPERAND (rbase2, 0);
1419 }
1420 if (rbase1 && rbase2
1421 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1422 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1423 /* If the accesses are in the same restrict clique... */
1424 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1425 /* But based on different pointers they do not alias. */
1426 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1427 return false;
1428
1429 ind1_p = (TREE_CODE (base1) == MEM_REF
1430 || TREE_CODE (base1) == TARGET_MEM_REF);
1431 ind2_p = (TREE_CODE (base2) == MEM_REF
1432 || TREE_CODE (base2) == TARGET_MEM_REF);
1433
1434 /* Canonicalize the pointer-vs-decl case. */
1435 if (ind1_p && var2_p)
1436 {
1437 HOST_WIDE_INT tmp1;
1438 tree tmp2;
1439 ao_ref *tmp3;
1440 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1441 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1442 tmp2 = base1; base1 = base2; base2 = tmp2;
1443 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1444 var1_p = true;
1445 ind1_p = false;
1446 var2_p = false;
1447 ind2_p = true;
1448 }
1449
1450 /* First defer to TBAA if possible. */
1451 if (tbaa_p
1452 && flag_strict_aliasing
1453 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1454 ao_ref_alias_set (ref2)))
1455 return false;
1456
1457 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1458 if (var1_p && ind2_p)
1459 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1460 offset2, max_size2,
1461 ao_ref_alias_set (ref2), -1,
1462 ref1->ref, base1,
1463 offset1, max_size1,
1464 ao_ref_alias_set (ref1),
1465 ao_ref_base_alias_set (ref1),
1466 tbaa_p);
1467 else if (ind1_p && ind2_p)
1468 return indirect_refs_may_alias_p (ref1->ref, base1,
1469 offset1, max_size1,
1470 ao_ref_alias_set (ref1), -1,
1471 ref2->ref, base2,
1472 offset2, max_size2,
1473 ao_ref_alias_set (ref2), -1,
1474 tbaa_p);
1475
1476 /* We really do not want to end up here, but returning true is safe. */
1477 #ifdef ENABLE_CHECKING
1478 gcc_unreachable ();
1479 #else
1480 return true;
1481 #endif
1482 }
1483
1484 static bool
1485 refs_may_alias_p (tree ref1, ao_ref *ref2)
1486 {
1487 ao_ref r1;
1488 ao_ref_init (&r1, ref1);
1489 return refs_may_alias_p_1 (&r1, ref2, true);
1490 }
1491
1492 bool
1493 refs_may_alias_p (tree ref1, tree ref2)
1494 {
1495 ao_ref r1, r2;
1496 bool res;
1497 ao_ref_init (&r1, ref1);
1498 ao_ref_init (&r2, ref2);
1499 res = refs_may_alias_p_1 (&r1, &r2, true);
1500 if (res)
1501 ++alias_stats.refs_may_alias_p_may_alias;
1502 else
1503 ++alias_stats.refs_may_alias_p_no_alias;
1504 return res;
1505 }
1506
1507 /* Returns true if there is a anti-dependence for the STORE that
1508 executes after the LOAD. */
1509
1510 bool
1511 refs_anti_dependent_p (tree load, tree store)
1512 {
1513 ao_ref r1, r2;
1514 ao_ref_init (&r1, load);
1515 ao_ref_init (&r2, store);
1516 return refs_may_alias_p_1 (&r1, &r2, false);
1517 }
1518
1519 /* Returns true if there is a output dependence for the stores
1520 STORE1 and STORE2. */
1521
1522 bool
1523 refs_output_dependent_p (tree store1, tree store2)
1524 {
1525 ao_ref r1, r2;
1526 ao_ref_init (&r1, store1);
1527 ao_ref_init (&r2, store2);
1528 return refs_may_alias_p_1 (&r1, &r2, false);
1529 }
1530
1531 /* If the call CALL may use the memory reference REF return true,
1532 otherwise return false. */
1533
1534 static bool
1535 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1536 {
1537 tree base, callee;
1538 unsigned i;
1539 int flags = gimple_call_flags (call);
1540
1541 /* Const functions without a static chain do not implicitly use memory. */
1542 if (!gimple_call_chain (call)
1543 && (flags & (ECF_CONST|ECF_NOVOPS)))
1544 goto process_args;
1545
1546 base = ao_ref_base (ref);
1547 if (!base)
1548 return true;
1549
1550 /* A call that is not without side-effects might involve volatile
1551 accesses and thus conflicts with all other volatile accesses. */
1552 if (ref->volatile_p)
1553 return true;
1554
1555 /* If the reference is based on a decl that is not aliased the call
1556 cannot possibly use it. */
1557 if (DECL_P (base)
1558 && !may_be_aliased (base)
1559 /* But local statics can be used through recursion. */
1560 && !is_global_var (base))
1561 goto process_args;
1562
1563 callee = gimple_call_fndecl (call);
1564
1565 /* Handle those builtin functions explicitly that do not act as
1566 escape points. See tree-ssa-structalias.c:find_func_aliases
1567 for the list of builtins we might need to handle here. */
1568 if (callee != NULL_TREE
1569 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1570 switch (DECL_FUNCTION_CODE (callee))
1571 {
1572 /* All the following functions read memory pointed to by
1573 their second argument. strcat/strncat additionally
1574 reads memory pointed to by the first argument. */
1575 case BUILT_IN_STRCAT:
1576 case BUILT_IN_STRNCAT:
1577 {
1578 ao_ref dref;
1579 ao_ref_init_from_ptr_and_size (&dref,
1580 gimple_call_arg (call, 0),
1581 NULL_TREE);
1582 if (refs_may_alias_p_1 (&dref, ref, false))
1583 return true;
1584 }
1585 /* FALLTHRU */
1586 case BUILT_IN_STRCPY:
1587 case BUILT_IN_STRNCPY:
1588 case BUILT_IN_MEMCPY:
1589 case BUILT_IN_MEMMOVE:
1590 case BUILT_IN_MEMPCPY:
1591 case BUILT_IN_STPCPY:
1592 case BUILT_IN_STPNCPY:
1593 case BUILT_IN_TM_MEMCPY:
1594 case BUILT_IN_TM_MEMMOVE:
1595 {
1596 ao_ref dref;
1597 tree size = NULL_TREE;
1598 if (gimple_call_num_args (call) == 3)
1599 size = gimple_call_arg (call, 2);
1600 ao_ref_init_from_ptr_and_size (&dref,
1601 gimple_call_arg (call, 1),
1602 size);
1603 return refs_may_alias_p_1 (&dref, ref, false);
1604 }
1605 case BUILT_IN_STRCAT_CHK:
1606 case BUILT_IN_STRNCAT_CHK:
1607 {
1608 ao_ref dref;
1609 ao_ref_init_from_ptr_and_size (&dref,
1610 gimple_call_arg (call, 0),
1611 NULL_TREE);
1612 if (refs_may_alias_p_1 (&dref, ref, false))
1613 return true;
1614 }
1615 /* FALLTHRU */
1616 case BUILT_IN_STRCPY_CHK:
1617 case BUILT_IN_STRNCPY_CHK:
1618 case BUILT_IN_MEMCPY_CHK:
1619 case BUILT_IN_MEMMOVE_CHK:
1620 case BUILT_IN_MEMPCPY_CHK:
1621 case BUILT_IN_STPCPY_CHK:
1622 case BUILT_IN_STPNCPY_CHK:
1623 {
1624 ao_ref dref;
1625 tree size = NULL_TREE;
1626 if (gimple_call_num_args (call) == 4)
1627 size = gimple_call_arg (call, 2);
1628 ao_ref_init_from_ptr_and_size (&dref,
1629 gimple_call_arg (call, 1),
1630 size);
1631 return refs_may_alias_p_1 (&dref, ref, false);
1632 }
1633 case BUILT_IN_BCOPY:
1634 {
1635 ao_ref dref;
1636 tree size = gimple_call_arg (call, 2);
1637 ao_ref_init_from_ptr_and_size (&dref,
1638 gimple_call_arg (call, 0),
1639 size);
1640 return refs_may_alias_p_1 (&dref, ref, false);
1641 }
1642
1643 /* The following functions read memory pointed to by their
1644 first argument. */
1645 CASE_BUILT_IN_TM_LOAD (1):
1646 CASE_BUILT_IN_TM_LOAD (2):
1647 CASE_BUILT_IN_TM_LOAD (4):
1648 CASE_BUILT_IN_TM_LOAD (8):
1649 CASE_BUILT_IN_TM_LOAD (FLOAT):
1650 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1651 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1652 CASE_BUILT_IN_TM_LOAD (M64):
1653 CASE_BUILT_IN_TM_LOAD (M128):
1654 CASE_BUILT_IN_TM_LOAD (M256):
1655 case BUILT_IN_TM_LOG:
1656 case BUILT_IN_TM_LOG_1:
1657 case BUILT_IN_TM_LOG_2:
1658 case BUILT_IN_TM_LOG_4:
1659 case BUILT_IN_TM_LOG_8:
1660 case BUILT_IN_TM_LOG_FLOAT:
1661 case BUILT_IN_TM_LOG_DOUBLE:
1662 case BUILT_IN_TM_LOG_LDOUBLE:
1663 case BUILT_IN_TM_LOG_M64:
1664 case BUILT_IN_TM_LOG_M128:
1665 case BUILT_IN_TM_LOG_M256:
1666 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1667
1668 /* These read memory pointed to by the first argument. */
1669 case BUILT_IN_STRDUP:
1670 case BUILT_IN_STRNDUP:
1671 case BUILT_IN_REALLOC:
1672 {
1673 ao_ref dref;
1674 tree size = NULL_TREE;
1675 if (gimple_call_num_args (call) == 2)
1676 size = gimple_call_arg (call, 1);
1677 ao_ref_init_from_ptr_and_size (&dref,
1678 gimple_call_arg (call, 0),
1679 size);
1680 return refs_may_alias_p_1 (&dref, ref, false);
1681 }
1682 /* These read memory pointed to by the first argument. */
1683 case BUILT_IN_INDEX:
1684 case BUILT_IN_STRCHR:
1685 case BUILT_IN_STRRCHR:
1686 {
1687 ao_ref dref;
1688 ao_ref_init_from_ptr_and_size (&dref,
1689 gimple_call_arg (call, 0),
1690 NULL_TREE);
1691 return refs_may_alias_p_1 (&dref, ref, false);
1692 }
1693 /* These read memory pointed to by the first argument with size
1694 in the third argument. */
1695 case BUILT_IN_MEMCHR:
1696 {
1697 ao_ref dref;
1698 ao_ref_init_from_ptr_and_size (&dref,
1699 gimple_call_arg (call, 0),
1700 gimple_call_arg (call, 2));
1701 return refs_may_alias_p_1 (&dref, ref, false);
1702 }
1703 /* These read memory pointed to by the first and second arguments. */
1704 case BUILT_IN_STRSTR:
1705 case BUILT_IN_STRPBRK:
1706 {
1707 ao_ref dref;
1708 ao_ref_init_from_ptr_and_size (&dref,
1709 gimple_call_arg (call, 0),
1710 NULL_TREE);
1711 if (refs_may_alias_p_1 (&dref, ref, false))
1712 return true;
1713 ao_ref_init_from_ptr_and_size (&dref,
1714 gimple_call_arg (call, 1),
1715 NULL_TREE);
1716 return refs_may_alias_p_1 (&dref, ref, false);
1717 }
1718
1719 /* The following builtins do not read from memory. */
1720 case BUILT_IN_FREE:
1721 case BUILT_IN_MALLOC:
1722 case BUILT_IN_POSIX_MEMALIGN:
1723 case BUILT_IN_ALIGNED_ALLOC:
1724 case BUILT_IN_CALLOC:
1725 case BUILT_IN_ALLOCA:
1726 case BUILT_IN_ALLOCA_WITH_ALIGN:
1727 case BUILT_IN_STACK_SAVE:
1728 case BUILT_IN_STACK_RESTORE:
1729 case BUILT_IN_MEMSET:
1730 case BUILT_IN_TM_MEMSET:
1731 case BUILT_IN_MEMSET_CHK:
1732 case BUILT_IN_FREXP:
1733 case BUILT_IN_FREXPF:
1734 case BUILT_IN_FREXPL:
1735 case BUILT_IN_GAMMA_R:
1736 case BUILT_IN_GAMMAF_R:
1737 case BUILT_IN_GAMMAL_R:
1738 case BUILT_IN_LGAMMA_R:
1739 case BUILT_IN_LGAMMAF_R:
1740 case BUILT_IN_LGAMMAL_R:
1741 case BUILT_IN_MODF:
1742 case BUILT_IN_MODFF:
1743 case BUILT_IN_MODFL:
1744 case BUILT_IN_REMQUO:
1745 case BUILT_IN_REMQUOF:
1746 case BUILT_IN_REMQUOL:
1747 case BUILT_IN_SINCOS:
1748 case BUILT_IN_SINCOSF:
1749 case BUILT_IN_SINCOSL:
1750 case BUILT_IN_ASSUME_ALIGNED:
1751 case BUILT_IN_VA_END:
1752 return false;
1753 /* __sync_* builtins and some OpenMP builtins act as threading
1754 barriers. */
1755 #undef DEF_SYNC_BUILTIN
1756 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1757 #include "sync-builtins.def"
1758 #undef DEF_SYNC_BUILTIN
1759 case BUILT_IN_GOMP_ATOMIC_START:
1760 case BUILT_IN_GOMP_ATOMIC_END:
1761 case BUILT_IN_GOMP_BARRIER:
1762 case BUILT_IN_GOMP_BARRIER_CANCEL:
1763 case BUILT_IN_GOMP_TASKWAIT:
1764 case BUILT_IN_GOMP_TASKGROUP_END:
1765 case BUILT_IN_GOMP_CRITICAL_START:
1766 case BUILT_IN_GOMP_CRITICAL_END:
1767 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1768 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1769 case BUILT_IN_GOMP_LOOP_END:
1770 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1771 case BUILT_IN_GOMP_ORDERED_START:
1772 case BUILT_IN_GOMP_ORDERED_END:
1773 case BUILT_IN_GOMP_SECTIONS_END:
1774 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1775 case BUILT_IN_GOMP_SINGLE_COPY_START:
1776 case BUILT_IN_GOMP_SINGLE_COPY_END:
1777 return true;
1778
1779 default:
1780 /* Fallthru to general call handling. */;
1781 }
1782
1783 /* Check if base is a global static variable that is not read
1784 by the function. */
1785 if (callee != NULL_TREE
1786 && TREE_CODE (base) == VAR_DECL
1787 && TREE_STATIC (base))
1788 {
1789 struct cgraph_node *node = cgraph_node::get (callee);
1790 bitmap not_read;
1791
1792 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1793 node yet. We should enforce that there are nodes for all decls in the
1794 IL and remove this check instead. */
1795 if (node
1796 && (not_read = ipa_reference_get_not_read_global (node))
1797 && bitmap_bit_p (not_read, DECL_UID (base)))
1798 goto process_args;
1799 }
1800
1801 /* Check if the base variable is call-used. */
1802 if (DECL_P (base))
1803 {
1804 if (pt_solution_includes (gimple_call_use_set (call), base))
1805 return true;
1806 }
1807 else if ((TREE_CODE (base) == MEM_REF
1808 || TREE_CODE (base) == TARGET_MEM_REF)
1809 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1810 {
1811 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1812 if (!pi)
1813 return true;
1814
1815 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1816 return true;
1817 }
1818 else
1819 return true;
1820
1821 /* Inspect call arguments for passed-by-value aliases. */
1822 process_args:
1823 for (i = 0; i < gimple_call_num_args (call); ++i)
1824 {
1825 tree op = gimple_call_arg (call, i);
1826 int flags = gimple_call_arg_flags (call, i);
1827
1828 if (flags & EAF_UNUSED)
1829 continue;
1830
1831 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1832 op = TREE_OPERAND (op, 0);
1833
1834 if (TREE_CODE (op) != SSA_NAME
1835 && !is_gimple_min_invariant (op))
1836 {
1837 ao_ref r;
1838 ao_ref_init (&r, op);
1839 if (refs_may_alias_p_1 (&r, ref, true))
1840 return true;
1841 }
1842 }
1843
1844 return false;
1845 }
1846
1847 static bool
1848 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1849 {
1850 bool res;
1851 res = ref_maybe_used_by_call_p_1 (call, ref);
1852 if (res)
1853 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1854 else
1855 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1856 return res;
1857 }
1858
1859
1860 /* If the statement STMT may use the memory reference REF return
1861 true, otherwise return false. */
1862
1863 bool
1864 ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
1865 {
1866 if (is_gimple_assign (stmt))
1867 {
1868 tree rhs;
1869
1870 /* All memory assign statements are single. */
1871 if (!gimple_assign_single_p (stmt))
1872 return false;
1873
1874 rhs = gimple_assign_rhs1 (stmt);
1875 if (is_gimple_reg (rhs)
1876 || is_gimple_min_invariant (rhs)
1877 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1878 return false;
1879
1880 return refs_may_alias_p (rhs, ref);
1881 }
1882 else if (is_gimple_call (stmt))
1883 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1884 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1885 {
1886 tree retval = gimple_return_retval (return_stmt);
1887 if (retval
1888 && TREE_CODE (retval) != SSA_NAME
1889 && !is_gimple_min_invariant (retval)
1890 && refs_may_alias_p (retval, ref))
1891 return true;
1892 /* If ref escapes the function then the return acts as a use. */
1893 tree base = ao_ref_base (ref);
1894 if (!base)
1895 ;
1896 else if (DECL_P (base))
1897 return is_global_var (base);
1898 else if (TREE_CODE (base) == MEM_REF
1899 || TREE_CODE (base) == TARGET_MEM_REF)
1900 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1901 return false;
1902 }
1903
1904 return true;
1905 }
1906
1907 bool
1908 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1909 {
1910 ao_ref r;
1911 ao_ref_init (&r, ref);
1912 return ref_maybe_used_by_stmt_p (stmt, &r);
1913 }
1914
1915 /* If the call in statement CALL may clobber the memory reference REF
1916 return true, otherwise return false. */
1917
1918 bool
1919 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1920 {
1921 tree base;
1922 tree callee;
1923
1924 /* If the call is pure or const it cannot clobber anything. */
1925 if (gimple_call_flags (call)
1926 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1927 return false;
1928 if (gimple_call_internal_p (call))
1929 switch (gimple_call_internal_fn (call))
1930 {
1931 /* Treat these internal calls like ECF_PURE for aliasing,
1932 they don't write to any memory the program should care about.
1933 They have important other side-effects, and read memory,
1934 so can't be ECF_NOVOPS. */
1935 case IFN_UBSAN_NULL:
1936 case IFN_UBSAN_BOUNDS:
1937 case IFN_UBSAN_VPTR:
1938 case IFN_UBSAN_OBJECT_SIZE:
1939 case IFN_ASAN_CHECK:
1940 return false;
1941 default:
1942 break;
1943 }
1944
1945 base = ao_ref_base (ref);
1946 if (!base)
1947 return true;
1948
1949 if (TREE_CODE (base) == SSA_NAME
1950 || CONSTANT_CLASS_P (base))
1951 return false;
1952
1953 /* A call that is not without side-effects might involve volatile
1954 accesses and thus conflicts with all other volatile accesses. */
1955 if (ref->volatile_p)
1956 return true;
1957
1958 /* If the reference is based on a decl that is not aliased the call
1959 cannot possibly clobber it. */
1960 if (DECL_P (base)
1961 && !may_be_aliased (base)
1962 /* But local non-readonly statics can be modified through recursion
1963 or the call may implement a threading barrier which we must
1964 treat as may-def. */
1965 && (TREE_READONLY (base)
1966 || !is_global_var (base)))
1967 return false;
1968
1969 callee = gimple_call_fndecl (call);
1970
1971 /* Handle those builtin functions explicitly that do not act as
1972 escape points. See tree-ssa-structalias.c:find_func_aliases
1973 for the list of builtins we might need to handle here. */
1974 if (callee != NULL_TREE
1975 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1976 switch (DECL_FUNCTION_CODE (callee))
1977 {
1978 /* All the following functions clobber memory pointed to by
1979 their first argument. */
1980 case BUILT_IN_STRCPY:
1981 case BUILT_IN_STRNCPY:
1982 case BUILT_IN_MEMCPY:
1983 case BUILT_IN_MEMMOVE:
1984 case BUILT_IN_MEMPCPY:
1985 case BUILT_IN_STPCPY:
1986 case BUILT_IN_STPNCPY:
1987 case BUILT_IN_STRCAT:
1988 case BUILT_IN_STRNCAT:
1989 case BUILT_IN_MEMSET:
1990 case BUILT_IN_TM_MEMSET:
1991 CASE_BUILT_IN_TM_STORE (1):
1992 CASE_BUILT_IN_TM_STORE (2):
1993 CASE_BUILT_IN_TM_STORE (4):
1994 CASE_BUILT_IN_TM_STORE (8):
1995 CASE_BUILT_IN_TM_STORE (FLOAT):
1996 CASE_BUILT_IN_TM_STORE (DOUBLE):
1997 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1998 CASE_BUILT_IN_TM_STORE (M64):
1999 CASE_BUILT_IN_TM_STORE (M128):
2000 CASE_BUILT_IN_TM_STORE (M256):
2001 case BUILT_IN_TM_MEMCPY:
2002 case BUILT_IN_TM_MEMMOVE:
2003 {
2004 ao_ref dref;
2005 tree size = NULL_TREE;
2006 /* Don't pass in size for strncat, as the maximum size
2007 is strlen (dest) + n + 1 instead of n, resp.
2008 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2009 known. */
2010 if (gimple_call_num_args (call) == 3
2011 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2012 size = gimple_call_arg (call, 2);
2013 ao_ref_init_from_ptr_and_size (&dref,
2014 gimple_call_arg (call, 0),
2015 size);
2016 return refs_may_alias_p_1 (&dref, ref, false);
2017 }
2018 case BUILT_IN_STRCPY_CHK:
2019 case BUILT_IN_STRNCPY_CHK:
2020 case BUILT_IN_MEMCPY_CHK:
2021 case BUILT_IN_MEMMOVE_CHK:
2022 case BUILT_IN_MEMPCPY_CHK:
2023 case BUILT_IN_STPCPY_CHK:
2024 case BUILT_IN_STPNCPY_CHK:
2025 case BUILT_IN_STRCAT_CHK:
2026 case BUILT_IN_STRNCAT_CHK:
2027 case BUILT_IN_MEMSET_CHK:
2028 {
2029 ao_ref dref;
2030 tree size = NULL_TREE;
2031 /* Don't pass in size for __strncat_chk, as the maximum size
2032 is strlen (dest) + n + 1 instead of n, resp.
2033 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2034 known. */
2035 if (gimple_call_num_args (call) == 4
2036 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2037 size = gimple_call_arg (call, 2);
2038 ao_ref_init_from_ptr_and_size (&dref,
2039 gimple_call_arg (call, 0),
2040 size);
2041 return refs_may_alias_p_1 (&dref, ref, false);
2042 }
2043 case BUILT_IN_BCOPY:
2044 {
2045 ao_ref dref;
2046 tree size = gimple_call_arg (call, 2);
2047 ao_ref_init_from_ptr_and_size (&dref,
2048 gimple_call_arg (call, 1),
2049 size);
2050 return refs_may_alias_p_1 (&dref, ref, false);
2051 }
2052 /* Allocating memory does not have any side-effects apart from
2053 being the definition point for the pointer. */
2054 case BUILT_IN_MALLOC:
2055 case BUILT_IN_ALIGNED_ALLOC:
2056 case BUILT_IN_CALLOC:
2057 case BUILT_IN_STRDUP:
2058 case BUILT_IN_STRNDUP:
2059 /* Unix98 specifies that errno is set on allocation failure. */
2060 if (flag_errno_math
2061 && targetm.ref_may_alias_errno (ref))
2062 return true;
2063 return false;
2064 case BUILT_IN_STACK_SAVE:
2065 case BUILT_IN_ALLOCA:
2066 case BUILT_IN_ALLOCA_WITH_ALIGN:
2067 case BUILT_IN_ASSUME_ALIGNED:
2068 return false;
2069 /* But posix_memalign stores a pointer into the memory pointed to
2070 by its first argument. */
2071 case BUILT_IN_POSIX_MEMALIGN:
2072 {
2073 tree ptrptr = gimple_call_arg (call, 0);
2074 ao_ref dref;
2075 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2076 TYPE_SIZE_UNIT (ptr_type_node));
2077 return (refs_may_alias_p_1 (&dref, ref, false)
2078 || (flag_errno_math
2079 && targetm.ref_may_alias_errno (ref)));
2080 }
2081 /* Freeing memory kills the pointed-to memory. More importantly
2082 the call has to serve as a barrier for moving loads and stores
2083 across it. */
2084 case BUILT_IN_FREE:
2085 case BUILT_IN_VA_END:
2086 {
2087 tree ptr = gimple_call_arg (call, 0);
2088 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2089 }
2090 /* Realloc serves both as allocation point and deallocation point. */
2091 case BUILT_IN_REALLOC:
2092 {
2093 tree ptr = gimple_call_arg (call, 0);
2094 /* Unix98 specifies that errno is set on allocation failure. */
2095 return ((flag_errno_math
2096 && targetm.ref_may_alias_errno (ref))
2097 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2098 }
2099 case BUILT_IN_GAMMA_R:
2100 case BUILT_IN_GAMMAF_R:
2101 case BUILT_IN_GAMMAL_R:
2102 case BUILT_IN_LGAMMA_R:
2103 case BUILT_IN_LGAMMAF_R:
2104 case BUILT_IN_LGAMMAL_R:
2105 {
2106 tree out = gimple_call_arg (call, 1);
2107 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2108 return true;
2109 if (flag_errno_math)
2110 break;
2111 return false;
2112 }
2113 case BUILT_IN_FREXP:
2114 case BUILT_IN_FREXPF:
2115 case BUILT_IN_FREXPL:
2116 case BUILT_IN_MODF:
2117 case BUILT_IN_MODFF:
2118 case BUILT_IN_MODFL:
2119 {
2120 tree out = gimple_call_arg (call, 1);
2121 return ptr_deref_may_alias_ref_p_1 (out, ref);
2122 }
2123 case BUILT_IN_REMQUO:
2124 case BUILT_IN_REMQUOF:
2125 case BUILT_IN_REMQUOL:
2126 {
2127 tree out = gimple_call_arg (call, 2);
2128 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2129 return true;
2130 if (flag_errno_math)
2131 break;
2132 return false;
2133 }
2134 case BUILT_IN_SINCOS:
2135 case BUILT_IN_SINCOSF:
2136 case BUILT_IN_SINCOSL:
2137 {
2138 tree sin = gimple_call_arg (call, 1);
2139 tree cos = gimple_call_arg (call, 2);
2140 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2141 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2142 }
2143 /* __sync_* builtins and some OpenMP builtins act as threading
2144 barriers. */
2145 #undef DEF_SYNC_BUILTIN
2146 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2147 #include "sync-builtins.def"
2148 #undef DEF_SYNC_BUILTIN
2149 case BUILT_IN_GOMP_ATOMIC_START:
2150 case BUILT_IN_GOMP_ATOMIC_END:
2151 case BUILT_IN_GOMP_BARRIER:
2152 case BUILT_IN_GOMP_BARRIER_CANCEL:
2153 case BUILT_IN_GOMP_TASKWAIT:
2154 case BUILT_IN_GOMP_TASKGROUP_END:
2155 case BUILT_IN_GOMP_CRITICAL_START:
2156 case BUILT_IN_GOMP_CRITICAL_END:
2157 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2158 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2159 case BUILT_IN_GOMP_LOOP_END:
2160 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2161 case BUILT_IN_GOMP_ORDERED_START:
2162 case BUILT_IN_GOMP_ORDERED_END:
2163 case BUILT_IN_GOMP_SECTIONS_END:
2164 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2165 case BUILT_IN_GOMP_SINGLE_COPY_START:
2166 case BUILT_IN_GOMP_SINGLE_COPY_END:
2167 return true;
2168 default:
2169 /* Fallthru to general call handling. */;
2170 }
2171
2172 /* Check if base is a global static variable that is not written
2173 by the function. */
2174 if (callee != NULL_TREE
2175 && TREE_CODE (base) == VAR_DECL
2176 && TREE_STATIC (base))
2177 {
2178 struct cgraph_node *node = cgraph_node::get (callee);
2179 bitmap not_written;
2180
2181 if (node
2182 && (not_written = ipa_reference_get_not_written_global (node))
2183 && bitmap_bit_p (not_written, DECL_UID (base)))
2184 return false;
2185 }
2186
2187 /* Check if the base variable is call-clobbered. */
2188 if (DECL_P (base))
2189 return pt_solution_includes (gimple_call_clobber_set (call), base);
2190 else if ((TREE_CODE (base) == MEM_REF
2191 || TREE_CODE (base) == TARGET_MEM_REF)
2192 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2193 {
2194 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2195 if (!pi)
2196 return true;
2197
2198 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2199 }
2200
2201 return true;
2202 }
2203
2204 /* If the call in statement CALL may clobber the memory reference REF
2205 return true, otherwise return false. */
2206
2207 bool
2208 call_may_clobber_ref_p (gcall *call, tree ref)
2209 {
2210 bool res;
2211 ao_ref r;
2212 ao_ref_init (&r, ref);
2213 res = call_may_clobber_ref_p_1 (call, &r);
2214 if (res)
2215 ++alias_stats.call_may_clobber_ref_p_may_alias;
2216 else
2217 ++alias_stats.call_may_clobber_ref_p_no_alias;
2218 return res;
2219 }
2220
2221
2222 /* If the statement STMT may clobber the memory reference REF return true,
2223 otherwise return false. */
2224
2225 bool
2226 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
2227 {
2228 if (is_gimple_call (stmt))
2229 {
2230 tree lhs = gimple_call_lhs (stmt);
2231 if (lhs
2232 && TREE_CODE (lhs) != SSA_NAME)
2233 {
2234 ao_ref r;
2235 ao_ref_init (&r, lhs);
2236 if (refs_may_alias_p_1 (ref, &r, true))
2237 return true;
2238 }
2239
2240 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2241 }
2242 else if (gimple_assign_single_p (stmt))
2243 {
2244 tree lhs = gimple_assign_lhs (stmt);
2245 if (TREE_CODE (lhs) != SSA_NAME)
2246 {
2247 ao_ref r;
2248 ao_ref_init (&r, lhs);
2249 return refs_may_alias_p_1 (ref, &r, true);
2250 }
2251 }
2252 else if (gimple_code (stmt) == GIMPLE_ASM)
2253 return true;
2254
2255 return false;
2256 }
2257
2258 bool
2259 stmt_may_clobber_ref_p (gimple stmt, tree ref)
2260 {
2261 ao_ref r;
2262 ao_ref_init (&r, ref);
2263 return stmt_may_clobber_ref_p_1 (stmt, &r);
2264 }
2265
2266 /* If STMT kills the memory reference REF return true, otherwise
2267 return false. */
2268
2269 bool
2270 stmt_kills_ref_p (gimple stmt, ao_ref *ref)
2271 {
2272 if (!ao_ref_base (ref))
2273 return false;
2274
2275 if (gimple_has_lhs (stmt)
2276 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2277 /* The assignment is not necessarily carried out if it can throw
2278 and we can catch it in the current function where we could inspect
2279 the previous value.
2280 ??? We only need to care about the RHS throwing. For aggregate
2281 assignments or similar calls and non-call exceptions the LHS
2282 might throw as well. */
2283 && !stmt_can_throw_internal (stmt))
2284 {
2285 tree lhs = gimple_get_lhs (stmt);
2286 /* If LHS is literally a base of the access we are done. */
2287 if (ref->ref)
2288 {
2289 tree base = ref->ref;
2290 if (handled_component_p (base))
2291 {
2292 tree saved_lhs0 = NULL_TREE;
2293 if (handled_component_p (lhs))
2294 {
2295 saved_lhs0 = TREE_OPERAND (lhs, 0);
2296 TREE_OPERAND (lhs, 0) = integer_zero_node;
2297 }
2298 do
2299 {
2300 /* Just compare the outermost handled component, if
2301 they are equal we have found a possible common
2302 base. */
2303 tree saved_base0 = TREE_OPERAND (base, 0);
2304 TREE_OPERAND (base, 0) = integer_zero_node;
2305 bool res = operand_equal_p (lhs, base, 0);
2306 TREE_OPERAND (base, 0) = saved_base0;
2307 if (res)
2308 break;
2309 /* Otherwise drop handled components of the access. */
2310 base = saved_base0;
2311 }
2312 while (handled_component_p (base));
2313 if (saved_lhs0)
2314 TREE_OPERAND (lhs, 0) = saved_lhs0;
2315 }
2316 /* Finally check if lhs is equal or equal to the base candidate
2317 of the access. */
2318 if (operand_equal_p (lhs, base, 0))
2319 return true;
2320 }
2321
2322 /* Now look for non-literal equal bases with the restriction of
2323 handling constant offset and size. */
2324 /* For a must-alias check we need to be able to constrain
2325 the access properly. */
2326 if (ref->max_size == -1)
2327 return false;
2328 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2329 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2330 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2331 so base == ref->base does not always hold. */
2332 if (base != ref->base)
2333 {
2334 /* If both base and ref->base are MEM_REFs, only compare the
2335 first operand, and if the second operand isn't equal constant,
2336 try to add the offsets into offset and ref_offset. */
2337 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2338 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2339 {
2340 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2341 TREE_OPERAND (ref->base, 1)))
2342 {
2343 offset_int off1 = mem_ref_offset (base);
2344 off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2345 off1 += offset;
2346 offset_int off2 = mem_ref_offset (ref->base);
2347 off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2348 off2 += ref_offset;
2349 if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2350 {
2351 offset = off1.to_shwi ();
2352 ref_offset = off2.to_shwi ();
2353 }
2354 else
2355 size = -1;
2356 }
2357 }
2358 else
2359 size = -1;
2360 }
2361 /* For a must-alias check we need to be able to constrain
2362 the access properly. */
2363 if (size != -1 && size == max_size)
2364 {
2365 if (offset <= ref_offset
2366 && offset + size >= ref_offset + ref->max_size)
2367 return true;
2368 }
2369 }
2370
2371 if (is_gimple_call (stmt))
2372 {
2373 tree callee = gimple_call_fndecl (stmt);
2374 if (callee != NULL_TREE
2375 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2376 switch (DECL_FUNCTION_CODE (callee))
2377 {
2378 case BUILT_IN_FREE:
2379 {
2380 tree ptr = gimple_call_arg (stmt, 0);
2381 tree base = ao_ref_base (ref);
2382 if (base && TREE_CODE (base) == MEM_REF
2383 && TREE_OPERAND (base, 0) == ptr)
2384 return true;
2385 break;
2386 }
2387
2388 case BUILT_IN_MEMCPY:
2389 case BUILT_IN_MEMPCPY:
2390 case BUILT_IN_MEMMOVE:
2391 case BUILT_IN_MEMSET:
2392 case BUILT_IN_MEMCPY_CHK:
2393 case BUILT_IN_MEMPCPY_CHK:
2394 case BUILT_IN_MEMMOVE_CHK:
2395 case BUILT_IN_MEMSET_CHK:
2396 {
2397 /* For a must-alias check we need to be able to constrain
2398 the access properly. */
2399 if (ref->max_size == -1)
2400 return false;
2401 tree dest = gimple_call_arg (stmt, 0);
2402 tree len = gimple_call_arg (stmt, 2);
2403 if (!tree_fits_shwi_p (len))
2404 return false;
2405 tree rbase = ref->base;
2406 offset_int roffset = ref->offset;
2407 ao_ref dref;
2408 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2409 tree base = ao_ref_base (&dref);
2410 offset_int offset = dref.offset;
2411 if (!base || dref.size == -1)
2412 return false;
2413 if (TREE_CODE (base) == MEM_REF)
2414 {
2415 if (TREE_CODE (rbase) != MEM_REF)
2416 return false;
2417 // Compare pointers.
2418 offset += wi::lshift (mem_ref_offset (base),
2419 LOG2_BITS_PER_UNIT);
2420 roffset += wi::lshift (mem_ref_offset (rbase),
2421 LOG2_BITS_PER_UNIT);
2422 base = TREE_OPERAND (base, 0);
2423 rbase = TREE_OPERAND (rbase, 0);
2424 }
2425 if (base == rbase
2426 && wi::les_p (offset, roffset)
2427 && wi::les_p (roffset + ref->max_size,
2428 offset + wi::lshift (wi::to_offset (len),
2429 LOG2_BITS_PER_UNIT)))
2430 return true;
2431 break;
2432 }
2433
2434 case BUILT_IN_VA_END:
2435 {
2436 tree ptr = gimple_call_arg (stmt, 0);
2437 if (TREE_CODE (ptr) == ADDR_EXPR)
2438 {
2439 tree base = ao_ref_base (ref);
2440 if (TREE_OPERAND (ptr, 0) == base)
2441 return true;
2442 }
2443 break;
2444 }
2445
2446 default:;
2447 }
2448 }
2449 return false;
2450 }
2451
2452 bool
2453 stmt_kills_ref_p (gimple stmt, tree ref)
2454 {
2455 ao_ref r;
2456 ao_ref_init (&r, ref);
2457 return stmt_kills_ref_p (stmt, &r);
2458 }
2459
2460
2461 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2462 TARGET or a statement clobbering the memory reference REF in which
2463 case false is returned. The walk starts with VUSE, one argument of PHI. */
2464
2465 static bool
2466 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2467 tree vuse, unsigned int *cnt, bitmap *visited,
2468 bool abort_on_visited,
2469 void *(*translate)(ao_ref *, tree, void *, bool),
2470 void *data)
2471 {
2472 basic_block bb = gimple_bb (phi);
2473
2474 if (!*visited)
2475 *visited = BITMAP_ALLOC (NULL);
2476
2477 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2478
2479 /* Walk until we hit the target. */
2480 while (vuse != target)
2481 {
2482 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2483 /* Recurse for PHI nodes. */
2484 if (gimple_code (def_stmt) == GIMPLE_PHI)
2485 {
2486 /* An already visited PHI node ends the walk successfully. */
2487 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2488 return !abort_on_visited;
2489 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2490 visited, abort_on_visited,
2491 translate, data);
2492 if (!vuse)
2493 return false;
2494 continue;
2495 }
2496 else if (gimple_nop_p (def_stmt))
2497 return false;
2498 else
2499 {
2500 /* A clobbering statement or the end of the IL ends it failing. */
2501 ++*cnt;
2502 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2503 {
2504 if (translate
2505 && (*translate) (ref, vuse, data, true) == NULL)
2506 ;
2507 else
2508 return false;
2509 }
2510 }
2511 /* If we reach a new basic-block see if we already skipped it
2512 in a previous walk that ended successfully. */
2513 if (gimple_bb (def_stmt) != bb)
2514 {
2515 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2516 return !abort_on_visited;
2517 bb = gimple_bb (def_stmt);
2518 }
2519 vuse = gimple_vuse (def_stmt);
2520 }
2521 return true;
2522 }
2523
2524 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2525 until we hit the phi argument definition that dominates the other one.
2526 Return that, or NULL_TREE if there is no such definition. */
2527
2528 static tree
2529 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2530 ao_ref *ref, unsigned int *cnt,
2531 bitmap *visited, bool abort_on_visited,
2532 void *(*translate)(ao_ref *, tree, void *, bool),
2533 void *data)
2534 {
2535 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2536 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2537 tree common_vuse;
2538
2539 if (arg0 == arg1)
2540 return arg0;
2541 else if (gimple_nop_p (def0)
2542 || (!gimple_nop_p (def1)
2543 && dominated_by_p (CDI_DOMINATORS,
2544 gimple_bb (def1), gimple_bb (def0))))
2545 {
2546 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2547 visited, abort_on_visited, translate, data))
2548 return arg0;
2549 }
2550 else if (gimple_nop_p (def1)
2551 || dominated_by_p (CDI_DOMINATORS,
2552 gimple_bb (def0), gimple_bb (def1)))
2553 {
2554 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2555 visited, abort_on_visited, translate, data))
2556 return arg1;
2557 }
2558 /* Special case of a diamond:
2559 MEM_1 = ...
2560 goto (cond) ? L1 : L2
2561 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2562 goto L3
2563 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2564 L3: MEM_4 = PHI<MEM_2, MEM_3>
2565 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2566 dominate each other, but still we can easily skip this PHI node
2567 if we recognize that the vuse MEM operand is the same for both,
2568 and that we can skip both statements (they don't clobber us).
2569 This is still linear. Don't use maybe_skip_until, that might
2570 potentially be slow. */
2571 else if ((common_vuse = gimple_vuse (def0))
2572 && common_vuse == gimple_vuse (def1))
2573 {
2574 *cnt += 2;
2575 if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2576 || (translate
2577 && (*translate) (ref, arg0, data, true) == NULL))
2578 && (!stmt_may_clobber_ref_p_1 (def1, ref)
2579 || (translate
2580 && (*translate) (ref, arg1, data, true) == NULL)))
2581 return common_vuse;
2582 }
2583
2584 return NULL_TREE;
2585 }
2586
2587
2588 /* Starting from a PHI node for the virtual operand of the memory reference
2589 REF find a continuation virtual operand that allows to continue walking
2590 statements dominating PHI skipping only statements that cannot possibly
2591 clobber REF. Increments *CNT for each alias disambiguation done.
2592 Returns NULL_TREE if no suitable virtual operand can be found. */
2593
2594 tree
2595 get_continuation_for_phi (gimple phi, ao_ref *ref,
2596 unsigned int *cnt, bitmap *visited,
2597 bool abort_on_visited,
2598 void *(*translate)(ao_ref *, tree, void *, bool),
2599 void *data)
2600 {
2601 unsigned nargs = gimple_phi_num_args (phi);
2602
2603 /* Through a single-argument PHI we can simply look through. */
2604 if (nargs == 1)
2605 return PHI_ARG_DEF (phi, 0);
2606
2607 /* For two or more arguments try to pairwise skip non-aliasing code
2608 until we hit the phi argument definition that dominates the other one. */
2609 else if (nargs >= 2)
2610 {
2611 tree arg0, arg1;
2612 unsigned i;
2613
2614 /* Find a candidate for the virtual operand which definition
2615 dominates those of all others. */
2616 arg0 = PHI_ARG_DEF (phi, 0);
2617 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2618 for (i = 1; i < nargs; ++i)
2619 {
2620 arg1 = PHI_ARG_DEF (phi, i);
2621 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2622 {
2623 arg0 = arg1;
2624 break;
2625 }
2626 if (dominated_by_p (CDI_DOMINATORS,
2627 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2628 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2629 arg0 = arg1;
2630 }
2631
2632 /* Then pairwise reduce against the found candidate. */
2633 for (i = 0; i < nargs; ++i)
2634 {
2635 arg1 = PHI_ARG_DEF (phi, i);
2636 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2637 cnt, visited, abort_on_visited,
2638 translate, data);
2639 if (!arg0)
2640 return NULL_TREE;
2641 }
2642
2643 return arg0;
2644 }
2645
2646 return NULL_TREE;
2647 }
2648
2649 /* Based on the memory reference REF and its virtual use VUSE call
2650 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2651 itself. That is, for each virtual use for which its defining statement
2652 does not clobber REF.
2653
2654 WALKER is called with REF, the current virtual use and DATA. If
2655 WALKER returns non-NULL the walk stops and its result is returned.
2656 At the end of a non-successful walk NULL is returned.
2657
2658 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2659 use which definition is a statement that may clobber REF and DATA.
2660 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2661 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2662 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2663 to adjust REF and *DATA to make that valid.
2664
2665 VALUEIZE if non-NULL is called with the next VUSE that is considered
2666 and return value is substituted for that. This can be used to
2667 implement optimistic value-numbering for example. Note that the
2668 VUSE argument is assumed to be valueized already.
2669
2670 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2671
2672 void *
2673 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2674 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2675 void *(*translate)(ao_ref *, tree, void *, bool),
2676 tree (*valueize)(tree),
2677 void *data)
2678 {
2679 bitmap visited = NULL;
2680 void *res;
2681 unsigned int cnt = 0;
2682 bool translated = false;
2683
2684 timevar_push (TV_ALIAS_STMT_WALK);
2685
2686 do
2687 {
2688 gimple def_stmt;
2689
2690 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2691 res = (*walker) (ref, vuse, cnt, data);
2692 /* Abort walk. */
2693 if (res == (void *)-1)
2694 {
2695 res = NULL;
2696 break;
2697 }
2698 /* Lookup succeeded. */
2699 else if (res != NULL)
2700 break;
2701
2702 if (valueize)
2703 vuse = valueize (vuse);
2704 def_stmt = SSA_NAME_DEF_STMT (vuse);
2705 if (gimple_nop_p (def_stmt))
2706 break;
2707 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2708 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2709 &visited, translated, translate, data);
2710 else
2711 {
2712 cnt++;
2713 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2714 {
2715 if (!translate)
2716 break;
2717 res = (*translate) (ref, vuse, data, false);
2718 /* Failed lookup and translation. */
2719 if (res == (void *)-1)
2720 {
2721 res = NULL;
2722 break;
2723 }
2724 /* Lookup succeeded. */
2725 else if (res != NULL)
2726 break;
2727 /* Translation succeeded, continue walking. */
2728 translated = true;
2729 }
2730 vuse = gimple_vuse (def_stmt);
2731 }
2732 }
2733 while (vuse);
2734
2735 if (visited)
2736 BITMAP_FREE (visited);
2737
2738 timevar_pop (TV_ALIAS_STMT_WALK);
2739
2740 return res;
2741 }
2742
2743
2744 /* Based on the memory reference REF call WALKER for each vdef which
2745 defining statement may clobber REF, starting with VDEF. If REF
2746 is NULL_TREE, each defining statement is visited.
2747
2748 WALKER is called with REF, the current vdef and DATA. If WALKER
2749 returns true the walk is stopped, otherwise it continues.
2750
2751 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2752 The pointer may be NULL and then we do not track this information.
2753
2754 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2755 PHI argument (but only one walk continues on merge points), the
2756 return value is true if any of the walks was successful.
2757
2758 The function returns the number of statements walked. */
2759
2760 static unsigned int
2761 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2762 bool (*walker)(ao_ref *, tree, void *), void *data,
2763 bitmap *visited, unsigned int cnt,
2764 bool *function_entry_reached)
2765 {
2766 do
2767 {
2768 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2769
2770 if (*visited
2771 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2772 return cnt;
2773
2774 if (gimple_nop_p (def_stmt))
2775 {
2776 if (function_entry_reached)
2777 *function_entry_reached = true;
2778 return cnt;
2779 }
2780 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2781 {
2782 unsigned i;
2783 if (!*visited)
2784 *visited = BITMAP_ALLOC (NULL);
2785 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2786 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2787 walker, data, visited, 0,
2788 function_entry_reached);
2789 return cnt;
2790 }
2791
2792 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2793 cnt++;
2794 if ((!ref
2795 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2796 && (*walker) (ref, vdef, data))
2797 return cnt;
2798
2799 vdef = gimple_vuse (def_stmt);
2800 }
2801 while (1);
2802 }
2803
2804 unsigned int
2805 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2806 bool (*walker)(ao_ref *, tree, void *), void *data,
2807 bitmap *visited,
2808 bool *function_entry_reached)
2809 {
2810 bitmap local_visited = NULL;
2811 unsigned int ret;
2812
2813 timevar_push (TV_ALIAS_STMT_WALK);
2814
2815 if (function_entry_reached)
2816 *function_entry_reached = false;
2817
2818 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2819 visited ? visited : &local_visited, 0,
2820 function_entry_reached);
2821 if (local_visited)
2822 BITMAP_FREE (local_visited);
2823
2824 timevar_pop (TV_ALIAS_STMT_WALK);
2825
2826 return ret;
2827 }
2828