]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
Perforate fnspec strings
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45
46 /* Broad overview of how alias analysis on gimple works:
47
48 Statements clobbering or using memory are linked through the
49 virtual operand factored use-def chain. The virtual operand
50 is unique per function, its symbol is accessible via gimple_vop (cfun).
51 Virtual operands are used for efficiently walking memory statements
52 in the gimple IL and are useful for things like value-numbering as
53 a generation count for memory references.
54
55 SSA_NAME pointers may have associated points-to information
56 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
57 points-to information is (re-)computed by the TODO_rebuild_alias
58 pass manager todo. Points-to information is also used for more
59 precise tracking of call-clobbered and call-used variables and
60 related disambiguations.
61
62 This file contains functions for disambiguating memory references,
63 the so called alias-oracle and tools for walking of the gimple IL.
64
65 The main alias-oracle entry-points are
66
67 bool stmt_may_clobber_ref_p (gimple *, tree)
68
69 This function queries if a statement may invalidate (parts of)
70 the memory designated by the reference tree argument.
71
72 bool ref_maybe_used_by_stmt_p (gimple *, tree)
73
74 This function queries if a statement may need (parts of) the
75 memory designated by the reference tree argument.
76
77 There are variants of these functions that only handle the call
78 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
79 Note that these do not disambiguate against a possible call lhs.
80
81 bool refs_may_alias_p (tree, tree)
82
83 This function tries to disambiguate two reference trees.
84
85 bool ptr_deref_may_alias_global_p (tree)
86
87 This function queries if dereferencing a pointer variable may
88 alias global memory.
89
90 More low-level disambiguators are available and documented in
91 this file. Low-level disambiguators dealing with points-to
92 information are in tree-ssa-structalias.c. */
93
94 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
95 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
96
97 /* Query statistics for the different low-level disambiguators.
98 A high-level query may trigger multiple of them. */
99
100 static struct {
101 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
102 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
103 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
104 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
105 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
106 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
107 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
108 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
109 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
110 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
111 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
112 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
113 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
114 unsigned HOST_WIDE_INT modref_use_may_alias;
115 unsigned HOST_WIDE_INT modref_use_no_alias;
116 unsigned HOST_WIDE_INT modref_clobber_may_alias;
117 unsigned HOST_WIDE_INT modref_clobber_no_alias;
118 unsigned HOST_WIDE_INT modref_tests;
119 unsigned HOST_WIDE_INT modref_baseptr_tests;
120 } alias_stats;
121
122 void
123 dump_alias_stats (FILE *s)
124 {
125 fprintf (s, "\nAlias oracle query stats:\n");
126 fprintf (s, " refs_may_alias_p: "
127 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
128 HOST_WIDE_INT_PRINT_DEC" queries\n",
129 alias_stats.refs_may_alias_p_no_alias,
130 alias_stats.refs_may_alias_p_no_alias
131 + alias_stats.refs_may_alias_p_may_alias);
132 fprintf (s, " ref_maybe_used_by_call_p: "
133 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
134 HOST_WIDE_INT_PRINT_DEC" queries\n",
135 alias_stats.ref_maybe_used_by_call_p_no_alias,
136 alias_stats.refs_may_alias_p_no_alias
137 + alias_stats.ref_maybe_used_by_call_p_may_alias);
138 fprintf (s, " call_may_clobber_ref_p: "
139 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
140 HOST_WIDE_INT_PRINT_DEC" queries\n",
141 alias_stats.call_may_clobber_ref_p_no_alias,
142 alias_stats.call_may_clobber_ref_p_no_alias
143 + alias_stats.call_may_clobber_ref_p_may_alias);
144 fprintf (s, " nonoverlapping_component_refs_p: "
145 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
146 HOST_WIDE_INT_PRINT_DEC" queries\n",
147 alias_stats.nonoverlapping_component_refs_p_no_alias,
148 alias_stats.nonoverlapping_component_refs_p_no_alias
149 + alias_stats.nonoverlapping_component_refs_p_may_alias);
150 fprintf (s, " nonoverlapping_refs_since_match_p: "
151 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
152 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
153 HOST_WIDE_INT_PRINT_DEC" queries\n",
154 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
155 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
156 alias_stats.nonoverlapping_refs_since_match_p_no_alias
157 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
158 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
159 fprintf (s, " aliasing_component_refs_p: "
160 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
161 HOST_WIDE_INT_PRINT_DEC" queries\n",
162 alias_stats.aliasing_component_refs_p_no_alias,
163 alias_stats.aliasing_component_refs_p_no_alias
164 + alias_stats.aliasing_component_refs_p_may_alias);
165 dump_alias_stats_in_alias_c (s);
166 fprintf (s, "\nModref stats:\n");
167 fprintf (s, " modref use: "
168 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
169 HOST_WIDE_INT_PRINT_DEC" queries\n",
170 alias_stats.modref_use_no_alias,
171 alias_stats.modref_use_no_alias
172 + alias_stats.modref_use_may_alias);
173 fprintf (s, " modref clobber: "
174 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
175 HOST_WIDE_INT_PRINT_DEC" queries\n"
176 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
177 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
178 alias_stats.modref_clobber_no_alias,
179 alias_stats.modref_clobber_no_alias
180 + alias_stats.modref_clobber_may_alias,
181 alias_stats.modref_tests,
182 ((double)alias_stats.modref_tests)
183 / (alias_stats.modref_clobber_no_alias
184 + alias_stats.modref_clobber_may_alias),
185 alias_stats.modref_baseptr_tests,
186 ((double)alias_stats.modref_baseptr_tests)
187 / (alias_stats.modref_clobber_no_alias
188 + alias_stats.modref_clobber_may_alias));
189 }
190
191
192 /* Return true, if dereferencing PTR may alias with a global variable. */
193
194 bool
195 ptr_deref_may_alias_global_p (tree ptr)
196 {
197 struct ptr_info_def *pi;
198
199 /* If we end up with a pointer constant here that may point
200 to global memory. */
201 if (TREE_CODE (ptr) != SSA_NAME)
202 return true;
203
204 pi = SSA_NAME_PTR_INFO (ptr);
205
206 /* If we do not have points-to information for this variable,
207 we have to punt. */
208 if (!pi)
209 return true;
210
211 /* ??? This does not use TBAA to prune globals ptr may not access. */
212 return pt_solution_includes_global (&pi->pt);
213 }
214
215 /* Return true if dereferencing PTR may alias DECL.
216 The caller is responsible for applying TBAA to see if PTR
217 may access DECL at all. */
218
219 static bool
220 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
221 {
222 struct ptr_info_def *pi;
223
224 /* Conversions are irrelevant for points-to information and
225 data-dependence analysis can feed us those. */
226 STRIP_NOPS (ptr);
227
228 /* Anything we do not explicilty handle aliases. */
229 if ((TREE_CODE (ptr) != SSA_NAME
230 && TREE_CODE (ptr) != ADDR_EXPR
231 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
232 || !POINTER_TYPE_P (TREE_TYPE (ptr))
233 || (!VAR_P (decl)
234 && TREE_CODE (decl) != PARM_DECL
235 && TREE_CODE (decl) != RESULT_DECL))
236 return true;
237
238 /* Disregard pointer offsetting. */
239 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
240 {
241 do
242 {
243 ptr = TREE_OPERAND (ptr, 0);
244 }
245 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
246 return ptr_deref_may_alias_decl_p (ptr, decl);
247 }
248
249 /* ADDR_EXPR pointers either just offset another pointer or directly
250 specify the pointed-to set. */
251 if (TREE_CODE (ptr) == ADDR_EXPR)
252 {
253 tree base = get_base_address (TREE_OPERAND (ptr, 0));
254 if (base
255 && (TREE_CODE (base) == MEM_REF
256 || TREE_CODE (base) == TARGET_MEM_REF))
257 ptr = TREE_OPERAND (base, 0);
258 else if (base
259 && DECL_P (base))
260 return compare_base_decls (base, decl) != 0;
261 else if (base
262 && CONSTANT_CLASS_P (base))
263 return false;
264 else
265 return true;
266 }
267
268 /* Non-aliased variables cannot be pointed to. */
269 if (!may_be_aliased (decl))
270 return false;
271
272 /* If we do not have useful points-to information for this pointer
273 we cannot disambiguate anything else. */
274 pi = SSA_NAME_PTR_INFO (ptr);
275 if (!pi)
276 return true;
277
278 return pt_solution_includes (&pi->pt, decl);
279 }
280
281 /* Return true if dereferenced PTR1 and PTR2 may alias.
282 The caller is responsible for applying TBAA to see if accesses
283 through PTR1 and PTR2 may conflict at all. */
284
285 bool
286 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
287 {
288 struct ptr_info_def *pi1, *pi2;
289
290 /* Conversions are irrelevant for points-to information and
291 data-dependence analysis can feed us those. */
292 STRIP_NOPS (ptr1);
293 STRIP_NOPS (ptr2);
294
295 /* Disregard pointer offsetting. */
296 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
297 {
298 do
299 {
300 ptr1 = TREE_OPERAND (ptr1, 0);
301 }
302 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
303 return ptr_derefs_may_alias_p (ptr1, ptr2);
304 }
305 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
306 {
307 do
308 {
309 ptr2 = TREE_OPERAND (ptr2, 0);
310 }
311 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
312 return ptr_derefs_may_alias_p (ptr1, ptr2);
313 }
314
315 /* ADDR_EXPR pointers either just offset another pointer or directly
316 specify the pointed-to set. */
317 if (TREE_CODE (ptr1) == ADDR_EXPR)
318 {
319 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
320 if (base
321 && (TREE_CODE (base) == MEM_REF
322 || TREE_CODE (base) == TARGET_MEM_REF))
323 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
324 else if (base
325 && DECL_P (base))
326 return ptr_deref_may_alias_decl_p (ptr2, base);
327 else
328 return true;
329 }
330 if (TREE_CODE (ptr2) == ADDR_EXPR)
331 {
332 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
333 if (base
334 && (TREE_CODE (base) == MEM_REF
335 || TREE_CODE (base) == TARGET_MEM_REF))
336 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
337 else if (base
338 && DECL_P (base))
339 return ptr_deref_may_alias_decl_p (ptr1, base);
340 else
341 return true;
342 }
343
344 /* From here we require SSA name pointers. Anything else aliases. */
345 if (TREE_CODE (ptr1) != SSA_NAME
346 || TREE_CODE (ptr2) != SSA_NAME
347 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
348 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
349 return true;
350
351 /* We may end up with two empty points-to solutions for two same pointers.
352 In this case we still want to say both pointers alias, so shortcut
353 that here. */
354 if (ptr1 == ptr2)
355 return true;
356
357 /* If we do not have useful points-to information for either pointer
358 we cannot disambiguate anything else. */
359 pi1 = SSA_NAME_PTR_INFO (ptr1);
360 pi2 = SSA_NAME_PTR_INFO (ptr2);
361 if (!pi1 || !pi2)
362 return true;
363
364 /* ??? This does not use TBAA to prune decls from the intersection
365 that not both pointers may access. */
366 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
367 }
368
369 /* Return true if dereferencing PTR may alias *REF.
370 The caller is responsible for applying TBAA to see if PTR
371 may access *REF at all. */
372
373 static bool
374 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
375 {
376 tree base = ao_ref_base (ref);
377
378 if (TREE_CODE (base) == MEM_REF
379 || TREE_CODE (base) == TARGET_MEM_REF)
380 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
381 else if (DECL_P (base))
382 return ptr_deref_may_alias_decl_p (ptr, base);
383
384 return true;
385 }
386
387 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
388
389 bool
390 ptrs_compare_unequal (tree ptr1, tree ptr2)
391 {
392 /* First resolve the pointers down to a SSA name pointer base or
393 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
394 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
395 or STRING_CSTs which needs points-to adjustments to track them
396 in the points-to sets. */
397 tree obj1 = NULL_TREE;
398 tree obj2 = NULL_TREE;
399 if (TREE_CODE (ptr1) == ADDR_EXPR)
400 {
401 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
402 if (! tem)
403 return false;
404 if (VAR_P (tem)
405 || TREE_CODE (tem) == PARM_DECL
406 || TREE_CODE (tem) == RESULT_DECL)
407 obj1 = tem;
408 else if (TREE_CODE (tem) == MEM_REF)
409 ptr1 = TREE_OPERAND (tem, 0);
410 }
411 if (TREE_CODE (ptr2) == ADDR_EXPR)
412 {
413 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
414 if (! tem)
415 return false;
416 if (VAR_P (tem)
417 || TREE_CODE (tem) == PARM_DECL
418 || TREE_CODE (tem) == RESULT_DECL)
419 obj2 = tem;
420 else if (TREE_CODE (tem) == MEM_REF)
421 ptr2 = TREE_OPERAND (tem, 0);
422 }
423
424 /* Canonicalize ptr vs. object. */
425 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
426 {
427 std::swap (ptr1, ptr2);
428 std::swap (obj1, obj2);
429 }
430
431 if (obj1 && obj2)
432 /* Other code handles this correctly, no need to duplicate it here. */;
433 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
434 {
435 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
436 /* We may not use restrict to optimize pointer comparisons.
437 See PR71062. So we have to assume that restrict-pointed-to
438 may be in fact obj1. */
439 if (!pi
440 || pi->pt.vars_contains_restrict
441 || pi->pt.vars_contains_interposable)
442 return false;
443 if (VAR_P (obj1)
444 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
445 {
446 varpool_node *node = varpool_node::get (obj1);
447 /* If obj1 may bind to NULL give up (see below). */
448 if (! node
449 || ! node->nonzero_address ()
450 || ! decl_binds_to_current_def_p (obj1))
451 return false;
452 }
453 return !pt_solution_includes (&pi->pt, obj1);
454 }
455
456 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
457 but those require pt.null to be conservatively correct. */
458
459 return false;
460 }
461
462 /* Returns whether reference REF to BASE may refer to global memory. */
463
464 static bool
465 ref_may_alias_global_p_1 (tree base)
466 {
467 if (DECL_P (base))
468 return is_global_var (base);
469 else if (TREE_CODE (base) == MEM_REF
470 || TREE_CODE (base) == TARGET_MEM_REF)
471 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
472 return true;
473 }
474
475 bool
476 ref_may_alias_global_p (ao_ref *ref)
477 {
478 tree base = ao_ref_base (ref);
479 return ref_may_alias_global_p_1 (base);
480 }
481
482 bool
483 ref_may_alias_global_p (tree ref)
484 {
485 tree base = get_base_address (ref);
486 return ref_may_alias_global_p_1 (base);
487 }
488
489 /* Return true whether STMT may clobber global memory. */
490
491 bool
492 stmt_may_clobber_global_p (gimple *stmt)
493 {
494 tree lhs;
495
496 if (!gimple_vdef (stmt))
497 return false;
498
499 /* ??? We can ask the oracle whether an artificial pointer
500 dereference with a pointer with points-to information covering
501 all global memory (what about non-address taken memory?) maybe
502 clobbered by this call. As there is at the moment no convenient
503 way of doing that without generating garbage do some manual
504 checking instead.
505 ??? We could make a NULL ao_ref argument to the various
506 predicates special, meaning any global memory. */
507
508 switch (gimple_code (stmt))
509 {
510 case GIMPLE_ASSIGN:
511 lhs = gimple_assign_lhs (stmt);
512 return (TREE_CODE (lhs) != SSA_NAME
513 && ref_may_alias_global_p (lhs));
514 case GIMPLE_CALL:
515 return true;
516 default:
517 return true;
518 }
519 }
520
521
522 /* Dump alias information on FILE. */
523
524 void
525 dump_alias_info (FILE *file)
526 {
527 unsigned i;
528 tree ptr;
529 const char *funcname
530 = lang_hooks.decl_printable_name (current_function_decl, 2);
531 tree var;
532
533 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
534
535 fprintf (file, "Aliased symbols\n\n");
536
537 FOR_EACH_LOCAL_DECL (cfun, i, var)
538 {
539 if (may_be_aliased (var))
540 dump_variable (file, var);
541 }
542
543 fprintf (file, "\nCall clobber information\n");
544
545 fprintf (file, "\nESCAPED");
546 dump_points_to_solution (file, &cfun->gimple_df->escaped);
547
548 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
549
550 FOR_EACH_SSA_NAME (i, ptr, cfun)
551 {
552 struct ptr_info_def *pi;
553
554 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
555 || SSA_NAME_IN_FREE_LIST (ptr))
556 continue;
557
558 pi = SSA_NAME_PTR_INFO (ptr);
559 if (pi)
560 dump_points_to_info_for (file, ptr);
561 }
562
563 fprintf (file, "\n");
564 }
565
566
567 /* Dump alias information on stderr. */
568
569 DEBUG_FUNCTION void
570 debug_alias_info (void)
571 {
572 dump_alias_info (stderr);
573 }
574
575
576 /* Dump the points-to set *PT into FILE. */
577
578 void
579 dump_points_to_solution (FILE *file, struct pt_solution *pt)
580 {
581 if (pt->anything)
582 fprintf (file, ", points-to anything");
583
584 if (pt->nonlocal)
585 fprintf (file, ", points-to non-local");
586
587 if (pt->escaped)
588 fprintf (file, ", points-to escaped");
589
590 if (pt->ipa_escaped)
591 fprintf (file, ", points-to unit escaped");
592
593 if (pt->null)
594 fprintf (file, ", points-to NULL");
595
596 if (pt->vars)
597 {
598 fprintf (file, ", points-to vars: ");
599 dump_decl_set (file, pt->vars);
600 if (pt->vars_contains_nonlocal
601 || pt->vars_contains_escaped
602 || pt->vars_contains_escaped_heap
603 || pt->vars_contains_restrict)
604 {
605 const char *comma = "";
606 fprintf (file, " (");
607 if (pt->vars_contains_nonlocal)
608 {
609 fprintf (file, "nonlocal");
610 comma = ", ";
611 }
612 if (pt->vars_contains_escaped)
613 {
614 fprintf (file, "%sescaped", comma);
615 comma = ", ";
616 }
617 if (pt->vars_contains_escaped_heap)
618 {
619 fprintf (file, "%sescaped heap", comma);
620 comma = ", ";
621 }
622 if (pt->vars_contains_restrict)
623 {
624 fprintf (file, "%srestrict", comma);
625 comma = ", ";
626 }
627 if (pt->vars_contains_interposable)
628 fprintf (file, "%sinterposable", comma);
629 fprintf (file, ")");
630 }
631 }
632 }
633
634
635 /* Unified dump function for pt_solution. */
636
637 DEBUG_FUNCTION void
638 debug (pt_solution &ref)
639 {
640 dump_points_to_solution (stderr, &ref);
641 }
642
643 DEBUG_FUNCTION void
644 debug (pt_solution *ptr)
645 {
646 if (ptr)
647 debug (*ptr);
648 else
649 fprintf (stderr, "<nil>\n");
650 }
651
652
653 /* Dump points-to information for SSA_NAME PTR into FILE. */
654
655 void
656 dump_points_to_info_for (FILE *file, tree ptr)
657 {
658 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
659
660 print_generic_expr (file, ptr, dump_flags);
661
662 if (pi)
663 dump_points_to_solution (file, &pi->pt);
664 else
665 fprintf (file, ", points-to anything");
666
667 fprintf (file, "\n");
668 }
669
670
671 /* Dump points-to information for VAR into stderr. */
672
673 DEBUG_FUNCTION void
674 debug_points_to_info_for (tree var)
675 {
676 dump_points_to_info_for (stderr, var);
677 }
678
679
680 /* Initializes the alias-oracle reference representation *R from REF. */
681
682 void
683 ao_ref_init (ao_ref *r, tree ref)
684 {
685 r->ref = ref;
686 r->base = NULL_TREE;
687 r->offset = 0;
688 r->size = -1;
689 r->max_size = -1;
690 r->ref_alias_set = -1;
691 r->base_alias_set = -1;
692 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
693 }
694
695 /* Returns the base object of the memory reference *REF. */
696
697 tree
698 ao_ref_base (ao_ref *ref)
699 {
700 bool reverse;
701
702 if (ref->base)
703 return ref->base;
704 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
705 &ref->max_size, &reverse);
706 return ref->base;
707 }
708
709 /* Returns the base object alias set of the memory reference *REF. */
710
711 alias_set_type
712 ao_ref_base_alias_set (ao_ref *ref)
713 {
714 tree base_ref;
715 if (ref->base_alias_set != -1)
716 return ref->base_alias_set;
717 if (!ref->ref)
718 return 0;
719 base_ref = ref->ref;
720 while (handled_component_p (base_ref))
721 base_ref = TREE_OPERAND (base_ref, 0);
722 ref->base_alias_set = get_alias_set (base_ref);
723 return ref->base_alias_set;
724 }
725
726 /* Returns the reference alias set of the memory reference *REF. */
727
728 alias_set_type
729 ao_ref_alias_set (ao_ref *ref)
730 {
731 if (ref->ref_alias_set != -1)
732 return ref->ref_alias_set;
733 if (!ref->ref)
734 return 0;
735 ref->ref_alias_set = get_alias_set (ref->ref);
736 return ref->ref_alias_set;
737 }
738
739 /* Init an alias-oracle reference representation from a gimple pointer
740 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
741 that RANGE_KNOWN is set.
742
743 The access is assumed to be only to or after of the pointer target adjusted
744 by the offset, not before it (even in the case RANGE_KNOWN is false). */
745
746 static void
747 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
748 bool range_known,
749 poly_int64 offset,
750 poly_int64 size,
751 poly_int64 max_size)
752 {
753 poly_int64 t, extra_offset = 0;
754
755 ref->ref = NULL_TREE;
756 if (TREE_CODE (ptr) == SSA_NAME)
757 {
758 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
759 if (gimple_assign_single_p (stmt)
760 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
761 ptr = gimple_assign_rhs1 (stmt);
762 else if (is_gimple_assign (stmt)
763 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
764 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
765 {
766 ptr = gimple_assign_rhs1 (stmt);
767 extra_offset *= BITS_PER_UNIT;
768 }
769 }
770
771 if (TREE_CODE (ptr) == ADDR_EXPR)
772 {
773 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
774 if (ref->base)
775 ref->offset = BITS_PER_UNIT * t;
776 else
777 {
778 range_known = false;
779 ref->offset = 0;
780 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
781 }
782 }
783 else
784 {
785 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
786 ref->base = build2 (MEM_REF, char_type_node,
787 ptr, null_pointer_node);
788 ref->offset = 0;
789 }
790 ref->offset += extra_offset + offset;
791 if (range_known)
792 {
793 ref->max_size = max_size;
794 ref->size = size;
795 }
796 else
797 ref->max_size = ref->size = -1;
798 ref->ref_alias_set = 0;
799 ref->base_alias_set = 0;
800 ref->volatile_p = false;
801 }
802
803 /* Init an alias-oracle reference representation from a gimple pointer
804 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
805 size is assumed to be unknown. The access is assumed to be only
806 to or after of the pointer target, not before it. */
807
808 void
809 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
810 {
811 poly_int64 size_hwi;
812 if (size
813 && poly_int_tree_p (size, &size_hwi)
814 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
815 {
816 size_hwi = size_hwi * BITS_PER_UNIT;
817 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
818 }
819 else
820 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
821 }
822
823 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
824 Return -1 if S1 < S2
825 Return 1 if S1 > S2
826 Return 0 if equal or incomparable. */
827
828 static int
829 compare_sizes (tree s1, tree s2)
830 {
831 if (!s1 || !s2)
832 return 0;
833
834 poly_uint64 size1;
835 poly_uint64 size2;
836
837 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
838 return 0;
839 if (known_lt (size1, size2))
840 return -1;
841 if (known_lt (size2, size1))
842 return 1;
843 return 0;
844 }
845
846 /* Compare TYPE1 and TYPE2 by its size.
847 Return -1 if size of TYPE1 < size of TYPE2
848 Return 1 if size of TYPE1 > size of TYPE2
849 Return 0 if types are of equal sizes or we can not compare them. */
850
851 static int
852 compare_type_sizes (tree type1, tree type2)
853 {
854 /* Be conservative for arrays and vectors. We want to support partial
855 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
856 while (TREE_CODE (type1) == ARRAY_TYPE
857 || TREE_CODE (type1) == VECTOR_TYPE)
858 type1 = TREE_TYPE (type1);
859 while (TREE_CODE (type2) == ARRAY_TYPE
860 || TREE_CODE (type2) == VECTOR_TYPE)
861 type2 = TREE_TYPE (type2);
862 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
863 }
864
865 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
866 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
867 decide. */
868
869 static inline int
870 same_type_for_tbaa (tree type1, tree type2)
871 {
872 type1 = TYPE_MAIN_VARIANT (type1);
873 type2 = TYPE_MAIN_VARIANT (type2);
874
875 /* Handle the most common case first. */
876 if (type1 == type2)
877 return 1;
878
879 /* If we would have to do structural comparison bail out. */
880 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
881 || TYPE_STRUCTURAL_EQUALITY_P (type2))
882 return -1;
883
884 /* Compare the canonical types. */
885 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
886 return 1;
887
888 /* ??? Array types are not properly unified in all cases as we have
889 spurious changes in the index types for example. Removing this
890 causes all sorts of problems with the Fortran frontend. */
891 if (TREE_CODE (type1) == ARRAY_TYPE
892 && TREE_CODE (type2) == ARRAY_TYPE)
893 return -1;
894
895 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
896 object of one of its constrained subtypes, e.g. when a function with an
897 unconstrained parameter passed by reference is called on an object and
898 inlined. But, even in the case of a fixed size, type and subtypes are
899 not equivalent enough as to share the same TYPE_CANONICAL, since this
900 would mean that conversions between them are useless, whereas they are
901 not (e.g. type and subtypes can have different modes). So, in the end,
902 they are only guaranteed to have the same alias set. */
903 alias_set_type set1 = get_alias_set (type1);
904 alias_set_type set2 = get_alias_set (type2);
905 if (set1 == set2)
906 return -1;
907
908 /* Pointers to void are considered compatible with all other pointers,
909 so for two pointers see what the alias set resolution thinks. */
910 if (POINTER_TYPE_P (type1)
911 && POINTER_TYPE_P (type2)
912 && alias_sets_conflict_p (set1, set2))
913 return -1;
914
915 /* The types are known to be not equal. */
916 return 0;
917 }
918
919 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
920 components on it). */
921
922 static bool
923 type_has_components_p (tree type)
924 {
925 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
926 || TREE_CODE (type) == COMPLEX_TYPE;
927 }
928
929 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
930 respectively are either pointing to same address or are completely
931 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
932 just partly overlap.
933
934 Try to disambiguate using the access path starting from the match
935 and return false if there is no conflict.
936
937 Helper for aliasing_component_refs_p. */
938
939 static bool
940 aliasing_matching_component_refs_p (tree match1, tree ref1,
941 poly_int64 offset1, poly_int64 max_size1,
942 tree match2, tree ref2,
943 poly_int64 offset2, poly_int64 max_size2,
944 bool partial_overlap)
945 {
946 poly_int64 offadj, sztmp, msztmp;
947 bool reverse;
948
949 if (!partial_overlap)
950 {
951 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
952 offset2 -= offadj;
953 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
954 offset1 -= offadj;
955 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
956 {
957 ++alias_stats.aliasing_component_refs_p_no_alias;
958 return false;
959 }
960 }
961
962 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
963 partial_overlap);
964 if (cmp == 1
965 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
966 {
967 ++alias_stats.aliasing_component_refs_p_no_alias;
968 return false;
969 }
970 ++alias_stats.aliasing_component_refs_p_may_alias;
971 return true;
972 }
973
974 /* Return true if REF is reference to zero sized trailing array. I.e.
975 struct foo {int bar; int array[0];} *fooptr;
976 fooptr->array. */
977
978 static bool
979 component_ref_to_zero_sized_trailing_array_p (tree ref)
980 {
981 return (TREE_CODE (ref) == COMPONENT_REF
982 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
983 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
984 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
985 && array_at_struct_end_p (ref));
986 }
987
988 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
989 aliasing_component_refs_p.
990
991 Walk access path REF2 and try to find type matching TYPE1
992 (which is a start of possibly aliasing access path REF1).
993 If match is found, try to disambiguate.
994
995 Return 0 for sucessful disambiguation.
996 Return 1 if match was found but disambiguation failed
997 Return -1 if there is no match.
998 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
999 in access patch REF2 and -1 if we are not sure. */
1000
1001 static int
1002 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1003 poly_int64 offset1, poly_int64 max_size1,
1004 tree end_struct_ref1,
1005 tree ref2, tree base2,
1006 poly_int64 offset2, poly_int64 max_size2,
1007 bool *maybe_match)
1008 {
1009 tree ref = ref2;
1010 int same_p = 0;
1011
1012 while (true)
1013 {
1014 /* We walk from inner type to the outer types. If type we see is
1015 already too large to be part of type1, terminate the search. */
1016 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1017
1018 if (cmp < 0
1019 && (!end_struct_ref1
1020 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1021 TREE_TYPE (ref)) < 0))
1022 break;
1023 /* If types may be of same size, see if we can decide about their
1024 equality. */
1025 if (cmp == 0)
1026 {
1027 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1028 if (same_p == 1)
1029 break;
1030 /* In case we can't decide whether types are same try to
1031 continue looking for the exact match.
1032 Remember however that we possibly saw a match
1033 to bypass the access path continuations tests we do later. */
1034 if (same_p == -1)
1035 *maybe_match = true;
1036 }
1037 if (!handled_component_p (ref))
1038 break;
1039 ref = TREE_OPERAND (ref, 0);
1040 }
1041 if (same_p == 1)
1042 {
1043 bool partial_overlap = false;
1044
1045 /* We assume that arrays can overlap by multiple of their elements
1046 size as tested in gcc.dg/torture/alias-2.c.
1047 This partial overlap happen only when both arrays are bases of
1048 the access and not contained within another component ref.
1049 To be safe we also assume partial overlap for VLAs. */
1050 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1051 && (!TYPE_SIZE (TREE_TYPE (base1))
1052 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1053 || ref == base2))
1054 {
1055 /* Setting maybe_match to true triggers
1056 nonoverlapping_component_refs_p test later that still may do
1057 useful disambiguation. */
1058 *maybe_match = true;
1059 partial_overlap = true;
1060 }
1061 return aliasing_matching_component_refs_p (base1, ref1,
1062 offset1, max_size1,
1063 ref, ref2,
1064 offset2, max_size2,
1065 partial_overlap);
1066 }
1067 return -1;
1068 }
1069
1070 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1071 Return true if they can be composed to single access path
1072 base1...ref1...base2...ref2.
1073
1074 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1075 a trailing array access after REF1 in the non-TBAA part of the access.
1076 REF1_ALIAS_SET is the alias set of REF1.
1077
1078 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1079 a traling array access in the TBAA part of access path2.
1080 BASE2_ALIAS_SET is the alias set of base2. */
1081
1082 bool
1083 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1084 alias_set_type ref1_alias_set,
1085 tree base_type2, tree end_struct_ref2,
1086 alias_set_type base2_alias_set)
1087 {
1088 /* Access path can not continue past types with no components. */
1089 if (!type_has_components_p (ref_type1))
1090 return false;
1091
1092 /* If first access path ends by too small type to hold base of
1093 the second access path, typically paths can not continue.
1094
1095 Punt if end_struct_past_end1 is true. We want to support arbitrary
1096 type puning past first COMPONENT_REF to union because redundant store
1097 elimination depends on this, see PR92152. For this reason we can not
1098 check size of the reference because types may partially overlap. */
1099 if (!end_struct_past_end1)
1100 {
1101 if (compare_type_sizes (ref_type1, base_type2) < 0)
1102 return false;
1103 /* If the path2 contains trailing array access we can strenghten the check
1104 to verify that also the size of element of the trailing array fits.
1105 In fact we could check for offset + type_size, but we do not track
1106 offsets and this is quite side case. */
1107 if (end_struct_ref2
1108 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1109 return false;
1110 }
1111 return (base2_alias_set == ref1_alias_set
1112 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1113 }
1114
1115 /* Determine if the two component references REF1 and REF2 which are
1116 based on access types TYPE1 and TYPE2 and of which at least one is based
1117 on an indirect reference may alias.
1118 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1119 are the respective alias sets. */
1120
1121 static bool
1122 aliasing_component_refs_p (tree ref1,
1123 alias_set_type ref1_alias_set,
1124 alias_set_type base1_alias_set,
1125 poly_int64 offset1, poly_int64 max_size1,
1126 tree ref2,
1127 alias_set_type ref2_alias_set,
1128 alias_set_type base2_alias_set,
1129 poly_int64 offset2, poly_int64 max_size2)
1130 {
1131 /* If one reference is a component references through pointers try to find a
1132 common base and apply offset based disambiguation. This handles
1133 for example
1134 struct A { int i; int j; } *q;
1135 struct B { struct A a; int k; } *p;
1136 disambiguating q->i and p->a.j. */
1137 tree base1, base2;
1138 tree type1, type2;
1139 bool maybe_match = false;
1140 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1141 bool end_struct_past_end1 = false;
1142 bool end_struct_past_end2 = false;
1143
1144 /* Choose bases and base types to search for.
1145 The access path is as follows:
1146 base....end_of_tbaa_ref...actual_ref
1147 At one place in the access path may be a reference to zero sized or
1148 trailing array.
1149
1150 We generally discard the segment after end_of_tbaa_ref however
1151 we need to be careful in case it contains zero sized or traling array.
1152 These may happen after refernce to union and in this case we need to
1153 not disambiguate type puning scenarios.
1154
1155 We set:
1156 base1 to point to base
1157
1158 ref1 to point to end_of_tbaa_ref
1159
1160 end_struct_ref1 to point the trailing reference (if it exists
1161 in range base....end_of_tbaa_ref
1162
1163 end_struct_past_end1 is true if this traling refernece occurs in
1164 end_of_tbaa_ref...actual_ref. */
1165 base1 = ref1;
1166 while (handled_component_p (base1))
1167 {
1168 /* Generally access paths are monotous in the size of object. The
1169 exception are trailing arrays of structures. I.e.
1170 struct a {int array[0];};
1171 or
1172 struct a {int array1[0]; int array[];};
1173 Such struct has size 0 but accesses to a.array may have non-zero size.
1174 In this case the size of TREE_TYPE (base1) is smaller than
1175 size of TREE_TYPE (TREE_OPERNAD (base1, 0)).
1176
1177 Because we compare sizes of arrays just by sizes of their elements,
1178 we only need to care about zero sized array fields here. */
1179 if (component_ref_to_zero_sized_trailing_array_p (base1))
1180 {
1181 gcc_checking_assert (!end_struct_ref1);
1182 end_struct_ref1 = base1;
1183 }
1184 if (ends_tbaa_access_path_p (base1))
1185 {
1186 ref1 = TREE_OPERAND (base1, 0);
1187 if (end_struct_ref1)
1188 {
1189 end_struct_past_end1 = true;
1190 end_struct_ref1 = NULL;
1191 }
1192 }
1193 base1 = TREE_OPERAND (base1, 0);
1194 }
1195 type1 = TREE_TYPE (base1);
1196 base2 = ref2;
1197 while (handled_component_p (base2))
1198 {
1199 if (component_ref_to_zero_sized_trailing_array_p (base2))
1200 {
1201 gcc_checking_assert (!end_struct_ref2);
1202 end_struct_ref2 = base2;
1203 }
1204 if (ends_tbaa_access_path_p (base2))
1205 {
1206 ref2 = TREE_OPERAND (base2, 0);
1207 if (end_struct_ref2)
1208 {
1209 end_struct_past_end2 = true;
1210 end_struct_ref2 = NULL;
1211 }
1212 }
1213 base2 = TREE_OPERAND (base2, 0);
1214 }
1215 type2 = TREE_TYPE (base2);
1216
1217 /* Now search for the type1 in the access path of ref2. This
1218 would be a common base for doing offset based disambiguation on.
1219 This however only makes sense if type2 is big enough to hold type1. */
1220 int cmp_outer = compare_type_sizes (type2, type1);
1221
1222 /* If type2 is big enough to contain type1 walk its access path.
1223 We also need to care of arrays at the end of structs that may extend
1224 beyond the end of structure. If this occurs in the TBAA part of the
1225 access path, we need to consider the increased type as well. */
1226 if (cmp_outer >= 0
1227 || (end_struct_ref2
1228 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1229 {
1230 int res = aliasing_component_refs_walk (ref1, type1, base1,
1231 offset1, max_size1,
1232 end_struct_ref1,
1233 ref2, base2, offset2, max_size2,
1234 &maybe_match);
1235 if (res != -1)
1236 return res;
1237 }
1238
1239 /* If we didn't find a common base, try the other way around. */
1240 if (cmp_outer <= 0
1241 || (end_struct_ref1
1242 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1243 {
1244 int res = aliasing_component_refs_walk (ref2, type2, base2,
1245 offset2, max_size2,
1246 end_struct_ref2,
1247 ref1, base1, offset1, max_size1,
1248 &maybe_match);
1249 if (res != -1)
1250 return res;
1251 }
1252
1253 /* In the following code we make an assumption that the types in access
1254 paths do not overlap and thus accesses alias only if one path can be
1255 continuation of another. If we was not able to decide about equivalence,
1256 we need to give up. */
1257 if (maybe_match)
1258 {
1259 if (!nonoverlapping_component_refs_p (ref1, ref2))
1260 {
1261 ++alias_stats.aliasing_component_refs_p_may_alias;
1262 return true;
1263 }
1264 ++alias_stats.aliasing_component_refs_p_no_alias;
1265 return false;
1266 }
1267
1268 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1269 ref1_alias_set,
1270 type2, end_struct_ref2,
1271 base2_alias_set)
1272 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1273 ref2_alias_set,
1274 type1, end_struct_ref1,
1275 base1_alias_set))
1276 {
1277 ++alias_stats.aliasing_component_refs_p_may_alias;
1278 return true;
1279 }
1280 ++alias_stats.aliasing_component_refs_p_no_alias;
1281 return false;
1282 }
1283
1284 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1285 that bases of both component refs are either equivalent or nonoverlapping.
1286 We do not assume that the containers of FIELD1 and FIELD2 are of the
1287 same type or size.
1288
1289 Return 0 in case the base address of component_refs are same then
1290 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1291 may not be of same type or size.
1292
1293 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1294
1295 Return -1 otherwise.
1296
1297 Main difference between 0 and -1 is to let
1298 nonoverlapping_component_refs_since_match_p discover the semantically
1299 equivalent part of the access path.
1300
1301 Note that this function is used even with -fno-strict-aliasing
1302 and makes use of no TBAA assumptions. */
1303
1304 static int
1305 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1306 {
1307 /* If both fields are of the same type, we could save hard work of
1308 comparing offsets. */
1309 tree type1 = DECL_CONTEXT (field1);
1310 tree type2 = DECL_CONTEXT (field2);
1311
1312 if (TREE_CODE (type1) == RECORD_TYPE
1313 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1314 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1315 if (TREE_CODE (type2) == RECORD_TYPE
1316 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1317 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1318
1319 /* ??? Bitfields can overlap at RTL level so punt on them.
1320 FIXME: RTL expansion should be fixed by adjusting the access path
1321 when producing MEM_ATTRs for MEMs which are wider than
1322 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1323 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1324 return -1;
1325
1326 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1327 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1328 return field1 != field2;
1329
1330 /* In common case the offsets and bit offsets will be the same.
1331 However if frontends do not agree on the alignment, they may be
1332 different even if they actually represent same address.
1333 Try the common case first and if that fails calcualte the
1334 actual bit offset. */
1335 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1336 DECL_FIELD_OFFSET (field2))
1337 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1338 DECL_FIELD_BIT_OFFSET (field2)))
1339 return 0;
1340
1341 /* Note that it may be possible to use component_ref_field_offset
1342 which would provide offsets as trees. However constructing and folding
1343 trees is expensive and does not seem to be worth the compile time
1344 cost. */
1345
1346 poly_uint64 offset1, offset2;
1347 poly_uint64 bit_offset1, bit_offset2;
1348
1349 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1350 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1351 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1352 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1353 {
1354 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1355 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1356
1357 if (known_eq (offset1, offset2))
1358 return 0;
1359
1360 poly_uint64 size1, size2;
1361
1362 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1363 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1364 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1365 return 1;
1366 }
1367 /* Resort to slower overlap checking by looking for matching types in
1368 the middle of access path. */
1369 return -1;
1370 }
1371
1372 /* Return low bound of array. Do not produce new trees
1373 and thus do not care about particular type of integer constant
1374 and placeholder exprs. */
1375
1376 static tree
1377 cheap_array_ref_low_bound (tree ref)
1378 {
1379 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1380
1381 /* Avoid expensive array_ref_low_bound.
1382 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1383 type or it is zero. */
1384 if (TREE_OPERAND (ref, 2))
1385 return TREE_OPERAND (ref, 2);
1386 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1387 return TYPE_MIN_VALUE (domain_type);
1388 else
1389 return integer_zero_node;
1390 }
1391
1392 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1393 completely disjoint.
1394
1395 Return 1 if the refs are non-overlapping.
1396 Return 0 if they are possibly overlapping but if so the overlap again
1397 starts on the same address.
1398 Return -1 otherwise. */
1399
1400 int
1401 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1402 {
1403 tree index1 = TREE_OPERAND (ref1, 1);
1404 tree index2 = TREE_OPERAND (ref2, 1);
1405 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1406 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1407
1408 /* Handle zero offsets first: we do not need to match type size in this
1409 case. */
1410 if (operand_equal_p (index1, low_bound1, 0)
1411 && operand_equal_p (index2, low_bound2, 0))
1412 return 0;
1413
1414 /* If type sizes are different, give up.
1415
1416 Avoid expensive array_ref_element_size.
1417 If operand 3 is present it denotes size in the alignmnet units.
1418 Otherwise size is TYPE_SIZE of the element type.
1419 Handle only common cases where types are of the same "kind". */
1420 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1421 return -1;
1422
1423 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1424 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1425
1426 if (TREE_OPERAND (ref1, 3))
1427 {
1428 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1429 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1430 TREE_OPERAND (ref2, 3), 0))
1431 return -1;
1432 }
1433 else
1434 {
1435 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1436 TYPE_SIZE_UNIT (elmt_type2), 0))
1437 return -1;
1438 }
1439
1440 /* Since we know that type sizes are the same, there is no need to return
1441 -1 after this point. Partial overlap can not be introduced. */
1442
1443 /* We may need to fold trees in this case.
1444 TODO: Handle integer constant case at least. */
1445 if (!operand_equal_p (low_bound1, low_bound2, 0))
1446 return 0;
1447
1448 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1449 {
1450 if (tree_int_cst_equal (index1, index2))
1451 return 0;
1452 return 1;
1453 }
1454 /* TODO: We can use VRP to further disambiguate here. */
1455 return 0;
1456 }
1457
1458 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1459 MATCH2 either point to the same address or are disjoint.
1460 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1461 respectively or NULL in the case we established equivalence of bases.
1462 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1463 overlap by exact multiply of their element size.
1464
1465 This test works by matching the initial segment of the access path
1466 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1467 match was determined without use of TBAA oracle.
1468
1469 Return 1 if we can determine that component references REF1 and REF2,
1470 that are within a common DECL, cannot overlap.
1471
1472 Return 0 if paths are same and thus there is nothing to disambiguate more
1473 (i.e. there is must alias assuming there is must alias between MATCH1 and
1474 MATCH2)
1475
1476 Return -1 if we can not determine 0 or 1 - this happens when we met
1477 non-matching types was met in the path.
1478 In this case it may make sense to continue by other disambiguation
1479 oracles. */
1480
1481 static int
1482 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1483 tree match2, tree ref2,
1484 bool partial_overlap)
1485 {
1486 int ntbaa1 = 0, ntbaa2 = 0;
1487 /* Early return if there are no references to match, we do not need
1488 to walk the access paths.
1489
1490 Do not consider this as may-alias for stats - it is more useful
1491 to have information how many disambiguations happened provided that
1492 the query was meaningful. */
1493
1494 if (match1 == ref1 || !handled_component_p (ref1)
1495 || match2 == ref2 || !handled_component_p (ref2))
1496 return -1;
1497
1498 auto_vec<tree, 16> component_refs1;
1499 auto_vec<tree, 16> component_refs2;
1500
1501 /* Create the stack of handled components for REF1. */
1502 while (handled_component_p (ref1) && ref1 != match1)
1503 {
1504 /* We use TBAA only to re-synchronize after mismatched refs. So we
1505 do not need to truncate access path after TBAA part ends. */
1506 if (ends_tbaa_access_path_p (ref1))
1507 ntbaa1 = 0;
1508 else
1509 ntbaa1++;
1510 component_refs1.safe_push (ref1);
1511 ref1 = TREE_OPERAND (ref1, 0);
1512 }
1513
1514 /* Create the stack of handled components for REF2. */
1515 while (handled_component_p (ref2) && ref2 != match2)
1516 {
1517 if (ends_tbaa_access_path_p (ref2))
1518 ntbaa2 = 0;
1519 else
1520 ntbaa2++;
1521 component_refs2.safe_push (ref2);
1522 ref2 = TREE_OPERAND (ref2, 0);
1523 }
1524
1525 if (!flag_strict_aliasing)
1526 {
1527 ntbaa1 = 0;
1528 ntbaa2 = 0;
1529 }
1530
1531 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1532 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1533
1534 /* If only one of access path starts with MEM_REF check that offset is 0
1535 so the addresses stays the same after stripping it.
1536 TODO: In this case we may walk the other access path until we get same
1537 offset.
1538
1539 If both starts with MEM_REF, offset has to be same. */
1540 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1541 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1542 || (mem_ref1 && mem_ref2
1543 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1544 TREE_OPERAND (ref2, 1))))
1545 {
1546 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1547 return -1;
1548 }
1549
1550 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1551 to handle them here at all. */
1552 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1553 && TREE_CODE (ref2) != TARGET_MEM_REF);
1554
1555 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1556 rank. This is sufficient because we start from the same DECL and you
1557 cannot reference several fields at a time with COMPONENT_REFs (unlike
1558 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1559 of them to access a sub-component, unless you're in a union, in which
1560 case the return value will precisely be false. */
1561 while (true)
1562 {
1563 /* Track if we seen unmatched ref with non-zero offset. In this case
1564 we must look for partial overlaps. */
1565 bool seen_unmatched_ref_p = false;
1566
1567 /* First match ARRAY_REFs an try to disambiguate. */
1568 if (!component_refs1.is_empty ()
1569 && !component_refs2.is_empty ())
1570 {
1571 unsigned int narray_refs1=0, narray_refs2=0;
1572
1573 /* We generally assume that both access paths starts by same sequence
1574 of refs. However if number of array refs is not in sync, try
1575 to recover and pop elts until number match. This helps the case
1576 where one access path starts by array and other by element. */
1577 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1578 narray_refs1++)
1579 if (TREE_CODE (component_refs1 [component_refs1.length()
1580 - 1 - narray_refs1]) != ARRAY_REF)
1581 break;
1582
1583 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1584 narray_refs2++)
1585 if (TREE_CODE (component_refs2 [component_refs2.length()
1586 - 1 - narray_refs2]) != ARRAY_REF)
1587 break;
1588 for (; narray_refs1 > narray_refs2; narray_refs1--)
1589 {
1590 ref1 = component_refs1.pop ();
1591 ntbaa1--;
1592
1593 /* If index is non-zero we need to check whether the reference
1594 does not break the main invariant that bases are either
1595 disjoint or equal. Consider the example:
1596
1597 unsigned char out[][1];
1598 out[1]="a";
1599 out[i][0];
1600
1601 Here bases out and out are same, but after removing the
1602 [i] index, this invariant no longer holds, because
1603 out[i] points to the middle of array out.
1604
1605 TODO: If size of type of the skipped reference is an integer
1606 multiply of the size of type of the other reference this
1607 invariant can be verified, but even then it is not completely
1608 safe with !flag_strict_aliasing if the other reference contains
1609 unbounded array accesses.
1610 See */
1611
1612 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1613 cheap_array_ref_low_bound (ref1), 0))
1614 return 0;
1615 }
1616 for (; narray_refs2 > narray_refs1; narray_refs2--)
1617 {
1618 ref2 = component_refs2.pop ();
1619 ntbaa2--;
1620 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1621 cheap_array_ref_low_bound (ref2), 0))
1622 return 0;
1623 }
1624 /* Try to disambiguate matched arrays. */
1625 for (unsigned int i = 0; i < narray_refs1; i++)
1626 {
1627 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1628 component_refs2.pop ());
1629 ntbaa1--;
1630 ntbaa2--;
1631 if (cmp == 1 && !partial_overlap)
1632 {
1633 ++alias_stats
1634 .nonoverlapping_refs_since_match_p_no_alias;
1635 return 1;
1636 }
1637 if (cmp == -1)
1638 {
1639 seen_unmatched_ref_p = true;
1640 /* We can not maintain the invariant that bases are either
1641 same or completely disjoint. However we can still recover
1642 from type based alias analysis if we reach referneces to
1643 same sizes. We do not attempt to match array sizes, so
1644 just finish array walking and look for component refs. */
1645 if (ntbaa1 < 0 || ntbaa2 < 0)
1646 {
1647 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1648 return -1;
1649 }
1650 for (i++; i < narray_refs1; i++)
1651 {
1652 component_refs1.pop ();
1653 component_refs2.pop ();
1654 ntbaa1--;
1655 ntbaa2--;
1656 }
1657 break;
1658 }
1659 partial_overlap = false;
1660 }
1661 }
1662
1663 /* Next look for component_refs. */
1664 do
1665 {
1666 if (component_refs1.is_empty ())
1667 {
1668 ++alias_stats
1669 .nonoverlapping_refs_since_match_p_must_overlap;
1670 return 0;
1671 }
1672 ref1 = component_refs1.pop ();
1673 ntbaa1--;
1674 if (TREE_CODE (ref1) != COMPONENT_REF)
1675 {
1676 seen_unmatched_ref_p = true;
1677 if (ntbaa1 < 0 || ntbaa2 < 0)
1678 {
1679 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1680 return -1;
1681 }
1682 }
1683 }
1684 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1685
1686 do
1687 {
1688 if (component_refs2.is_empty ())
1689 {
1690 ++alias_stats
1691 .nonoverlapping_refs_since_match_p_must_overlap;
1692 return 0;
1693 }
1694 ref2 = component_refs2.pop ();
1695 ntbaa2--;
1696 if (TREE_CODE (ref2) != COMPONENT_REF)
1697 {
1698 if (ntbaa1 < 0 || ntbaa2 < 0)
1699 {
1700 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1701 return -1;
1702 }
1703 seen_unmatched_ref_p = true;
1704 }
1705 }
1706 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1707
1708 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1709 earlier. */
1710 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1711 && TREE_CODE (ref2) == COMPONENT_REF);
1712
1713 tree field1 = TREE_OPERAND (ref1, 1);
1714 tree field2 = TREE_OPERAND (ref2, 1);
1715
1716 /* ??? We cannot simply use the type of operand #0 of the refs here
1717 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1718 for common blocks instead of using unions like everyone else. */
1719 tree type1 = DECL_CONTEXT (field1);
1720 tree type2 = DECL_CONTEXT (field2);
1721
1722 partial_overlap = false;
1723
1724 /* If we skipped array refs on type of different sizes, we can
1725 no longer be sure that there are not partial overlaps. */
1726 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1727 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1728 {
1729 ++alias_stats
1730 .nonoverlapping_refs_since_match_p_may_alias;
1731 return -1;
1732 }
1733
1734 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1735 if (cmp == -1)
1736 {
1737 ++alias_stats
1738 .nonoverlapping_refs_since_match_p_may_alias;
1739 return -1;
1740 }
1741 else if (cmp == 1)
1742 {
1743 ++alias_stats
1744 .nonoverlapping_refs_since_match_p_no_alias;
1745 return 1;
1746 }
1747 }
1748
1749 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1750 return 0;
1751 }
1752
1753 /* Return TYPE_UID which can be used to match record types we consider
1754 same for TBAA purposes. */
1755
1756 static inline int
1757 ncr_type_uid (const_tree field)
1758 {
1759 /* ??? We cannot simply use the type of operand #0 of the refs here
1760 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1761 for common blocks instead of using unions like everyone else. */
1762 tree type = DECL_FIELD_CONTEXT (field);
1763 /* With LTO types considered same_type_for_tbaa_p
1764 from different translation unit may not have same
1765 main variant. They however have same TYPE_CANONICAL. */
1766 if (TYPE_CANONICAL (type))
1767 return TYPE_UID (TYPE_CANONICAL (type));
1768 return TYPE_UID (type);
1769 }
1770
1771 /* qsort compare function to sort FIELD_DECLs after their
1772 DECL_FIELD_CONTEXT TYPE_UID. */
1773
1774 static inline int
1775 ncr_compar (const void *field1_, const void *field2_)
1776 {
1777 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1778 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1779 unsigned int uid1 = ncr_type_uid (field1);
1780 unsigned int uid2 = ncr_type_uid (field2);
1781
1782 if (uid1 < uid2)
1783 return -1;
1784 else if (uid1 > uid2)
1785 return 1;
1786 return 0;
1787 }
1788
1789 /* Return true if we can determine that the fields referenced cannot
1790 overlap for any pair of objects. This relies on TBAA. */
1791
1792 static bool
1793 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1794 {
1795 /* Early return if we have nothing to do.
1796
1797 Do not consider this as may-alias for stats - it is more useful
1798 to have information how many disambiguations happened provided that
1799 the query was meaningful. */
1800 if (!flag_strict_aliasing
1801 || !x || !y
1802 || !handled_component_p (x)
1803 || !handled_component_p (y))
1804 return false;
1805
1806 auto_vec<const_tree, 16> fieldsx;
1807 while (handled_component_p (x))
1808 {
1809 if (TREE_CODE (x) == COMPONENT_REF)
1810 {
1811 tree field = TREE_OPERAND (x, 1);
1812 tree type = DECL_FIELD_CONTEXT (field);
1813 if (TREE_CODE (type) == RECORD_TYPE)
1814 fieldsx.safe_push (field);
1815 }
1816 else if (ends_tbaa_access_path_p (x))
1817 fieldsx.truncate (0);
1818 x = TREE_OPERAND (x, 0);
1819 }
1820 if (fieldsx.length () == 0)
1821 return false;
1822 auto_vec<const_tree, 16> fieldsy;
1823 while (handled_component_p (y))
1824 {
1825 if (TREE_CODE (y) == COMPONENT_REF)
1826 {
1827 tree field = TREE_OPERAND (y, 1);
1828 tree type = DECL_FIELD_CONTEXT (field);
1829 if (TREE_CODE (type) == RECORD_TYPE)
1830 fieldsy.safe_push (TREE_OPERAND (y, 1));
1831 }
1832 else if (ends_tbaa_access_path_p (y))
1833 fieldsy.truncate (0);
1834 y = TREE_OPERAND (y, 0);
1835 }
1836 if (fieldsy.length () == 0)
1837 {
1838 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1839 return false;
1840 }
1841
1842 /* Most common case first. */
1843 if (fieldsx.length () == 1
1844 && fieldsy.length () == 1)
1845 {
1846 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1847 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1848 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1849 {
1850 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1851 return true;
1852 }
1853 else
1854 {
1855 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1856 return false;
1857 }
1858 }
1859
1860 if (fieldsx.length () == 2)
1861 {
1862 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1863 std::swap (fieldsx[0], fieldsx[1]);
1864 }
1865 else
1866 fieldsx.qsort (ncr_compar);
1867
1868 if (fieldsy.length () == 2)
1869 {
1870 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1871 std::swap (fieldsy[0], fieldsy[1]);
1872 }
1873 else
1874 fieldsy.qsort (ncr_compar);
1875
1876 unsigned i = 0, j = 0;
1877 do
1878 {
1879 const_tree fieldx = fieldsx[i];
1880 const_tree fieldy = fieldsy[j];
1881
1882 /* We're left with accessing different fields of a structure,
1883 no possible overlap. */
1884 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1885 DECL_FIELD_CONTEXT (fieldy)) == 1
1886 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1887 {
1888 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1889 return true;
1890 }
1891
1892 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1893 {
1894 i++;
1895 if (i == fieldsx.length ())
1896 break;
1897 }
1898 else
1899 {
1900 j++;
1901 if (j == fieldsy.length ())
1902 break;
1903 }
1904 }
1905 while (1);
1906
1907 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1908 return false;
1909 }
1910
1911
1912 /* Return true if two memory references based on the variables BASE1
1913 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1914 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1915 if non-NULL are the complete memory reference trees. */
1916
1917 static bool
1918 decl_refs_may_alias_p (tree ref1, tree base1,
1919 poly_int64 offset1, poly_int64 max_size1,
1920 poly_int64 size1,
1921 tree ref2, tree base2,
1922 poly_int64 offset2, poly_int64 max_size2,
1923 poly_int64 size2)
1924 {
1925 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1926
1927 /* If both references are based on different variables, they cannot alias. */
1928 if (compare_base_decls (base1, base2) == 0)
1929 return false;
1930
1931 /* If both references are based on the same variable, they cannot alias if
1932 the accesses do not overlap. */
1933 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1934 return false;
1935
1936 /* If there is must alias, there is no use disambiguating further. */
1937 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1938 return true;
1939
1940 /* For components with variable position, the above test isn't sufficient,
1941 so we disambiguate component references manually. */
1942 if (ref1 && ref2
1943 && handled_component_p (ref1) && handled_component_p (ref2)
1944 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1945 return false;
1946
1947 return true;
1948 }
1949
1950 /* Return true if an indirect reference based on *PTR1 constrained
1951 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1952 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1953 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1954 in which case they are computed on-demand. REF1 and REF2
1955 if non-NULL are the complete memory reference trees. */
1956
1957 static bool
1958 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1959 poly_int64 offset1, poly_int64 max_size1,
1960 poly_int64 size1,
1961 alias_set_type ref1_alias_set,
1962 alias_set_type base1_alias_set,
1963 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1964 poly_int64 offset2, poly_int64 max_size2,
1965 poly_int64 size2,
1966 alias_set_type ref2_alias_set,
1967 alias_set_type base2_alias_set, bool tbaa_p)
1968 {
1969 tree ptr1;
1970 tree ptrtype1, dbase2;
1971
1972 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1973 || TREE_CODE (base1) == TARGET_MEM_REF)
1974 && DECL_P (base2));
1975
1976 ptr1 = TREE_OPERAND (base1, 0);
1977 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1978
1979 /* If only one reference is based on a variable, they cannot alias if
1980 the pointer access is beyond the extent of the variable access.
1981 (the pointer base cannot validly point to an offset less than zero
1982 of the variable).
1983 ??? IVOPTs creates bases that do not honor this restriction,
1984 so do not apply this optimization for TARGET_MEM_REFs. */
1985 if (TREE_CODE (base1) != TARGET_MEM_REF
1986 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1987 return false;
1988 /* They also cannot alias if the pointer may not point to the decl. */
1989 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1990 return false;
1991
1992 /* Disambiguations that rely on strict aliasing rules follow. */
1993 if (!flag_strict_aliasing || !tbaa_p)
1994 return true;
1995
1996 /* If the alias set for a pointer access is zero all bets are off. */
1997 if (base1_alias_set == 0 || base2_alias_set == 0)
1998 return true;
1999
2000 /* When we are trying to disambiguate an access with a pointer dereference
2001 as base versus one with a decl as base we can use both the size
2002 of the decl and its dynamic type for extra disambiguation.
2003 ??? We do not know anything about the dynamic type of the decl
2004 other than that its alias-set contains base2_alias_set as a subset
2005 which does not help us here. */
2006 /* As we know nothing useful about the dynamic type of the decl just
2007 use the usual conflict check rather than a subset test.
2008 ??? We could introduce -fvery-strict-aliasing when the language
2009 does not allow decls to have a dynamic type that differs from their
2010 static type. Then we can check
2011 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2012 if (base1_alias_set != base2_alias_set
2013 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2014 return false;
2015
2016 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2017
2018 /* If the size of the access relevant for TBAA through the pointer
2019 is bigger than the size of the decl we can't possibly access the
2020 decl via that pointer. */
2021 if (/* ??? This in turn may run afoul when a decl of type T which is
2022 a member of union type U is accessed through a pointer to
2023 type U and sizeof T is smaller than sizeof U. */
2024 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2025 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2026 && compare_sizes (DECL_SIZE (base2),
2027 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2028 return false;
2029
2030 if (!ref2)
2031 return true;
2032
2033 /* If the decl is accessed via a MEM_REF, reconstruct the base
2034 we can use for TBAA and an appropriately adjusted offset. */
2035 dbase2 = ref2;
2036 while (handled_component_p (dbase2))
2037 dbase2 = TREE_OPERAND (dbase2, 0);
2038 poly_int64 doffset1 = offset1;
2039 poly_offset_int doffset2 = offset2;
2040 if (TREE_CODE (dbase2) == MEM_REF
2041 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2042 {
2043 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2044 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2045 /* If second reference is view-converted, give up now. */
2046 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2047 return true;
2048 }
2049
2050 /* If first reference is view-converted, give up now. */
2051 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2052 return true;
2053
2054 /* If both references are through the same type, they do not alias
2055 if the accesses do not overlap. This does extra disambiguation
2056 for mixed/pointer accesses but requires strict aliasing.
2057 For MEM_REFs we require that the component-ref offset we computed
2058 is relative to the start of the type which we ensure by
2059 comparing rvalue and access type and disregarding the constant
2060 pointer offset.
2061
2062 But avoid treating variable length arrays as "objects", instead assume they
2063 can overlap by an exact multiple of their element size.
2064 See gcc.dg/torture/alias-2.c. */
2065 if (((TREE_CODE (base1) != TARGET_MEM_REF
2066 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2067 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2068 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2069 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2070 {
2071 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2072 && (TYPE_SIZE (TREE_TYPE (base1))
2073 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2074 != INTEGER_CST));
2075 if (!partial_overlap
2076 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2077 return false;
2078 if (!ref1 || !ref2
2079 /* If there is must alias, there is no use disambiguating further. */
2080 || (!partial_overlap
2081 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2082 return true;
2083 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2084 partial_overlap);
2085 if (res == -1)
2086 return !nonoverlapping_component_refs_p (ref1, ref2);
2087 return !res;
2088 }
2089
2090 /* Do access-path based disambiguation. */
2091 if (ref1 && ref2
2092 && (handled_component_p (ref1) || handled_component_p (ref2)))
2093 return aliasing_component_refs_p (ref1,
2094 ref1_alias_set, base1_alias_set,
2095 offset1, max_size1,
2096 ref2,
2097 ref2_alias_set, base2_alias_set,
2098 offset2, max_size2);
2099
2100 return true;
2101 }
2102
2103 /* Return true if two indirect references based on *PTR1
2104 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2105 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2106 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2107 in which case they are computed on-demand. REF1 and REF2
2108 if non-NULL are the complete memory reference trees. */
2109
2110 static bool
2111 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2112 poly_int64 offset1, poly_int64 max_size1,
2113 poly_int64 size1,
2114 alias_set_type ref1_alias_set,
2115 alias_set_type base1_alias_set,
2116 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2117 poly_int64 offset2, poly_int64 max_size2,
2118 poly_int64 size2,
2119 alias_set_type ref2_alias_set,
2120 alias_set_type base2_alias_set, bool tbaa_p)
2121 {
2122 tree ptr1;
2123 tree ptr2;
2124 tree ptrtype1, ptrtype2;
2125
2126 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2127 || TREE_CODE (base1) == TARGET_MEM_REF)
2128 && (TREE_CODE (base2) == MEM_REF
2129 || TREE_CODE (base2) == TARGET_MEM_REF));
2130
2131 ptr1 = TREE_OPERAND (base1, 0);
2132 ptr2 = TREE_OPERAND (base2, 0);
2133
2134 /* If both bases are based on pointers they cannot alias if they may not
2135 point to the same memory object or if they point to the same object
2136 and the accesses do not overlap. */
2137 if ((!cfun || gimple_in_ssa_p (cfun))
2138 && operand_equal_p (ptr1, ptr2, 0)
2139 && (((TREE_CODE (base1) != TARGET_MEM_REF
2140 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2141 && (TREE_CODE (base2) != TARGET_MEM_REF
2142 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2143 || (TREE_CODE (base1) == TARGET_MEM_REF
2144 && TREE_CODE (base2) == TARGET_MEM_REF
2145 && (TMR_STEP (base1) == TMR_STEP (base2)
2146 || (TMR_STEP (base1) && TMR_STEP (base2)
2147 && operand_equal_p (TMR_STEP (base1),
2148 TMR_STEP (base2), 0)))
2149 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2150 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2151 && operand_equal_p (TMR_INDEX (base1),
2152 TMR_INDEX (base2), 0)))
2153 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2154 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2155 && operand_equal_p (TMR_INDEX2 (base1),
2156 TMR_INDEX2 (base2), 0))))))
2157 {
2158 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2159 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2160 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2161 offset2 + moff2, max_size2))
2162 return false;
2163 /* If there is must alias, there is no use disambiguating further. */
2164 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2165 return true;
2166 if (ref1 && ref2)
2167 {
2168 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2169 false);
2170 if (res != -1)
2171 return !res;
2172 }
2173 }
2174 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2175 return false;
2176
2177 /* Disambiguations that rely on strict aliasing rules follow. */
2178 if (!flag_strict_aliasing || !tbaa_p)
2179 return true;
2180
2181 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2182 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2183
2184 /* If the alias set for a pointer access is zero all bets are off. */
2185 if (base1_alias_set == 0
2186 || base2_alias_set == 0)
2187 return true;
2188
2189 /* Do type-based disambiguation. */
2190 if (base1_alias_set != base2_alias_set
2191 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2192 return false;
2193
2194 /* If either reference is view-converted, give up now. */
2195 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2196 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2197 return true;
2198
2199 /* If both references are through the same type, they do not alias
2200 if the accesses do not overlap. This does extra disambiguation
2201 for mixed/pointer accesses but requires strict aliasing. */
2202 if ((TREE_CODE (base1) != TARGET_MEM_REF
2203 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2204 && (TREE_CODE (base2) != TARGET_MEM_REF
2205 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2206 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2207 TREE_TYPE (ptrtype2)) == 1)
2208 {
2209 /* But avoid treating arrays as "objects", instead assume they
2210 can overlap by an exact multiple of their element size.
2211 See gcc.dg/torture/alias-2.c. */
2212 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2213
2214 if (!partial_overlap
2215 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2216 return false;
2217 if (!ref1 || !ref2
2218 || (!partial_overlap
2219 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2220 return true;
2221 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2222 partial_overlap);
2223 if (res == -1)
2224 return !nonoverlapping_component_refs_p (ref1, ref2);
2225 return !res;
2226 }
2227
2228 /* Do access-path based disambiguation. */
2229 if (ref1 && ref2
2230 && (handled_component_p (ref1) || handled_component_p (ref2)))
2231 return aliasing_component_refs_p (ref1,
2232 ref1_alias_set, base1_alias_set,
2233 offset1, max_size1,
2234 ref2,
2235 ref2_alias_set, base2_alias_set,
2236 offset2, max_size2);
2237
2238 return true;
2239 }
2240
2241 /* Return true, if the two memory references REF1 and REF2 may alias. */
2242
2243 static bool
2244 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2245 {
2246 tree base1, base2;
2247 poly_int64 offset1 = 0, offset2 = 0;
2248 poly_int64 max_size1 = -1, max_size2 = -1;
2249 bool var1_p, var2_p, ind1_p, ind2_p;
2250
2251 gcc_checking_assert ((!ref1->ref
2252 || TREE_CODE (ref1->ref) == SSA_NAME
2253 || DECL_P (ref1->ref)
2254 || TREE_CODE (ref1->ref) == STRING_CST
2255 || handled_component_p (ref1->ref)
2256 || TREE_CODE (ref1->ref) == MEM_REF
2257 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
2258 && (!ref2->ref
2259 || TREE_CODE (ref2->ref) == SSA_NAME
2260 || DECL_P (ref2->ref)
2261 || TREE_CODE (ref2->ref) == STRING_CST
2262 || handled_component_p (ref2->ref)
2263 || TREE_CODE (ref2->ref) == MEM_REF
2264 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
2265
2266 /* Decompose the references into their base objects and the access. */
2267 base1 = ao_ref_base (ref1);
2268 offset1 = ref1->offset;
2269 max_size1 = ref1->max_size;
2270 base2 = ao_ref_base (ref2);
2271 offset2 = ref2->offset;
2272 max_size2 = ref2->max_size;
2273
2274 /* We can end up with registers or constants as bases for example from
2275 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2276 which is seen as a struct copy. */
2277 if (TREE_CODE (base1) == SSA_NAME
2278 || TREE_CODE (base1) == CONST_DECL
2279 || TREE_CODE (base1) == CONSTRUCTOR
2280 || TREE_CODE (base1) == ADDR_EXPR
2281 || CONSTANT_CLASS_P (base1)
2282 || TREE_CODE (base2) == SSA_NAME
2283 || TREE_CODE (base2) == CONST_DECL
2284 || TREE_CODE (base2) == CONSTRUCTOR
2285 || TREE_CODE (base2) == ADDR_EXPR
2286 || CONSTANT_CLASS_P (base2))
2287 return false;
2288
2289 /* We can end up referring to code via function and label decls.
2290 As we likely do not properly track code aliases conservatively
2291 bail out. */
2292 if (TREE_CODE (base1) == FUNCTION_DECL
2293 || TREE_CODE (base1) == LABEL_DECL
2294 || TREE_CODE (base2) == FUNCTION_DECL
2295 || TREE_CODE (base2) == LABEL_DECL)
2296 return true;
2297
2298 /* Two volatile accesses always conflict. */
2299 if (ref1->volatile_p
2300 && ref2->volatile_p)
2301 return true;
2302
2303 /* Defer to simple offset based disambiguation if we have
2304 references based on two decls. Do this before defering to
2305 TBAA to handle must-alias cases in conformance with the
2306 GCC extension of allowing type-punning through unions. */
2307 var1_p = DECL_P (base1);
2308 var2_p = DECL_P (base2);
2309 if (var1_p && var2_p)
2310 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
2311 ref1->size,
2312 ref2->ref, base2, offset2, max_size2,
2313 ref2->size);
2314
2315 /* Handle restrict based accesses.
2316 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2317 here. */
2318 tree rbase1 = base1;
2319 tree rbase2 = base2;
2320 if (var1_p)
2321 {
2322 rbase1 = ref1->ref;
2323 if (rbase1)
2324 while (handled_component_p (rbase1))
2325 rbase1 = TREE_OPERAND (rbase1, 0);
2326 }
2327 if (var2_p)
2328 {
2329 rbase2 = ref2->ref;
2330 if (rbase2)
2331 while (handled_component_p (rbase2))
2332 rbase2 = TREE_OPERAND (rbase2, 0);
2333 }
2334 if (rbase1 && rbase2
2335 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2336 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2337 /* If the accesses are in the same restrict clique... */
2338 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2339 /* But based on different pointers they do not alias. */
2340 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2341 return false;
2342
2343 ind1_p = (TREE_CODE (base1) == MEM_REF
2344 || TREE_CODE (base1) == TARGET_MEM_REF);
2345 ind2_p = (TREE_CODE (base2) == MEM_REF
2346 || TREE_CODE (base2) == TARGET_MEM_REF);
2347
2348 /* Canonicalize the pointer-vs-decl case. */
2349 if (ind1_p && var2_p)
2350 {
2351 std::swap (offset1, offset2);
2352 std::swap (max_size1, max_size2);
2353 std::swap (base1, base2);
2354 std::swap (ref1, ref2);
2355 var1_p = true;
2356 ind1_p = false;
2357 var2_p = false;
2358 ind2_p = true;
2359 }
2360
2361 /* First defer to TBAA if possible. */
2362 if (tbaa_p
2363 && flag_strict_aliasing
2364 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2365 ao_ref_alias_set (ref2)))
2366 return false;
2367
2368 /* If the reference is based on a pointer that points to memory
2369 that may not be written to then the other reference cannot possibly
2370 clobber it. */
2371 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2372 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2373 || (ind1_p
2374 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2375 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2376 return false;
2377
2378 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2379 if (var1_p && ind2_p)
2380 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
2381 offset2, max_size2, ref2->size,
2382 ao_ref_alias_set (ref2),
2383 ao_ref_base_alias_set (ref2),
2384 ref1->ref, base1,
2385 offset1, max_size1, ref1->size,
2386 ao_ref_alias_set (ref1),
2387 ao_ref_base_alias_set (ref1),
2388 tbaa_p);
2389 else if (ind1_p && ind2_p)
2390 return indirect_refs_may_alias_p (ref1->ref, base1,
2391 offset1, max_size1, ref1->size,
2392 ao_ref_alias_set (ref1),
2393 ao_ref_base_alias_set (ref1),
2394 ref2->ref, base2,
2395 offset2, max_size2, ref2->size,
2396 ao_ref_alias_set (ref2),
2397 ao_ref_base_alias_set (ref2),
2398 tbaa_p);
2399
2400 gcc_unreachable ();
2401 }
2402
2403 /* Return true, if the two memory references REF1 and REF2 may alias
2404 and update statistics. */
2405
2406 bool
2407 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2408 {
2409 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2410 if (res)
2411 ++alias_stats.refs_may_alias_p_may_alias;
2412 else
2413 ++alias_stats.refs_may_alias_p_no_alias;
2414 return res;
2415 }
2416
2417 static bool
2418 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2419 {
2420 ao_ref r1;
2421 ao_ref_init (&r1, ref1);
2422 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2423 }
2424
2425 bool
2426 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2427 {
2428 ao_ref r1, r2;
2429 ao_ref_init (&r1, ref1);
2430 ao_ref_init (&r2, ref2);
2431 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2432 }
2433
2434 /* Returns true if there is a anti-dependence for the STORE that
2435 executes after the LOAD. */
2436
2437 bool
2438 refs_anti_dependent_p (tree load, tree store)
2439 {
2440 ao_ref r1, r2;
2441 ao_ref_init (&r1, load);
2442 ao_ref_init (&r2, store);
2443 return refs_may_alias_p_1 (&r1, &r2, false);
2444 }
2445
2446 /* Returns true if there is a output dependence for the stores
2447 STORE1 and STORE2. */
2448
2449 bool
2450 refs_output_dependent_p (tree store1, tree store2)
2451 {
2452 ao_ref r1, r2;
2453 ao_ref_init (&r1, store1);
2454 ao_ref_init (&r2, store2);
2455 return refs_may_alias_p_1 (&r1, &r2, false);
2456 }
2457
2458 /* Returns true if and only if REF may alias any access stored in TT.
2459 IF TBAA_P is true, use TBAA oracle. */
2460
2461 static bool
2462 modref_may_conflict (const gimple *stmt,
2463 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2464 {
2465 alias_set_type base_set, ref_set;
2466 modref_base_node <alias_set_type> *base_node;
2467 modref_ref_node <alias_set_type> *ref_node;
2468 size_t i, j, k;
2469
2470 if (tt->every_base)
2471 return true;
2472
2473 base_set = ao_ref_base_alias_set (ref);
2474
2475 ref_set = ao_ref_alias_set (ref);
2476
2477 int num_tests = 0, max_tests = param_modref_max_tests;
2478 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
2479 {
2480 if (tbaa_p && flag_strict_aliasing)
2481 {
2482 if (num_tests >= max_tests)
2483 return true;
2484 alias_stats.modref_tests++;
2485 if (!alias_sets_conflict_p (base_set, base_node->base))
2486 continue;
2487 num_tests++;
2488 }
2489
2490 if (base_node->every_ref)
2491 return true;
2492
2493 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
2494 {
2495 /* Do not repeat same test as before. */
2496 if ((ref_set != base_set || base_node->base != ref_node->ref)
2497 && tbaa_p && flag_strict_aliasing)
2498 {
2499 if (num_tests >= max_tests)
2500 return true;
2501 alias_stats.modref_tests++;
2502 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2503 continue;
2504 num_tests++;
2505 }
2506
2507 /* TBAA checks did not disambiguate, try to use base pointer, for
2508 that we however need to have ref->ref. */
2509 if (ref_node->every_access || !ref->ref)
2510 return true;
2511
2512 modref_access_node *access_node;
2513 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
2514 {
2515 if (num_tests >= max_tests)
2516 return true;
2517
2518 if (access_node->parm_index == -1
2519 || (unsigned)access_node->parm_index
2520 >= gimple_call_num_args (stmt))
2521 return true;
2522
2523
2524 alias_stats.modref_baseptr_tests++;
2525
2526 if (ptr_deref_may_alias_ref_p_1
2527 (gimple_call_arg (stmt, access_node->parm_index), ref))
2528 return true;
2529 num_tests++;
2530 }
2531 }
2532 }
2533 return false;
2534 }
2535
2536 /* If the call CALL may use the memory reference REF return true,
2537 otherwise return false. */
2538
2539 static bool
2540 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2541 {
2542 tree base, callee;
2543 unsigned i;
2544 int flags = gimple_call_flags (call);
2545
2546 /* Const functions without a static chain do not implicitly use memory. */
2547 if (!gimple_call_chain (call)
2548 && (flags & (ECF_CONST|ECF_NOVOPS)))
2549 goto process_args;
2550
2551 /* A call that is not without side-effects might involve volatile
2552 accesses and thus conflicts with all other volatile accesses. */
2553 if (ref->volatile_p)
2554 return true;
2555
2556 callee = gimple_call_fndecl (call);
2557
2558 if (!gimple_call_chain (call) && callee != NULL_TREE)
2559 {
2560 struct cgraph_node *node = cgraph_node::get (callee);
2561 /* We can not safely optimize based on summary of calle if it does
2562 not always bind to current def: it is possible that memory load
2563 was optimized out earlier and the interposed variant may not be
2564 optimized this way. */
2565 if (node && node->binds_to_current_def_p ())
2566 {
2567 modref_summary *summary = get_modref_function_summary (node);
2568 if (summary)
2569 {
2570 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2571 {
2572 alias_stats.modref_use_no_alias++;
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2574 {
2575 fprintf (dump_file, "ipa-modref: in %s,"
2576 " call to %s does not use ",
2577 cgraph_node::get
2578 (current_function_decl)->dump_name (),
2579 node->dump_name ());
2580 print_generic_expr (dump_file, ref->ref);
2581 fprintf (dump_file, " %i->%i\n",
2582 ao_ref_base_alias_set (ref),
2583 ao_ref_alias_set (ref));
2584 }
2585 goto process_args;
2586 }
2587 alias_stats.modref_use_may_alias++;
2588 }
2589 }
2590 }
2591
2592 base = ao_ref_base (ref);
2593 if (!base)
2594 return true;
2595
2596 /* If the reference is based on a decl that is not aliased the call
2597 cannot possibly use it. */
2598 if (DECL_P (base)
2599 && !may_be_aliased (base)
2600 /* But local statics can be used through recursion. */
2601 && !is_global_var (base))
2602 goto process_args;
2603
2604 /* Handle those builtin functions explicitly that do not act as
2605 escape points. See tree-ssa-structalias.c:find_func_aliases
2606 for the list of builtins we might need to handle here. */
2607 if (callee != NULL_TREE
2608 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2609 switch (DECL_FUNCTION_CODE (callee))
2610 {
2611 /* All the following functions read memory pointed to by
2612 their second argument. strcat/strncat additionally
2613 reads memory pointed to by the first argument. */
2614 case BUILT_IN_STRCAT:
2615 case BUILT_IN_STRNCAT:
2616 {
2617 ao_ref dref;
2618 ao_ref_init_from_ptr_and_size (&dref,
2619 gimple_call_arg (call, 0),
2620 NULL_TREE);
2621 if (refs_may_alias_p_1 (&dref, ref, false))
2622 return true;
2623 }
2624 /* FALLTHRU */
2625 case BUILT_IN_STRCPY:
2626 case BUILT_IN_STRNCPY:
2627 case BUILT_IN_MEMCPY:
2628 case BUILT_IN_MEMMOVE:
2629 case BUILT_IN_MEMPCPY:
2630 case BUILT_IN_STPCPY:
2631 case BUILT_IN_STPNCPY:
2632 case BUILT_IN_TM_MEMCPY:
2633 case BUILT_IN_TM_MEMMOVE:
2634 {
2635 ao_ref dref;
2636 tree size = NULL_TREE;
2637 if (gimple_call_num_args (call) == 3)
2638 size = gimple_call_arg (call, 2);
2639 ao_ref_init_from_ptr_and_size (&dref,
2640 gimple_call_arg (call, 1),
2641 size);
2642 return refs_may_alias_p_1 (&dref, ref, false);
2643 }
2644 case BUILT_IN_STRCAT_CHK:
2645 case BUILT_IN_STRNCAT_CHK:
2646 {
2647 ao_ref dref;
2648 ao_ref_init_from_ptr_and_size (&dref,
2649 gimple_call_arg (call, 0),
2650 NULL_TREE);
2651 if (refs_may_alias_p_1 (&dref, ref, false))
2652 return true;
2653 }
2654 /* FALLTHRU */
2655 case BUILT_IN_STRCPY_CHK:
2656 case BUILT_IN_STRNCPY_CHK:
2657 case BUILT_IN_MEMCPY_CHK:
2658 case BUILT_IN_MEMMOVE_CHK:
2659 case BUILT_IN_MEMPCPY_CHK:
2660 case BUILT_IN_STPCPY_CHK:
2661 case BUILT_IN_STPNCPY_CHK:
2662 {
2663 ao_ref dref;
2664 tree size = NULL_TREE;
2665 if (gimple_call_num_args (call) == 4)
2666 size = gimple_call_arg (call, 2);
2667 ao_ref_init_from_ptr_and_size (&dref,
2668 gimple_call_arg (call, 1),
2669 size);
2670 return refs_may_alias_p_1 (&dref, ref, false);
2671 }
2672 case BUILT_IN_BCOPY:
2673 {
2674 ao_ref dref;
2675 tree size = gimple_call_arg (call, 2);
2676 ao_ref_init_from_ptr_and_size (&dref,
2677 gimple_call_arg (call, 0),
2678 size);
2679 return refs_may_alias_p_1 (&dref, ref, false);
2680 }
2681
2682 /* The following functions read memory pointed to by their
2683 first argument. */
2684 CASE_BUILT_IN_TM_LOAD (1):
2685 CASE_BUILT_IN_TM_LOAD (2):
2686 CASE_BUILT_IN_TM_LOAD (4):
2687 CASE_BUILT_IN_TM_LOAD (8):
2688 CASE_BUILT_IN_TM_LOAD (FLOAT):
2689 CASE_BUILT_IN_TM_LOAD (DOUBLE):
2690 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
2691 CASE_BUILT_IN_TM_LOAD (M64):
2692 CASE_BUILT_IN_TM_LOAD (M128):
2693 CASE_BUILT_IN_TM_LOAD (M256):
2694 case BUILT_IN_TM_LOG:
2695 case BUILT_IN_TM_LOG_1:
2696 case BUILT_IN_TM_LOG_2:
2697 case BUILT_IN_TM_LOG_4:
2698 case BUILT_IN_TM_LOG_8:
2699 case BUILT_IN_TM_LOG_FLOAT:
2700 case BUILT_IN_TM_LOG_DOUBLE:
2701 case BUILT_IN_TM_LOG_LDOUBLE:
2702 case BUILT_IN_TM_LOG_M64:
2703 case BUILT_IN_TM_LOG_M128:
2704 case BUILT_IN_TM_LOG_M256:
2705 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
2706
2707 /* These read memory pointed to by the first argument. */
2708 case BUILT_IN_STRDUP:
2709 case BUILT_IN_STRNDUP:
2710 case BUILT_IN_REALLOC:
2711 {
2712 ao_ref dref;
2713 tree size = NULL_TREE;
2714 if (gimple_call_num_args (call) == 2)
2715 size = gimple_call_arg (call, 1);
2716 ao_ref_init_from_ptr_and_size (&dref,
2717 gimple_call_arg (call, 0),
2718 size);
2719 return refs_may_alias_p_1 (&dref, ref, false);
2720 }
2721 /* These read memory pointed to by the first argument. */
2722 case BUILT_IN_INDEX:
2723 case BUILT_IN_STRCHR:
2724 case BUILT_IN_STRRCHR:
2725 {
2726 ao_ref dref;
2727 ao_ref_init_from_ptr_and_size (&dref,
2728 gimple_call_arg (call, 0),
2729 NULL_TREE);
2730 return refs_may_alias_p_1 (&dref, ref, false);
2731 }
2732 /* These read memory pointed to by the first argument with size
2733 in the third argument. */
2734 case BUILT_IN_MEMCHR:
2735 {
2736 ao_ref dref;
2737 ao_ref_init_from_ptr_and_size (&dref,
2738 gimple_call_arg (call, 0),
2739 gimple_call_arg (call, 2));
2740 return refs_may_alias_p_1 (&dref, ref, false);
2741 }
2742 /* These read memory pointed to by the first and second arguments. */
2743 case BUILT_IN_STRSTR:
2744 case BUILT_IN_STRPBRK:
2745 {
2746 ao_ref dref;
2747 ao_ref_init_from_ptr_and_size (&dref,
2748 gimple_call_arg (call, 0),
2749 NULL_TREE);
2750 if (refs_may_alias_p_1 (&dref, ref, false))
2751 return true;
2752 ao_ref_init_from_ptr_and_size (&dref,
2753 gimple_call_arg (call, 1),
2754 NULL_TREE);
2755 return refs_may_alias_p_1 (&dref, ref, false);
2756 }
2757
2758 /* The following builtins do not read from memory. */
2759 case BUILT_IN_FREE:
2760 case BUILT_IN_MALLOC:
2761 case BUILT_IN_POSIX_MEMALIGN:
2762 case BUILT_IN_ALIGNED_ALLOC:
2763 case BUILT_IN_CALLOC:
2764 CASE_BUILT_IN_ALLOCA:
2765 case BUILT_IN_STACK_SAVE:
2766 case BUILT_IN_STACK_RESTORE:
2767 case BUILT_IN_MEMSET:
2768 case BUILT_IN_TM_MEMSET:
2769 case BUILT_IN_MEMSET_CHK:
2770 case BUILT_IN_FREXP:
2771 case BUILT_IN_FREXPF:
2772 case BUILT_IN_FREXPL:
2773 case BUILT_IN_GAMMA_R:
2774 case BUILT_IN_GAMMAF_R:
2775 case BUILT_IN_GAMMAL_R:
2776 case BUILT_IN_LGAMMA_R:
2777 case BUILT_IN_LGAMMAF_R:
2778 case BUILT_IN_LGAMMAL_R:
2779 case BUILT_IN_MODF:
2780 case BUILT_IN_MODFF:
2781 case BUILT_IN_MODFL:
2782 case BUILT_IN_REMQUO:
2783 case BUILT_IN_REMQUOF:
2784 case BUILT_IN_REMQUOL:
2785 case BUILT_IN_SINCOS:
2786 case BUILT_IN_SINCOSF:
2787 case BUILT_IN_SINCOSL:
2788 case BUILT_IN_ASSUME_ALIGNED:
2789 case BUILT_IN_VA_END:
2790 return false;
2791 /* __sync_* builtins and some OpenMP builtins act as threading
2792 barriers. */
2793 #undef DEF_SYNC_BUILTIN
2794 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2795 #include "sync-builtins.def"
2796 #undef DEF_SYNC_BUILTIN
2797 case BUILT_IN_GOMP_ATOMIC_START:
2798 case BUILT_IN_GOMP_ATOMIC_END:
2799 case BUILT_IN_GOMP_BARRIER:
2800 case BUILT_IN_GOMP_BARRIER_CANCEL:
2801 case BUILT_IN_GOMP_TASKWAIT:
2802 case BUILT_IN_GOMP_TASKGROUP_END:
2803 case BUILT_IN_GOMP_CRITICAL_START:
2804 case BUILT_IN_GOMP_CRITICAL_END:
2805 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2806 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2807 case BUILT_IN_GOMP_LOOP_END:
2808 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2809 case BUILT_IN_GOMP_ORDERED_START:
2810 case BUILT_IN_GOMP_ORDERED_END:
2811 case BUILT_IN_GOMP_SECTIONS_END:
2812 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2813 case BUILT_IN_GOMP_SINGLE_COPY_START:
2814 case BUILT_IN_GOMP_SINGLE_COPY_END:
2815 return true;
2816
2817 default:
2818 /* Fallthru to general call handling. */;
2819 }
2820
2821 /* Check if base is a global static variable that is not read
2822 by the function. */
2823 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2824 {
2825 struct cgraph_node *node = cgraph_node::get (callee);
2826 bitmap read;
2827 int id;
2828
2829 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2830 node yet. We should enforce that there are nodes for all decls in the
2831 IL and remove this check instead. */
2832 if (node
2833 && (id = ipa_reference_var_uid (base)) != -1
2834 && (read = ipa_reference_get_read_global (node))
2835 && !bitmap_bit_p (read, id))
2836 goto process_args;
2837 }
2838
2839 /* Check if the base variable is call-used. */
2840 if (DECL_P (base))
2841 {
2842 if (pt_solution_includes (gimple_call_use_set (call), base))
2843 return true;
2844 }
2845 else if ((TREE_CODE (base) == MEM_REF
2846 || TREE_CODE (base) == TARGET_MEM_REF)
2847 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2848 {
2849 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2850 if (!pi)
2851 return true;
2852
2853 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2854 return true;
2855 }
2856 else
2857 return true;
2858
2859 /* Inspect call arguments for passed-by-value aliases. */
2860 process_args:
2861 for (i = 0; i < gimple_call_num_args (call); ++i)
2862 {
2863 tree op = gimple_call_arg (call, i);
2864 int flags = gimple_call_arg_flags (call, i);
2865
2866 if (flags & EAF_UNUSED)
2867 continue;
2868
2869 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2870 op = TREE_OPERAND (op, 0);
2871
2872 if (TREE_CODE (op) != SSA_NAME
2873 && !is_gimple_min_invariant (op))
2874 {
2875 ao_ref r;
2876 ao_ref_init (&r, op);
2877 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2878 return true;
2879 }
2880 }
2881
2882 return false;
2883 }
2884
2885 static bool
2886 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2887 {
2888 bool res;
2889 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2890 if (res)
2891 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2892 else
2893 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2894 return res;
2895 }
2896
2897
2898 /* If the statement STMT may use the memory reference REF return
2899 true, otherwise return false. */
2900
2901 bool
2902 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2903 {
2904 if (is_gimple_assign (stmt))
2905 {
2906 tree rhs;
2907
2908 /* All memory assign statements are single. */
2909 if (!gimple_assign_single_p (stmt))
2910 return false;
2911
2912 rhs = gimple_assign_rhs1 (stmt);
2913 if (is_gimple_reg (rhs)
2914 || is_gimple_min_invariant (rhs)
2915 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2916 return false;
2917
2918 return refs_may_alias_p (rhs, ref, tbaa_p);
2919 }
2920 else if (is_gimple_call (stmt))
2921 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2922 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2923 {
2924 tree retval = gimple_return_retval (return_stmt);
2925 if (retval
2926 && TREE_CODE (retval) != SSA_NAME
2927 && !is_gimple_min_invariant (retval)
2928 && refs_may_alias_p (retval, ref, tbaa_p))
2929 return true;
2930 /* If ref escapes the function then the return acts as a use. */
2931 tree base = ao_ref_base (ref);
2932 if (!base)
2933 ;
2934 else if (DECL_P (base))
2935 return is_global_var (base);
2936 else if (TREE_CODE (base) == MEM_REF
2937 || TREE_CODE (base) == TARGET_MEM_REF)
2938 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2939 return false;
2940 }
2941
2942 return true;
2943 }
2944
2945 bool
2946 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2947 {
2948 ao_ref r;
2949 ao_ref_init (&r, ref);
2950 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2951 }
2952
2953 /* If the call in statement CALL may clobber the memory reference REF
2954 return true, otherwise return false. */
2955
2956 bool
2957 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2958 {
2959 tree base;
2960 tree callee;
2961
2962 /* If the call is pure or const it cannot clobber anything. */
2963 if (gimple_call_flags (call)
2964 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2965 return false;
2966 if (gimple_call_internal_p (call))
2967 switch (gimple_call_internal_fn (call))
2968 {
2969 /* Treat these internal calls like ECF_PURE for aliasing,
2970 they don't write to any memory the program should care about.
2971 They have important other side-effects, and read memory,
2972 so can't be ECF_NOVOPS. */
2973 case IFN_UBSAN_NULL:
2974 case IFN_UBSAN_BOUNDS:
2975 case IFN_UBSAN_VPTR:
2976 case IFN_UBSAN_OBJECT_SIZE:
2977 case IFN_UBSAN_PTR:
2978 case IFN_ASAN_CHECK:
2979 return false;
2980 default:
2981 break;
2982 }
2983
2984 callee = gimple_call_fndecl (call);
2985
2986 if (callee != NULL_TREE && !ref->volatile_p)
2987 {
2988 struct cgraph_node *node = cgraph_node::get (callee);
2989 if (node)
2990 {
2991 modref_summary *summary = get_modref_function_summary (node);
2992 if (summary)
2993 {
2994 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p))
2995 {
2996 alias_stats.modref_clobber_no_alias++;
2997 if (dump_file && (dump_flags & TDF_DETAILS))
2998 {
2999 fprintf (dump_file,
3000 "ipa-modref: in %s, "
3001 "call to %s does not clobber ",
3002 cgraph_node::get
3003 (current_function_decl)->dump_name (),
3004 node->dump_name ());
3005 print_generic_expr (dump_file, ref->ref);
3006 fprintf (dump_file, " %i->%i\n",
3007 ao_ref_base_alias_set (ref),
3008 ao_ref_alias_set (ref));
3009 }
3010 return false;
3011 }
3012 alias_stats.modref_clobber_may_alias++;
3013 }
3014 }
3015 }
3016
3017 base = ao_ref_base (ref);
3018 if (!base)
3019 return true;
3020
3021 if (TREE_CODE (base) == SSA_NAME
3022 || CONSTANT_CLASS_P (base))
3023 return false;
3024
3025 /* A call that is not without side-effects might involve volatile
3026 accesses and thus conflicts with all other volatile accesses. */
3027 if (ref->volatile_p)
3028 return true;
3029
3030 /* If the reference is based on a decl that is not aliased the call
3031 cannot possibly clobber it. */
3032 if (DECL_P (base)
3033 && !may_be_aliased (base)
3034 /* But local non-readonly statics can be modified through recursion
3035 or the call may implement a threading barrier which we must
3036 treat as may-def. */
3037 && (TREE_READONLY (base)
3038 || !is_global_var (base)))
3039 return false;
3040
3041 /* If the reference is based on a pointer that points to memory
3042 that may not be written to then the call cannot possibly clobber it. */
3043 if ((TREE_CODE (base) == MEM_REF
3044 || TREE_CODE (base) == TARGET_MEM_REF)
3045 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3046 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3047 return false;
3048
3049 /* Handle those builtin functions explicitly that do not act as
3050 escape points. See tree-ssa-structalias.c:find_func_aliases
3051 for the list of builtins we might need to handle here. */
3052 if (callee != NULL_TREE
3053 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
3054 switch (DECL_FUNCTION_CODE (callee))
3055 {
3056 /* All the following functions clobber memory pointed to by
3057 their first argument. */
3058 case BUILT_IN_STRCPY:
3059 case BUILT_IN_STRNCPY:
3060 case BUILT_IN_MEMCPY:
3061 case BUILT_IN_MEMMOVE:
3062 case BUILT_IN_MEMPCPY:
3063 case BUILT_IN_STPCPY:
3064 case BUILT_IN_STPNCPY:
3065 case BUILT_IN_STRCAT:
3066 case BUILT_IN_STRNCAT:
3067 case BUILT_IN_MEMSET:
3068 case BUILT_IN_TM_MEMSET:
3069 CASE_BUILT_IN_TM_STORE (1):
3070 CASE_BUILT_IN_TM_STORE (2):
3071 CASE_BUILT_IN_TM_STORE (4):
3072 CASE_BUILT_IN_TM_STORE (8):
3073 CASE_BUILT_IN_TM_STORE (FLOAT):
3074 CASE_BUILT_IN_TM_STORE (DOUBLE):
3075 CASE_BUILT_IN_TM_STORE (LDOUBLE):
3076 CASE_BUILT_IN_TM_STORE (M64):
3077 CASE_BUILT_IN_TM_STORE (M128):
3078 CASE_BUILT_IN_TM_STORE (M256):
3079 case BUILT_IN_TM_MEMCPY:
3080 case BUILT_IN_TM_MEMMOVE:
3081 {
3082 ao_ref dref;
3083 tree size = NULL_TREE;
3084 /* Don't pass in size for strncat, as the maximum size
3085 is strlen (dest) + n + 1 instead of n, resp.
3086 n + 1 at dest + strlen (dest), but strlen (dest) isn't
3087 known. */
3088 if (gimple_call_num_args (call) == 3
3089 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
3090 size = gimple_call_arg (call, 2);
3091 ao_ref_init_from_ptr_and_size (&dref,
3092 gimple_call_arg (call, 0),
3093 size);
3094 return refs_may_alias_p_1 (&dref, ref, false);
3095 }
3096 case BUILT_IN_STRCPY_CHK:
3097 case BUILT_IN_STRNCPY_CHK:
3098 case BUILT_IN_MEMCPY_CHK:
3099 case BUILT_IN_MEMMOVE_CHK:
3100 case BUILT_IN_MEMPCPY_CHK:
3101 case BUILT_IN_STPCPY_CHK:
3102 case BUILT_IN_STPNCPY_CHK:
3103 case BUILT_IN_STRCAT_CHK:
3104 case BUILT_IN_STRNCAT_CHK:
3105 case BUILT_IN_MEMSET_CHK:
3106 {
3107 ao_ref dref;
3108 tree size = NULL_TREE;
3109 /* Don't pass in size for __strncat_chk, as the maximum size
3110 is strlen (dest) + n + 1 instead of n, resp.
3111 n + 1 at dest + strlen (dest), but strlen (dest) isn't
3112 known. */
3113 if (gimple_call_num_args (call) == 4
3114 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
3115 size = gimple_call_arg (call, 2);
3116 ao_ref_init_from_ptr_and_size (&dref,
3117 gimple_call_arg (call, 0),
3118 size);
3119 return refs_may_alias_p_1 (&dref, ref, false);
3120 }
3121 case BUILT_IN_BCOPY:
3122 {
3123 ao_ref dref;
3124 tree size = gimple_call_arg (call, 2);
3125 ao_ref_init_from_ptr_and_size (&dref,
3126 gimple_call_arg (call, 1),
3127 size);
3128 return refs_may_alias_p_1 (&dref, ref, false);
3129 }
3130 /* Allocating memory does not have any side-effects apart from
3131 being the definition point for the pointer. */
3132 case BUILT_IN_MALLOC:
3133 case BUILT_IN_ALIGNED_ALLOC:
3134 case BUILT_IN_CALLOC:
3135 case BUILT_IN_STRDUP:
3136 case BUILT_IN_STRNDUP:
3137 /* Unix98 specifies that errno is set on allocation failure. */
3138 if (flag_errno_math
3139 && targetm.ref_may_alias_errno (ref))
3140 return true;
3141 return false;
3142 case BUILT_IN_STACK_SAVE:
3143 CASE_BUILT_IN_ALLOCA:
3144 case BUILT_IN_ASSUME_ALIGNED:
3145 return false;
3146 /* But posix_memalign stores a pointer into the memory pointed to
3147 by its first argument. */
3148 case BUILT_IN_POSIX_MEMALIGN:
3149 {
3150 tree ptrptr = gimple_call_arg (call, 0);
3151 ao_ref dref;
3152 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
3153 TYPE_SIZE_UNIT (ptr_type_node));
3154 return (refs_may_alias_p_1 (&dref, ref, false)
3155 || (flag_errno_math
3156 && targetm.ref_may_alias_errno (ref)));
3157 }
3158 /* Freeing memory kills the pointed-to memory. More importantly
3159 the call has to serve as a barrier for moving loads and stores
3160 across it. */
3161 case BUILT_IN_FREE:
3162 case BUILT_IN_VA_END:
3163 {
3164 tree ptr = gimple_call_arg (call, 0);
3165 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
3166 }
3167 /* Realloc serves both as allocation point and deallocation point. */
3168 case BUILT_IN_REALLOC:
3169 {
3170 tree ptr = gimple_call_arg (call, 0);
3171 /* Unix98 specifies that errno is set on allocation failure. */
3172 return ((flag_errno_math
3173 && targetm.ref_may_alias_errno (ref))
3174 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
3175 }
3176 case BUILT_IN_GAMMA_R:
3177 case BUILT_IN_GAMMAF_R:
3178 case BUILT_IN_GAMMAL_R:
3179 case BUILT_IN_LGAMMA_R:
3180 case BUILT_IN_LGAMMAF_R:
3181 case BUILT_IN_LGAMMAL_R:
3182 {
3183 tree out = gimple_call_arg (call, 1);
3184 if (ptr_deref_may_alias_ref_p_1 (out, ref))
3185 return true;
3186 if (flag_errno_math)
3187 break;
3188 return false;
3189 }
3190 case BUILT_IN_FREXP:
3191 case BUILT_IN_FREXPF:
3192 case BUILT_IN_FREXPL:
3193 case BUILT_IN_MODF:
3194 case BUILT_IN_MODFF:
3195 case BUILT_IN_MODFL:
3196 {
3197 tree out = gimple_call_arg (call, 1);
3198 return ptr_deref_may_alias_ref_p_1 (out, ref);
3199 }
3200 case BUILT_IN_REMQUO:
3201 case BUILT_IN_REMQUOF:
3202 case BUILT_IN_REMQUOL:
3203 {
3204 tree out = gimple_call_arg (call, 2);
3205 if (ptr_deref_may_alias_ref_p_1 (out, ref))
3206 return true;
3207 if (flag_errno_math)
3208 break;
3209 return false;
3210 }
3211 case BUILT_IN_SINCOS:
3212 case BUILT_IN_SINCOSF:
3213 case BUILT_IN_SINCOSL:
3214 {
3215 tree sin = gimple_call_arg (call, 1);
3216 tree cos = gimple_call_arg (call, 2);
3217 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
3218 || ptr_deref_may_alias_ref_p_1 (cos, ref));
3219 }
3220 /* __sync_* builtins and some OpenMP builtins act as threading
3221 barriers. */
3222 #undef DEF_SYNC_BUILTIN
3223 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
3224 #include "sync-builtins.def"
3225 #undef DEF_SYNC_BUILTIN
3226 case BUILT_IN_GOMP_ATOMIC_START:
3227 case BUILT_IN_GOMP_ATOMIC_END:
3228 case BUILT_IN_GOMP_BARRIER:
3229 case BUILT_IN_GOMP_BARRIER_CANCEL:
3230 case BUILT_IN_GOMP_TASKWAIT:
3231 case BUILT_IN_GOMP_TASKGROUP_END:
3232 case BUILT_IN_GOMP_CRITICAL_START:
3233 case BUILT_IN_GOMP_CRITICAL_END:
3234 case BUILT_IN_GOMP_CRITICAL_NAME_START:
3235 case BUILT_IN_GOMP_CRITICAL_NAME_END:
3236 case BUILT_IN_GOMP_LOOP_END:
3237 case BUILT_IN_GOMP_LOOP_END_CANCEL:
3238 case BUILT_IN_GOMP_ORDERED_START:
3239 case BUILT_IN_GOMP_ORDERED_END:
3240 case BUILT_IN_GOMP_SECTIONS_END:
3241 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
3242 case BUILT_IN_GOMP_SINGLE_COPY_START:
3243 case BUILT_IN_GOMP_SINGLE_COPY_END:
3244 return true;
3245 default:
3246 /* Fallthru to general call handling. */;
3247 }
3248
3249 /* Check if base is a global static variable that is not written
3250 by the function. */
3251 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3252 {
3253 struct cgraph_node *node = cgraph_node::get (callee);
3254 bitmap written;
3255 int id;
3256
3257 if (node
3258 && (id = ipa_reference_var_uid (base)) != -1
3259 && (written = ipa_reference_get_written_global (node))
3260 && !bitmap_bit_p (written, id))
3261 return false;
3262 }
3263
3264 /* Check if the base variable is call-clobbered. */
3265 if (DECL_P (base))
3266 return pt_solution_includes (gimple_call_clobber_set (call), base);
3267 else if ((TREE_CODE (base) == MEM_REF
3268 || TREE_CODE (base) == TARGET_MEM_REF)
3269 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3270 {
3271 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3272 if (!pi)
3273 return true;
3274
3275 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3276 }
3277
3278 return true;
3279 }
3280
3281 /* If the call in statement CALL may clobber the memory reference REF
3282 return true, otherwise return false. */
3283
3284 bool
3285 call_may_clobber_ref_p (gcall *call, tree ref)
3286 {
3287 bool res;
3288 ao_ref r;
3289 ao_ref_init (&r, ref);
3290 res = call_may_clobber_ref_p_1 (call, &r, true);
3291 if (res)
3292 ++alias_stats.call_may_clobber_ref_p_may_alias;
3293 else
3294 ++alias_stats.call_may_clobber_ref_p_no_alias;
3295 return res;
3296 }
3297
3298
3299 /* If the statement STMT may clobber the memory reference REF return true,
3300 otherwise return false. */
3301
3302 bool
3303 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3304 {
3305 if (is_gimple_call (stmt))
3306 {
3307 tree lhs = gimple_call_lhs (stmt);
3308 if (lhs
3309 && TREE_CODE (lhs) != SSA_NAME)
3310 {
3311 ao_ref r;
3312 ao_ref_init (&r, lhs);
3313 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3314 return true;
3315 }
3316
3317 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3318 }
3319 else if (gimple_assign_single_p (stmt))
3320 {
3321 tree lhs = gimple_assign_lhs (stmt);
3322 if (TREE_CODE (lhs) != SSA_NAME)
3323 {
3324 ao_ref r;
3325 ao_ref_init (&r, lhs);
3326 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3327 }
3328 }
3329 else if (gimple_code (stmt) == GIMPLE_ASM)
3330 return true;
3331
3332 return false;
3333 }
3334
3335 bool
3336 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3337 {
3338 ao_ref r;
3339 ao_ref_init (&r, ref);
3340 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3341 }
3342
3343 /* Return true if store1 and store2 described by corresponding tuples
3344 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3345 address. */
3346
3347 static bool
3348 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3349 poly_int64 max_size1,
3350 tree base2, poly_int64 offset2, poly_int64 size2,
3351 poly_int64 max_size2)
3352 {
3353 /* Offsets need to be 0. */
3354 if (maybe_ne (offset1, 0)
3355 || maybe_ne (offset2, 0))
3356 return false;
3357
3358 bool base1_obj_p = SSA_VAR_P (base1);
3359 bool base2_obj_p = SSA_VAR_P (base2);
3360
3361 /* We need one object. */
3362 if (base1_obj_p == base2_obj_p)
3363 return false;
3364 tree obj = base1_obj_p ? base1 : base2;
3365
3366 /* And we need one MEM_REF. */
3367 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3368 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3369 if (base1_memref_p == base2_memref_p)
3370 return false;
3371 tree memref = base1_memref_p ? base1 : base2;
3372
3373 /* Sizes need to be valid. */
3374 if (!known_size_p (max_size1)
3375 || !known_size_p (max_size2)
3376 || !known_size_p (size1)
3377 || !known_size_p (size2))
3378 return false;
3379
3380 /* Max_size needs to match size. */
3381 if (maybe_ne (max_size1, size1)
3382 || maybe_ne (max_size2, size2))
3383 return false;
3384
3385 /* Sizes need to match. */
3386 if (maybe_ne (size1, size2))
3387 return false;
3388
3389
3390 /* Check that memref is a store to pointer with singleton points-to info. */
3391 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3392 return false;
3393 tree ptr = TREE_OPERAND (memref, 0);
3394 if (TREE_CODE (ptr) != SSA_NAME)
3395 return false;
3396 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3397 unsigned int pt_uid;
3398 if (pi == NULL
3399 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3400 return false;
3401
3402 /* Be conservative with non-call exceptions when the address might
3403 be NULL. */
3404 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3405 return false;
3406
3407 /* Check that ptr points relative to obj. */
3408 unsigned int obj_uid = DECL_PT_UID (obj);
3409 if (obj_uid != pt_uid)
3410 return false;
3411
3412 /* Check that the object size is the same as the store size. That ensures us
3413 that ptr points to the start of obj. */
3414 return (DECL_SIZE (obj)
3415 && poly_int_tree_p (DECL_SIZE (obj))
3416 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3417 }
3418
3419 /* If STMT kills the memory reference REF return true, otherwise
3420 return false. */
3421
3422 bool
3423 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3424 {
3425 if (!ao_ref_base (ref))
3426 return false;
3427
3428 if (gimple_has_lhs (stmt)
3429 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3430 /* The assignment is not necessarily carried out if it can throw
3431 and we can catch it in the current function where we could inspect
3432 the previous value.
3433 ??? We only need to care about the RHS throwing. For aggregate
3434 assignments or similar calls and non-call exceptions the LHS
3435 might throw as well. */
3436 && !stmt_can_throw_internal (cfun, stmt))
3437 {
3438 tree lhs = gimple_get_lhs (stmt);
3439 /* If LHS is literally a base of the access we are done. */
3440 if (ref->ref)
3441 {
3442 tree base = ref->ref;
3443 tree innermost_dropped_array_ref = NULL_TREE;
3444 if (handled_component_p (base))
3445 {
3446 tree saved_lhs0 = NULL_TREE;
3447 if (handled_component_p (lhs))
3448 {
3449 saved_lhs0 = TREE_OPERAND (lhs, 0);
3450 TREE_OPERAND (lhs, 0) = integer_zero_node;
3451 }
3452 do
3453 {
3454 /* Just compare the outermost handled component, if
3455 they are equal we have found a possible common
3456 base. */
3457 tree saved_base0 = TREE_OPERAND (base, 0);
3458 TREE_OPERAND (base, 0) = integer_zero_node;
3459 bool res = operand_equal_p (lhs, base, 0);
3460 TREE_OPERAND (base, 0) = saved_base0;
3461 if (res)
3462 break;
3463 /* Remember if we drop an array-ref that we need to
3464 double-check not being at struct end. */
3465 if (TREE_CODE (base) == ARRAY_REF
3466 || TREE_CODE (base) == ARRAY_RANGE_REF)
3467 innermost_dropped_array_ref = base;
3468 /* Otherwise drop handled components of the access. */
3469 base = saved_base0;
3470 }
3471 while (handled_component_p (base));
3472 if (saved_lhs0)
3473 TREE_OPERAND (lhs, 0) = saved_lhs0;
3474 }
3475 /* Finally check if the lhs has the same address and size as the
3476 base candidate of the access. Watch out if we have dropped
3477 an array-ref that was at struct end, this means ref->ref may
3478 be outside of the TYPE_SIZE of its base. */
3479 if ((! innermost_dropped_array_ref
3480 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3481 && (lhs == base
3482 || (((TYPE_SIZE (TREE_TYPE (lhs))
3483 == TYPE_SIZE (TREE_TYPE (base)))
3484 || (TYPE_SIZE (TREE_TYPE (lhs))
3485 && TYPE_SIZE (TREE_TYPE (base))
3486 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3487 TYPE_SIZE (TREE_TYPE (base)),
3488 0)))
3489 && operand_equal_p (lhs, base,
3490 OEP_ADDRESS_OF
3491 | OEP_MATCH_SIDE_EFFECTS))))
3492 return true;
3493 }
3494
3495 /* Now look for non-literal equal bases with the restriction of
3496 handling constant offset and size. */
3497 /* For a must-alias check we need to be able to constrain
3498 the access properly. */
3499 if (!ref->max_size_known_p ())
3500 return false;
3501 poly_int64 size, offset, max_size, ref_offset = ref->offset;
3502 bool reverse;
3503 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3504 &reverse);
3505 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3506 so base == ref->base does not always hold. */
3507 if (base != ref->base)
3508 {
3509 /* Try using points-to info. */
3510 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3511 ref->offset, ref->size, ref->max_size))
3512 return true;
3513
3514 /* If both base and ref->base are MEM_REFs, only compare the
3515 first operand, and if the second operand isn't equal constant,
3516 try to add the offsets into offset and ref_offset. */
3517 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3518 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3519 {
3520 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3521 TREE_OPERAND (ref->base, 1)))
3522 {
3523 poly_offset_int off1 = mem_ref_offset (base);
3524 off1 <<= LOG2_BITS_PER_UNIT;
3525 off1 += offset;
3526 poly_offset_int off2 = mem_ref_offset (ref->base);
3527 off2 <<= LOG2_BITS_PER_UNIT;
3528 off2 += ref_offset;
3529 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3530 size = -1;
3531 }
3532 }
3533 else
3534 size = -1;
3535 }
3536 /* For a must-alias check we need to be able to constrain
3537 the access properly. */
3538 if (known_eq (size, max_size)
3539 && known_subrange_p (ref_offset, ref->max_size, offset, size))
3540 return true;
3541 }
3542
3543 if (is_gimple_call (stmt))
3544 {
3545 tree callee = gimple_call_fndecl (stmt);
3546 if (callee != NULL_TREE
3547 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3548 switch (DECL_FUNCTION_CODE (callee))
3549 {
3550 case BUILT_IN_FREE:
3551 {
3552 tree ptr = gimple_call_arg (stmt, 0);
3553 tree base = ao_ref_base (ref);
3554 if (base && TREE_CODE (base) == MEM_REF
3555 && TREE_OPERAND (base, 0) == ptr)
3556 return true;
3557 break;
3558 }
3559
3560 case BUILT_IN_MEMCPY:
3561 case BUILT_IN_MEMPCPY:
3562 case BUILT_IN_MEMMOVE:
3563 case BUILT_IN_MEMSET:
3564 case BUILT_IN_MEMCPY_CHK:
3565 case BUILT_IN_MEMPCPY_CHK:
3566 case BUILT_IN_MEMMOVE_CHK:
3567 case BUILT_IN_MEMSET_CHK:
3568 case BUILT_IN_STRNCPY:
3569 case BUILT_IN_STPNCPY:
3570 case BUILT_IN_CALLOC:
3571 {
3572 /* For a must-alias check we need to be able to constrain
3573 the access properly. */
3574 if (!ref->max_size_known_p ())
3575 return false;
3576 tree dest;
3577 tree len;
3578
3579 /* In execution order a calloc call will never kill
3580 anything. However, DSE will (ab)use this interface
3581 to ask if a calloc call writes the same memory locations
3582 as a later assignment, memset, etc. So handle calloc
3583 in the expected way. */
3584 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3585 {
3586 tree arg0 = gimple_call_arg (stmt, 0);
3587 tree arg1 = gimple_call_arg (stmt, 1);
3588 if (TREE_CODE (arg0) != INTEGER_CST
3589 || TREE_CODE (arg1) != INTEGER_CST)
3590 return false;
3591
3592 dest = gimple_call_lhs (stmt);
3593 if (!dest)
3594 return false;
3595 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3596 }
3597 else
3598 {
3599 dest = gimple_call_arg (stmt, 0);
3600 len = gimple_call_arg (stmt, 2);
3601 }
3602 if (!poly_int_tree_p (len))
3603 return false;
3604 tree rbase = ref->base;
3605 poly_offset_int roffset = ref->offset;
3606 ao_ref dref;
3607 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3608 tree base = ao_ref_base (&dref);
3609 poly_offset_int offset = dref.offset;
3610 if (!base || !known_size_p (dref.size))
3611 return false;
3612 if (TREE_CODE (base) == MEM_REF)
3613 {
3614 if (TREE_CODE (rbase) != MEM_REF)
3615 return false;
3616 // Compare pointers.
3617 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
3618 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3619 base = TREE_OPERAND (base, 0);
3620 rbase = TREE_OPERAND (rbase, 0);
3621 }
3622 if (base == rbase
3623 && known_subrange_p (roffset, ref->max_size, offset,
3624 wi::to_poly_offset (len)
3625 << LOG2_BITS_PER_UNIT))
3626 return true;
3627 break;
3628 }
3629
3630 case BUILT_IN_VA_END:
3631 {
3632 tree ptr = gimple_call_arg (stmt, 0);
3633 if (TREE_CODE (ptr) == ADDR_EXPR)
3634 {
3635 tree base = ao_ref_base (ref);
3636 if (TREE_OPERAND (ptr, 0) == base)
3637 return true;
3638 }
3639 break;
3640 }
3641
3642 default:;
3643 }
3644 }
3645 return false;
3646 }
3647
3648 bool
3649 stmt_kills_ref_p (gimple *stmt, tree ref)
3650 {
3651 ao_ref r;
3652 ao_ref_init (&r, ref);
3653 return stmt_kills_ref_p (stmt, &r);
3654 }
3655
3656
3657 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3658 TARGET or a statement clobbering the memory reference REF in which
3659 case false is returned. The walk starts with VUSE, one argument of PHI. */
3660
3661 static bool
3662 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3663 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3664 bitmap *visited, bool abort_on_visited,
3665 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3666 translate_flags disambiguate_only,
3667 void *data)
3668 {
3669 basic_block bb = gimple_bb (phi);
3670
3671 if (!*visited)
3672 *visited = BITMAP_ALLOC (NULL);
3673
3674 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3675
3676 /* Walk until we hit the target. */
3677 while (vuse != target)
3678 {
3679 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3680 /* If we are searching for the target VUSE by walking up to
3681 TARGET_BB dominating the original PHI we are finished once
3682 we reach a default def or a definition in a block dominating
3683 that block. Update TARGET and return. */
3684 if (!target
3685 && (gimple_nop_p (def_stmt)
3686 || dominated_by_p (CDI_DOMINATORS,
3687 target_bb, gimple_bb (def_stmt))))
3688 {
3689 target = vuse;
3690 return true;
3691 }
3692
3693 /* Recurse for PHI nodes. */
3694 if (gimple_code (def_stmt) == GIMPLE_PHI)
3695 {
3696 /* An already visited PHI node ends the walk successfully. */
3697 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3698 return !abort_on_visited;
3699 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3700 visited, abort_on_visited,
3701 translate, data, disambiguate_only);
3702 if (!vuse)
3703 return false;
3704 continue;
3705 }
3706 else if (gimple_nop_p (def_stmt))
3707 return false;
3708 else
3709 {
3710 /* A clobbering statement or the end of the IL ends it failing. */
3711 if ((int)limit <= 0)
3712 return false;
3713 --limit;
3714 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3715 {
3716 translate_flags tf = disambiguate_only;
3717 if (translate
3718 && (*translate) (ref, vuse, data, &tf) == NULL)
3719 ;
3720 else
3721 return false;
3722 }
3723 }
3724 /* If we reach a new basic-block see if we already skipped it
3725 in a previous walk that ended successfully. */
3726 if (gimple_bb (def_stmt) != bb)
3727 {
3728 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3729 return !abort_on_visited;
3730 bb = gimple_bb (def_stmt);
3731 }
3732 vuse = gimple_vuse (def_stmt);
3733 }
3734 return true;
3735 }
3736
3737
3738 /* Starting from a PHI node for the virtual operand of the memory reference
3739 REF find a continuation virtual operand that allows to continue walking
3740 statements dominating PHI skipping only statements that cannot possibly
3741 clobber REF. Decrements LIMIT for each alias disambiguation done
3742 and aborts the walk, returning NULL_TREE if it reaches zero.
3743 Returns NULL_TREE if no suitable virtual operand can be found. */
3744
3745 tree
3746 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3747 unsigned int &limit, bitmap *visited,
3748 bool abort_on_visited,
3749 void *(*translate)(ao_ref *, tree, void *,
3750 translate_flags *),
3751 void *data,
3752 translate_flags disambiguate_only)
3753 {
3754 unsigned nargs = gimple_phi_num_args (phi);
3755
3756 /* Through a single-argument PHI we can simply look through. */
3757 if (nargs == 1)
3758 return PHI_ARG_DEF (phi, 0);
3759
3760 /* For two or more arguments try to pairwise skip non-aliasing code
3761 until we hit the phi argument definition that dominates the other one. */
3762 basic_block phi_bb = gimple_bb (phi);
3763 tree arg0, arg1;
3764 unsigned i;
3765
3766 /* Find a candidate for the virtual operand which definition
3767 dominates those of all others. */
3768 /* First look if any of the args themselves satisfy this. */
3769 for (i = 0; i < nargs; ++i)
3770 {
3771 arg0 = PHI_ARG_DEF (phi, i);
3772 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3773 break;
3774 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3775 if (def_bb != phi_bb
3776 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3777 break;
3778 arg0 = NULL_TREE;
3779 }
3780 /* If not, look if we can reach such candidate by walking defs
3781 until we hit the immediate dominator. maybe_skip_until will
3782 do that for us. */
3783 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3784
3785 /* Then check against the (to be) found candidate. */
3786 for (i = 0; i < nargs; ++i)
3787 {
3788 arg1 = PHI_ARG_DEF (phi, i);
3789 if (arg1 == arg0)
3790 ;
3791 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3792 limit, visited,
3793 abort_on_visited,
3794 translate,
3795 /* Do not valueize when walking over
3796 backedges. */
3797 dominated_by_p
3798 (CDI_DOMINATORS,
3799 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3800 phi_bb)
3801 ? TR_DISAMBIGUATE
3802 : disambiguate_only, data))
3803 return NULL_TREE;
3804 }
3805
3806 return arg0;
3807 }
3808
3809 /* Based on the memory reference REF and its virtual use VUSE call
3810 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3811 itself. That is, for each virtual use for which its defining statement
3812 does not clobber REF.
3813
3814 WALKER is called with REF, the current virtual use and DATA. If
3815 WALKER returns non-NULL the walk stops and its result is returned.
3816 At the end of a non-successful walk NULL is returned.
3817
3818 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3819 use which definition is a statement that may clobber REF and DATA.
3820 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3821 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3822 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3823 to adjust REF and *DATA to make that valid.
3824
3825 VALUEIZE if non-NULL is called with the next VUSE that is considered
3826 and return value is substituted for that. This can be used to
3827 implement optimistic value-numbering for example. Note that the
3828 VUSE argument is assumed to be valueized already.
3829
3830 LIMIT specifies the number of alias queries we are allowed to do,
3831 the walk stops when it reaches zero and NULL is returned. LIMIT
3832 is decremented by the number of alias queries (plus adjustments
3833 done by the callbacks) upon return.
3834
3835 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3836
3837 void *
3838 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3839 void *(*walker)(ao_ref *, tree, void *),
3840 void *(*translate)(ao_ref *, tree, void *,
3841 translate_flags *),
3842 tree (*valueize)(tree),
3843 unsigned &limit, void *data)
3844 {
3845 bitmap visited = NULL;
3846 void *res;
3847 bool translated = false;
3848
3849 timevar_push (TV_ALIAS_STMT_WALK);
3850
3851 do
3852 {
3853 gimple *def_stmt;
3854
3855 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3856 res = (*walker) (ref, vuse, data);
3857 /* Abort walk. */
3858 if (res == (void *)-1)
3859 {
3860 res = NULL;
3861 break;
3862 }
3863 /* Lookup succeeded. */
3864 else if (res != NULL)
3865 break;
3866
3867 if (valueize)
3868 {
3869 vuse = valueize (vuse);
3870 if (!vuse)
3871 {
3872 res = NULL;
3873 break;
3874 }
3875 }
3876 def_stmt = SSA_NAME_DEF_STMT (vuse);
3877 if (gimple_nop_p (def_stmt))
3878 break;
3879 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3880 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3881 &visited, translated, translate, data);
3882 else
3883 {
3884 if ((int)limit <= 0)
3885 {
3886 res = NULL;
3887 break;
3888 }
3889 --limit;
3890 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3891 {
3892 if (!translate)
3893 break;
3894 translate_flags disambiguate_only = TR_TRANSLATE;
3895 res = (*translate) (ref, vuse, data, &disambiguate_only);
3896 /* Failed lookup and translation. */
3897 if (res == (void *)-1)
3898 {
3899 res = NULL;
3900 break;
3901 }
3902 /* Lookup succeeded. */
3903 else if (res != NULL)
3904 break;
3905 /* Translation succeeded, continue walking. */
3906 translated = translated || disambiguate_only == TR_TRANSLATE;
3907 }
3908 vuse = gimple_vuse (def_stmt);
3909 }
3910 }
3911 while (vuse);
3912
3913 if (visited)
3914 BITMAP_FREE (visited);
3915
3916 timevar_pop (TV_ALIAS_STMT_WALK);
3917
3918 return res;
3919 }
3920
3921
3922 /* Based on the memory reference REF call WALKER for each vdef which
3923 defining statement may clobber REF, starting with VDEF. If REF
3924 is NULL_TREE, each defining statement is visited.
3925
3926 WALKER is called with REF, the current vdef and DATA. If WALKER
3927 returns true the walk is stopped, otherwise it continues.
3928
3929 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3930 The pointer may be NULL and then we do not track this information.
3931
3932 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3933 PHI argument (but only one walk continues on merge points), the
3934 return value is true if any of the walks was successful.
3935
3936 The function returns the number of statements walked or -1 if
3937 LIMIT stmts were walked and the walk was aborted at this point.
3938 If LIMIT is zero the walk is not aborted. */
3939
3940 static int
3941 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3942 bool (*walker)(ao_ref *, tree, void *), void *data,
3943 bitmap *visited, unsigned int cnt,
3944 bool *function_entry_reached, unsigned limit)
3945 {
3946 do
3947 {
3948 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3949
3950 if (*visited
3951 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3952 return cnt;
3953
3954 if (gimple_nop_p (def_stmt))
3955 {
3956 if (function_entry_reached)
3957 *function_entry_reached = true;
3958 return cnt;
3959 }
3960 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3961 {
3962 unsigned i;
3963 if (!*visited)
3964 *visited = BITMAP_ALLOC (NULL);
3965 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3966 {
3967 int res = walk_aliased_vdefs_1 (ref,
3968 gimple_phi_arg_def (def_stmt, i),
3969 walker, data, visited, cnt,
3970 function_entry_reached, limit);
3971 if (res == -1)
3972 return -1;
3973 cnt = res;
3974 }
3975 return cnt;
3976 }
3977
3978 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3979 cnt++;
3980 if (cnt == limit)
3981 return -1;
3982 if ((!ref
3983 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3984 && (*walker) (ref, vdef, data))
3985 return cnt;
3986
3987 vdef = gimple_vuse (def_stmt);
3988 }
3989 while (1);
3990 }
3991
3992 int
3993 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3994 bool (*walker)(ao_ref *, tree, void *), void *data,
3995 bitmap *visited,
3996 bool *function_entry_reached, unsigned int limit)
3997 {
3998 bitmap local_visited = NULL;
3999 int ret;
4000
4001 timevar_push (TV_ALIAS_STMT_WALK);
4002
4003 if (function_entry_reached)
4004 *function_entry_reached = false;
4005
4006 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
4007 visited ? visited : &local_visited, 0,
4008 function_entry_reached, limit);
4009 if (local_visited)
4010 BITMAP_FREE (local_visited);
4011
4012 timevar_pop (TV_ALIAS_STMT_WALK);
4013
4014 return ret;
4015 }
4016
4017 /* Verify validity of the fnspec string.
4018 See attr-fnspec.h for details. */
4019
4020 void
4021 attr_fnspec::verify ()
4022 {
4023 bool err = false;
4024
4025 /* Check return value specifier. */
4026 if (len < return_desc_size)
4027 err = true;
4028 else if ((len - return_desc_size) % arg_desc_size)
4029 err = true;
4030 else if ((str[0] < '1' || str[0] > '4')
4031 && str[0] != '.' && str[0] != 'm'
4032 /* FIXME: Fortran trans-decl.c contains multiple wrong fnspec
4033 strings. The following characters have no meaning. */
4034 && str[0] != 'R' && str[0] != 'W')
4035 err = true;
4036
4037 if (str[1] != ' ')
4038 err = true;
4039
4040 /* Now check all parameters. */
4041 for (unsigned int i = 0; arg_specified_p (i); i++)
4042 {
4043 unsigned int idx = arg_idx (i);
4044 switch (str[idx])
4045 {
4046 case 'x':
4047 case 'X':
4048 case 'r':
4049 case 'R':
4050 case 'w':
4051 case 'W':
4052 case '.':
4053 break;
4054 default:
4055 err = true;
4056 }
4057 if (str[idx + 1] != ' ')
4058 err = true;
4059 }
4060 if (err)
4061 internal_error ("invalid fn spec attribute \"%s\"", str);
4062 }