]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-alias.c
x86: Remove "%!" before ret
[thirdparty/gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
49 #include "builtins.h"
50
51 /* Broad overview of how alias analysis on gimple works:
52
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
59
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
66
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
69
70 The main alias-oracle entry-points are
71
72 bool stmt_may_clobber_ref_p (gimple *, tree)
73
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
76
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
78
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
81
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
85
86 bool refs_may_alias_p (tree, tree)
87
88 This function tries to disambiguate two reference trees.
89
90 bool ptr_deref_may_alias_global_p (tree)
91
92 This function queries if dereferencing a pointer variable may
93 alias global memory.
94
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.c. */
98
99 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
100 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
101
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
104
105 static struct {
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
119 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
120 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
121 unsigned HOST_WIDE_INT modref_use_may_alias;
122 unsigned HOST_WIDE_INT modref_use_no_alias;
123 unsigned HOST_WIDE_INT modref_clobber_may_alias;
124 unsigned HOST_WIDE_INT modref_clobber_no_alias;
125 unsigned HOST_WIDE_INT modref_kill_no;
126 unsigned HOST_WIDE_INT modref_kill_yes;
127 unsigned HOST_WIDE_INT modref_tests;
128 unsigned HOST_WIDE_INT modref_baseptr_tests;
129 } alias_stats;
130
131 void
132 dump_alias_stats (FILE *s)
133 {
134 fprintf (s, "\nAlias oracle query stats:\n");
135 fprintf (s, " refs_may_alias_p: "
136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC" queries\n",
138 alias_stats.refs_may_alias_p_no_alias,
139 alias_stats.refs_may_alias_p_no_alias
140 + alias_stats.refs_may_alias_p_may_alias);
141 fprintf (s, " ref_maybe_used_by_call_p: "
142 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.ref_maybe_used_by_call_p_no_alias,
145 alias_stats.refs_may_alias_p_no_alias
146 + alias_stats.ref_maybe_used_by_call_p_may_alias);
147 fprintf (s, " call_may_clobber_ref_p: "
148 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
149 HOST_WIDE_INT_PRINT_DEC" queries\n",
150 alias_stats.call_may_clobber_ref_p_no_alias,
151 alias_stats.call_may_clobber_ref_p_no_alias
152 + alias_stats.call_may_clobber_ref_p_may_alias);
153 fprintf (s, " stmt_kills_ref_p: "
154 HOST_WIDE_INT_PRINT_DEC" kills, "
155 HOST_WIDE_INT_PRINT_DEC" queries\n",
156 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
157 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
158 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
159 fprintf (s, " nonoverlapping_component_refs_p: "
160 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
161 HOST_WIDE_INT_PRINT_DEC" queries\n",
162 alias_stats.nonoverlapping_component_refs_p_no_alias,
163 alias_stats.nonoverlapping_component_refs_p_no_alias
164 + alias_stats.nonoverlapping_component_refs_p_may_alias);
165 fprintf (s, " nonoverlapping_refs_since_match_p: "
166 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
167 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
168 HOST_WIDE_INT_PRINT_DEC" queries\n",
169 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
170 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
171 alias_stats.nonoverlapping_refs_since_match_p_no_alias
172 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
173 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
174 fprintf (s, " aliasing_component_refs_p: "
175 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
176 HOST_WIDE_INT_PRINT_DEC" queries\n",
177 alias_stats.aliasing_component_refs_p_no_alias,
178 alias_stats.aliasing_component_refs_p_no_alias
179 + alias_stats.aliasing_component_refs_p_may_alias);
180 dump_alias_stats_in_alias_c (s);
181 fprintf (s, "\nModref stats:\n");
182 fprintf (s, " modref kill: "
183 HOST_WIDE_INT_PRINT_DEC" kills, "
184 HOST_WIDE_INT_PRINT_DEC" queries\n",
185 alias_stats.modref_kill_yes,
186 alias_stats.modref_kill_yes
187 + alias_stats.modref_kill_no);
188 fprintf (s, " modref use: "
189 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
190 HOST_WIDE_INT_PRINT_DEC" queries\n",
191 alias_stats.modref_use_no_alias,
192 alias_stats.modref_use_no_alias
193 + alias_stats.modref_use_may_alias);
194 fprintf (s, " modref clobber: "
195 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
196 HOST_WIDE_INT_PRINT_DEC" queries\n"
197 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
198 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
199 alias_stats.modref_clobber_no_alias,
200 alias_stats.modref_clobber_no_alias
201 + alias_stats.modref_clobber_may_alias,
202 alias_stats.modref_tests,
203 ((double)alias_stats.modref_tests)
204 / (alias_stats.modref_clobber_no_alias
205 + alias_stats.modref_clobber_may_alias),
206 alias_stats.modref_baseptr_tests,
207 ((double)alias_stats.modref_baseptr_tests)
208 / (alias_stats.modref_clobber_no_alias
209 + alias_stats.modref_clobber_may_alias));
210 }
211
212
213 /* Return true, if dereferencing PTR may alias with a global variable. */
214
215 bool
216 ptr_deref_may_alias_global_p (tree ptr)
217 {
218 struct ptr_info_def *pi;
219
220 /* If we end up with a pointer constant here that may point
221 to global memory. */
222 if (TREE_CODE (ptr) != SSA_NAME)
223 return true;
224
225 pi = SSA_NAME_PTR_INFO (ptr);
226
227 /* If we do not have points-to information for this variable,
228 we have to punt. */
229 if (!pi)
230 return true;
231
232 /* ??? This does not use TBAA to prune globals ptr may not access. */
233 return pt_solution_includes_global (&pi->pt);
234 }
235
236 /* Return true if dereferencing PTR may alias DECL.
237 The caller is responsible for applying TBAA to see if PTR
238 may access DECL at all. */
239
240 static bool
241 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
242 {
243 struct ptr_info_def *pi;
244
245 /* Conversions are irrelevant for points-to information and
246 data-dependence analysis can feed us those. */
247 STRIP_NOPS (ptr);
248
249 /* Anything we do not explicilty handle aliases. */
250 if ((TREE_CODE (ptr) != SSA_NAME
251 && TREE_CODE (ptr) != ADDR_EXPR
252 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
253 || !POINTER_TYPE_P (TREE_TYPE (ptr))
254 || (!VAR_P (decl)
255 && TREE_CODE (decl) != PARM_DECL
256 && TREE_CODE (decl) != RESULT_DECL))
257 return true;
258
259 /* Disregard pointer offsetting. */
260 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
261 {
262 do
263 {
264 ptr = TREE_OPERAND (ptr, 0);
265 }
266 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
267 return ptr_deref_may_alias_decl_p (ptr, decl);
268 }
269
270 /* ADDR_EXPR pointers either just offset another pointer or directly
271 specify the pointed-to set. */
272 if (TREE_CODE (ptr) == ADDR_EXPR)
273 {
274 tree base = get_base_address (TREE_OPERAND (ptr, 0));
275 if (base
276 && (TREE_CODE (base) == MEM_REF
277 || TREE_CODE (base) == TARGET_MEM_REF))
278 ptr = TREE_OPERAND (base, 0);
279 else if (base
280 && DECL_P (base))
281 return compare_base_decls (base, decl) != 0;
282 else if (base
283 && CONSTANT_CLASS_P (base))
284 return false;
285 else
286 return true;
287 }
288
289 /* Non-aliased variables cannot be pointed to. */
290 if (!may_be_aliased (decl))
291 return false;
292
293 /* If we do not have useful points-to information for this pointer
294 we cannot disambiguate anything else. */
295 pi = SSA_NAME_PTR_INFO (ptr);
296 if (!pi)
297 return true;
298
299 return pt_solution_includes (&pi->pt, decl);
300 }
301
302 /* Return true if dereferenced PTR1 and PTR2 may alias.
303 The caller is responsible for applying TBAA to see if accesses
304 through PTR1 and PTR2 may conflict at all. */
305
306 bool
307 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
308 {
309 struct ptr_info_def *pi1, *pi2;
310
311 /* Conversions are irrelevant for points-to information and
312 data-dependence analysis can feed us those. */
313 STRIP_NOPS (ptr1);
314 STRIP_NOPS (ptr2);
315
316 /* Disregard pointer offsetting. */
317 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
318 {
319 do
320 {
321 ptr1 = TREE_OPERAND (ptr1, 0);
322 }
323 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
324 return ptr_derefs_may_alias_p (ptr1, ptr2);
325 }
326 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
327 {
328 do
329 {
330 ptr2 = TREE_OPERAND (ptr2, 0);
331 }
332 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
333 return ptr_derefs_may_alias_p (ptr1, ptr2);
334 }
335
336 /* ADDR_EXPR pointers either just offset another pointer or directly
337 specify the pointed-to set. */
338 if (TREE_CODE (ptr1) == ADDR_EXPR)
339 {
340 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
341 if (base
342 && (TREE_CODE (base) == MEM_REF
343 || TREE_CODE (base) == TARGET_MEM_REF))
344 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
345 else if (base
346 && DECL_P (base))
347 return ptr_deref_may_alias_decl_p (ptr2, base);
348 else
349 return true;
350 }
351 if (TREE_CODE (ptr2) == ADDR_EXPR)
352 {
353 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
354 if (base
355 && (TREE_CODE (base) == MEM_REF
356 || TREE_CODE (base) == TARGET_MEM_REF))
357 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
358 else if (base
359 && DECL_P (base))
360 return ptr_deref_may_alias_decl_p (ptr1, base);
361 else
362 return true;
363 }
364
365 /* From here we require SSA name pointers. Anything else aliases. */
366 if (TREE_CODE (ptr1) != SSA_NAME
367 || TREE_CODE (ptr2) != SSA_NAME
368 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
369 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
370 return true;
371
372 /* We may end up with two empty points-to solutions for two same pointers.
373 In this case we still want to say both pointers alias, so shortcut
374 that here. */
375 if (ptr1 == ptr2)
376 return true;
377
378 /* If we do not have useful points-to information for either pointer
379 we cannot disambiguate anything else. */
380 pi1 = SSA_NAME_PTR_INFO (ptr1);
381 pi2 = SSA_NAME_PTR_INFO (ptr2);
382 if (!pi1 || !pi2)
383 return true;
384
385 /* ??? This does not use TBAA to prune decls from the intersection
386 that not both pointers may access. */
387 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
388 }
389
390 /* Return true if dereferencing PTR may alias *REF.
391 The caller is responsible for applying TBAA to see if PTR
392 may access *REF at all. */
393
394 static bool
395 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
396 {
397 tree base = ao_ref_base (ref);
398
399 if (TREE_CODE (base) == MEM_REF
400 || TREE_CODE (base) == TARGET_MEM_REF)
401 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
402 else if (DECL_P (base))
403 return ptr_deref_may_alias_decl_p (ptr, base);
404
405 return true;
406 }
407
408 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
409
410 bool
411 ptrs_compare_unequal (tree ptr1, tree ptr2)
412 {
413 /* First resolve the pointers down to a SSA name pointer base or
414 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
415 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
416 or STRING_CSTs which needs points-to adjustments to track them
417 in the points-to sets. */
418 tree obj1 = NULL_TREE;
419 tree obj2 = NULL_TREE;
420 if (TREE_CODE (ptr1) == ADDR_EXPR)
421 {
422 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
423 if (! tem)
424 return false;
425 if (VAR_P (tem)
426 || TREE_CODE (tem) == PARM_DECL
427 || TREE_CODE (tem) == RESULT_DECL)
428 obj1 = tem;
429 else if (TREE_CODE (tem) == MEM_REF)
430 ptr1 = TREE_OPERAND (tem, 0);
431 }
432 if (TREE_CODE (ptr2) == ADDR_EXPR)
433 {
434 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
435 if (! tem)
436 return false;
437 if (VAR_P (tem)
438 || TREE_CODE (tem) == PARM_DECL
439 || TREE_CODE (tem) == RESULT_DECL)
440 obj2 = tem;
441 else if (TREE_CODE (tem) == MEM_REF)
442 ptr2 = TREE_OPERAND (tem, 0);
443 }
444
445 /* Canonicalize ptr vs. object. */
446 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
447 {
448 std::swap (ptr1, ptr2);
449 std::swap (obj1, obj2);
450 }
451
452 if (obj1 && obj2)
453 /* Other code handles this correctly, no need to duplicate it here. */;
454 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
455 {
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
457 /* We may not use restrict to optimize pointer comparisons.
458 See PR71062. So we have to assume that restrict-pointed-to
459 may be in fact obj1. */
460 if (!pi
461 || pi->pt.vars_contains_restrict
462 || pi->pt.vars_contains_interposable)
463 return false;
464 if (VAR_P (obj1)
465 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
466 {
467 varpool_node *node = varpool_node::get (obj1);
468 /* If obj1 may bind to NULL give up (see below). */
469 if (! node
470 || ! node->nonzero_address ()
471 || ! decl_binds_to_current_def_p (obj1))
472 return false;
473 }
474 return !pt_solution_includes (&pi->pt, obj1);
475 }
476
477 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
478 but those require pt.null to be conservatively correct. */
479
480 return false;
481 }
482
483 /* Returns whether reference REF to BASE may refer to global memory. */
484
485 static bool
486 ref_may_alias_global_p_1 (tree base)
487 {
488 if (DECL_P (base))
489 return is_global_var (base);
490 else if (TREE_CODE (base) == MEM_REF
491 || TREE_CODE (base) == TARGET_MEM_REF)
492 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
493 return true;
494 }
495
496 bool
497 ref_may_alias_global_p (ao_ref *ref)
498 {
499 tree base = ao_ref_base (ref);
500 return ref_may_alias_global_p_1 (base);
501 }
502
503 bool
504 ref_may_alias_global_p (tree ref)
505 {
506 tree base = get_base_address (ref);
507 return ref_may_alias_global_p_1 (base);
508 }
509
510 /* Return true whether STMT may clobber global memory. */
511
512 bool
513 stmt_may_clobber_global_p (gimple *stmt)
514 {
515 tree lhs;
516
517 if (!gimple_vdef (stmt))
518 return false;
519
520 /* ??? We can ask the oracle whether an artificial pointer
521 dereference with a pointer with points-to information covering
522 all global memory (what about non-address taken memory?) maybe
523 clobbered by this call. As there is at the moment no convenient
524 way of doing that without generating garbage do some manual
525 checking instead.
526 ??? We could make a NULL ao_ref argument to the various
527 predicates special, meaning any global memory. */
528
529 switch (gimple_code (stmt))
530 {
531 case GIMPLE_ASSIGN:
532 lhs = gimple_assign_lhs (stmt);
533 return (TREE_CODE (lhs) != SSA_NAME
534 && ref_may_alias_global_p (lhs));
535 case GIMPLE_CALL:
536 return true;
537 default:
538 return true;
539 }
540 }
541
542
543 /* Dump alias information on FILE. */
544
545 void
546 dump_alias_info (FILE *file)
547 {
548 unsigned i;
549 tree ptr;
550 const char *funcname
551 = lang_hooks.decl_printable_name (current_function_decl, 2);
552 tree var;
553
554 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
555
556 fprintf (file, "Aliased symbols\n\n");
557
558 FOR_EACH_LOCAL_DECL (cfun, i, var)
559 {
560 if (may_be_aliased (var))
561 dump_variable (file, var);
562 }
563
564 fprintf (file, "\nCall clobber information\n");
565
566 fprintf (file, "\nESCAPED");
567 dump_points_to_solution (file, &cfun->gimple_df->escaped);
568
569 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
570
571 FOR_EACH_SSA_NAME (i, ptr, cfun)
572 {
573 struct ptr_info_def *pi;
574
575 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
576 || SSA_NAME_IN_FREE_LIST (ptr))
577 continue;
578
579 pi = SSA_NAME_PTR_INFO (ptr);
580 if (pi)
581 dump_points_to_info_for (file, ptr);
582 }
583
584 fprintf (file, "\n");
585 }
586
587
588 /* Dump alias information on stderr. */
589
590 DEBUG_FUNCTION void
591 debug_alias_info (void)
592 {
593 dump_alias_info (stderr);
594 }
595
596
597 /* Dump the points-to set *PT into FILE. */
598
599 void
600 dump_points_to_solution (FILE *file, struct pt_solution *pt)
601 {
602 if (pt->anything)
603 fprintf (file, ", points-to anything");
604
605 if (pt->nonlocal)
606 fprintf (file, ", points-to non-local");
607
608 if (pt->escaped)
609 fprintf (file, ", points-to escaped");
610
611 if (pt->ipa_escaped)
612 fprintf (file, ", points-to unit escaped");
613
614 if (pt->null)
615 fprintf (file, ", points-to NULL");
616
617 if (pt->vars)
618 {
619 fprintf (file, ", points-to vars: ");
620 dump_decl_set (file, pt->vars);
621 if (pt->vars_contains_nonlocal
622 || pt->vars_contains_escaped
623 || pt->vars_contains_escaped_heap
624 || pt->vars_contains_restrict)
625 {
626 const char *comma = "";
627 fprintf (file, " (");
628 if (pt->vars_contains_nonlocal)
629 {
630 fprintf (file, "nonlocal");
631 comma = ", ";
632 }
633 if (pt->vars_contains_escaped)
634 {
635 fprintf (file, "%sescaped", comma);
636 comma = ", ";
637 }
638 if (pt->vars_contains_escaped_heap)
639 {
640 fprintf (file, "%sescaped heap", comma);
641 comma = ", ";
642 }
643 if (pt->vars_contains_restrict)
644 {
645 fprintf (file, "%srestrict", comma);
646 comma = ", ";
647 }
648 if (pt->vars_contains_interposable)
649 fprintf (file, "%sinterposable", comma);
650 fprintf (file, ")");
651 }
652 }
653 }
654
655
656 /* Unified dump function for pt_solution. */
657
658 DEBUG_FUNCTION void
659 debug (pt_solution &ref)
660 {
661 dump_points_to_solution (stderr, &ref);
662 }
663
664 DEBUG_FUNCTION void
665 debug (pt_solution *ptr)
666 {
667 if (ptr)
668 debug (*ptr);
669 else
670 fprintf (stderr, "<nil>\n");
671 }
672
673
674 /* Dump points-to information for SSA_NAME PTR into FILE. */
675
676 void
677 dump_points_to_info_for (FILE *file, tree ptr)
678 {
679 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
680
681 print_generic_expr (file, ptr, dump_flags);
682
683 if (pi)
684 dump_points_to_solution (file, &pi->pt);
685 else
686 fprintf (file, ", points-to anything");
687
688 fprintf (file, "\n");
689 }
690
691
692 /* Dump points-to information for VAR into stderr. */
693
694 DEBUG_FUNCTION void
695 debug_points_to_info_for (tree var)
696 {
697 dump_points_to_info_for (stderr, var);
698 }
699
700
701 /* Initializes the alias-oracle reference representation *R from REF. */
702
703 void
704 ao_ref_init (ao_ref *r, tree ref)
705 {
706 r->ref = ref;
707 r->base = NULL_TREE;
708 r->offset = 0;
709 r->size = -1;
710 r->max_size = -1;
711 r->ref_alias_set = -1;
712 r->base_alias_set = -1;
713 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
714 }
715
716 /* Returns the base object of the memory reference *REF. */
717
718 tree
719 ao_ref_base (ao_ref *ref)
720 {
721 bool reverse;
722
723 if (ref->base)
724 return ref->base;
725 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
726 &ref->max_size, &reverse);
727 return ref->base;
728 }
729
730 /* Returns the base object alias set of the memory reference *REF. */
731
732 alias_set_type
733 ao_ref_base_alias_set (ao_ref *ref)
734 {
735 tree base_ref;
736 if (ref->base_alias_set != -1)
737 return ref->base_alias_set;
738 if (!ref->ref)
739 return 0;
740 base_ref = ref->ref;
741 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
742 base_ref = TREE_OPERAND (base_ref, 0);
743 while (handled_component_p (base_ref))
744 base_ref = TREE_OPERAND (base_ref, 0);
745 ref->base_alias_set = get_alias_set (base_ref);
746 return ref->base_alias_set;
747 }
748
749 /* Returns the reference alias set of the memory reference *REF. */
750
751 alias_set_type
752 ao_ref_alias_set (ao_ref *ref)
753 {
754 if (ref->ref_alias_set != -1)
755 return ref->ref_alias_set;
756 if (!ref->ref)
757 return 0;
758 ref->ref_alias_set = get_alias_set (ref->ref);
759 return ref->ref_alias_set;
760 }
761
762 /* Returns a type satisfying
763 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
764
765 tree
766 ao_ref_base_alias_ptr_type (ao_ref *ref)
767 {
768 tree base_ref;
769
770 if (!ref->ref)
771 return NULL_TREE;
772 base_ref = ref->ref;
773 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
774 base_ref = TREE_OPERAND (base_ref, 0);
775 while (handled_component_p (base_ref))
776 base_ref = TREE_OPERAND (base_ref, 0);
777 tree ret = reference_alias_ptr_type (base_ref);
778 return ret;
779 }
780
781 /* Returns a type satisfying
782 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
783
784 tree
785 ao_ref_alias_ptr_type (ao_ref *ref)
786 {
787 if (!ref->ref)
788 return NULL_TREE;
789 tree ret = reference_alias_ptr_type (ref->ref);
790 return ret;
791 }
792
793
794 /* Init an alias-oracle reference representation from a gimple pointer
795 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
796 that RANGE_KNOWN is set.
797
798 The access is assumed to be only to or after of the pointer target adjusted
799 by the offset, not before it (even in the case RANGE_KNOWN is false). */
800
801 void
802 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
803 bool range_known,
804 poly_int64 offset,
805 poly_int64 size,
806 poly_int64 max_size)
807 {
808 poly_int64 t, extra_offset = 0;
809
810 ref->ref = NULL_TREE;
811 if (TREE_CODE (ptr) == SSA_NAME)
812 {
813 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
814 if (gimple_assign_single_p (stmt)
815 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
816 ptr = gimple_assign_rhs1 (stmt);
817 else if (is_gimple_assign (stmt)
818 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
819 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
820 {
821 ptr = gimple_assign_rhs1 (stmt);
822 extra_offset *= BITS_PER_UNIT;
823 }
824 }
825
826 if (TREE_CODE (ptr) == ADDR_EXPR)
827 {
828 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
829 if (ref->base)
830 ref->offset = BITS_PER_UNIT * t;
831 else
832 {
833 range_known = false;
834 ref->offset = 0;
835 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
836 }
837 }
838 else
839 {
840 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
841 ref->base = build2 (MEM_REF, char_type_node,
842 ptr, null_pointer_node);
843 ref->offset = 0;
844 }
845 ref->offset += extra_offset + offset;
846 if (range_known)
847 {
848 ref->max_size = max_size;
849 ref->size = size;
850 }
851 else
852 ref->max_size = ref->size = -1;
853 ref->ref_alias_set = 0;
854 ref->base_alias_set = 0;
855 ref->volatile_p = false;
856 }
857
858 /* Init an alias-oracle reference representation from a gimple pointer
859 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
860 size is assumed to be unknown. The access is assumed to be only
861 to or after of the pointer target, not before it. */
862
863 void
864 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
865 {
866 poly_int64 size_hwi;
867 if (size
868 && poly_int_tree_p (size, &size_hwi)
869 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
870 {
871 size_hwi = size_hwi * BITS_PER_UNIT;
872 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
873 }
874 else
875 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
876 }
877
878 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
879 Return -1 if S1 < S2
880 Return 1 if S1 > S2
881 Return 0 if equal or incomparable. */
882
883 static int
884 compare_sizes (tree s1, tree s2)
885 {
886 if (!s1 || !s2)
887 return 0;
888
889 poly_uint64 size1;
890 poly_uint64 size2;
891
892 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
893 return 0;
894 if (known_lt (size1, size2))
895 return -1;
896 if (known_lt (size2, size1))
897 return 1;
898 return 0;
899 }
900
901 /* Compare TYPE1 and TYPE2 by its size.
902 Return -1 if size of TYPE1 < size of TYPE2
903 Return 1 if size of TYPE1 > size of TYPE2
904 Return 0 if types are of equal sizes or we can not compare them. */
905
906 static int
907 compare_type_sizes (tree type1, tree type2)
908 {
909 /* Be conservative for arrays and vectors. We want to support partial
910 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
911 while (TREE_CODE (type1) == ARRAY_TYPE
912 || TREE_CODE (type1) == VECTOR_TYPE)
913 type1 = TREE_TYPE (type1);
914 while (TREE_CODE (type2) == ARRAY_TYPE
915 || TREE_CODE (type2) == VECTOR_TYPE)
916 type2 = TREE_TYPE (type2);
917 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
918 }
919
920 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
921 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
922 decide. */
923
924 static inline int
925 same_type_for_tbaa (tree type1, tree type2)
926 {
927 type1 = TYPE_MAIN_VARIANT (type1);
928 type2 = TYPE_MAIN_VARIANT (type2);
929
930 /* Handle the most common case first. */
931 if (type1 == type2)
932 return 1;
933
934 /* If we would have to do structural comparison bail out. */
935 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
936 || TYPE_STRUCTURAL_EQUALITY_P (type2))
937 return -1;
938
939 /* Compare the canonical types. */
940 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
941 return 1;
942
943 /* ??? Array types are not properly unified in all cases as we have
944 spurious changes in the index types for example. Removing this
945 causes all sorts of problems with the Fortran frontend. */
946 if (TREE_CODE (type1) == ARRAY_TYPE
947 && TREE_CODE (type2) == ARRAY_TYPE)
948 return -1;
949
950 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
951 object of one of its constrained subtypes, e.g. when a function with an
952 unconstrained parameter passed by reference is called on an object and
953 inlined. But, even in the case of a fixed size, type and subtypes are
954 not equivalent enough as to share the same TYPE_CANONICAL, since this
955 would mean that conversions between them are useless, whereas they are
956 not (e.g. type and subtypes can have different modes). So, in the end,
957 they are only guaranteed to have the same alias set. */
958 alias_set_type set1 = get_alias_set (type1);
959 alias_set_type set2 = get_alias_set (type2);
960 if (set1 == set2)
961 return -1;
962
963 /* Pointers to void are considered compatible with all other pointers,
964 so for two pointers see what the alias set resolution thinks. */
965 if (POINTER_TYPE_P (type1)
966 && POINTER_TYPE_P (type2)
967 && alias_sets_conflict_p (set1, set2))
968 return -1;
969
970 /* The types are known to be not equal. */
971 return 0;
972 }
973
974 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
975 components on it). */
976
977 static bool
978 type_has_components_p (tree type)
979 {
980 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
981 || TREE_CODE (type) == COMPLEX_TYPE;
982 }
983
984 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
985 respectively are either pointing to same address or are completely
986 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
987 just partly overlap.
988
989 Try to disambiguate using the access path starting from the match
990 and return false if there is no conflict.
991
992 Helper for aliasing_component_refs_p. */
993
994 static bool
995 aliasing_matching_component_refs_p (tree match1, tree ref1,
996 poly_int64 offset1, poly_int64 max_size1,
997 tree match2, tree ref2,
998 poly_int64 offset2, poly_int64 max_size2,
999 bool partial_overlap)
1000 {
1001 poly_int64 offadj, sztmp, msztmp;
1002 bool reverse;
1003
1004 if (!partial_overlap)
1005 {
1006 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1007 offset2 -= offadj;
1008 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1009 offset1 -= offadj;
1010 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1011 {
1012 ++alias_stats.aliasing_component_refs_p_no_alias;
1013 return false;
1014 }
1015 }
1016
1017 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1018 partial_overlap);
1019 if (cmp == 1
1020 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1021 {
1022 ++alias_stats.aliasing_component_refs_p_no_alias;
1023 return false;
1024 }
1025 ++alias_stats.aliasing_component_refs_p_may_alias;
1026 return true;
1027 }
1028
1029 /* Return true if REF is reference to zero sized trailing array. I.e.
1030 struct foo {int bar; int array[0];} *fooptr;
1031 fooptr->array. */
1032
1033 static bool
1034 component_ref_to_zero_sized_trailing_array_p (tree ref)
1035 {
1036 return (TREE_CODE (ref) == COMPONENT_REF
1037 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1038 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1039 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1040 && array_at_struct_end_p (ref));
1041 }
1042
1043 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1044 aliasing_component_refs_p.
1045
1046 Walk access path REF2 and try to find type matching TYPE1
1047 (which is a start of possibly aliasing access path REF1).
1048 If match is found, try to disambiguate.
1049
1050 Return 0 for sucessful disambiguation.
1051 Return 1 if match was found but disambiguation failed
1052 Return -1 if there is no match.
1053 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1054 in access patch REF2 and -1 if we are not sure. */
1055
1056 static int
1057 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1058 poly_int64 offset1, poly_int64 max_size1,
1059 tree end_struct_ref1,
1060 tree ref2, tree base2,
1061 poly_int64 offset2, poly_int64 max_size2,
1062 bool *maybe_match)
1063 {
1064 tree ref = ref2;
1065 int same_p = 0;
1066
1067 while (true)
1068 {
1069 /* We walk from inner type to the outer types. If type we see is
1070 already too large to be part of type1, terminate the search. */
1071 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1072
1073 if (cmp < 0
1074 && (!end_struct_ref1
1075 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1076 TREE_TYPE (ref)) < 0))
1077 break;
1078 /* If types may be of same size, see if we can decide about their
1079 equality. */
1080 if (cmp == 0)
1081 {
1082 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1083 if (same_p == 1)
1084 break;
1085 /* In case we can't decide whether types are same try to
1086 continue looking for the exact match.
1087 Remember however that we possibly saw a match
1088 to bypass the access path continuations tests we do later. */
1089 if (same_p == -1)
1090 *maybe_match = true;
1091 }
1092 if (!handled_component_p (ref))
1093 break;
1094 ref = TREE_OPERAND (ref, 0);
1095 }
1096 if (same_p == 1)
1097 {
1098 bool partial_overlap = false;
1099
1100 /* We assume that arrays can overlap by multiple of their elements
1101 size as tested in gcc.dg/torture/alias-2.c.
1102 This partial overlap happen only when both arrays are bases of
1103 the access and not contained within another component ref.
1104 To be safe we also assume partial overlap for VLAs. */
1105 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1106 && (!TYPE_SIZE (TREE_TYPE (base1))
1107 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1108 || ref == base2))
1109 {
1110 /* Setting maybe_match to true triggers
1111 nonoverlapping_component_refs_p test later that still may do
1112 useful disambiguation. */
1113 *maybe_match = true;
1114 partial_overlap = true;
1115 }
1116 return aliasing_matching_component_refs_p (base1, ref1,
1117 offset1, max_size1,
1118 ref, ref2,
1119 offset2, max_size2,
1120 partial_overlap);
1121 }
1122 return -1;
1123 }
1124
1125 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1126 Return true if they can be composed to single access path
1127 base1...ref1...base2...ref2.
1128
1129 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1130 a trailing array access after REF1 in the non-TBAA part of the access.
1131 REF1_ALIAS_SET is the alias set of REF1.
1132
1133 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1134 a trailing array access in the TBAA part of access path2.
1135 BASE2_ALIAS_SET is the alias set of base2. */
1136
1137 bool
1138 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1139 alias_set_type ref1_alias_set,
1140 tree base_type2, tree end_struct_ref2,
1141 alias_set_type base2_alias_set)
1142 {
1143 /* Access path can not continue past types with no components. */
1144 if (!type_has_components_p (ref_type1))
1145 return false;
1146
1147 /* If first access path ends by too small type to hold base of
1148 the second access path, typically paths can not continue.
1149
1150 Punt if end_struct_past_end1 is true. We want to support arbitrary
1151 type puning past first COMPONENT_REF to union because redundant store
1152 elimination depends on this, see PR92152. For this reason we can not
1153 check size of the reference because types may partially overlap. */
1154 if (!end_struct_past_end1)
1155 {
1156 if (compare_type_sizes (ref_type1, base_type2) < 0)
1157 return false;
1158 /* If the path2 contains trailing array access we can strenghten the check
1159 to verify that also the size of element of the trailing array fits.
1160 In fact we could check for offset + type_size, but we do not track
1161 offsets and this is quite side case. */
1162 if (end_struct_ref2
1163 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1164 return false;
1165 }
1166 return (base2_alias_set == ref1_alias_set
1167 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1168 }
1169
1170 /* Determine if the two component references REF1 and REF2 which are
1171 based on access types TYPE1 and TYPE2 and of which at least one is based
1172 on an indirect reference may alias.
1173 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1174 are the respective alias sets. */
1175
1176 static bool
1177 aliasing_component_refs_p (tree ref1,
1178 alias_set_type ref1_alias_set,
1179 alias_set_type base1_alias_set,
1180 poly_int64 offset1, poly_int64 max_size1,
1181 tree ref2,
1182 alias_set_type ref2_alias_set,
1183 alias_set_type base2_alias_set,
1184 poly_int64 offset2, poly_int64 max_size2)
1185 {
1186 /* If one reference is a component references through pointers try to find a
1187 common base and apply offset based disambiguation. This handles
1188 for example
1189 struct A { int i; int j; } *q;
1190 struct B { struct A a; int k; } *p;
1191 disambiguating q->i and p->a.j. */
1192 tree base1, base2;
1193 tree type1, type2;
1194 bool maybe_match = false;
1195 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1196 bool end_struct_past_end1 = false;
1197 bool end_struct_past_end2 = false;
1198
1199 /* Choose bases and base types to search for.
1200 The access path is as follows:
1201 base....end_of_tbaa_ref...actual_ref
1202 At one place in the access path may be a reference to zero sized or
1203 trailing array.
1204
1205 We generally discard the segment after end_of_tbaa_ref however
1206 we need to be careful in case it contains zero sized or trailing array.
1207 These may happen after reference to union and in this case we need to
1208 not disambiguate type puning scenarios.
1209
1210 We set:
1211 base1 to point to base
1212
1213 ref1 to point to end_of_tbaa_ref
1214
1215 end_struct_ref1 to point the trailing reference (if it exists
1216 in range base....end_of_tbaa_ref
1217
1218 end_struct_past_end1 is true if this trailing reference occurs in
1219 end_of_tbaa_ref...actual_ref. */
1220 base1 = ref1;
1221 while (handled_component_p (base1))
1222 {
1223 /* Generally access paths are monotous in the size of object. The
1224 exception are trailing arrays of structures. I.e.
1225 struct a {int array[0];};
1226 or
1227 struct a {int array1[0]; int array[];};
1228 Such struct has size 0 but accesses to a.array may have non-zero size.
1229 In this case the size of TREE_TYPE (base1) is smaller than
1230 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1231
1232 Because we compare sizes of arrays just by sizes of their elements,
1233 we only need to care about zero sized array fields here. */
1234 if (component_ref_to_zero_sized_trailing_array_p (base1))
1235 {
1236 gcc_checking_assert (!end_struct_ref1);
1237 end_struct_ref1 = base1;
1238 }
1239 if (ends_tbaa_access_path_p (base1))
1240 {
1241 ref1 = TREE_OPERAND (base1, 0);
1242 if (end_struct_ref1)
1243 {
1244 end_struct_past_end1 = true;
1245 end_struct_ref1 = NULL;
1246 }
1247 }
1248 base1 = TREE_OPERAND (base1, 0);
1249 }
1250 type1 = TREE_TYPE (base1);
1251 base2 = ref2;
1252 while (handled_component_p (base2))
1253 {
1254 if (component_ref_to_zero_sized_trailing_array_p (base2))
1255 {
1256 gcc_checking_assert (!end_struct_ref2);
1257 end_struct_ref2 = base2;
1258 }
1259 if (ends_tbaa_access_path_p (base2))
1260 {
1261 ref2 = TREE_OPERAND (base2, 0);
1262 if (end_struct_ref2)
1263 {
1264 end_struct_past_end2 = true;
1265 end_struct_ref2 = NULL;
1266 }
1267 }
1268 base2 = TREE_OPERAND (base2, 0);
1269 }
1270 type2 = TREE_TYPE (base2);
1271
1272 /* Now search for the type1 in the access path of ref2. This
1273 would be a common base for doing offset based disambiguation on.
1274 This however only makes sense if type2 is big enough to hold type1. */
1275 int cmp_outer = compare_type_sizes (type2, type1);
1276
1277 /* If type2 is big enough to contain type1 walk its access path.
1278 We also need to care of arrays at the end of structs that may extend
1279 beyond the end of structure. If this occurs in the TBAA part of the
1280 access path, we need to consider the increased type as well. */
1281 if (cmp_outer >= 0
1282 || (end_struct_ref2
1283 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1284 {
1285 int res = aliasing_component_refs_walk (ref1, type1, base1,
1286 offset1, max_size1,
1287 end_struct_ref1,
1288 ref2, base2, offset2, max_size2,
1289 &maybe_match);
1290 if (res != -1)
1291 return res;
1292 }
1293
1294 /* If we didn't find a common base, try the other way around. */
1295 if (cmp_outer <= 0
1296 || (end_struct_ref1
1297 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1298 {
1299 int res = aliasing_component_refs_walk (ref2, type2, base2,
1300 offset2, max_size2,
1301 end_struct_ref2,
1302 ref1, base1, offset1, max_size1,
1303 &maybe_match);
1304 if (res != -1)
1305 return res;
1306 }
1307
1308 /* In the following code we make an assumption that the types in access
1309 paths do not overlap and thus accesses alias only if one path can be
1310 continuation of another. If we was not able to decide about equivalence,
1311 we need to give up. */
1312 if (maybe_match)
1313 {
1314 if (!nonoverlapping_component_refs_p (ref1, ref2))
1315 {
1316 ++alias_stats.aliasing_component_refs_p_may_alias;
1317 return true;
1318 }
1319 ++alias_stats.aliasing_component_refs_p_no_alias;
1320 return false;
1321 }
1322
1323 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1324 ref1_alias_set,
1325 type2, end_struct_ref2,
1326 base2_alias_set)
1327 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1328 ref2_alias_set,
1329 type1, end_struct_ref1,
1330 base1_alias_set))
1331 {
1332 ++alias_stats.aliasing_component_refs_p_may_alias;
1333 return true;
1334 }
1335 ++alias_stats.aliasing_component_refs_p_no_alias;
1336 return false;
1337 }
1338
1339 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1340 that bases of both component refs are either equivalent or nonoverlapping.
1341 We do not assume that the containers of FIELD1 and FIELD2 are of the
1342 same type or size.
1343
1344 Return 0 in case the base address of component_refs are same then
1345 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1346 may not be of same type or size.
1347
1348 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1349
1350 Return -1 otherwise.
1351
1352 Main difference between 0 and -1 is to let
1353 nonoverlapping_component_refs_since_match_p discover the semantically
1354 equivalent part of the access path.
1355
1356 Note that this function is used even with -fno-strict-aliasing
1357 and makes use of no TBAA assumptions. */
1358
1359 static int
1360 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1361 {
1362 /* If both fields are of the same type, we could save hard work of
1363 comparing offsets. */
1364 tree type1 = DECL_CONTEXT (field1);
1365 tree type2 = DECL_CONTEXT (field2);
1366
1367 if (TREE_CODE (type1) == RECORD_TYPE
1368 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1369 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1370 if (TREE_CODE (type2) == RECORD_TYPE
1371 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1372 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1373
1374 /* ??? Bitfields can overlap at RTL level so punt on them.
1375 FIXME: RTL expansion should be fixed by adjusting the access path
1376 when producing MEM_ATTRs for MEMs which are wider than
1377 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1378 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1379 return -1;
1380
1381 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1382 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1383 return field1 != field2;
1384
1385 /* In common case the offsets and bit offsets will be the same.
1386 However if frontends do not agree on the alignment, they may be
1387 different even if they actually represent same address.
1388 Try the common case first and if that fails calcualte the
1389 actual bit offset. */
1390 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1391 DECL_FIELD_OFFSET (field2))
1392 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1393 DECL_FIELD_BIT_OFFSET (field2)))
1394 return 0;
1395
1396 /* Note that it may be possible to use component_ref_field_offset
1397 which would provide offsets as trees. However constructing and folding
1398 trees is expensive and does not seem to be worth the compile time
1399 cost. */
1400
1401 poly_uint64 offset1, offset2;
1402 poly_uint64 bit_offset1, bit_offset2;
1403
1404 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1405 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1406 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1407 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1408 {
1409 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1410 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1411
1412 if (known_eq (offset1, offset2))
1413 return 0;
1414
1415 poly_uint64 size1, size2;
1416
1417 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1418 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1419 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1420 return 1;
1421 }
1422 /* Resort to slower overlap checking by looking for matching types in
1423 the middle of access path. */
1424 return -1;
1425 }
1426
1427 /* Return low bound of array. Do not produce new trees
1428 and thus do not care about particular type of integer constant
1429 and placeholder exprs. */
1430
1431 static tree
1432 cheap_array_ref_low_bound (tree ref)
1433 {
1434 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1435
1436 /* Avoid expensive array_ref_low_bound.
1437 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1438 type or it is zero. */
1439 if (TREE_OPERAND (ref, 2))
1440 return TREE_OPERAND (ref, 2);
1441 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1442 return TYPE_MIN_VALUE (domain_type);
1443 else
1444 return integer_zero_node;
1445 }
1446
1447 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1448 completely disjoint.
1449
1450 Return 1 if the refs are non-overlapping.
1451 Return 0 if they are possibly overlapping but if so the overlap again
1452 starts on the same address.
1453 Return -1 otherwise. */
1454
1455 int
1456 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1457 {
1458 tree index1 = TREE_OPERAND (ref1, 1);
1459 tree index2 = TREE_OPERAND (ref2, 1);
1460 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1461 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1462
1463 /* Handle zero offsets first: we do not need to match type size in this
1464 case. */
1465 if (operand_equal_p (index1, low_bound1, 0)
1466 && operand_equal_p (index2, low_bound2, 0))
1467 return 0;
1468
1469 /* If type sizes are different, give up.
1470
1471 Avoid expensive array_ref_element_size.
1472 If operand 3 is present it denotes size in the alignmnet units.
1473 Otherwise size is TYPE_SIZE of the element type.
1474 Handle only common cases where types are of the same "kind". */
1475 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1476 return -1;
1477
1478 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1479 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1480
1481 if (TREE_OPERAND (ref1, 3))
1482 {
1483 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1484 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1485 TREE_OPERAND (ref2, 3), 0))
1486 return -1;
1487 }
1488 else
1489 {
1490 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1491 TYPE_SIZE_UNIT (elmt_type2), 0))
1492 return -1;
1493 }
1494
1495 /* Since we know that type sizes are the same, there is no need to return
1496 -1 after this point. Partial overlap can not be introduced. */
1497
1498 /* We may need to fold trees in this case.
1499 TODO: Handle integer constant case at least. */
1500 if (!operand_equal_p (low_bound1, low_bound2, 0))
1501 return 0;
1502
1503 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1504 {
1505 if (tree_int_cst_equal (index1, index2))
1506 return 0;
1507 return 1;
1508 }
1509 /* TODO: We can use VRP to further disambiguate here. */
1510 return 0;
1511 }
1512
1513 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1514 MATCH2 either point to the same address or are disjoint.
1515 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1516 respectively or NULL in the case we established equivalence of bases.
1517 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1518 overlap by exact multiply of their element size.
1519
1520 This test works by matching the initial segment of the access path
1521 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1522 match was determined without use of TBAA oracle.
1523
1524 Return 1 if we can determine that component references REF1 and REF2,
1525 that are within a common DECL, cannot overlap.
1526
1527 Return 0 if paths are same and thus there is nothing to disambiguate more
1528 (i.e. there is must alias assuming there is must alias between MATCH1 and
1529 MATCH2)
1530
1531 Return -1 if we can not determine 0 or 1 - this happens when we met
1532 non-matching types was met in the path.
1533 In this case it may make sense to continue by other disambiguation
1534 oracles. */
1535
1536 static int
1537 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1538 tree match2, tree ref2,
1539 bool partial_overlap)
1540 {
1541 int ntbaa1 = 0, ntbaa2 = 0;
1542 /* Early return if there are no references to match, we do not need
1543 to walk the access paths.
1544
1545 Do not consider this as may-alias for stats - it is more useful
1546 to have information how many disambiguations happened provided that
1547 the query was meaningful. */
1548
1549 if (match1 == ref1 || !handled_component_p (ref1)
1550 || match2 == ref2 || !handled_component_p (ref2))
1551 return -1;
1552
1553 auto_vec<tree, 16> component_refs1;
1554 auto_vec<tree, 16> component_refs2;
1555
1556 /* Create the stack of handled components for REF1. */
1557 while (handled_component_p (ref1) && ref1 != match1)
1558 {
1559 /* We use TBAA only to re-synchronize after mismatched refs. So we
1560 do not need to truncate access path after TBAA part ends. */
1561 if (ends_tbaa_access_path_p (ref1))
1562 ntbaa1 = 0;
1563 else
1564 ntbaa1++;
1565 component_refs1.safe_push (ref1);
1566 ref1 = TREE_OPERAND (ref1, 0);
1567 }
1568
1569 /* Create the stack of handled components for REF2. */
1570 while (handled_component_p (ref2) && ref2 != match2)
1571 {
1572 if (ends_tbaa_access_path_p (ref2))
1573 ntbaa2 = 0;
1574 else
1575 ntbaa2++;
1576 component_refs2.safe_push (ref2);
1577 ref2 = TREE_OPERAND (ref2, 0);
1578 }
1579
1580 if (!flag_strict_aliasing)
1581 {
1582 ntbaa1 = 0;
1583 ntbaa2 = 0;
1584 }
1585
1586 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1587 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1588
1589 /* If only one of access path starts with MEM_REF check that offset is 0
1590 so the addresses stays the same after stripping it.
1591 TODO: In this case we may walk the other access path until we get same
1592 offset.
1593
1594 If both starts with MEM_REF, offset has to be same. */
1595 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1596 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1597 || (mem_ref1 && mem_ref2
1598 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1599 TREE_OPERAND (ref2, 1))))
1600 {
1601 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1602 return -1;
1603 }
1604
1605 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1606 to handle them here at all. */
1607 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1608 && TREE_CODE (ref2) != TARGET_MEM_REF);
1609
1610 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1611 rank. This is sufficient because we start from the same DECL and you
1612 cannot reference several fields at a time with COMPONENT_REFs (unlike
1613 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1614 of them to access a sub-component, unless you're in a union, in which
1615 case the return value will precisely be false. */
1616 while (true)
1617 {
1618 /* Track if we seen unmatched ref with non-zero offset. In this case
1619 we must look for partial overlaps. */
1620 bool seen_unmatched_ref_p = false;
1621
1622 /* First match ARRAY_REFs an try to disambiguate. */
1623 if (!component_refs1.is_empty ()
1624 && !component_refs2.is_empty ())
1625 {
1626 unsigned int narray_refs1=0, narray_refs2=0;
1627
1628 /* We generally assume that both access paths starts by same sequence
1629 of refs. However if number of array refs is not in sync, try
1630 to recover and pop elts until number match. This helps the case
1631 where one access path starts by array and other by element. */
1632 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1633 narray_refs1++)
1634 if (TREE_CODE (component_refs1 [component_refs1.length()
1635 - 1 - narray_refs1]) != ARRAY_REF)
1636 break;
1637
1638 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1639 narray_refs2++)
1640 if (TREE_CODE (component_refs2 [component_refs2.length()
1641 - 1 - narray_refs2]) != ARRAY_REF)
1642 break;
1643 for (; narray_refs1 > narray_refs2; narray_refs1--)
1644 {
1645 ref1 = component_refs1.pop ();
1646 ntbaa1--;
1647
1648 /* If index is non-zero we need to check whether the reference
1649 does not break the main invariant that bases are either
1650 disjoint or equal. Consider the example:
1651
1652 unsigned char out[][1];
1653 out[1]="a";
1654 out[i][0];
1655
1656 Here bases out and out are same, but after removing the
1657 [i] index, this invariant no longer holds, because
1658 out[i] points to the middle of array out.
1659
1660 TODO: If size of type of the skipped reference is an integer
1661 multiply of the size of type of the other reference this
1662 invariant can be verified, but even then it is not completely
1663 safe with !flag_strict_aliasing if the other reference contains
1664 unbounded array accesses.
1665 See */
1666
1667 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1668 cheap_array_ref_low_bound (ref1), 0))
1669 return 0;
1670 }
1671 for (; narray_refs2 > narray_refs1; narray_refs2--)
1672 {
1673 ref2 = component_refs2.pop ();
1674 ntbaa2--;
1675 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1676 cheap_array_ref_low_bound (ref2), 0))
1677 return 0;
1678 }
1679 /* Try to disambiguate matched arrays. */
1680 for (unsigned int i = 0; i < narray_refs1; i++)
1681 {
1682 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1683 component_refs2.pop ());
1684 ntbaa1--;
1685 ntbaa2--;
1686 if (cmp == 1 && !partial_overlap)
1687 {
1688 ++alias_stats
1689 .nonoverlapping_refs_since_match_p_no_alias;
1690 return 1;
1691 }
1692 if (cmp == -1)
1693 {
1694 seen_unmatched_ref_p = true;
1695 /* We can not maintain the invariant that bases are either
1696 same or completely disjoint. However we can still recover
1697 from type based alias analysis if we reach references to
1698 same sizes. We do not attempt to match array sizes, so
1699 just finish array walking and look for component refs. */
1700 if (ntbaa1 < 0 || ntbaa2 < 0)
1701 {
1702 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1703 return -1;
1704 }
1705 for (i++; i < narray_refs1; i++)
1706 {
1707 component_refs1.pop ();
1708 component_refs2.pop ();
1709 ntbaa1--;
1710 ntbaa2--;
1711 }
1712 break;
1713 }
1714 partial_overlap = false;
1715 }
1716 }
1717
1718 /* Next look for component_refs. */
1719 do
1720 {
1721 if (component_refs1.is_empty ())
1722 {
1723 ++alias_stats
1724 .nonoverlapping_refs_since_match_p_must_overlap;
1725 return 0;
1726 }
1727 ref1 = component_refs1.pop ();
1728 ntbaa1--;
1729 if (TREE_CODE (ref1) != COMPONENT_REF)
1730 {
1731 seen_unmatched_ref_p = true;
1732 if (ntbaa1 < 0 || ntbaa2 < 0)
1733 {
1734 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1735 return -1;
1736 }
1737 }
1738 }
1739 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1740
1741 do
1742 {
1743 if (component_refs2.is_empty ())
1744 {
1745 ++alias_stats
1746 .nonoverlapping_refs_since_match_p_must_overlap;
1747 return 0;
1748 }
1749 ref2 = component_refs2.pop ();
1750 ntbaa2--;
1751 if (TREE_CODE (ref2) != COMPONENT_REF)
1752 {
1753 if (ntbaa1 < 0 || ntbaa2 < 0)
1754 {
1755 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1756 return -1;
1757 }
1758 seen_unmatched_ref_p = true;
1759 }
1760 }
1761 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1762
1763 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1764 earlier. */
1765 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1766 && TREE_CODE (ref2) == COMPONENT_REF);
1767
1768 tree field1 = TREE_OPERAND (ref1, 1);
1769 tree field2 = TREE_OPERAND (ref2, 1);
1770
1771 /* ??? We cannot simply use the type of operand #0 of the refs here
1772 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1773 for common blocks instead of using unions like everyone else. */
1774 tree type1 = DECL_CONTEXT (field1);
1775 tree type2 = DECL_CONTEXT (field2);
1776
1777 partial_overlap = false;
1778
1779 /* If we skipped array refs on type of different sizes, we can
1780 no longer be sure that there are not partial overlaps. */
1781 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1782 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1783 {
1784 ++alias_stats
1785 .nonoverlapping_refs_since_match_p_may_alias;
1786 return -1;
1787 }
1788
1789 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1790 if (cmp == -1)
1791 {
1792 ++alias_stats
1793 .nonoverlapping_refs_since_match_p_may_alias;
1794 return -1;
1795 }
1796 else if (cmp == 1)
1797 {
1798 ++alias_stats
1799 .nonoverlapping_refs_since_match_p_no_alias;
1800 return 1;
1801 }
1802 }
1803
1804 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1805 return 0;
1806 }
1807
1808 /* Return TYPE_UID which can be used to match record types we consider
1809 same for TBAA purposes. */
1810
1811 static inline int
1812 ncr_type_uid (const_tree field)
1813 {
1814 /* ??? We cannot simply use the type of operand #0 of the refs here
1815 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1816 for common blocks instead of using unions like everyone else. */
1817 tree type = DECL_FIELD_CONTEXT (field);
1818 /* With LTO types considered same_type_for_tbaa_p
1819 from different translation unit may not have same
1820 main variant. They however have same TYPE_CANONICAL. */
1821 if (TYPE_CANONICAL (type))
1822 return TYPE_UID (TYPE_CANONICAL (type));
1823 return TYPE_UID (type);
1824 }
1825
1826 /* qsort compare function to sort FIELD_DECLs after their
1827 DECL_FIELD_CONTEXT TYPE_UID. */
1828
1829 static inline int
1830 ncr_compar (const void *field1_, const void *field2_)
1831 {
1832 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1833 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1834 unsigned int uid1 = ncr_type_uid (field1);
1835 unsigned int uid2 = ncr_type_uid (field2);
1836
1837 if (uid1 < uid2)
1838 return -1;
1839 else if (uid1 > uid2)
1840 return 1;
1841 return 0;
1842 }
1843
1844 /* Return true if we can determine that the fields referenced cannot
1845 overlap for any pair of objects. This relies on TBAA. */
1846
1847 static bool
1848 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1849 {
1850 /* Early return if we have nothing to do.
1851
1852 Do not consider this as may-alias for stats - it is more useful
1853 to have information how many disambiguations happened provided that
1854 the query was meaningful. */
1855 if (!flag_strict_aliasing
1856 || !x || !y
1857 || !handled_component_p (x)
1858 || !handled_component_p (y))
1859 return false;
1860
1861 auto_vec<const_tree, 16> fieldsx;
1862 while (handled_component_p (x))
1863 {
1864 if (TREE_CODE (x) == COMPONENT_REF)
1865 {
1866 tree field = TREE_OPERAND (x, 1);
1867 tree type = DECL_FIELD_CONTEXT (field);
1868 if (TREE_CODE (type) == RECORD_TYPE)
1869 fieldsx.safe_push (field);
1870 }
1871 else if (ends_tbaa_access_path_p (x))
1872 fieldsx.truncate (0);
1873 x = TREE_OPERAND (x, 0);
1874 }
1875 if (fieldsx.length () == 0)
1876 return false;
1877 auto_vec<const_tree, 16> fieldsy;
1878 while (handled_component_p (y))
1879 {
1880 if (TREE_CODE (y) == COMPONENT_REF)
1881 {
1882 tree field = TREE_OPERAND (y, 1);
1883 tree type = DECL_FIELD_CONTEXT (field);
1884 if (TREE_CODE (type) == RECORD_TYPE)
1885 fieldsy.safe_push (TREE_OPERAND (y, 1));
1886 }
1887 else if (ends_tbaa_access_path_p (y))
1888 fieldsy.truncate (0);
1889 y = TREE_OPERAND (y, 0);
1890 }
1891 if (fieldsy.length () == 0)
1892 {
1893 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1894 return false;
1895 }
1896
1897 /* Most common case first. */
1898 if (fieldsx.length () == 1
1899 && fieldsy.length () == 1)
1900 {
1901 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1902 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1903 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1904 {
1905 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1906 return true;
1907 }
1908 else
1909 {
1910 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1911 return false;
1912 }
1913 }
1914
1915 if (fieldsx.length () == 2)
1916 {
1917 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1918 std::swap (fieldsx[0], fieldsx[1]);
1919 }
1920 else
1921 fieldsx.qsort (ncr_compar);
1922
1923 if (fieldsy.length () == 2)
1924 {
1925 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1926 std::swap (fieldsy[0], fieldsy[1]);
1927 }
1928 else
1929 fieldsy.qsort (ncr_compar);
1930
1931 unsigned i = 0, j = 0;
1932 do
1933 {
1934 const_tree fieldx = fieldsx[i];
1935 const_tree fieldy = fieldsy[j];
1936
1937 /* We're left with accessing different fields of a structure,
1938 no possible overlap. */
1939 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1940 DECL_FIELD_CONTEXT (fieldy)) == 1
1941 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1942 {
1943 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1944 return true;
1945 }
1946
1947 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1948 {
1949 i++;
1950 if (i == fieldsx.length ())
1951 break;
1952 }
1953 else
1954 {
1955 j++;
1956 if (j == fieldsy.length ())
1957 break;
1958 }
1959 }
1960 while (1);
1961
1962 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1963 return false;
1964 }
1965
1966
1967 /* Return true if two memory references based on the variables BASE1
1968 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1969 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1970 if non-NULL are the complete memory reference trees. */
1971
1972 static bool
1973 decl_refs_may_alias_p (tree ref1, tree base1,
1974 poly_int64 offset1, poly_int64 max_size1,
1975 poly_int64 size1,
1976 tree ref2, tree base2,
1977 poly_int64 offset2, poly_int64 max_size2,
1978 poly_int64 size2)
1979 {
1980 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1981
1982 /* If both references are based on different variables, they cannot alias. */
1983 if (compare_base_decls (base1, base2) == 0)
1984 return false;
1985
1986 /* If both references are based on the same variable, they cannot alias if
1987 the accesses do not overlap. */
1988 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1989 return false;
1990
1991 /* If there is must alias, there is no use disambiguating further. */
1992 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1993 return true;
1994
1995 /* For components with variable position, the above test isn't sufficient,
1996 so we disambiguate component references manually. */
1997 if (ref1 && ref2
1998 && handled_component_p (ref1) && handled_component_p (ref2)
1999 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
2000 return false;
2001
2002 return true;
2003 }
2004
2005 /* Return true if access with BASE is view converted.
2006 Base must not be stripped from inner MEM_REF (&decl)
2007 which is done by ao_ref_base and thus one extra walk
2008 of handled components is needed. */
2009
2010 static bool
2011 view_converted_memref_p (tree base)
2012 {
2013 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2014 return false;
2015 return same_type_for_tbaa (TREE_TYPE (base),
2016 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2017 }
2018
2019 /* Return true if an indirect reference based on *PTR1 constrained
2020 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2021 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2022 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2023 in which case they are computed on-demand. REF1 and REF2
2024 if non-NULL are the complete memory reference trees. */
2025
2026 static bool
2027 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2028 poly_int64 offset1, poly_int64 max_size1,
2029 poly_int64 size1,
2030 alias_set_type ref1_alias_set,
2031 alias_set_type base1_alias_set,
2032 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2033 poly_int64 offset2, poly_int64 max_size2,
2034 poly_int64 size2,
2035 alias_set_type ref2_alias_set,
2036 alias_set_type base2_alias_set, bool tbaa_p)
2037 {
2038 tree ptr1;
2039 tree ptrtype1, dbase2;
2040
2041 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2042 || TREE_CODE (base1) == TARGET_MEM_REF)
2043 && DECL_P (base2));
2044
2045 ptr1 = TREE_OPERAND (base1, 0);
2046 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2047
2048 /* If only one reference is based on a variable, they cannot alias if
2049 the pointer access is beyond the extent of the variable access.
2050 (the pointer base cannot validly point to an offset less than zero
2051 of the variable).
2052 ??? IVOPTs creates bases that do not honor this restriction,
2053 so do not apply this optimization for TARGET_MEM_REFs. */
2054 if (TREE_CODE (base1) != TARGET_MEM_REF
2055 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2056 return false;
2057
2058 /* If the pointer based access is bigger than the variable they cannot
2059 alias. This is similar to the check below where we use TBAA to
2060 increase the size of the pointer based access based on the dynamic
2061 type of a containing object we can infer from it. */
2062 poly_int64 dsize2;
2063 if (known_size_p (size1)
2064 && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2065 && known_lt (dsize2, size1))
2066 return false;
2067
2068 /* They also cannot alias if the pointer may not point to the decl. */
2069 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2070 return false;
2071
2072 /* Disambiguations that rely on strict aliasing rules follow. */
2073 if (!flag_strict_aliasing || !tbaa_p)
2074 return true;
2075
2076 /* If the alias set for a pointer access is zero all bets are off. */
2077 if (base1_alias_set == 0 || base2_alias_set == 0)
2078 return true;
2079
2080 /* When we are trying to disambiguate an access with a pointer dereference
2081 as base versus one with a decl as base we can use both the size
2082 of the decl and its dynamic type for extra disambiguation.
2083 ??? We do not know anything about the dynamic type of the decl
2084 other than that its alias-set contains base2_alias_set as a subset
2085 which does not help us here. */
2086 /* As we know nothing useful about the dynamic type of the decl just
2087 use the usual conflict check rather than a subset test.
2088 ??? We could introduce -fvery-strict-aliasing when the language
2089 does not allow decls to have a dynamic type that differs from their
2090 static type. Then we can check
2091 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2092 if (base1_alias_set != base2_alias_set
2093 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2094 return false;
2095
2096 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2097
2098 /* If the size of the access relevant for TBAA through the pointer
2099 is bigger than the size of the decl we can't possibly access the
2100 decl via that pointer. */
2101 if (/* ??? This in turn may run afoul when a decl of type T which is
2102 a member of union type U is accessed through a pointer to
2103 type U and sizeof T is smaller than sizeof U. */
2104 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2105 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2106 && compare_sizes (DECL_SIZE (base2),
2107 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2108 return false;
2109
2110 if (!ref2)
2111 return true;
2112
2113 /* If the decl is accessed via a MEM_REF, reconstruct the base
2114 we can use for TBAA and an appropriately adjusted offset. */
2115 dbase2 = ref2;
2116 while (handled_component_p (dbase2))
2117 dbase2 = TREE_OPERAND (dbase2, 0);
2118 poly_int64 doffset1 = offset1;
2119 poly_offset_int doffset2 = offset2;
2120 if (TREE_CODE (dbase2) == MEM_REF
2121 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2122 {
2123 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2124 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2125 /* If second reference is view-converted, give up now. */
2126 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2127 return true;
2128 }
2129
2130 /* If first reference is view-converted, give up now. */
2131 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2132 return true;
2133
2134 /* If both references are through the same type, they do not alias
2135 if the accesses do not overlap. This does extra disambiguation
2136 for mixed/pointer accesses but requires strict aliasing.
2137 For MEM_REFs we require that the component-ref offset we computed
2138 is relative to the start of the type which we ensure by
2139 comparing rvalue and access type and disregarding the constant
2140 pointer offset.
2141
2142 But avoid treating variable length arrays as "objects", instead assume they
2143 can overlap by an exact multiple of their element size.
2144 See gcc.dg/torture/alias-2.c. */
2145 if (((TREE_CODE (base1) != TARGET_MEM_REF
2146 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2147 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2148 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2149 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2150 {
2151 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2152 && (TYPE_SIZE (TREE_TYPE (base1))
2153 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2154 != INTEGER_CST));
2155 if (!partial_overlap
2156 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2157 return false;
2158 if (!ref1 || !ref2
2159 /* If there is must alias, there is no use disambiguating further. */
2160 || (!partial_overlap
2161 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2162 return true;
2163 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2164 partial_overlap);
2165 if (res == -1)
2166 return !nonoverlapping_component_refs_p (ref1, ref2);
2167 return !res;
2168 }
2169
2170 /* Do access-path based disambiguation. */
2171 if (ref1 && ref2
2172 && (handled_component_p (ref1) || handled_component_p (ref2)))
2173 return aliasing_component_refs_p (ref1,
2174 ref1_alias_set, base1_alias_set,
2175 offset1, max_size1,
2176 ref2,
2177 ref2_alias_set, base2_alias_set,
2178 offset2, max_size2);
2179
2180 return true;
2181 }
2182
2183 /* Return true if two indirect references based on *PTR1
2184 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2185 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2186 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2187 in which case they are computed on-demand. REF1 and REF2
2188 if non-NULL are the complete memory reference trees. */
2189
2190 static bool
2191 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2192 poly_int64 offset1, poly_int64 max_size1,
2193 poly_int64 size1,
2194 alias_set_type ref1_alias_set,
2195 alias_set_type base1_alias_set,
2196 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2197 poly_int64 offset2, poly_int64 max_size2,
2198 poly_int64 size2,
2199 alias_set_type ref2_alias_set,
2200 alias_set_type base2_alias_set, bool tbaa_p)
2201 {
2202 tree ptr1;
2203 tree ptr2;
2204 tree ptrtype1, ptrtype2;
2205
2206 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2207 || TREE_CODE (base1) == TARGET_MEM_REF)
2208 && (TREE_CODE (base2) == MEM_REF
2209 || TREE_CODE (base2) == TARGET_MEM_REF));
2210
2211 ptr1 = TREE_OPERAND (base1, 0);
2212 ptr2 = TREE_OPERAND (base2, 0);
2213
2214 /* If both bases are based on pointers they cannot alias if they may not
2215 point to the same memory object or if they point to the same object
2216 and the accesses do not overlap. */
2217 if ((!cfun || gimple_in_ssa_p (cfun))
2218 && operand_equal_p (ptr1, ptr2, 0)
2219 && (((TREE_CODE (base1) != TARGET_MEM_REF
2220 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2221 && (TREE_CODE (base2) != TARGET_MEM_REF
2222 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2223 || (TREE_CODE (base1) == TARGET_MEM_REF
2224 && TREE_CODE (base2) == TARGET_MEM_REF
2225 && (TMR_STEP (base1) == TMR_STEP (base2)
2226 || (TMR_STEP (base1) && TMR_STEP (base2)
2227 && operand_equal_p (TMR_STEP (base1),
2228 TMR_STEP (base2), 0)))
2229 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2230 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2231 && operand_equal_p (TMR_INDEX (base1),
2232 TMR_INDEX (base2), 0)))
2233 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2234 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2235 && operand_equal_p (TMR_INDEX2 (base1),
2236 TMR_INDEX2 (base2), 0))))))
2237 {
2238 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2239 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2240 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2241 offset2 + moff2, max_size2))
2242 return false;
2243 /* If there is must alias, there is no use disambiguating further. */
2244 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2245 return true;
2246 if (ref1 && ref2)
2247 {
2248 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2249 false);
2250 if (res != -1)
2251 return !res;
2252 }
2253 }
2254 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2255 return false;
2256
2257 /* Disambiguations that rely on strict aliasing rules follow. */
2258 if (!flag_strict_aliasing || !tbaa_p)
2259 return true;
2260
2261 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2262 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2263
2264 /* If the alias set for a pointer access is zero all bets are off. */
2265 if (base1_alias_set == 0
2266 || base2_alias_set == 0)
2267 return true;
2268
2269 /* Do type-based disambiguation. */
2270 if (base1_alias_set != base2_alias_set
2271 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2272 return false;
2273
2274 /* If either reference is view-converted, give up now. */
2275 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2276 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2277 return true;
2278
2279 /* If both references are through the same type, they do not alias
2280 if the accesses do not overlap. This does extra disambiguation
2281 for mixed/pointer accesses but requires strict aliasing. */
2282 if ((TREE_CODE (base1) != TARGET_MEM_REF
2283 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2284 && (TREE_CODE (base2) != TARGET_MEM_REF
2285 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2286 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2287 TREE_TYPE (ptrtype2)) == 1)
2288 {
2289 /* But avoid treating arrays as "objects", instead assume they
2290 can overlap by an exact multiple of their element size.
2291 See gcc.dg/torture/alias-2.c. */
2292 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2293
2294 if (!partial_overlap
2295 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2296 return false;
2297 if (!ref1 || !ref2
2298 || (!partial_overlap
2299 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2300 return true;
2301 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2302 partial_overlap);
2303 if (res == -1)
2304 return !nonoverlapping_component_refs_p (ref1, ref2);
2305 return !res;
2306 }
2307
2308 /* Do access-path based disambiguation. */
2309 if (ref1 && ref2
2310 && (handled_component_p (ref1) || handled_component_p (ref2)))
2311 return aliasing_component_refs_p (ref1,
2312 ref1_alias_set, base1_alias_set,
2313 offset1, max_size1,
2314 ref2,
2315 ref2_alias_set, base2_alias_set,
2316 offset2, max_size2);
2317
2318 return true;
2319 }
2320
2321 /* Return true, if the two memory references REF1 and REF2 may alias. */
2322
2323 static bool
2324 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2325 {
2326 tree base1, base2;
2327 poly_int64 offset1 = 0, offset2 = 0;
2328 poly_int64 max_size1 = -1, max_size2 = -1;
2329 bool var1_p, var2_p, ind1_p, ind2_p;
2330
2331 gcc_checking_assert ((!ref1->ref
2332 || TREE_CODE (ref1->ref) == SSA_NAME
2333 || DECL_P (ref1->ref)
2334 || TREE_CODE (ref1->ref) == STRING_CST
2335 || handled_component_p (ref1->ref)
2336 || TREE_CODE (ref1->ref) == MEM_REF
2337 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2338 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2339 && (!ref2->ref
2340 || TREE_CODE (ref2->ref) == SSA_NAME
2341 || DECL_P (ref2->ref)
2342 || TREE_CODE (ref2->ref) == STRING_CST
2343 || handled_component_p (ref2->ref)
2344 || TREE_CODE (ref2->ref) == MEM_REF
2345 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2346 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2347
2348 /* Decompose the references into their base objects and the access. */
2349 base1 = ao_ref_base (ref1);
2350 offset1 = ref1->offset;
2351 max_size1 = ref1->max_size;
2352 base2 = ao_ref_base (ref2);
2353 offset2 = ref2->offset;
2354 max_size2 = ref2->max_size;
2355
2356 /* We can end up with registers or constants as bases for example from
2357 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2358 which is seen as a struct copy. */
2359 if (TREE_CODE (base1) == SSA_NAME
2360 || TREE_CODE (base1) == CONST_DECL
2361 || TREE_CODE (base1) == CONSTRUCTOR
2362 || TREE_CODE (base1) == ADDR_EXPR
2363 || CONSTANT_CLASS_P (base1)
2364 || TREE_CODE (base2) == SSA_NAME
2365 || TREE_CODE (base2) == CONST_DECL
2366 || TREE_CODE (base2) == CONSTRUCTOR
2367 || TREE_CODE (base2) == ADDR_EXPR
2368 || CONSTANT_CLASS_P (base2))
2369 return false;
2370
2371 /* We can end up referring to code via function and label decls.
2372 As we likely do not properly track code aliases conservatively
2373 bail out. */
2374 if (TREE_CODE (base1) == FUNCTION_DECL
2375 || TREE_CODE (base1) == LABEL_DECL
2376 || TREE_CODE (base2) == FUNCTION_DECL
2377 || TREE_CODE (base2) == LABEL_DECL)
2378 return true;
2379
2380 /* Two volatile accesses always conflict. */
2381 if (ref1->volatile_p
2382 && ref2->volatile_p)
2383 return true;
2384
2385 /* refN->ref may convey size information, do not confuse our workers
2386 with that but strip it - ao_ref_base took it into account already. */
2387 tree ref1ref = ref1->ref;
2388 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2389 ref1ref = TREE_OPERAND (ref1ref, 0);
2390 tree ref2ref = ref2->ref;
2391 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2392 ref2ref = TREE_OPERAND (ref2ref, 0);
2393
2394 /* Defer to simple offset based disambiguation if we have
2395 references based on two decls. Do this before defering to
2396 TBAA to handle must-alias cases in conformance with the
2397 GCC extension of allowing type-punning through unions. */
2398 var1_p = DECL_P (base1);
2399 var2_p = DECL_P (base2);
2400 if (var1_p && var2_p)
2401 return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2402 ref1->size,
2403 ref2ref, base2, offset2, max_size2,
2404 ref2->size);
2405
2406 /* Handle restrict based accesses.
2407 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2408 here. */
2409 tree rbase1 = base1;
2410 tree rbase2 = base2;
2411 if (var1_p)
2412 {
2413 rbase1 = ref1ref;
2414 if (rbase1)
2415 while (handled_component_p (rbase1))
2416 rbase1 = TREE_OPERAND (rbase1, 0);
2417 }
2418 if (var2_p)
2419 {
2420 rbase2 = ref2ref;
2421 if (rbase2)
2422 while (handled_component_p (rbase2))
2423 rbase2 = TREE_OPERAND (rbase2, 0);
2424 }
2425 if (rbase1 && rbase2
2426 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2427 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2428 /* If the accesses are in the same restrict clique... */
2429 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2430 /* But based on different pointers they do not alias. */
2431 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2432 return false;
2433
2434 ind1_p = (TREE_CODE (base1) == MEM_REF
2435 || TREE_CODE (base1) == TARGET_MEM_REF);
2436 ind2_p = (TREE_CODE (base2) == MEM_REF
2437 || TREE_CODE (base2) == TARGET_MEM_REF);
2438
2439 /* Canonicalize the pointer-vs-decl case. */
2440 if (ind1_p && var2_p)
2441 {
2442 std::swap (offset1, offset2);
2443 std::swap (max_size1, max_size2);
2444 std::swap (base1, base2);
2445 std::swap (ref1, ref2);
2446 std::swap (ref1ref, ref2ref);
2447 var1_p = true;
2448 ind1_p = false;
2449 var2_p = false;
2450 ind2_p = true;
2451 }
2452
2453 /* First defer to TBAA if possible. */
2454 if (tbaa_p
2455 && flag_strict_aliasing
2456 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2457 ao_ref_alias_set (ref2)))
2458 return false;
2459
2460 /* If the reference is based on a pointer that points to memory
2461 that may not be written to then the other reference cannot possibly
2462 clobber it. */
2463 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2464 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2465 || (ind1_p
2466 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2467 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2468 return false;
2469
2470 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2471 if (var1_p && ind2_p)
2472 return indirect_ref_may_alias_decl_p (ref2ref, base2,
2473 offset2, max_size2, ref2->size,
2474 ao_ref_alias_set (ref2),
2475 ao_ref_base_alias_set (ref2),
2476 ref1ref, base1,
2477 offset1, max_size1, ref1->size,
2478 ao_ref_alias_set (ref1),
2479 ao_ref_base_alias_set (ref1),
2480 tbaa_p);
2481 else if (ind1_p && ind2_p)
2482 return indirect_refs_may_alias_p (ref1ref, base1,
2483 offset1, max_size1, ref1->size,
2484 ao_ref_alias_set (ref1),
2485 ao_ref_base_alias_set (ref1),
2486 ref2ref, base2,
2487 offset2, max_size2, ref2->size,
2488 ao_ref_alias_set (ref2),
2489 ao_ref_base_alias_set (ref2),
2490 tbaa_p);
2491
2492 gcc_unreachable ();
2493 }
2494
2495 /* Return true, if the two memory references REF1 and REF2 may alias
2496 and update statistics. */
2497
2498 bool
2499 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2500 {
2501 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2502 if (res)
2503 ++alias_stats.refs_may_alias_p_may_alias;
2504 else
2505 ++alias_stats.refs_may_alias_p_no_alias;
2506 return res;
2507 }
2508
2509 static bool
2510 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2511 {
2512 ao_ref r1;
2513 ao_ref_init (&r1, ref1);
2514 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2515 }
2516
2517 bool
2518 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2519 {
2520 ao_ref r1, r2;
2521 ao_ref_init (&r1, ref1);
2522 ao_ref_init (&r2, ref2);
2523 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2524 }
2525
2526 /* Returns true if there is a anti-dependence for the STORE that
2527 executes after the LOAD. */
2528
2529 bool
2530 refs_anti_dependent_p (tree load, tree store)
2531 {
2532 ao_ref r1, r2;
2533 ao_ref_init (&r1, load);
2534 ao_ref_init (&r2, store);
2535 return refs_may_alias_p_1 (&r1, &r2, false);
2536 }
2537
2538 /* Returns true if there is a output dependence for the stores
2539 STORE1 and STORE2. */
2540
2541 bool
2542 refs_output_dependent_p (tree store1, tree store2)
2543 {
2544 ao_ref r1, r2;
2545 ao_ref_init (&r1, store1);
2546 ao_ref_init (&r2, store2);
2547 return refs_may_alias_p_1 (&r1, &r2, false);
2548 }
2549
2550 /* Returns true if and only if REF may alias any access stored in TT.
2551 IF TBAA_P is true, use TBAA oracle. */
2552
2553 static bool
2554 modref_may_conflict (const gcall *stmt,
2555 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2556 {
2557 alias_set_type base_set, ref_set;
2558
2559 if (tt->every_base)
2560 return true;
2561
2562 if (!dbg_cnt (ipa_mod_ref))
2563 return true;
2564
2565 base_set = ao_ref_base_alias_set (ref);
2566
2567 ref_set = ao_ref_alias_set (ref);
2568
2569 int num_tests = 0, max_tests = param_modref_max_tests;
2570 for (auto base_node : tt->bases)
2571 {
2572 if (tbaa_p && flag_strict_aliasing)
2573 {
2574 if (num_tests >= max_tests)
2575 return true;
2576 alias_stats.modref_tests++;
2577 if (!alias_sets_conflict_p (base_set, base_node->base))
2578 continue;
2579 num_tests++;
2580 }
2581
2582 if (base_node->every_ref)
2583 return true;
2584
2585 for (auto ref_node : base_node->refs)
2586 {
2587 /* Do not repeat same test as before. */
2588 if ((ref_set != base_set || base_node->base != ref_node->ref)
2589 && tbaa_p && flag_strict_aliasing)
2590 {
2591 if (num_tests >= max_tests)
2592 return true;
2593 alias_stats.modref_tests++;
2594 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2595 continue;
2596 num_tests++;
2597 }
2598
2599 if (ref_node->every_access)
2600 return true;
2601
2602 /* TBAA checks did not disambiguate, try individual accesses. */
2603 for (auto access_node : ref_node->accesses)
2604 {
2605 if (num_tests >= max_tests)
2606 return true;
2607
2608 tree arg = access_node.get_call_arg (stmt);
2609 if (!arg)
2610 return true;
2611
2612 alias_stats.modref_baseptr_tests++;
2613
2614 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2615 continue;
2616
2617 /* PTA oracle will be unhapy of arg is not an pointer. */
2618 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2619 return true;
2620
2621 /* If we don't have base pointer, give up. */
2622 if (!ref->ref && !ref->base)
2623 continue;
2624
2625 ao_ref ref2;
2626 if (access_node.get_ao_ref (stmt, &ref2))
2627 {
2628 ref2.ref_alias_set = ref_node->ref;
2629 ref2.base_alias_set = base_node->base;
2630 if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2631 return true;
2632 }
2633 else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2634 return true;
2635
2636 num_tests++;
2637 }
2638 }
2639 }
2640 return false;
2641 }
2642
2643 /* Check if REF conflicts with call using "fn spec" attribute.
2644 If CLOBBER is true we are checking for writes, otherwise check loads.
2645
2646 Return 0 if there are no conflicts (except for possible function call
2647 argument reads), 1 if there are conflicts and -1 if we can not decide by
2648 fn spec. */
2649
2650 static int
2651 check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2652 {
2653 attr_fnspec fnspec = gimple_call_fnspec (call);
2654 if (fnspec.known_p ())
2655 {
2656 if (clobber
2657 ? !fnspec.global_memory_written_p ()
2658 : !fnspec.global_memory_read_p ())
2659 {
2660 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2661 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2662 && (!fnspec.arg_specified_p (i)
2663 || (clobber ? fnspec.arg_maybe_written_p (i)
2664 : fnspec.arg_maybe_read_p (i))))
2665 {
2666 ao_ref dref;
2667 tree size = NULL_TREE;
2668 unsigned int size_arg;
2669
2670 if (!fnspec.arg_specified_p (i))
2671 ;
2672 else if (fnspec.arg_max_access_size_given_by_arg_p
2673 (i, &size_arg))
2674 size = gimple_call_arg (call, size_arg);
2675 else if (fnspec.arg_access_size_given_by_type_p (i))
2676 {
2677 tree callee = gimple_call_fndecl (call);
2678 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2679
2680 for (unsigned int p = 0; p < i; p++)
2681 t = TREE_CHAIN (t);
2682 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2683 }
2684 ao_ref_init_from_ptr_and_size (&dref,
2685 gimple_call_arg (call, i),
2686 size);
2687 if (refs_may_alias_p_1 (&dref, ref, false))
2688 return 1;
2689 }
2690 if (clobber
2691 && fnspec.errno_maybe_written_p ()
2692 && flag_errno_math
2693 && targetm.ref_may_alias_errno (ref))
2694 return 1;
2695 return 0;
2696 }
2697 }
2698
2699 /* FIXME: we should handle barriers more consistently, but for now leave the
2700 check here. */
2701 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2702 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2703 {
2704 /* __sync_* builtins and some OpenMP builtins act as threading
2705 barriers. */
2706 #undef DEF_SYNC_BUILTIN
2707 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2708 #include "sync-builtins.def"
2709 #undef DEF_SYNC_BUILTIN
2710 case BUILT_IN_GOMP_ATOMIC_START:
2711 case BUILT_IN_GOMP_ATOMIC_END:
2712 case BUILT_IN_GOMP_BARRIER:
2713 case BUILT_IN_GOMP_BARRIER_CANCEL:
2714 case BUILT_IN_GOMP_TASKWAIT:
2715 case BUILT_IN_GOMP_TASKGROUP_END:
2716 case BUILT_IN_GOMP_CRITICAL_START:
2717 case BUILT_IN_GOMP_CRITICAL_END:
2718 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2719 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2720 case BUILT_IN_GOMP_LOOP_END:
2721 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2722 case BUILT_IN_GOMP_ORDERED_START:
2723 case BUILT_IN_GOMP_ORDERED_END:
2724 case BUILT_IN_GOMP_SECTIONS_END:
2725 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2726 case BUILT_IN_GOMP_SINGLE_COPY_START:
2727 case BUILT_IN_GOMP_SINGLE_COPY_END:
2728 return 1;
2729
2730 default:
2731 return -1;
2732 }
2733 return -1;
2734 }
2735
2736 /* If the call CALL may use the memory reference REF return true,
2737 otherwise return false. */
2738
2739 static bool
2740 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2741 {
2742 tree base, callee;
2743 unsigned i;
2744 int flags = gimple_call_flags (call);
2745
2746 /* Const functions without a static chain do not implicitly use memory. */
2747 if (!gimple_call_chain (call)
2748 && (flags & (ECF_CONST|ECF_NOVOPS)))
2749 goto process_args;
2750
2751 /* A call that is not without side-effects might involve volatile
2752 accesses and thus conflicts with all other volatile accesses. */
2753 if (ref->volatile_p)
2754 return true;
2755
2756 callee = gimple_call_fndecl (call);
2757
2758 if (!gimple_call_chain (call) && callee != NULL_TREE)
2759 {
2760 struct cgraph_node *node = cgraph_node::get (callee);
2761 /* We can not safely optimize based on summary of calle if it does
2762 not always bind to current def: it is possible that memory load
2763 was optimized out earlier and the interposed variant may not be
2764 optimized this way. */
2765 if (node && node->binds_to_current_def_p ())
2766 {
2767 modref_summary *summary = get_modref_function_summary (node);
2768 if (summary && !summary->calls_interposable)
2769 {
2770 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2771 {
2772 alias_stats.modref_use_no_alias++;
2773 if (dump_file && (dump_flags & TDF_DETAILS))
2774 {
2775 fprintf (dump_file,
2776 "ipa-modref: call stmt ");
2777 print_gimple_stmt (dump_file, call, 0);
2778 fprintf (dump_file,
2779 "ipa-modref: call to %s does not use ",
2780 node->dump_name ());
2781 if (!ref->ref && ref->base)
2782 {
2783 fprintf (dump_file, "base: ");
2784 print_generic_expr (dump_file, ref->base);
2785 }
2786 else if (ref->ref)
2787 {
2788 fprintf (dump_file, "ref: ");
2789 print_generic_expr (dump_file, ref->ref);
2790 }
2791 fprintf (dump_file, " alias sets: %i->%i\n",
2792 ao_ref_base_alias_set (ref),
2793 ao_ref_alias_set (ref));
2794 }
2795 goto process_args;
2796 }
2797 alias_stats.modref_use_may_alias++;
2798 }
2799 }
2800 }
2801
2802 base = ao_ref_base (ref);
2803 if (!base)
2804 return true;
2805
2806 /* If the reference is based on a decl that is not aliased the call
2807 cannot possibly use it. */
2808 if (DECL_P (base)
2809 && !may_be_aliased (base)
2810 /* But local statics can be used through recursion. */
2811 && !is_global_var (base))
2812 goto process_args;
2813
2814 if (int res = check_fnspec (call, ref, false))
2815 {
2816 if (res == 1)
2817 return true;
2818 }
2819 else
2820 goto process_args;
2821
2822 /* Check if base is a global static variable that is not read
2823 by the function. */
2824 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2825 {
2826 struct cgraph_node *node = cgraph_node::get (callee);
2827 bitmap read;
2828 int id;
2829
2830 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2831 node yet. We should enforce that there are nodes for all decls in the
2832 IL and remove this check instead. */
2833 if (node
2834 && (id = ipa_reference_var_uid (base)) != -1
2835 && (read = ipa_reference_get_read_global (node))
2836 && !bitmap_bit_p (read, id))
2837 goto process_args;
2838 }
2839
2840 /* Check if the base variable is call-used. */
2841 if (DECL_P (base))
2842 {
2843 if (pt_solution_includes (gimple_call_use_set (call), base))
2844 return true;
2845 }
2846 else if ((TREE_CODE (base) == MEM_REF
2847 || TREE_CODE (base) == TARGET_MEM_REF)
2848 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2849 {
2850 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2851 if (!pi)
2852 return true;
2853
2854 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2855 return true;
2856 }
2857 else
2858 return true;
2859
2860 /* Inspect call arguments for passed-by-value aliases. */
2861 process_args:
2862 for (i = 0; i < gimple_call_num_args (call); ++i)
2863 {
2864 tree op = gimple_call_arg (call, i);
2865 int flags = gimple_call_arg_flags (call, i);
2866
2867 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2868 continue;
2869
2870 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2871 op = TREE_OPERAND (op, 0);
2872
2873 if (TREE_CODE (op) != SSA_NAME
2874 && !is_gimple_min_invariant (op))
2875 {
2876 ao_ref r;
2877 ao_ref_init (&r, op);
2878 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2879 return true;
2880 }
2881 }
2882
2883 return false;
2884 }
2885
2886 static bool
2887 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2888 {
2889 bool res;
2890 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2891 if (res)
2892 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2893 else
2894 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2895 return res;
2896 }
2897
2898
2899 /* If the statement STMT may use the memory reference REF return
2900 true, otherwise return false. */
2901
2902 bool
2903 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2904 {
2905 if (is_gimple_assign (stmt))
2906 {
2907 tree rhs;
2908
2909 /* All memory assign statements are single. */
2910 if (!gimple_assign_single_p (stmt))
2911 return false;
2912
2913 rhs = gimple_assign_rhs1 (stmt);
2914 if (is_gimple_reg (rhs)
2915 || is_gimple_min_invariant (rhs)
2916 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2917 return false;
2918
2919 return refs_may_alias_p (rhs, ref, tbaa_p);
2920 }
2921 else if (is_gimple_call (stmt))
2922 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2923 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2924 {
2925 tree retval = gimple_return_retval (return_stmt);
2926 if (retval
2927 && TREE_CODE (retval) != SSA_NAME
2928 && !is_gimple_min_invariant (retval)
2929 && refs_may_alias_p (retval, ref, tbaa_p))
2930 return true;
2931 /* If ref escapes the function then the return acts as a use. */
2932 tree base = ao_ref_base (ref);
2933 if (!base)
2934 ;
2935 else if (DECL_P (base))
2936 return is_global_var (base);
2937 else if (TREE_CODE (base) == MEM_REF
2938 || TREE_CODE (base) == TARGET_MEM_REF)
2939 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2940 return false;
2941 }
2942
2943 return true;
2944 }
2945
2946 bool
2947 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2948 {
2949 ao_ref r;
2950 ao_ref_init (&r, ref);
2951 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2952 }
2953
2954 /* If the call in statement CALL may clobber the memory reference REF
2955 return true, otherwise return false. */
2956
2957 bool
2958 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2959 {
2960 tree base;
2961 tree callee;
2962
2963 /* If the call is pure or const it cannot clobber anything. */
2964 if (gimple_call_flags (call)
2965 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2966 return false;
2967 if (gimple_call_internal_p (call))
2968 switch (gimple_call_internal_fn (call))
2969 {
2970 /* Treat these internal calls like ECF_PURE for aliasing,
2971 they don't write to any memory the program should care about.
2972 They have important other side-effects, and read memory,
2973 so can't be ECF_NOVOPS. */
2974 case IFN_UBSAN_NULL:
2975 case IFN_UBSAN_BOUNDS:
2976 case IFN_UBSAN_VPTR:
2977 case IFN_UBSAN_OBJECT_SIZE:
2978 case IFN_UBSAN_PTR:
2979 case IFN_ASAN_CHECK:
2980 return false;
2981 default:
2982 break;
2983 }
2984
2985 callee = gimple_call_fndecl (call);
2986
2987 if (callee != NULL_TREE && !ref->volatile_p)
2988 {
2989 struct cgraph_node *node = cgraph_node::get (callee);
2990 if (node)
2991 {
2992 modref_summary *summary = get_modref_function_summary (node);
2993 if (summary)
2994 {
2995 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
2996 && (!summary->writes_errno
2997 || !targetm.ref_may_alias_errno (ref)))
2998 {
2999 alias_stats.modref_clobber_no_alias++;
3000 if (dump_file && (dump_flags & TDF_DETAILS))
3001 {
3002 fprintf (dump_file,
3003 "ipa-modref: call stmt ");
3004 print_gimple_stmt (dump_file, call, 0);
3005 fprintf (dump_file,
3006 "ipa-modref: call to %s does not clobber ",
3007 node->dump_name ());
3008 if (!ref->ref && ref->base)
3009 {
3010 fprintf (dump_file, "base: ");
3011 print_generic_expr (dump_file, ref->base);
3012 }
3013 else if (ref->ref)
3014 {
3015 fprintf (dump_file, "ref: ");
3016 print_generic_expr (dump_file, ref->ref);
3017 }
3018 fprintf (dump_file, " alias sets: %i->%i\n",
3019 ao_ref_base_alias_set (ref),
3020 ao_ref_alias_set (ref));
3021 }
3022 return false;
3023 }
3024 alias_stats.modref_clobber_may_alias++;
3025 }
3026 }
3027 }
3028
3029 base = ao_ref_base (ref);
3030 if (!base)
3031 return true;
3032
3033 if (TREE_CODE (base) == SSA_NAME
3034 || CONSTANT_CLASS_P (base))
3035 return false;
3036
3037 /* A call that is not without side-effects might involve volatile
3038 accesses and thus conflicts with all other volatile accesses. */
3039 if (ref->volatile_p)
3040 return true;
3041
3042 /* If the reference is based on a decl that is not aliased the call
3043 cannot possibly clobber it. */
3044 if (DECL_P (base)
3045 && !may_be_aliased (base)
3046 /* But local non-readonly statics can be modified through recursion
3047 or the call may implement a threading barrier which we must
3048 treat as may-def. */
3049 && (TREE_READONLY (base)
3050 || !is_global_var (base)))
3051 return false;
3052
3053 /* If the reference is based on a pointer that points to memory
3054 that may not be written to then the call cannot possibly clobber it. */
3055 if ((TREE_CODE (base) == MEM_REF
3056 || TREE_CODE (base) == TARGET_MEM_REF)
3057 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3058 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3059 return false;
3060
3061 if (int res = check_fnspec (call, ref, true))
3062 {
3063 if (res == 1)
3064 return true;
3065 }
3066 else
3067 return false;
3068
3069 /* Check if base is a global static variable that is not written
3070 by the function. */
3071 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3072 {
3073 struct cgraph_node *node = cgraph_node::get (callee);
3074 bitmap written;
3075 int id;
3076
3077 if (node
3078 && (id = ipa_reference_var_uid (base)) != -1
3079 && (written = ipa_reference_get_written_global (node))
3080 && !bitmap_bit_p (written, id))
3081 return false;
3082 }
3083
3084 /* Check if the base variable is call-clobbered. */
3085 if (DECL_P (base))
3086 return pt_solution_includes (gimple_call_clobber_set (call), base);
3087 else if ((TREE_CODE (base) == MEM_REF
3088 || TREE_CODE (base) == TARGET_MEM_REF)
3089 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3090 {
3091 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3092 if (!pi)
3093 return true;
3094
3095 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3096 }
3097
3098 return true;
3099 }
3100
3101 /* If the call in statement CALL may clobber the memory reference REF
3102 return true, otherwise return false. */
3103
3104 bool
3105 call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3106 {
3107 bool res;
3108 ao_ref r;
3109 ao_ref_init (&r, ref);
3110 res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3111 if (res)
3112 ++alias_stats.call_may_clobber_ref_p_may_alias;
3113 else
3114 ++alias_stats.call_may_clobber_ref_p_no_alias;
3115 return res;
3116 }
3117
3118
3119 /* If the statement STMT may clobber the memory reference REF return true,
3120 otherwise return false. */
3121
3122 bool
3123 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3124 {
3125 if (is_gimple_call (stmt))
3126 {
3127 tree lhs = gimple_call_lhs (stmt);
3128 if (lhs
3129 && TREE_CODE (lhs) != SSA_NAME)
3130 {
3131 ao_ref r;
3132 ao_ref_init (&r, lhs);
3133 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3134 return true;
3135 }
3136
3137 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3138 }
3139 else if (gimple_assign_single_p (stmt))
3140 {
3141 tree lhs = gimple_assign_lhs (stmt);
3142 if (TREE_CODE (lhs) != SSA_NAME)
3143 {
3144 ao_ref r;
3145 ao_ref_init (&r, lhs);
3146 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3147 }
3148 }
3149 else if (gimple_code (stmt) == GIMPLE_ASM)
3150 return true;
3151
3152 return false;
3153 }
3154
3155 bool
3156 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3157 {
3158 ao_ref r;
3159 ao_ref_init (&r, ref);
3160 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3161 }
3162
3163 /* Return true if store1 and store2 described by corresponding tuples
3164 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3165 address. */
3166
3167 static bool
3168 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3169 poly_int64 max_size1,
3170 tree base2, poly_int64 offset2, poly_int64 size2,
3171 poly_int64 max_size2)
3172 {
3173 /* Offsets need to be 0. */
3174 if (maybe_ne (offset1, 0)
3175 || maybe_ne (offset2, 0))
3176 return false;
3177
3178 bool base1_obj_p = SSA_VAR_P (base1);
3179 bool base2_obj_p = SSA_VAR_P (base2);
3180
3181 /* We need one object. */
3182 if (base1_obj_p == base2_obj_p)
3183 return false;
3184 tree obj = base1_obj_p ? base1 : base2;
3185
3186 /* And we need one MEM_REF. */
3187 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3188 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3189 if (base1_memref_p == base2_memref_p)
3190 return false;
3191 tree memref = base1_memref_p ? base1 : base2;
3192
3193 /* Sizes need to be valid. */
3194 if (!known_size_p (max_size1)
3195 || !known_size_p (max_size2)
3196 || !known_size_p (size1)
3197 || !known_size_p (size2))
3198 return false;
3199
3200 /* Max_size needs to match size. */
3201 if (maybe_ne (max_size1, size1)
3202 || maybe_ne (max_size2, size2))
3203 return false;
3204
3205 /* Sizes need to match. */
3206 if (maybe_ne (size1, size2))
3207 return false;
3208
3209
3210 /* Check that memref is a store to pointer with singleton points-to info. */
3211 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3212 return false;
3213 tree ptr = TREE_OPERAND (memref, 0);
3214 if (TREE_CODE (ptr) != SSA_NAME)
3215 return false;
3216 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3217 unsigned int pt_uid;
3218 if (pi == NULL
3219 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3220 return false;
3221
3222 /* Be conservative with non-call exceptions when the address might
3223 be NULL. */
3224 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3225 return false;
3226
3227 /* Check that ptr points relative to obj. */
3228 unsigned int obj_uid = DECL_PT_UID (obj);
3229 if (obj_uid != pt_uid)
3230 return false;
3231
3232 /* Check that the object size is the same as the store size. That ensures us
3233 that ptr points to the start of obj. */
3234 return (DECL_SIZE (obj)
3235 && poly_int_tree_p (DECL_SIZE (obj))
3236 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3237 }
3238
3239 /* Return true if REF is killed by an store described by
3240 BASE, OFFSET, SIZE and MAX_SIZE. */
3241
3242 static bool
3243 store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3244 poly_int64 max_size, ao_ref *ref)
3245 {
3246 poly_int64 ref_offset = ref->offset;
3247 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3248 so base == ref->base does not always hold. */
3249 if (base != ref->base)
3250 {
3251 /* Try using points-to info. */
3252 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3253 ref->offset, ref->size, ref->max_size))
3254 return true;
3255
3256 /* If both base and ref->base are MEM_REFs, only compare the
3257 first operand, and if the second operand isn't equal constant,
3258 try to add the offsets into offset and ref_offset. */
3259 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3260 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3261 {
3262 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3263 TREE_OPERAND (ref->base, 1)))
3264 {
3265 poly_offset_int off1 = mem_ref_offset (base);
3266 off1 <<= LOG2_BITS_PER_UNIT;
3267 off1 += offset;
3268 poly_offset_int off2 = mem_ref_offset (ref->base);
3269 off2 <<= LOG2_BITS_PER_UNIT;
3270 off2 += ref_offset;
3271 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3272 size = -1;
3273 }
3274 }
3275 else
3276 size = -1;
3277 }
3278 /* For a must-alias check we need to be able to constrain
3279 the access properly. */
3280 return (known_eq (size, max_size)
3281 && known_subrange_p (ref_offset, ref->max_size, offset, size));
3282 }
3283
3284 /* If STMT kills the memory reference REF return true, otherwise
3285 return false. */
3286
3287 bool
3288 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3289 {
3290 if (!ao_ref_base (ref))
3291 return false;
3292
3293 if (gimple_has_lhs (stmt)
3294 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3295 /* The assignment is not necessarily carried out if it can throw
3296 and we can catch it in the current function where we could inspect
3297 the previous value.
3298 ??? We only need to care about the RHS throwing. For aggregate
3299 assignments or similar calls and non-call exceptions the LHS
3300 might throw as well. */
3301 && !stmt_can_throw_internal (cfun, stmt))
3302 {
3303 tree lhs = gimple_get_lhs (stmt);
3304 /* If LHS is literally a base of the access we are done. */
3305 if (ref->ref)
3306 {
3307 tree base = ref->ref;
3308 tree innermost_dropped_array_ref = NULL_TREE;
3309 if (handled_component_p (base))
3310 {
3311 tree saved_lhs0 = NULL_TREE;
3312 if (handled_component_p (lhs))
3313 {
3314 saved_lhs0 = TREE_OPERAND (lhs, 0);
3315 TREE_OPERAND (lhs, 0) = integer_zero_node;
3316 }
3317 do
3318 {
3319 /* Just compare the outermost handled component, if
3320 they are equal we have found a possible common
3321 base. */
3322 tree saved_base0 = TREE_OPERAND (base, 0);
3323 TREE_OPERAND (base, 0) = integer_zero_node;
3324 bool res = operand_equal_p (lhs, base, 0);
3325 TREE_OPERAND (base, 0) = saved_base0;
3326 if (res)
3327 break;
3328 /* Remember if we drop an array-ref that we need to
3329 double-check not being at struct end. */
3330 if (TREE_CODE (base) == ARRAY_REF
3331 || TREE_CODE (base) == ARRAY_RANGE_REF)
3332 innermost_dropped_array_ref = base;
3333 /* Otherwise drop handled components of the access. */
3334 base = saved_base0;
3335 }
3336 while (handled_component_p (base));
3337 if (saved_lhs0)
3338 TREE_OPERAND (lhs, 0) = saved_lhs0;
3339 }
3340 /* Finally check if the lhs has the same address and size as the
3341 base candidate of the access. Watch out if we have dropped
3342 an array-ref that was at struct end, this means ref->ref may
3343 be outside of the TYPE_SIZE of its base. */
3344 if ((! innermost_dropped_array_ref
3345 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3346 && (lhs == base
3347 || (((TYPE_SIZE (TREE_TYPE (lhs))
3348 == TYPE_SIZE (TREE_TYPE (base)))
3349 || (TYPE_SIZE (TREE_TYPE (lhs))
3350 && TYPE_SIZE (TREE_TYPE (base))
3351 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3352 TYPE_SIZE (TREE_TYPE (base)),
3353 0)))
3354 && operand_equal_p (lhs, base,
3355 OEP_ADDRESS_OF
3356 | OEP_MATCH_SIDE_EFFECTS))))
3357 {
3358 ++alias_stats.stmt_kills_ref_p_yes;
3359 return true;
3360 }
3361 }
3362
3363 /* Now look for non-literal equal bases with the restriction of
3364 handling constant offset and size. */
3365 /* For a must-alias check we need to be able to constrain
3366 the access properly. */
3367 if (!ref->max_size_known_p ())
3368 {
3369 ++alias_stats.stmt_kills_ref_p_no;
3370 return false;
3371 }
3372 poly_int64 size, offset, max_size;
3373 bool reverse;
3374 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3375 &reverse);
3376 if (store_kills_ref_p (base, offset, size, max_size, ref))
3377 {
3378 ++alias_stats.stmt_kills_ref_p_yes;
3379 return true;
3380 }
3381 }
3382
3383 if (is_gimple_call (stmt))
3384 {
3385 tree callee = gimple_call_fndecl (stmt);
3386 struct cgraph_node *node;
3387 modref_summary *summary;
3388
3389 /* Try to disambiguate using modref summary. Modref records a vector
3390 of stores with known offsets relative to function parameters that must
3391 happen every execution of function. Find if we have a matching
3392 store and verify that function can not use the value. */
3393 if (callee != NULL_TREE
3394 && (node = cgraph_node::get (callee)) != NULL
3395 && node->binds_to_current_def_p ()
3396 && (summary = get_modref_function_summary (node)) != NULL
3397 && summary->kills.length ()
3398 && (!cfun->can_throw_non_call_exceptions
3399 || !stmt_can_throw_internal (cfun, stmt)))
3400 {
3401 for (auto kill : summary->kills)
3402 {
3403 ao_ref dref;
3404
3405 /* We only can do useful compares if we know the access range
3406 precisely. */
3407 if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3408 continue;
3409 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3410 dref.size, dref.max_size, ref))
3411 {
3412 /* For store to be killed it needs to not be used
3413 earlier. */
3414 if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3415 true)
3416 || !dbg_cnt (ipa_mod_ref))
3417 break;
3418 if (dump_file && (dump_flags & TDF_DETAILS))
3419 {
3420 fprintf (dump_file,
3421 "ipa-modref: call stmt ");
3422 print_gimple_stmt (dump_file, stmt, 0);
3423 fprintf (dump_file,
3424 "ipa-modref: call to %s kills ",
3425 node->dump_name ());
3426 print_generic_expr (dump_file, ref->base);
3427 }
3428 ++alias_stats.modref_kill_yes;
3429 return true;
3430 }
3431 }
3432 ++alias_stats.modref_kill_no;
3433 }
3434 if (callee != NULL_TREE
3435 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3436 switch (DECL_FUNCTION_CODE (callee))
3437 {
3438 case BUILT_IN_FREE:
3439 {
3440 tree ptr = gimple_call_arg (stmt, 0);
3441 tree base = ao_ref_base (ref);
3442 if (base && TREE_CODE (base) == MEM_REF
3443 && TREE_OPERAND (base, 0) == ptr)
3444 {
3445 ++alias_stats.stmt_kills_ref_p_yes;
3446 return true;
3447 }
3448 break;
3449 }
3450
3451 case BUILT_IN_MEMCPY:
3452 case BUILT_IN_MEMPCPY:
3453 case BUILT_IN_MEMMOVE:
3454 case BUILT_IN_MEMSET:
3455 case BUILT_IN_MEMCPY_CHK:
3456 case BUILT_IN_MEMPCPY_CHK:
3457 case BUILT_IN_MEMMOVE_CHK:
3458 case BUILT_IN_MEMSET_CHK:
3459 case BUILT_IN_STRNCPY:
3460 case BUILT_IN_STPNCPY:
3461 case BUILT_IN_CALLOC:
3462 {
3463 /* For a must-alias check we need to be able to constrain
3464 the access properly. */
3465 if (!ref->max_size_known_p ())
3466 {
3467 ++alias_stats.stmt_kills_ref_p_no;
3468 return false;
3469 }
3470 tree dest;
3471 tree len;
3472
3473 /* In execution order a calloc call will never kill
3474 anything. However, DSE will (ab)use this interface
3475 to ask if a calloc call writes the same memory locations
3476 as a later assignment, memset, etc. So handle calloc
3477 in the expected way. */
3478 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3479 {
3480 tree arg0 = gimple_call_arg (stmt, 0);
3481 tree arg1 = gimple_call_arg (stmt, 1);
3482 if (TREE_CODE (arg0) != INTEGER_CST
3483 || TREE_CODE (arg1) != INTEGER_CST)
3484 {
3485 ++alias_stats.stmt_kills_ref_p_no;
3486 return false;
3487 }
3488
3489 dest = gimple_call_lhs (stmt);
3490 if (!dest)
3491 {
3492 ++alias_stats.stmt_kills_ref_p_no;
3493 return false;
3494 }
3495 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3496 }
3497 else
3498 {
3499 dest = gimple_call_arg (stmt, 0);
3500 len = gimple_call_arg (stmt, 2);
3501 }
3502 if (!poly_int_tree_p (len))
3503 return false;
3504 ao_ref dref;
3505 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3506 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3507 dref.size, dref.max_size, ref))
3508 {
3509 ++alias_stats.stmt_kills_ref_p_yes;
3510 return true;
3511 }
3512 break;
3513 }
3514
3515 case BUILT_IN_VA_END:
3516 {
3517 tree ptr = gimple_call_arg (stmt, 0);
3518 if (TREE_CODE (ptr) == ADDR_EXPR)
3519 {
3520 tree base = ao_ref_base (ref);
3521 if (TREE_OPERAND (ptr, 0) == base)
3522 {
3523 ++alias_stats.stmt_kills_ref_p_yes;
3524 return true;
3525 }
3526 }
3527 break;
3528 }
3529
3530 default:;
3531 }
3532 }
3533 ++alias_stats.stmt_kills_ref_p_no;
3534 return false;
3535 }
3536
3537 bool
3538 stmt_kills_ref_p (gimple *stmt, tree ref)
3539 {
3540 ao_ref r;
3541 ao_ref_init (&r, ref);
3542 return stmt_kills_ref_p (stmt, &r);
3543 }
3544
3545
3546 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3547 TARGET or a statement clobbering the memory reference REF in which
3548 case false is returned. The walk starts with VUSE, one argument of PHI. */
3549
3550 static bool
3551 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3552 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3553 bitmap *visited, bool abort_on_visited,
3554 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3555 translate_flags disambiguate_only,
3556 void *data)
3557 {
3558 basic_block bb = gimple_bb (phi);
3559
3560 if (!*visited)
3561 *visited = BITMAP_ALLOC (NULL);
3562
3563 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3564
3565 /* Walk until we hit the target. */
3566 while (vuse != target)
3567 {
3568 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3569 /* If we are searching for the target VUSE by walking up to
3570 TARGET_BB dominating the original PHI we are finished once
3571 we reach a default def or a definition in a block dominating
3572 that block. Update TARGET and return. */
3573 if (!target
3574 && (gimple_nop_p (def_stmt)
3575 || dominated_by_p (CDI_DOMINATORS,
3576 target_bb, gimple_bb (def_stmt))))
3577 {
3578 target = vuse;
3579 return true;
3580 }
3581
3582 /* Recurse for PHI nodes. */
3583 if (gimple_code (def_stmt) == GIMPLE_PHI)
3584 {
3585 /* An already visited PHI node ends the walk successfully. */
3586 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3587 return !abort_on_visited;
3588 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3589 visited, abort_on_visited,
3590 translate, data, disambiguate_only);
3591 if (!vuse)
3592 return false;
3593 continue;
3594 }
3595 else if (gimple_nop_p (def_stmt))
3596 return false;
3597 else
3598 {
3599 /* A clobbering statement or the end of the IL ends it failing. */
3600 if ((int)limit <= 0)
3601 return false;
3602 --limit;
3603 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3604 {
3605 translate_flags tf = disambiguate_only;
3606 if (translate
3607 && (*translate) (ref, vuse, data, &tf) == NULL)
3608 ;
3609 else
3610 return false;
3611 }
3612 }
3613 /* If we reach a new basic-block see if we already skipped it
3614 in a previous walk that ended successfully. */
3615 if (gimple_bb (def_stmt) != bb)
3616 {
3617 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3618 return !abort_on_visited;
3619 bb = gimple_bb (def_stmt);
3620 }
3621 vuse = gimple_vuse (def_stmt);
3622 }
3623 return true;
3624 }
3625
3626
3627 /* Starting from a PHI node for the virtual operand of the memory reference
3628 REF find a continuation virtual operand that allows to continue walking
3629 statements dominating PHI skipping only statements that cannot possibly
3630 clobber REF. Decrements LIMIT for each alias disambiguation done
3631 and aborts the walk, returning NULL_TREE if it reaches zero.
3632 Returns NULL_TREE if no suitable virtual operand can be found. */
3633
3634 tree
3635 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3636 unsigned int &limit, bitmap *visited,
3637 bool abort_on_visited,
3638 void *(*translate)(ao_ref *, tree, void *,
3639 translate_flags *),
3640 void *data,
3641 translate_flags disambiguate_only)
3642 {
3643 unsigned nargs = gimple_phi_num_args (phi);
3644
3645 /* Through a single-argument PHI we can simply look through. */
3646 if (nargs == 1)
3647 return PHI_ARG_DEF (phi, 0);
3648
3649 /* For two or more arguments try to pairwise skip non-aliasing code
3650 until we hit the phi argument definition that dominates the other one. */
3651 basic_block phi_bb = gimple_bb (phi);
3652 tree arg0, arg1;
3653 unsigned i;
3654
3655 /* Find a candidate for the virtual operand which definition
3656 dominates those of all others. */
3657 /* First look if any of the args themselves satisfy this. */
3658 for (i = 0; i < nargs; ++i)
3659 {
3660 arg0 = PHI_ARG_DEF (phi, i);
3661 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3662 break;
3663 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3664 if (def_bb != phi_bb
3665 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3666 break;
3667 arg0 = NULL_TREE;
3668 }
3669 /* If not, look if we can reach such candidate by walking defs
3670 until we hit the immediate dominator. maybe_skip_until will
3671 do that for us. */
3672 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3673
3674 /* Then check against the (to be) found candidate. */
3675 for (i = 0; i < nargs; ++i)
3676 {
3677 arg1 = PHI_ARG_DEF (phi, i);
3678 if (arg1 == arg0)
3679 ;
3680 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3681 limit, visited,
3682 abort_on_visited,
3683 translate,
3684 /* Do not valueize when walking over
3685 backedges. */
3686 dominated_by_p
3687 (CDI_DOMINATORS,
3688 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3689 phi_bb)
3690 ? TR_DISAMBIGUATE
3691 : disambiguate_only, data))
3692 return NULL_TREE;
3693 }
3694
3695 return arg0;
3696 }
3697
3698 /* Based on the memory reference REF and its virtual use VUSE call
3699 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3700 itself. That is, for each virtual use for which its defining statement
3701 does not clobber REF.
3702
3703 WALKER is called with REF, the current virtual use and DATA. If
3704 WALKER returns non-NULL the walk stops and its result is returned.
3705 At the end of a non-successful walk NULL is returned.
3706
3707 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3708 use which definition is a statement that may clobber REF and DATA.
3709 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3710 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3711 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3712 to adjust REF and *DATA to make that valid.
3713
3714 VALUEIZE if non-NULL is called with the next VUSE that is considered
3715 and return value is substituted for that. This can be used to
3716 implement optimistic value-numbering for example. Note that the
3717 VUSE argument is assumed to be valueized already.
3718
3719 LIMIT specifies the number of alias queries we are allowed to do,
3720 the walk stops when it reaches zero and NULL is returned. LIMIT
3721 is decremented by the number of alias queries (plus adjustments
3722 done by the callbacks) upon return.
3723
3724 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3725
3726 void *
3727 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3728 void *(*walker)(ao_ref *, tree, void *),
3729 void *(*translate)(ao_ref *, tree, void *,
3730 translate_flags *),
3731 tree (*valueize)(tree),
3732 unsigned &limit, void *data)
3733 {
3734 bitmap visited = NULL;
3735 void *res;
3736 bool translated = false;
3737
3738 timevar_push (TV_ALIAS_STMT_WALK);
3739
3740 do
3741 {
3742 gimple *def_stmt;
3743
3744 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3745 res = (*walker) (ref, vuse, data);
3746 /* Abort walk. */
3747 if (res == (void *)-1)
3748 {
3749 res = NULL;
3750 break;
3751 }
3752 /* Lookup succeeded. */
3753 else if (res != NULL)
3754 break;
3755
3756 if (valueize)
3757 {
3758 vuse = valueize (vuse);
3759 if (!vuse)
3760 {
3761 res = NULL;
3762 break;
3763 }
3764 }
3765 def_stmt = SSA_NAME_DEF_STMT (vuse);
3766 if (gimple_nop_p (def_stmt))
3767 break;
3768 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3769 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3770 &visited, translated, translate, data);
3771 else
3772 {
3773 if ((int)limit <= 0)
3774 {
3775 res = NULL;
3776 break;
3777 }
3778 --limit;
3779 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3780 {
3781 if (!translate)
3782 break;
3783 translate_flags disambiguate_only = TR_TRANSLATE;
3784 res = (*translate) (ref, vuse, data, &disambiguate_only);
3785 /* Failed lookup and translation. */
3786 if (res == (void *)-1)
3787 {
3788 res = NULL;
3789 break;
3790 }
3791 /* Lookup succeeded. */
3792 else if (res != NULL)
3793 break;
3794 /* Translation succeeded, continue walking. */
3795 translated = translated || disambiguate_only == TR_TRANSLATE;
3796 }
3797 vuse = gimple_vuse (def_stmt);
3798 }
3799 }
3800 while (vuse);
3801
3802 if (visited)
3803 BITMAP_FREE (visited);
3804
3805 timevar_pop (TV_ALIAS_STMT_WALK);
3806
3807 return res;
3808 }
3809
3810
3811 /* Based on the memory reference REF call WALKER for each vdef whose
3812 defining statement may clobber REF, starting with VDEF. If REF
3813 is NULL_TREE, each defining statement is visited.
3814
3815 WALKER is called with REF, the current vdef and DATA. If WALKER
3816 returns true the walk is stopped, otherwise it continues.
3817
3818 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3819 The pointer may be NULL and then we do not track this information.
3820
3821 At PHI nodes walk_aliased_vdefs forks into one walk for each
3822 PHI argument (but only one walk continues at merge points), the
3823 return value is true if any of the walks was successful.
3824
3825 The function returns the number of statements walked or -1 if
3826 LIMIT stmts were walked and the walk was aborted at this point.
3827 If LIMIT is zero the walk is not aborted. */
3828
3829 static int
3830 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3831 bool (*walker)(ao_ref *, tree, void *), void *data,
3832 bitmap *visited, unsigned int cnt,
3833 bool *function_entry_reached, unsigned limit)
3834 {
3835 do
3836 {
3837 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3838
3839 if (*visited
3840 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3841 return cnt;
3842
3843 if (gimple_nop_p (def_stmt))
3844 {
3845 if (function_entry_reached)
3846 *function_entry_reached = true;
3847 return cnt;
3848 }
3849 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3850 {
3851 unsigned i;
3852 if (!*visited)
3853 *visited = BITMAP_ALLOC (NULL);
3854 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3855 {
3856 int res = walk_aliased_vdefs_1 (ref,
3857 gimple_phi_arg_def (def_stmt, i),
3858 walker, data, visited, cnt,
3859 function_entry_reached, limit);
3860 if (res == -1)
3861 return -1;
3862 cnt = res;
3863 }
3864 return cnt;
3865 }
3866
3867 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3868 cnt++;
3869 if (cnt == limit)
3870 return -1;
3871 if ((!ref
3872 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3873 && (*walker) (ref, vdef, data))
3874 return cnt;
3875
3876 vdef = gimple_vuse (def_stmt);
3877 }
3878 while (1);
3879 }
3880
3881 int
3882 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3883 bool (*walker)(ao_ref *, tree, void *), void *data,
3884 bitmap *visited,
3885 bool *function_entry_reached, unsigned int limit)
3886 {
3887 bitmap local_visited = NULL;
3888 int ret;
3889
3890 timevar_push (TV_ALIAS_STMT_WALK);
3891
3892 if (function_entry_reached)
3893 *function_entry_reached = false;
3894
3895 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3896 visited ? visited : &local_visited, 0,
3897 function_entry_reached, limit);
3898 if (local_visited)
3899 BITMAP_FREE (local_visited);
3900
3901 timevar_pop (TV_ALIAS_STMT_WALK);
3902
3903 return ret;
3904 }
3905
3906 /* Verify validity of the fnspec string.
3907 See attr-fnspec.h for details. */
3908
3909 void
3910 attr_fnspec::verify ()
3911 {
3912 bool err = false;
3913 if (!len)
3914 return;
3915
3916 /* Check return value specifier. */
3917 if (len < return_desc_size)
3918 err = true;
3919 else if ((len - return_desc_size) % arg_desc_size)
3920 err = true;
3921 else if ((str[0] < '1' || str[0] > '4')
3922 && str[0] != '.' && str[0] != 'm')
3923 err = true;
3924
3925 switch (str[1])
3926 {
3927 case ' ':
3928 case 'p':
3929 case 'P':
3930 case 'c':
3931 case 'C':
3932 break;
3933 default:
3934 err = true;
3935 }
3936 if (err)
3937 internal_error ("invalid fn spec attribute \"%s\"", str);
3938
3939 /* Now check all parameters. */
3940 for (unsigned int i = 0; arg_specified_p (i); i++)
3941 {
3942 unsigned int idx = arg_idx (i);
3943 switch (str[idx])
3944 {
3945 case 'x':
3946 case 'X':
3947 case 'r':
3948 case 'R':
3949 case 'o':
3950 case 'O':
3951 case 'w':
3952 case 'W':
3953 case '.':
3954 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
3955 || str[idx + 1] == 't')
3956 {
3957 if (str[idx] != 'r' && str[idx] != 'R'
3958 && str[idx] != 'w' && str[idx] != 'W'
3959 && str[idx] != 'o' && str[idx] != 'O')
3960 err = true;
3961 if (str[idx + 1] != 't'
3962 /* Size specified is scalar, so it should be described
3963 by ". " if specified at all. */
3964 && (arg_specified_p (str[idx + 1] - '1')
3965 && str[arg_idx (str[idx + 1] - '1')] != '.'))
3966 err = true;
3967 }
3968 else if (str[idx + 1] != ' ')
3969 err = true;
3970 break;
3971 default:
3972 if (str[idx] < '1' || str[idx] > '9')
3973 err = true;
3974 }
3975 if (err)
3976 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
3977 }
3978 }
3979
3980 /* Return ture if TYPE1 and TYPE2 will always give the same answer
3981 when compared wit hother types using same_type_for_tbaa_p. */
3982
3983 static bool
3984 types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
3985 bool lto_streaming_safe)
3986 {
3987 /* We use same_type_for_tbaa_p to match types in the access path.
3988 This check is overly conservative. */
3989 type1 = TYPE_MAIN_VARIANT (type1);
3990 type2 = TYPE_MAIN_VARIANT (type2);
3991
3992 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
3993 != TYPE_STRUCTURAL_EQUALITY_P (type2))
3994 return false;
3995 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
3996 return true;
3997
3998 if (lto_streaming_safe)
3999 return type1 == type2;
4000 else
4001 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4002 }
4003
4004 /* Compare REF1 and REF2 and return flags specifying their differences.
4005 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4006 types that are going to be recomputed.
4007 If TBAA is true also compare TBAA metadata. */
4008
4009 int
4010 ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4011 bool lto_streaming_safe,
4012 bool tbaa)
4013 {
4014 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4015 return SEMANTICS;
4016 tree base1 = ao_ref_base (ref1);
4017 tree base2 = ao_ref_base (ref2);
4018
4019 if (!known_eq (ref1->offset, ref2->offset)
4020 || !known_eq (ref1->size, ref2->size)
4021 || !known_eq (ref1->max_size, ref2->max_size))
4022 return SEMANTICS;
4023
4024 /* For variable accesses we need to compare actual paths
4025 to check that both refs are accessing same address and the access size. */
4026 if (!known_eq (ref1->size, ref1->max_size))
4027 {
4028 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4029 TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4030 return SEMANTICS;
4031 tree r1 = ref1->ref;
4032 tree r2 = ref2->ref;
4033
4034 /* Handle toplevel COMPONENT_REFs of bitfields.
4035 Those are special since they are not allowed in
4036 ADDR_EXPR. */
4037 if (TREE_CODE (r1) == COMPONENT_REF
4038 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4039 {
4040 if (TREE_CODE (r2) != COMPONENT_REF
4041 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4042 return SEMANTICS;
4043 tree field1 = TREE_OPERAND (r1, 1);
4044 tree field2 = TREE_OPERAND (r2, 1);
4045 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4046 DECL_FIELD_OFFSET (field2), 0)
4047 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4048 DECL_FIELD_BIT_OFFSET (field2), 0)
4049 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4050 || !types_compatible_p (TREE_TYPE (r1),
4051 TREE_TYPE (r2)))
4052 return SEMANTICS;
4053 r1 = TREE_OPERAND (r1, 0);
4054 r2 = TREE_OPERAND (r2, 0);
4055 }
4056 else if (TREE_CODE (r2) == COMPONENT_REF
4057 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4058 return SEMANTICS;
4059
4060 /* Similarly for bit field refs. */
4061 if (TREE_CODE (r1) == BIT_FIELD_REF)
4062 {
4063 if (TREE_CODE (r2) != BIT_FIELD_REF
4064 || !operand_equal_p (TREE_OPERAND (r1, 1),
4065 TREE_OPERAND (r2, 1), 0)
4066 || !operand_equal_p (TREE_OPERAND (r1, 2),
4067 TREE_OPERAND (r2, 2), 0)
4068 || !types_compatible_p (TREE_TYPE (r1),
4069 TREE_TYPE (r2)))
4070 return SEMANTICS;
4071 r1 = TREE_OPERAND (r1, 0);
4072 r2 = TREE_OPERAND (r2, 0);
4073 }
4074 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4075 return SEMANTICS;
4076
4077 /* Now we can compare the address of actual memory access. */
4078 if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4079 return SEMANTICS;
4080 }
4081 /* For constant accesses we get more matches by comparing offset only. */
4082 else if (!operand_equal_p (base1, base2,
4083 OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4084 return SEMANTICS;
4085
4086 /* We can't simply use get_object_alignment_1 on the full
4087 reference as for accesses with variable indexes this reports
4088 too conservative alignment. */
4089 unsigned int align1, align2;
4090 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4091 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4092 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4093 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4094 TYPE_ALIGN but still returns false. This seem to contradict
4095 its description. So compare even if alignment is unknown. */
4096 if (known1 != known2
4097 || (bitpos1 != bitpos2 || align1 != align2))
4098 return SEMANTICS;
4099
4100 /* Now we know that accesses are semantically same. */
4101 int flags = 0;
4102
4103 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4104 tree rbase1 = ref1->ref;
4105 if (rbase1)
4106 while (handled_component_p (rbase1))
4107 rbase1 = TREE_OPERAND (rbase1, 0);
4108 tree rbase2 = ref2->ref;
4109 while (handled_component_p (rbase2))
4110 rbase2 = TREE_OPERAND (rbase2, 0);
4111
4112 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4113 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4114 Otherwise we need to match bases and cliques. */
4115 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4116 && MR_DEPENDENCE_CLIQUE (rbase1))
4117 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4118 && MR_DEPENDENCE_CLIQUE (rbase2)))
4119 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4120 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4121 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4122 flags |= DEPENDENCE_CLIQUE;
4123
4124 if (!tbaa)
4125 return flags;
4126
4127 /* Alias sets are not stable across LTO sreaming; be conservative here
4128 and compare types the alias sets are ultimately based on. */
4129 if (lto_streaming_safe)
4130 {
4131 tree t1 = ao_ref_alias_ptr_type (ref1);
4132 tree t2 = ao_ref_alias_ptr_type (ref2);
4133 if (!alias_ptr_types_compatible_p (t1, t2))
4134 flags |= REF_ALIAS_SET;
4135
4136 t1 = ao_ref_base_alias_ptr_type (ref1);
4137 t2 = ao_ref_base_alias_ptr_type (ref2);
4138 if (!alias_ptr_types_compatible_p (t1, t2))
4139 flags |= BASE_ALIAS_SET;
4140 }
4141 else
4142 {
4143 if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4144 flags |= REF_ALIAS_SET;
4145 if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4146 flags |= BASE_ALIAS_SET;
4147 }
4148
4149 /* Access path is used only on non-view-converted references. */
4150 bool view_converted = view_converted_memref_p (rbase1);
4151 if (view_converted_memref_p (rbase2) != view_converted)
4152 return flags | ACCESS_PATH;
4153 else if (view_converted)
4154 return flags;
4155
4156
4157 /* Find start of access paths and look for trailing arrays. */
4158 tree c1 = ref1->ref, c2 = ref2->ref;
4159 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4160 int nskipped1 = 0, nskipped2 = 0;
4161 int i = 0;
4162
4163 for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4164 {
4165 if (component_ref_to_zero_sized_trailing_array_p (p1))
4166 end_struct_ref1 = p1;
4167 if (ends_tbaa_access_path_p (p1))
4168 c1 = p1, nskipped1 = i;
4169 i++;
4170 }
4171 for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4172 {
4173 if (component_ref_to_zero_sized_trailing_array_p (p2))
4174 end_struct_ref2 = p2;
4175 if (ends_tbaa_access_path_p (p2))
4176 c2 = p2, nskipped1 = i;
4177 i++;
4178 }
4179
4180 /* For variable accesses we can not rely on offset match bellow.
4181 We know that paths are struturally same, so only check that
4182 starts of TBAA paths did not diverge. */
4183 if (!known_eq (ref1->size, ref1->max_size)
4184 && nskipped1 != nskipped2)
4185 return flags | ACCESS_PATH;
4186
4187 /* Information about trailing refs is used by
4188 aliasing_component_refs_p that is applied only if paths
4189 has handled components.. */
4190 if (!handled_component_p (c1) && !handled_component_p (c2))
4191 ;
4192 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4193 return flags | ACCESS_PATH;
4194 if (end_struct_ref1
4195 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4196 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4197 return flags | ACCESS_PATH;
4198
4199 /* Now compare all handled components of the access path.
4200 We have three oracles that cares about access paths:
4201 - aliasing_component_refs_p
4202 - nonoverlapping_refs_since_match_p
4203 - nonoverlapping_component_refs_p
4204 We need to match things these oracles compare.
4205
4206 It is only necessary to check types for compatibility
4207 and offsets. Rest of what oracles compares are actual
4208 addresses. Those are already known to be same:
4209 - for constant accesses we check offsets
4210 - for variable accesses we already matched
4211 the path lexically with operand_equal_p. */
4212 while (true)
4213 {
4214 bool comp1 = handled_component_p (c1);
4215 bool comp2 = handled_component_p (c2);
4216
4217 if (comp1 != comp2)
4218 return flags | ACCESS_PATH;
4219 if (!comp1)
4220 break;
4221
4222 if (TREE_CODE (c1) != TREE_CODE (c2))
4223 return flags | ACCESS_PATH;
4224
4225 /* aliasing_component_refs_p attempts to find type match within
4226 the paths. For that reason both types needs to be equal
4227 with respect to same_type_for_tbaa_p. */
4228 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4229 TREE_TYPE (c2),
4230 lto_streaming_safe))
4231 return flags | ACCESS_PATH;
4232 if (component_ref_to_zero_sized_trailing_array_p (c1)
4233 != component_ref_to_zero_sized_trailing_array_p (c2))
4234 return flags | ACCESS_PATH;
4235
4236 /* aliasing_matching_component_refs_p compares
4237 offsets within the path. Other properties are ignored.
4238 Do not bother to verify offsets in variable accesses. Here we
4239 already compared them by operand_equal_p so they are
4240 structurally same. */
4241 if (!known_eq (ref1->size, ref1->max_size))
4242 {
4243 poly_int64 offadj1, sztmc1, msztmc1;
4244 bool reverse1;
4245 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4246 poly_int64 offadj2, sztmc2, msztmc2;
4247 bool reverse2;
4248 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4249 if (!known_eq (offadj1, offadj2))
4250 return flags | ACCESS_PATH;
4251 }
4252 c1 = TREE_OPERAND (c1, 0);
4253 c2 = TREE_OPERAND (c2, 0);
4254 }
4255 /* Finally test the access type. */
4256 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4257 TREE_TYPE (c2),
4258 lto_streaming_safe))
4259 return flags | ACCESS_PATH;
4260 return flags;
4261 }
4262
4263 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4264 and canonical types. */
4265 void
4266 ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4267 inchash::hash &hstate)
4268 {
4269 tree base = ao_ref_base (ref);
4270 tree tbase = base;
4271
4272 if (!known_eq (ref->size, ref->max_size))
4273 {
4274 tree r = ref->ref;
4275 if (TREE_CODE (r) == COMPONENT_REF
4276 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4277 {
4278 tree field = TREE_OPERAND (r, 1);
4279 hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4280 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4281 hash_operand (DECL_SIZE (field), hstate, 0);
4282 r = TREE_OPERAND (r, 0);
4283 }
4284 if (TREE_CODE (r) == BIT_FIELD_REF)
4285 {
4286 hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4287 hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4288 r = TREE_OPERAND (r, 0);
4289 }
4290 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4291 hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4292 }
4293 else
4294 {
4295 hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4296 hstate.add_poly_int (ref->offset);
4297 hstate.add_poly_int (ref->size);
4298 hstate.add_poly_int (ref->max_size);
4299 }
4300 if (!lto_streaming_safe && tbaa)
4301 {
4302 hstate.add_int (ao_ref_alias_set (ref));
4303 hstate.add_int (ao_ref_base_alias_set (ref));
4304 }
4305 }