]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Alias analysis for trees. |
fbd26352 | 2 | Copyright (C) 2004-2019 Free Software Foundation, Inc. |
4ee9c684 | 3 | Contributed by Diego Novillo <dnovillo@redhat.com> |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 9 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
9ef16211 | 24 | #include "backend.h" |
7c29e30e | 25 | #include "target.h" |
26 | #include "rtl.h" | |
4ee9c684 | 27 | #include "tree.h" |
9ef16211 | 28 | #include "gimple.h" |
7c29e30e | 29 | #include "timevar.h" /* for TV_ALIAS_STMT_WALK */ |
9ef16211 | 30 | #include "ssa.h" |
7c29e30e | 31 | #include "cgraph.h" |
32 | #include "tree-pretty-print.h" | |
9ef16211 | 33 | #include "alias.h" |
b20a8bb4 | 34 | #include "fold-const.h" |
94ea8568 | 35 | #include "langhooks.h" |
b9ed1410 | 36 | #include "dumpfile.h" |
bc61cadb | 37 | #include "tree-eh.h" |
073c1fd5 | 38 | #include "tree-dfa.h" |
dd9784d4 | 39 | #include "ipa-reference.h" |
76bc343a | 40 | #include "varasm.h" |
a55dc2cd | 41 | |
dd277d48 | 42 | /* Broad overview of how alias analysis on gimple works: |
4fb5e5ca | 43 | |
dd277d48 | 44 | Statements clobbering or using memory are linked through the |
45 | virtual operand factored use-def chain. The virtual operand | |
46 | is unique per function, its symbol is accessible via gimple_vop (cfun). | |
47 | Virtual operands are used for efficiently walking memory statements | |
48 | in the gimple IL and are useful for things like value-numbering as | |
49 | a generation count for memory references. | |
339d7079 | 50 | |
dd277d48 | 51 | SSA_NAME pointers may have associated points-to information |
52 | accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive | |
53 | points-to information is (re-)computed by the TODO_rebuild_alias | |
54 | pass manager todo. Points-to information is also used for more | |
55 | precise tracking of call-clobbered and call-used variables and | |
56 | related disambiguations. | |
339d7079 | 57 | |
dd277d48 | 58 | This file contains functions for disambiguating memory references, |
59 | the so called alias-oracle and tools for walking of the gimple IL. | |
339d7079 | 60 | |
dd277d48 | 61 | The main alias-oracle entry-points are |
339d7079 | 62 | |
24500bba | 63 | bool stmt_may_clobber_ref_p (gimple *, tree) |
339d7079 | 64 | |
dd277d48 | 65 | This function queries if a statement may invalidate (parts of) |
66 | the memory designated by the reference tree argument. | |
339d7079 | 67 | |
24500bba | 68 | bool ref_maybe_used_by_stmt_p (gimple *, tree) |
339d7079 | 69 | |
dd277d48 | 70 | This function queries if a statement may need (parts of) the |
71 | memory designated by the reference tree argument. | |
339d7079 | 72 | |
dd277d48 | 73 | There are variants of these functions that only handle the call |
74 | part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p. | |
75 | Note that these do not disambiguate against a possible call lhs. | |
339d7079 | 76 | |
dd277d48 | 77 | bool refs_may_alias_p (tree, tree) |
339d7079 | 78 | |
dd277d48 | 79 | This function tries to disambiguate two reference trees. |
05931b00 | 80 | |
dd277d48 | 81 | bool ptr_deref_may_alias_global_p (tree) |
339d7079 | 82 | |
dd277d48 | 83 | This function queries if dereferencing a pointer variable may |
84 | alias global memory. | |
339d7079 | 85 | |
dd277d48 | 86 | More low-level disambiguators are available and documented in |
87 | this file. Low-level disambiguators dealing with points-to | |
88 | information are in tree-ssa-structalias.c. */ | |
339d7079 | 89 | |
3bcb81b2 | 90 | static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool); |
dc2ef903 | 91 | static bool nonoverlapping_component_refs_p (const_tree, const_tree); |
339d7079 | 92 | |
dd277d48 | 93 | /* Query statistics for the different low-level disambiguators. |
94 | A high-level query may trigger multiple of them. */ | |
339d7079 | 95 | |
dd277d48 | 96 | static struct { |
97 | unsigned HOST_WIDE_INT refs_may_alias_p_may_alias; | |
98 | unsigned HOST_WIDE_INT refs_may_alias_p_no_alias; | |
99 | unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias; | |
100 | unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias; | |
101 | unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias; | |
102 | unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias; | |
ba73fec8 | 103 | unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias; |
104 | unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias; | |
bac90ef8 | 105 | unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias; |
106 | unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias; | |
3bcb81b2 | 107 | unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias; |
108 | unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap; | |
109 | unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias; | |
dd277d48 | 110 | } alias_stats; |
339d7079 | 111 | |
dd277d48 | 112 | void |
113 | dump_alias_stats (FILE *s) | |
114 | { | |
115 | fprintf (s, "\nAlias oracle query stats:\n"); | |
116 | fprintf (s, " refs_may_alias_p: " | |
117 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " | |
118 | HOST_WIDE_INT_PRINT_DEC" queries\n", | |
119 | alias_stats.refs_may_alias_p_no_alias, | |
120 | alias_stats.refs_may_alias_p_no_alias | |
121 | + alias_stats.refs_may_alias_p_may_alias); | |
122 | fprintf (s, " ref_maybe_used_by_call_p: " | |
123 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " | |
124 | HOST_WIDE_INT_PRINT_DEC" queries\n", | |
125 | alias_stats.ref_maybe_used_by_call_p_no_alias, | |
126 | alias_stats.refs_may_alias_p_no_alias | |
127 | + alias_stats.ref_maybe_used_by_call_p_may_alias); | |
128 | fprintf (s, " call_may_clobber_ref_p: " | |
129 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " | |
130 | HOST_WIDE_INT_PRINT_DEC" queries\n", | |
131 | alias_stats.call_may_clobber_ref_p_no_alias, | |
132 | alias_stats.call_may_clobber_ref_p_no_alias | |
133 | + alias_stats.call_may_clobber_ref_p_may_alias); | |
bac90ef8 | 134 | fprintf (s, " nonoverlapping_component_refs_p: " |
135 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " | |
136 | HOST_WIDE_INT_PRINT_DEC" queries\n", | |
137 | alias_stats.nonoverlapping_component_refs_p_no_alias, | |
138 | alias_stats.nonoverlapping_component_refs_p_no_alias | |
139 | + alias_stats.nonoverlapping_component_refs_p_may_alias); | |
3bcb81b2 | 140 | fprintf (s, " nonoverlapping_refs_since_match_p: " |
bac90ef8 | 141 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
f9d6698b | 142 | HOST_WIDE_INT_PRINT_DEC" must overlaps, " |
bac90ef8 | 143 | HOST_WIDE_INT_PRINT_DEC" queries\n", |
3bcb81b2 | 144 | alias_stats.nonoverlapping_refs_since_match_p_no_alias, |
145 | alias_stats.nonoverlapping_refs_since_match_p_must_overlap, | |
146 | alias_stats.nonoverlapping_refs_since_match_p_no_alias | |
147 | + alias_stats.nonoverlapping_refs_since_match_p_may_alias | |
148 | + alias_stats.nonoverlapping_refs_since_match_p_must_overlap); | |
bac90ef8 | 149 | fprintf (s, " aliasing_component_refs_p: " |
ba73fec8 | 150 | HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
151 | HOST_WIDE_INT_PRINT_DEC" queries\n", | |
152 | alias_stats.aliasing_component_refs_p_no_alias, | |
153 | alias_stats.aliasing_component_refs_p_no_alias | |
154 | + alias_stats.aliasing_component_refs_p_may_alias); | |
5b6bf098 | 155 | dump_alias_stats_in_alias_c (s); |
dd277d48 | 156 | } |
157 | ||
158 | ||
159 | /* Return true, if dereferencing PTR may alias with a global variable. */ | |
4ee9c684 | 160 | |
dd277d48 | 161 | bool |
162 | ptr_deref_may_alias_global_p (tree ptr) | |
4ee9c684 | 163 | { |
dd277d48 | 164 | struct ptr_info_def *pi; |
4fb5e5ca | 165 | |
dd277d48 | 166 | /* If we end up with a pointer constant here that may point |
167 | to global memory. */ | |
168 | if (TREE_CODE (ptr) != SSA_NAME) | |
169 | return true; | |
4ee9c684 | 170 | |
dd277d48 | 171 | pi = SSA_NAME_PTR_INFO (ptr); |
4ee9c684 | 172 | |
dd277d48 | 173 | /* If we do not have points-to information for this variable, |
174 | we have to punt. */ | |
175 | if (!pi) | |
176 | return true; | |
4ee9c684 | 177 | |
2a3ebafa | 178 | /* ??? This does not use TBAA to prune globals ptr may not access. */ |
dd277d48 | 179 | return pt_solution_includes_global (&pi->pt); |
4ee9c684 | 180 | } |
181 | ||
2a3ebafa | 182 | /* Return true if dereferencing PTR may alias DECL. |
183 | The caller is responsible for applying TBAA to see if PTR | |
184 | may access DECL at all. */ | |
4ee9c684 | 185 | |
dd277d48 | 186 | static bool |
187 | ptr_deref_may_alias_decl_p (tree ptr, tree decl) | |
4ee9c684 | 188 | { |
dd277d48 | 189 | struct ptr_info_def *pi; |
f7d118a9 | 190 | |
2760c0e3 | 191 | /* Conversions are irrelevant for points-to information and |
192 | data-dependence analysis can feed us those. */ | |
193 | STRIP_NOPS (ptr); | |
194 | ||
195 | /* Anything we do not explicilty handle aliases. */ | |
196 | if ((TREE_CODE (ptr) != SSA_NAME | |
197 | && TREE_CODE (ptr) != ADDR_EXPR | |
198 | && TREE_CODE (ptr) != POINTER_PLUS_EXPR) | |
199 | || !POINTER_TYPE_P (TREE_TYPE (ptr)) | |
53e9c5c4 | 200 | || (!VAR_P (decl) |
2760c0e3 | 201 | && TREE_CODE (decl) != PARM_DECL |
202 | && TREE_CODE (decl) != RESULT_DECL)) | |
9c01641d | 203 | return true; |
2fd0b5dc | 204 | |
2760c0e3 | 205 | /* Disregard pointer offsetting. */ |
206 | if (TREE_CODE (ptr) == POINTER_PLUS_EXPR) | |
207 | { | |
208 | do | |
209 | { | |
210 | ptr = TREE_OPERAND (ptr, 0); | |
211 | } | |
212 | while (TREE_CODE (ptr) == POINTER_PLUS_EXPR); | |
213 | return ptr_deref_may_alias_decl_p (ptr, decl); | |
214 | } | |
215 | ||
047fdd47 | 216 | /* ADDR_EXPR pointers either just offset another pointer or directly |
217 | specify the pointed-to set. */ | |
218 | if (TREE_CODE (ptr) == ADDR_EXPR) | |
219 | { | |
220 | tree base = get_base_address (TREE_OPERAND (ptr, 0)); | |
221 | if (base | |
2622064f | 222 | && (TREE_CODE (base) == MEM_REF |
223 | || TREE_CODE (base) == TARGET_MEM_REF)) | |
047fdd47 | 224 | ptr = TREE_OPERAND (base, 0); |
225 | else if (base | |
2622064f | 226 | && DECL_P (base)) |
96ba6770 | 227 | return compare_base_decls (base, decl) != 0; |
047fdd47 | 228 | else if (base |
229 | && CONSTANT_CLASS_P (base)) | |
230 | return false; | |
231 | else | |
232 | return true; | |
233 | } | |
234 | ||
f4d3c071 | 235 | /* Non-aliased variables cannot be pointed to. */ |
2760c0e3 | 236 | if (!may_be_aliased (decl)) |
237 | return false; | |
047fdd47 | 238 | |
dd277d48 | 239 | /* If we do not have useful points-to information for this pointer |
240 | we cannot disambiguate anything else. */ | |
241 | pi = SSA_NAME_PTR_INFO (ptr); | |
242 | if (!pi) | |
2fd0b5dc | 243 | return true; |
244 | ||
dd277d48 | 245 | return pt_solution_includes (&pi->pt, decl); |
2fd0b5dc | 246 | } |
4ee9c684 | 247 | |
2a3ebafa | 248 | /* Return true if dereferenced PTR1 and PTR2 may alias. |
249 | The caller is responsible for applying TBAA to see if accesses | |
250 | through PTR1 and PTR2 may conflict at all. */ | |
4ee9c684 | 251 | |
2760c0e3 | 252 | bool |
dd277d48 | 253 | ptr_derefs_may_alias_p (tree ptr1, tree ptr2) |
4ee9c684 | 254 | { |
dd277d48 | 255 | struct ptr_info_def *pi1, *pi2; |
4ee9c684 | 256 | |
2760c0e3 | 257 | /* Conversions are irrelevant for points-to information and |
258 | data-dependence analysis can feed us those. */ | |
259 | STRIP_NOPS (ptr1); | |
260 | STRIP_NOPS (ptr2); | |
261 | ||
2760c0e3 | 262 | /* Disregard pointer offsetting. */ |
263 | if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR) | |
264 | { | |
265 | do | |
266 | { | |
267 | ptr1 = TREE_OPERAND (ptr1, 0); | |
268 | } | |
269 | while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR); | |
270 | return ptr_derefs_may_alias_p (ptr1, ptr2); | |
271 | } | |
272 | if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR) | |
273 | { | |
274 | do | |
275 | { | |
276 | ptr2 = TREE_OPERAND (ptr2, 0); | |
277 | } | |
278 | while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR); | |
279 | return ptr_derefs_may_alias_p (ptr1, ptr2); | |
280 | } | |
f85fb819 | 281 | |
047fdd47 | 282 | /* ADDR_EXPR pointers either just offset another pointer or directly |
283 | specify the pointed-to set. */ | |
284 | if (TREE_CODE (ptr1) == ADDR_EXPR) | |
285 | { | |
286 | tree base = get_base_address (TREE_OPERAND (ptr1, 0)); | |
287 | if (base | |
2622064f | 288 | && (TREE_CODE (base) == MEM_REF |
289 | || TREE_CODE (base) == TARGET_MEM_REF)) | |
80b4d93e | 290 | return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2); |
047fdd47 | 291 | else if (base |
2622064f | 292 | && DECL_P (base)) |
047fdd47 | 293 | return ptr_deref_may_alias_decl_p (ptr2, base); |
294 | else | |
295 | return true; | |
296 | } | |
297 | if (TREE_CODE (ptr2) == ADDR_EXPR) | |
298 | { | |
299 | tree base = get_base_address (TREE_OPERAND (ptr2, 0)); | |
300 | if (base | |
2622064f | 301 | && (TREE_CODE (base) == MEM_REF |
302 | || TREE_CODE (base) == TARGET_MEM_REF)) | |
80b4d93e | 303 | return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0)); |
047fdd47 | 304 | else if (base |
2622064f | 305 | && DECL_P (base)) |
047fdd47 | 306 | return ptr_deref_may_alias_decl_p (ptr1, base); |
307 | else | |
308 | return true; | |
309 | } | |
310 | ||
80b4d93e | 311 | /* From here we require SSA name pointers. Anything else aliases. */ |
312 | if (TREE_CODE (ptr1) != SSA_NAME | |
313 | || TREE_CODE (ptr2) != SSA_NAME | |
314 | || !POINTER_TYPE_P (TREE_TYPE (ptr1)) | |
315 | || !POINTER_TYPE_P (TREE_TYPE (ptr2))) | |
316 | return true; | |
317 | ||
dd277d48 | 318 | /* We may end up with two empty points-to solutions for two same pointers. |
319 | In this case we still want to say both pointers alias, so shortcut | |
320 | that here. */ | |
321 | if (ptr1 == ptr2) | |
322 | return true; | |
81d08033 | 323 | |
dd277d48 | 324 | /* If we do not have useful points-to information for either pointer |
325 | we cannot disambiguate anything else. */ | |
326 | pi1 = SSA_NAME_PTR_INFO (ptr1); | |
327 | pi2 = SSA_NAME_PTR_INFO (ptr2); | |
328 | if (!pi1 || !pi2) | |
329 | return true; | |
81d08033 | 330 | |
2a3ebafa | 331 | /* ??? This does not use TBAA to prune decls from the intersection |
332 | that not both pointers may access. */ | |
dd277d48 | 333 | return pt_solutions_intersect (&pi1->pt, &pi2->pt); |
81d08033 | 334 | } |
335 | ||
047fdd47 | 336 | /* Return true if dereferencing PTR may alias *REF. |
337 | The caller is responsible for applying TBAA to see if PTR | |
338 | may access *REF at all. */ | |
339 | ||
340 | static bool | |
341 | ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref) | |
342 | { | |
343 | tree base = ao_ref_base (ref); | |
344 | ||
2622064f | 345 | if (TREE_CODE (base) == MEM_REF |
346 | || TREE_CODE (base) == TARGET_MEM_REF) | |
047fdd47 | 347 | return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0)); |
2622064f | 348 | else if (DECL_P (base)) |
047fdd47 | 349 | return ptr_deref_may_alias_decl_p (ptr, base); |
350 | ||
351 | return true; | |
352 | } | |
353 | ||
73447cc5 | 354 | /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */ |
355 | ||
356 | bool | |
357 | ptrs_compare_unequal (tree ptr1, tree ptr2) | |
358 | { | |
359 | /* First resolve the pointers down to a SSA name pointer base or | |
360 | a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does | |
361 | not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs | |
362 | or STRING_CSTs which needs points-to adjustments to track them | |
363 | in the points-to sets. */ | |
364 | tree obj1 = NULL_TREE; | |
365 | tree obj2 = NULL_TREE; | |
366 | if (TREE_CODE (ptr1) == ADDR_EXPR) | |
367 | { | |
368 | tree tem = get_base_address (TREE_OPERAND (ptr1, 0)); | |
369 | if (! tem) | |
370 | return false; | |
53e9c5c4 | 371 | if (VAR_P (tem) |
73447cc5 | 372 | || TREE_CODE (tem) == PARM_DECL |
373 | || TREE_CODE (tem) == RESULT_DECL) | |
374 | obj1 = tem; | |
375 | else if (TREE_CODE (tem) == MEM_REF) | |
376 | ptr1 = TREE_OPERAND (tem, 0); | |
377 | } | |
378 | if (TREE_CODE (ptr2) == ADDR_EXPR) | |
379 | { | |
380 | tree tem = get_base_address (TREE_OPERAND (ptr2, 0)); | |
381 | if (! tem) | |
382 | return false; | |
53e9c5c4 | 383 | if (VAR_P (tem) |
73447cc5 | 384 | || TREE_CODE (tem) == PARM_DECL |
385 | || TREE_CODE (tem) == RESULT_DECL) | |
386 | obj2 = tem; | |
387 | else if (TREE_CODE (tem) == MEM_REF) | |
388 | ptr2 = TREE_OPERAND (tem, 0); | |
389 | } | |
390 | ||
c3f85235 | 391 | /* Canonicalize ptr vs. object. */ |
392 | if (TREE_CODE (ptr1) == SSA_NAME && obj2) | |
393 | { | |
394 | std::swap (ptr1, ptr2); | |
395 | std::swap (obj1, obj2); | |
396 | } | |
397 | ||
73447cc5 | 398 | if (obj1 && obj2) |
399 | /* Other code handles this correctly, no need to duplicate it here. */; | |
400 | else if (obj1 && TREE_CODE (ptr2) == SSA_NAME) | |
401 | { | |
402 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2); | |
5ffb4a0d | 403 | /* We may not use restrict to optimize pointer comparisons. |
404 | See PR71062. So we have to assume that restrict-pointed-to | |
405 | may be in fact obj1. */ | |
76bc343a | 406 | if (!pi |
407 | || pi->pt.vars_contains_restrict | |
408 | || pi->pt.vars_contains_interposable) | |
73447cc5 | 409 | return false; |
c3f85235 | 410 | if (VAR_P (obj1) |
411 | && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1))) | |
412 | { | |
413 | varpool_node *node = varpool_node::get (obj1); | |
414 | /* If obj1 may bind to NULL give up (see below). */ | |
76bc343a | 415 | if (! node |
416 | || ! node->nonzero_address () | |
417 | || ! decl_binds_to_current_def_p (obj1)) | |
c3f85235 | 418 | return false; |
419 | } | |
73447cc5 | 420 | return !pt_solution_includes (&pi->pt, obj1); |
421 | } | |
73447cc5 | 422 | |
423 | /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2 | |
424 | but those require pt.null to be conservatively correct. */ | |
425 | ||
426 | return false; | |
427 | } | |
428 | ||
258bd648 | 429 | /* Returns whether reference REF to BASE may refer to global memory. */ |
8763c223 | 430 | |
258bd648 | 431 | static bool |
432 | ref_may_alias_global_p_1 (tree base) | |
8763c223 | 433 | { |
8763c223 | 434 | if (DECL_P (base)) |
435 | return is_global_var (base); | |
436 | else if (TREE_CODE (base) == MEM_REF | |
437 | || TREE_CODE (base) == TARGET_MEM_REF) | |
438 | return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)); | |
439 | return true; | |
440 | } | |
441 | ||
258bd648 | 442 | bool |
443 | ref_may_alias_global_p (ao_ref *ref) | |
444 | { | |
445 | tree base = ao_ref_base (ref); | |
446 | return ref_may_alias_global_p_1 (base); | |
447 | } | |
448 | ||
449 | bool | |
450 | ref_may_alias_global_p (tree ref) | |
451 | { | |
452 | tree base = get_base_address (ref); | |
453 | return ref_may_alias_global_p_1 (base); | |
454 | } | |
455 | ||
8763c223 | 456 | /* Return true whether STMT may clobber global memory. */ |
457 | ||
458 | bool | |
42acab1c | 459 | stmt_may_clobber_global_p (gimple *stmt) |
8763c223 | 460 | { |
461 | tree lhs; | |
462 | ||
463 | if (!gimple_vdef (stmt)) | |
464 | return false; | |
465 | ||
466 | /* ??? We can ask the oracle whether an artificial pointer | |
467 | dereference with a pointer with points-to information covering | |
468 | all global memory (what about non-address taken memory?) maybe | |
469 | clobbered by this call. As there is at the moment no convenient | |
470 | way of doing that without generating garbage do some manual | |
471 | checking instead. | |
472 | ??? We could make a NULL ao_ref argument to the various | |
473 | predicates special, meaning any global memory. */ | |
474 | ||
475 | switch (gimple_code (stmt)) | |
476 | { | |
477 | case GIMPLE_ASSIGN: | |
478 | lhs = gimple_assign_lhs (stmt); | |
479 | return (TREE_CODE (lhs) != SSA_NAME | |
480 | && ref_may_alias_global_p (lhs)); | |
481 | case GIMPLE_CALL: | |
482 | return true; | |
483 | default: | |
484 | return true; | |
485 | } | |
486 | } | |
487 | ||
4ee9c684 | 488 | |
489 | /* Dump alias information on FILE. */ | |
490 | ||
491 | void | |
492 | dump_alias_info (FILE *file) | |
493 | { | |
2aac21c5 | 494 | unsigned i; |
f211616e | 495 | tree ptr; |
4ee9c684 | 496 | const char *funcname |
5135beeb | 497 | = lang_hooks.decl_printable_name (current_function_decl, 2); |
a55dc2cd | 498 | tree var; |
4ee9c684 | 499 | |
dd277d48 | 500 | fprintf (file, "\n\nAlias information for %s\n\n", funcname); |
81d08033 | 501 | |
502 | fprintf (file, "Aliased symbols\n\n"); | |
48e1416a | 503 | |
2aac21c5 | 504 | FOR_EACH_LOCAL_DECL (cfun, i, var) |
81d08033 | 505 | { |
81d08033 | 506 | if (may_be_aliased (var)) |
507 | dump_variable (file, var); | |
508 | } | |
509 | ||
7f81b5ee | 510 | fprintf (file, "\nCall clobber information\n"); |
511 | ||
512 | fprintf (file, "\nESCAPED"); | |
513 | dump_points_to_solution (file, &cfun->gimple_df->escaped); | |
7f81b5ee | 514 | |
515 | fprintf (file, "\n\nFlow-insensitive points-to information\n\n"); | |
81d08033 | 516 | |
f211616e | 517 | FOR_EACH_SSA_NAME (i, ptr, cfun) |
81d08033 | 518 | { |
a48b1470 | 519 | struct ptr_info_def *pi; |
48e1416a | 520 | |
f211616e | 521 | if (!POINTER_TYPE_P (TREE_TYPE (ptr)) |
dd277d48 | 522 | || SSA_NAME_IN_FREE_LIST (ptr)) |
a48b1470 | 523 | continue; |
524 | ||
525 | pi = SSA_NAME_PTR_INFO (ptr); | |
dd277d48 | 526 | if (pi) |
81d08033 | 527 | dump_points_to_info_for (file, ptr); |
528 | } | |
4ee9c684 | 529 | |
4ee9c684 | 530 | fprintf (file, "\n"); |
531 | } | |
532 | ||
533 | ||
534 | /* Dump alias information on stderr. */ | |
535 | ||
4b987fac | 536 | DEBUG_FUNCTION void |
4ee9c684 | 537 | debug_alias_info (void) |
538 | { | |
539 | dump_alias_info (stderr); | |
540 | } | |
541 | ||
542 | ||
7f81b5ee | 543 | /* Dump the points-to set *PT into FILE. */ |
4ee9c684 | 544 | |
d793732c | 545 | void |
7f81b5ee | 546 | dump_points_to_solution (FILE *file, struct pt_solution *pt) |
4ee9c684 | 547 | { |
7f81b5ee | 548 | if (pt->anything) |
549 | fprintf (file, ", points-to anything"); | |
4ee9c684 | 550 | |
7f81b5ee | 551 | if (pt->nonlocal) |
552 | fprintf (file, ", points-to non-local"); | |
4ee9c684 | 553 | |
7f81b5ee | 554 | if (pt->escaped) |
555 | fprintf (file, ", points-to escaped"); | |
556 | ||
1a981e1a | 557 | if (pt->ipa_escaped) |
558 | fprintf (file, ", points-to unit escaped"); | |
559 | ||
7f81b5ee | 560 | if (pt->null) |
561 | fprintf (file, ", points-to NULL"); | |
562 | ||
563 | if (pt->vars) | |
4ee9c684 | 564 | { |
7f81b5ee | 565 | fprintf (file, ", points-to vars: "); |
566 | dump_decl_set (file, pt->vars); | |
420582bc | 567 | if (pt->vars_contains_nonlocal |
5ffb4a0d | 568 | || pt->vars_contains_escaped |
569 | || pt->vars_contains_escaped_heap | |
570 | || pt->vars_contains_restrict) | |
571 | { | |
572 | const char *comma = ""; | |
573 | fprintf (file, " ("); | |
574 | if (pt->vars_contains_nonlocal) | |
575 | { | |
576 | fprintf (file, "nonlocal"); | |
577 | comma = ", "; | |
578 | } | |
579 | if (pt->vars_contains_escaped) | |
580 | { | |
581 | fprintf (file, "%sescaped", comma); | |
582 | comma = ", "; | |
583 | } | |
584 | if (pt->vars_contains_escaped_heap) | |
585 | { | |
586 | fprintf (file, "%sescaped heap", comma); | |
587 | comma = ", "; | |
588 | } | |
589 | if (pt->vars_contains_restrict) | |
76bc343a | 590 | { |
591 | fprintf (file, "%srestrict", comma); | |
592 | comma = ", "; | |
593 | } | |
594 | if (pt->vars_contains_interposable) | |
595 | fprintf (file, "%sinterposable", comma); | |
5ffb4a0d | 596 | fprintf (file, ")"); |
597 | } | |
7f81b5ee | 598 | } |
599 | } | |
4ee9c684 | 600 | |
c7d89805 | 601 | |
602 | /* Unified dump function for pt_solution. */ | |
603 | ||
604 | DEBUG_FUNCTION void | |
605 | debug (pt_solution &ref) | |
606 | { | |
607 | dump_points_to_solution (stderr, &ref); | |
608 | } | |
609 | ||
610 | DEBUG_FUNCTION void | |
611 | debug (pt_solution *ptr) | |
612 | { | |
613 | if (ptr) | |
614 | debug (*ptr); | |
615 | else | |
616 | fprintf (stderr, "<nil>\n"); | |
617 | } | |
618 | ||
619 | ||
7f81b5ee | 620 | /* Dump points-to information for SSA_NAME PTR into FILE. */ |
4ee9c684 | 621 | |
7f81b5ee | 622 | void |
623 | dump_points_to_info_for (FILE *file, tree ptr) | |
624 | { | |
625 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); | |
4ee9c684 | 626 | |
7f81b5ee | 627 | print_generic_expr (file, ptr, dump_flags); |
1e1a4c8c | 628 | |
7f81b5ee | 629 | if (pi) |
630 | dump_points_to_solution (file, &pi->pt); | |
631 | else | |
632 | fprintf (file, ", points-to anything"); | |
4ee9c684 | 633 | |
634 | fprintf (file, "\n"); | |
635 | } | |
636 | ||
637 | ||
d793732c | 638 | /* Dump points-to information for VAR into stderr. */ |
639 | ||
4b987fac | 640 | DEBUG_FUNCTION void |
d793732c | 641 | debug_points_to_info_for (tree var) |
642 | { | |
643 | dump_points_to_info_for (stderr, var); | |
644 | } | |
645 | ||
3918bd18 | 646 | |
647 | /* Initializes the alias-oracle reference representation *R from REF. */ | |
648 | ||
649 | void | |
650 | ao_ref_init (ao_ref *r, tree ref) | |
651 | { | |
652 | r->ref = ref; | |
653 | r->base = NULL_TREE; | |
654 | r->offset = 0; | |
655 | r->size = -1; | |
656 | r->max_size = -1; | |
657 | r->ref_alias_set = -1; | |
658 | r->base_alias_set = -1; | |
3787db52 | 659 | r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false; |
3918bd18 | 660 | } |
661 | ||
662 | /* Returns the base object of the memory reference *REF. */ | |
663 | ||
664 | tree | |
665 | ao_ref_base (ao_ref *ref) | |
666 | { | |
292237f3 | 667 | bool reverse; |
668 | ||
3918bd18 | 669 | if (ref->base) |
670 | return ref->base; | |
f3c2a387 | 671 | ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size, |
672 | &ref->max_size, &reverse); | |
3918bd18 | 673 | return ref->base; |
674 | } | |
675 | ||
676 | /* Returns the base object alias set of the memory reference *REF. */ | |
677 | ||
8e92a5ee | 678 | alias_set_type |
3918bd18 | 679 | ao_ref_base_alias_set (ao_ref *ref) |
680 | { | |
182cf5a9 | 681 | tree base_ref; |
3918bd18 | 682 | if (ref->base_alias_set != -1) |
683 | return ref->base_alias_set; | |
182cf5a9 | 684 | if (!ref->ref) |
685 | return 0; | |
686 | base_ref = ref->ref; | |
687 | while (handled_component_p (base_ref)) | |
688 | base_ref = TREE_OPERAND (base_ref, 0); | |
689 | ref->base_alias_set = get_alias_set (base_ref); | |
3918bd18 | 690 | return ref->base_alias_set; |
691 | } | |
692 | ||
693 | /* Returns the reference alias set of the memory reference *REF. */ | |
694 | ||
695 | alias_set_type | |
696 | ao_ref_alias_set (ao_ref *ref) | |
697 | { | |
698 | if (ref->ref_alias_set != -1) | |
699 | return ref->ref_alias_set; | |
700 | ref->ref_alias_set = get_alias_set (ref->ref); | |
701 | return ref->ref_alias_set; | |
702 | } | |
703 | ||
134899ac | 704 | /* Init an alias-oracle reference representation from a gimple pointer |
234f1f79 | 705 | PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the |
134899ac | 706 | size is assumed to be unknown. The access is assumed to be only |
707 | to or after of the pointer target, not before it. */ | |
708 | ||
709 | void | |
710 | ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size) | |
711 | { | |
773078cb | 712 | poly_int64 t, size_hwi, extra_offset = 0; |
134899ac | 713 | ref->ref = NULL_TREE; |
4e9e79c0 | 714 | if (TREE_CODE (ptr) == SSA_NAME) |
715 | { | |
42acab1c | 716 | gimple *stmt = SSA_NAME_DEF_STMT (ptr); |
4e9e79c0 | 717 | if (gimple_assign_single_p (stmt) |
718 | && gimple_assign_rhs_code (stmt) == ADDR_EXPR) | |
719 | ptr = gimple_assign_rhs1 (stmt); | |
e3ac92d2 | 720 | else if (is_gimple_assign (stmt) |
721 | && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR | |
fe60c82c | 722 | && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset)) |
e3ac92d2 | 723 | { |
724 | ptr = gimple_assign_rhs1 (stmt); | |
fe60c82c | 725 | extra_offset *= BITS_PER_UNIT; |
e3ac92d2 | 726 | } |
4e9e79c0 | 727 | } |
728 | ||
134899ac | 729 | if (TREE_CODE (ptr) == ADDR_EXPR) |
d1603825 | 730 | { |
731 | ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t); | |
732 | if (ref->base) | |
733 | ref->offset = BITS_PER_UNIT * t; | |
734 | else | |
735 | { | |
736 | size = NULL_TREE; | |
737 | ref->offset = 0; | |
738 | ref->base = get_base_address (TREE_OPERAND (ptr, 0)); | |
739 | } | |
740 | } | |
134899ac | 741 | else |
742 | { | |
7d39dbf6 | 743 | gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr))); |
182cf5a9 | 744 | ref->base = build2 (MEM_REF, char_type_node, |
2512209b | 745 | ptr, null_pointer_node); |
134899ac | 746 | ref->offset = 0; |
747 | } | |
e3ac92d2 | 748 | ref->offset += extra_offset; |
134899ac | 749 | if (size |
fe60c82c | 750 | && poly_int_tree_p (size, &size_hwi) |
751 | && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT)) | |
dd49bf5f | 752 | ref->max_size = ref->size = size_hwi * BITS_PER_UNIT; |
134899ac | 753 | else |
754 | ref->max_size = ref->size = -1; | |
755 | ref->ref_alias_set = 0; | |
756 | ref->base_alias_set = 0; | |
3787db52 | 757 | ref->volatile_p = false; |
134899ac | 758 | } |
759 | ||
ce7b4f26 | 760 | /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them: |
761 | Return -1 if S1 < S2 | |
762 | Return 1 if S1 > S2 | |
763 | Return 0 if equal or incomparable. */ | |
764 | ||
765 | static int | |
766 | compare_sizes (tree s1, tree s2) | |
767 | { | |
768 | if (!s1 || !s2) | |
769 | return 0; | |
770 | ||
50709af0 | 771 | poly_uint64 size1; |
772 | poly_uint64 size2; | |
ce7b4f26 | 773 | |
774 | if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2)) | |
775 | return 0; | |
776 | if (known_lt (size1, size2)) | |
777 | return -1; | |
778 | if (known_lt (size2, size1)) | |
779 | return 1; | |
780 | return 0; | |
781 | } | |
782 | ||
783 | /* Compare TYPE1 and TYPE2 by its size. | |
784 | Return -1 if size of TYPE1 < size of TYPE2 | |
785 | Return 1 if size of TYPE1 > size of TYPE2 | |
786 | Return 0 if types are of equal sizes or we can not compare them. */ | |
787 | ||
788 | static int | |
789 | compare_type_sizes (tree type1, tree type2) | |
790 | { | |
791 | /* Be conservative for arrays and vectors. We want to support partial | |
792 | overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */ | |
793 | while (TREE_CODE (type1) == ARRAY_TYPE | |
794 | || TREE_CODE (type1) == VECTOR_TYPE) | |
795 | type1 = TREE_TYPE (type1); | |
796 | while (TREE_CODE (type2) == ARRAY_TYPE | |
797 | || TREE_CODE (type2) == VECTOR_TYPE) | |
798 | type2 = TREE_TYPE (type2); | |
799 | return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2)); | |
800 | } | |
801 | ||
dd277d48 | 802 | /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the |
803 | purpose of TBAA. Return 0 if they are distinct and -1 if we cannot | |
804 | decide. */ | |
d793732c | 805 | |
dd277d48 | 806 | static inline int |
807 | same_type_for_tbaa (tree type1, tree type2) | |
808 | { | |
809 | type1 = TYPE_MAIN_VARIANT (type1); | |
810 | type2 = TYPE_MAIN_VARIANT (type2); | |
4ee9c684 | 811 | |
326ddc79 | 812 | /* Handle the most common case first. */ |
813 | if (type1 == type2) | |
814 | return 1; | |
815 | ||
dd277d48 | 816 | /* If we would have to do structural comparison bail out. */ |
817 | if (TYPE_STRUCTURAL_EQUALITY_P (type1) | |
818 | || TYPE_STRUCTURAL_EQUALITY_P (type2)) | |
819 | return -1; | |
820 | ||
821 | /* Compare the canonical types. */ | |
822 | if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2)) | |
823 | return 1; | |
824 | ||
4b4d1513 | 825 | /* ??? Array types are not properly unified in all cases as we have |
dd277d48 | 826 | spurious changes in the index types for example. Removing this |
827 | causes all sorts of problems with the Fortran frontend. */ | |
828 | if (TREE_CODE (type1) == ARRAY_TYPE | |
829 | && TREE_CODE (type2) == ARRAY_TYPE) | |
830 | return -1; | |
831 | ||
102bfc9c | 832 | /* ??? In Ada, an lvalue of an unconstrained type can be used to access an |
833 | object of one of its constrained subtypes, e.g. when a function with an | |
834 | unconstrained parameter passed by reference is called on an object and | |
835 | inlined. But, even in the case of a fixed size, type and subtypes are | |
836 | not equivalent enough as to share the same TYPE_CANONICAL, since this | |
837 | would mean that conversions between them are useless, whereas they are | |
838 | not (e.g. type and subtypes can have different modes). So, in the end, | |
839 | they are only guaranteed to have the same alias set. */ | |
840 | if (get_alias_set (type1) == get_alias_set (type2)) | |
4b4d1513 | 841 | return -1; |
842 | ||
dd277d48 | 843 | /* The types are known to be not equal. */ |
844 | return 0; | |
845 | } | |
846 | ||
df33f9b5 | 847 | /* Return true if TYPE is a composite type (i.e. we may apply one of handled |
848 | components on it). */ | |
849 | ||
850 | static bool | |
851 | type_has_components_p (tree type) | |
852 | { | |
853 | return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type) | |
854 | || TREE_CODE (type) == COMPLEX_TYPE; | |
855 | } | |
856 | ||
dc2ef903 | 857 | /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2 |
858 | respectively are either pointing to same address or are completely | |
3bcb81b2 | 859 | disjoint. If PARITAL_OVERLAP is true, assume that outermost arrays may |
860 | just partly overlap. | |
dc2ef903 | 861 | |
862 | Try to disambiguate using the access path starting from the match | |
863 | and return false if there is no conflict. | |
864 | ||
865 | Helper for aliasing_component_refs_p. */ | |
866 | ||
867 | static bool | |
868 | aliasing_matching_component_refs_p (tree match1, tree ref1, | |
869 | poly_int64 offset1, poly_int64 max_size1, | |
870 | tree match2, tree ref2, | |
3bcb81b2 | 871 | poly_int64 offset2, poly_int64 max_size2, |
872 | bool partial_overlap) | |
dc2ef903 | 873 | { |
874 | poly_int64 offadj, sztmp, msztmp; | |
875 | bool reverse; | |
876 | ||
3bcb81b2 | 877 | if (!partial_overlap) |
dc2ef903 | 878 | { |
3bcb81b2 | 879 | get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse); |
880 | offset2 -= offadj; | |
881 | get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse); | |
882 | offset1 -= offadj; | |
883 | if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) | |
884 | { | |
885 | ++alias_stats.aliasing_component_refs_p_no_alias; | |
886 | return false; | |
887 | } | |
dc2ef903 | 888 | } |
889 | ||
3bcb81b2 | 890 | int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2, |
891 | partial_overlap); | |
dc2ef903 | 892 | if (cmp == 1 |
893 | || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2))) | |
894 | { | |
895 | ++alias_stats.aliasing_component_refs_p_no_alias; | |
896 | return false; | |
897 | } | |
898 | ++alias_stats.aliasing_component_refs_p_may_alias; | |
899 | return true; | |
900 | } | |
901 | ||
2bb8562d | 902 | /* Return true if REF is reference to zero sized trailing array. I.e. |
903 | struct foo {int bar; int array[0];} *fooptr; | |
904 | fooptr->array. */ | |
905 | ||
906 | static bool | |
907 | component_ref_to_zero_sized_trailing_array_p (tree ref) | |
908 | { | |
909 | return (TREE_CODE (ref) == COMPONENT_REF | |
910 | && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE | |
911 | && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))) | |
912 | || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))))) | |
913 | && array_at_struct_end_p (ref)); | |
914 | } | |
915 | ||
916 | /* Worker for aliasing_component_refs_p. Most parameters match parameters of | |
917 | aliasing_component_refs_p. | |
918 | ||
919 | Walk access path REF2 and try to find type matching TYPE1 | |
920 | (which is a start of possibly aliasing access path REF1). | |
921 | If match is found, try to disambiguate. | |
922 | ||
923 | Return 0 for sucessful disambiguation. | |
924 | Return 1 if match was found but disambiguation failed | |
925 | Return -1 if there is no match. | |
926 | In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1 | |
927 | in access patch REF2 and -1 if we are not sure. */ | |
928 | ||
929 | static int | |
930 | aliasing_component_refs_walk (tree ref1, tree type1, tree base1, | |
931 | poly_int64 offset1, poly_int64 max_size1, | |
932 | tree end_struct_ref1, | |
933 | tree ref2, tree base2, | |
934 | poly_int64 offset2, poly_int64 max_size2, | |
935 | bool *maybe_match) | |
936 | { | |
937 | tree ref = ref2; | |
54ee6600 | 938 | int same_p = 0; |
2bb8562d | 939 | |
940 | while (true) | |
941 | { | |
942 | /* We walk from inner type to the outer types. If type we see is | |
943 | already too large to be part of type1, terminate the search. */ | |
944 | int cmp = compare_type_sizes (type1, TREE_TYPE (ref)); | |
945 | ||
946 | if (cmp < 0 | |
947 | && (!end_struct_ref1 | |
948 | || compare_type_sizes (TREE_TYPE (end_struct_ref1), | |
949 | TREE_TYPE (ref)) < 0)) | |
950 | break; | |
951 | /* If types may be of same size, see if we can decide about their | |
952 | equality. */ | |
953 | if (cmp == 0) | |
954 | { | |
955 | same_p = same_type_for_tbaa (TREE_TYPE (ref), type1); | |
956 | if (same_p == 1) | |
957 | break; | |
958 | /* In case we can't decide whether types are same try to | |
959 | continue looking for the exact match. | |
960 | Remember however that we possibly saw a match | |
961 | to bypass the access path continuations tests we do later. */ | |
962 | if (same_p == -1) | |
963 | *maybe_match = true; | |
964 | } | |
965 | if (!handled_component_p (ref)) | |
966 | break; | |
967 | ref = TREE_OPERAND (ref, 0); | |
968 | } | |
969 | if (same_p == 1) | |
970 | { | |
3bcb81b2 | 971 | bool partial_overlap = false; |
972 | ||
2bb8562d | 973 | /* We assume that arrays can overlap by multiple of their elements |
974 | size as tested in gcc.dg/torture/alias-2.c. | |
975 | This partial overlap happen only when both arrays are bases of | |
976 | the access and not contained within another component ref. | |
977 | To be safe we also assume partial overlap for VLAs. */ | |
978 | if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE | |
979 | && (!TYPE_SIZE (TREE_TYPE (base1)) | |
980 | || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST | |
981 | || ref == base2)) | |
3bcb81b2 | 982 | { |
983 | /* Setting maybe_match to true triggers | |
984 | nonoverlapping_component_refs_p test later that still may do | |
985 | useful disambiguation. */ | |
986 | *maybe_match = true; | |
987 | partial_overlap = true; | |
988 | } | |
989 | return aliasing_matching_component_refs_p (base1, ref1, | |
990 | offset1, max_size1, | |
991 | ref, ref2, | |
992 | offset2, max_size2, | |
993 | partial_overlap); | |
2bb8562d | 994 | } |
995 | return -1; | |
996 | } | |
997 | ||
dd277d48 | 998 | /* Determine if the two component references REF1 and REF2 which are |
999 | based on access types TYPE1 and TYPE2 and of which at least one is based | |
28a179b9 | 1000 | on an indirect reference may alias. |
ac30e3b2 | 1001 | REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET |
1002 | are the respective alias sets. */ | |
dd277d48 | 1003 | |
1004 | static bool | |
0d2f7431 | 1005 | aliasing_component_refs_p (tree ref1, |
ac30e3b2 | 1006 | alias_set_type ref1_alias_set, |
1007 | alias_set_type base1_alias_set, | |
fe60c82c | 1008 | poly_int64 offset1, poly_int64 max_size1, |
0d2f7431 | 1009 | tree ref2, |
ac30e3b2 | 1010 | alias_set_type ref2_alias_set, |
1011 | alias_set_type base2_alias_set, | |
28a179b9 | 1012 | poly_int64 offset2, poly_int64 max_size2) |
dd277d48 | 1013 | { |
1014 | /* If one reference is a component references through pointers try to find a | |
1015 | common base and apply offset based disambiguation. This handles | |
1016 | for example | |
1017 | struct A { int i; int j; } *q; | |
1018 | struct B { struct A a; int k; } *p; | |
1019 | disambiguating q->i and p->a.j. */ | |
0d2f7431 | 1020 | tree base1, base2; |
1021 | tree type1, type2; | |
952d3202 | 1022 | bool maybe_match = false; |
371a73ce | 1023 | tree end_struct_ref1 = NULL, end_struct_ref2 = NULL; |
dd277d48 | 1024 | |
0d2f7431 | 1025 | /* Choose bases and base types to search for. */ |
1026 | base1 = ref1; | |
1027 | while (handled_component_p (base1)) | |
371a73ce | 1028 | { |
1029 | /* Generally access paths are monotous in the size of object. The | |
1030 | exception are trailing arrays of structures. I.e. | |
1031 | struct a {int array[0];}; | |
1032 | or | |
1033 | struct a {int array1[0]; int array[];}; | |
1034 | Such struct has size 0 but accesses to a.array may have non-zero size. | |
1035 | In this case the size of TREE_TYPE (base1) is smaller than | |
1036 | size of TREE_TYPE (TREE_OPERNAD (base1, 0)). | |
1037 | ||
1038 | Because we compare sizes of arrays just by sizes of their elements, | |
1039 | we only need to care about zero sized array fields here. */ | |
2bb8562d | 1040 | if (component_ref_to_zero_sized_trailing_array_p (base1)) |
371a73ce | 1041 | { |
1042 | gcc_checking_assert (!end_struct_ref1); | |
1043 | end_struct_ref1 = base1; | |
1044 | } | |
c830e807 | 1045 | if (TREE_CODE (base1) == VIEW_CONVERT_EXPR |
1046 | || TREE_CODE (base1) == BIT_FIELD_REF) | |
1047 | ref1 = TREE_OPERAND (base1, 0); | |
371a73ce | 1048 | base1 = TREE_OPERAND (base1, 0); |
1049 | } | |
0d2f7431 | 1050 | type1 = TREE_TYPE (base1); |
1051 | base2 = ref2; | |
1052 | while (handled_component_p (base2)) | |
371a73ce | 1053 | { |
2bb8562d | 1054 | if (component_ref_to_zero_sized_trailing_array_p (base2)) |
371a73ce | 1055 | { |
1056 | gcc_checking_assert (!end_struct_ref2); | |
1057 | end_struct_ref2 = base2; | |
1058 | } | |
c830e807 | 1059 | if (TREE_CODE (base2) == VIEW_CONVERT_EXPR |
1060 | || TREE_CODE (base2) == BIT_FIELD_REF) | |
1061 | ref2 = TREE_OPERAND (base2, 0); | |
371a73ce | 1062 | base2 = TREE_OPERAND (base2, 0); |
1063 | } | |
0d2f7431 | 1064 | type2 = TREE_TYPE (base2); |
1065 | ||
dd277d48 | 1066 | /* Now search for the type1 in the access path of ref2. This |
ce7b4f26 | 1067 | would be a common base for doing offset based disambiguation on. |
1068 | This however only makes sense if type2 is big enough to hold type1. */ | |
1069 | int cmp_outer = compare_type_sizes (type2, type1); | |
371a73ce | 1070 | |
1071 | /* If type2 is big enough to contain type1 walk its access path. | |
1072 | We also need to care of arrays at the end of structs that may extend | |
1073 | beyond the end of structure. */ | |
1074 | if (cmp_outer >= 0 | |
1075 | || (end_struct_ref2 | |
1076 | && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0)) | |
dd277d48 | 1077 | { |
2bb8562d | 1078 | int res = aliasing_component_refs_walk (ref1, type1, base1, |
1079 | offset1, max_size1, | |
1080 | end_struct_ref1, | |
1081 | ref2, base2, offset2, max_size2, | |
1082 | &maybe_match); | |
1083 | if (res != -1) | |
1084 | return res; | |
dd277d48 | 1085 | } |
b5ef585a | 1086 | |
dd277d48 | 1087 | /* If we didn't find a common base, try the other way around. */ |
371a73ce | 1088 | if (cmp_outer <= 0 |
1089 | || (end_struct_ref1 | |
1090 | && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0)) | |
dd277d48 | 1091 | { |
2bb8562d | 1092 | int res = aliasing_component_refs_walk (ref2, type2, base2, |
1093 | offset2, max_size2, | |
1094 | end_struct_ref2, | |
1095 | ref1, base1, offset1, max_size1, | |
1096 | &maybe_match); | |
1097 | if (res != -1) | |
1098 | return res; | |
dd277d48 | 1099 | } |
ac30e3b2 | 1100 | |
50709af0 | 1101 | /* In the following code we make an assumption that the types in access |
1102 | paths do not overlap and thus accesses alias only if one path can be | |
1103 | continuation of another. If we was not able to decide about equivalence, | |
1104 | we need to give up. */ | |
952d3202 | 1105 | if (maybe_match) |
dc2ef903 | 1106 | { |
1107 | if (!nonoverlapping_component_refs_p (ref1, ref2)) | |
1108 | { | |
1109 | ++alias_stats.aliasing_component_refs_p_may_alias; | |
1110 | return true; | |
1111 | } | |
1112 | ++alias_stats.aliasing_component_refs_p_no_alias; | |
1113 | return false; | |
1114 | } | |
50709af0 | 1115 | |
4b4d1513 | 1116 | /* If we have two type access paths B1.path1 and B2.path2 they may |
ac30e3b2 | 1117 | only alias if either B1 is in B2.path2 or B2 is in B1.path1. |
1118 | But we can still have a path that goes B1.path1...B2.path2 with | |
1119 | a part that we do not see. So we can only disambiguate now | |
1120 | if there is no B2 in the tail of path1 and no B1 on the | |
1121 | tail of path2. */ | |
ce7b4f26 | 1122 | if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0 |
371a73ce | 1123 | && (!end_struct_ref1 |
1124 | || compare_type_sizes (TREE_TYPE (ref2), | |
1125 | TREE_TYPE (end_struct_ref1)) >= 0) | |
df33f9b5 | 1126 | && type_has_components_p (TREE_TYPE (ref2)) |
50709af0 | 1127 | && (base1_alias_set == ref2_alias_set |
ce7b4f26 | 1128 | || alias_set_subset_of (base1_alias_set, ref2_alias_set))) |
ba73fec8 | 1129 | { |
1130 | ++alias_stats.aliasing_component_refs_p_may_alias; | |
1131 | return true; | |
1132 | } | |
ac30e3b2 | 1133 | /* If this is ptr vs. decl then we know there is no ptr ... decl path. */ |
28a179b9 | 1134 | if (compare_type_sizes (TREE_TYPE (ref1), type2) >= 0 |
371a73ce | 1135 | && (!end_struct_ref2 |
1136 | || compare_type_sizes (TREE_TYPE (ref1), | |
1137 | TREE_TYPE (end_struct_ref2)) >= 0) | |
df33f9b5 | 1138 | && type_has_components_p (TREE_TYPE (ref1)) |
50709af0 | 1139 | && (base2_alias_set == ref1_alias_set |
ba73fec8 | 1140 | || alias_set_subset_of (base2_alias_set, ref1_alias_set))) |
1141 | { | |
1142 | ++alias_stats.aliasing_component_refs_p_may_alias; | |
1143 | return true; | |
1144 | } | |
1145 | ++alias_stats.aliasing_component_refs_p_no_alias; | |
4b4d1513 | 1146 | return false; |
dd277d48 | 1147 | } |
1148 | ||
0c8f993c | 1149 | /* FIELD1 and FIELD2 are two fields of component refs. We assume |
1150 | that bases of both component refs are either equivalent or nonoverlapping. | |
1151 | We do not assume that the containers of FIELD1 and FIELD2 are of the | |
1152 | same type or size. | |
1153 | ||
1154 | Return 0 in case the base address of component_refs are same then | |
1155 | FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2 | |
1156 | may not be of same type or size. | |
1157 | ||
1158 | Return 1 if FIELD1 and FIELD2 are non-overlapping. | |
1159 | ||
1160 | Return -1 otherwise. | |
1161 | ||
1162 | Main difference between 0 and -1 is to let | |
1163 | nonoverlapping_component_refs_since_match_p discover the semantically | |
1164 | equivalent part of the access path. | |
1165 | ||
1166 | Note that this function is used even with -fno-strict-aliasing | |
1167 | and makes use of no TBAA assumptions. */ | |
1168 | ||
1169 | static int | |
1170 | nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2) | |
1171 | { | |
1172 | /* If both fields are of the same type, we could save hard work of | |
1173 | comparing offsets. */ | |
1174 | tree type1 = DECL_CONTEXT (field1); | |
1175 | tree type2 = DECL_CONTEXT (field2); | |
1176 | ||
1177 | if (TREE_CODE (type1) == RECORD_TYPE | |
1178 | && DECL_BIT_FIELD_REPRESENTATIVE (field1)) | |
1179 | field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1); | |
1180 | if (TREE_CODE (type2) == RECORD_TYPE | |
1181 | && DECL_BIT_FIELD_REPRESENTATIVE (field2)) | |
1182 | field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2); | |
1183 | ||
1184 | /* ??? Bitfields can overlap at RTL level so punt on them. | |
1185 | FIXME: RTL expansion should be fixed by adjusting the access path | |
1186 | when producing MEM_ATTRs for MEMs which are wider than | |
1187 | the bitfields similarly as done in set_mem_attrs_minus_bitpos. */ | |
1188 | if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2)) | |
1189 | return -1; | |
1190 | ||
1191 | /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */ | |
1192 | if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE) | |
1193 | return field1 != field2; | |
1194 | ||
1195 | /* In common case the offsets and bit offsets will be the same. | |
1196 | However if frontends do not agree on the alignment, they may be | |
1197 | different even if they actually represent same address. | |
1198 | Try the common case first and if that fails calcualte the | |
1199 | actual bit offset. */ | |
1200 | if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1), | |
1201 | DECL_FIELD_OFFSET (field2)) | |
1202 | && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1), | |
1203 | DECL_FIELD_BIT_OFFSET (field2))) | |
1204 | return 0; | |
1205 | ||
1206 | /* Note that it may be possible to use component_ref_field_offset | |
1207 | which would provide offsets as trees. However constructing and folding | |
1208 | trees is expensive and does not seem to be worth the compile time | |
1209 | cost. */ | |
1210 | ||
1211 | poly_uint64 offset1, offset2; | |
1212 | poly_uint64 bit_offset1, bit_offset2; | |
1213 | ||
1214 | if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1) | |
1215 | && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2) | |
1216 | && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1) | |
1217 | && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2)) | |
1218 | { | |
1219 | offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1; | |
1220 | offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2; | |
1221 | ||
1222 | if (known_eq (offset1, offset2)) | |
1223 | return 0; | |
1224 | ||
1225 | poly_uint64 size1, size2; | |
1226 | ||
1227 | if (poly_int_tree_p (DECL_SIZE (field1), &size1) | |
1228 | && poly_int_tree_p (DECL_SIZE (field2), &size2) | |
1229 | && !ranges_maybe_overlap_p (offset1, size1, offset2, size2)) | |
1230 | return 1; | |
1231 | } | |
1232 | /* Resort to slower overlap checking by looking for matching types in | |
1233 | the middle of access path. */ | |
1234 | return -1; | |
1235 | } | |
1236 | ||
3bcb81b2 | 1237 | /* Return low bound of array. Do not produce new trees |
1238 | and thus do not care about particular type of integer constant | |
1239 | and placeholder exprs. */ | |
1240 | ||
1241 | static tree | |
1242 | cheap_array_ref_low_bound (tree ref) | |
1243 | { | |
1244 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); | |
1245 | ||
1246 | /* Avoid expensive array_ref_low_bound. | |
1247 | low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain | |
1248 | type or it is zero. */ | |
1249 | if (TREE_OPERAND (ref, 2)) | |
1250 | return TREE_OPERAND (ref, 2); | |
1251 | else if (domain_type && TYPE_MIN_VALUE (domain_type)) | |
1252 | return TYPE_MIN_VALUE (domain_type); | |
1253 | else | |
1254 | return integer_zero_node; | |
1255 | } | |
1256 | ||
1257 | /* REF1 and REF2 are ARRAY_REFs with either same base address or which are | |
1258 | completely disjoint. | |
1259 | ||
1260 | Return 1 if the refs are non-overlapping. | |
1261 | Return 0 if they are possibly overlapping but if so the overlap again | |
1262 | starts on the same address. | |
1263 | Return -1 otherwise. */ | |
1264 | ||
1265 | int | |
1266 | nonoverlapping_array_refs_p (tree ref1, tree ref2) | |
1267 | { | |
1268 | tree index1 = TREE_OPERAND (ref1, 1); | |
1269 | tree index2 = TREE_OPERAND (ref2, 1); | |
1270 | tree low_bound1 = cheap_array_ref_low_bound(ref1); | |
1271 | tree low_bound2 = cheap_array_ref_low_bound(ref2); | |
1272 | ||
1273 | /* Handle zero offsets first: we do not need to match type size in this | |
1274 | case. */ | |
1275 | if (operand_equal_p (index1, low_bound1, 0) | |
1276 | && operand_equal_p (index2, low_bound2, 0)) | |
1277 | return 0; | |
1278 | ||
1279 | /* If type sizes are different, give up. | |
1280 | ||
1281 | Avoid expensive array_ref_element_size. | |
1282 | If operand 3 is present it denotes size in the alignmnet units. | |
1283 | Otherwise size is TYPE_SIZE of the element type. | |
1284 | Handle only common cases where types are of the same "kind". */ | |
1285 | if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL)) | |
1286 | return -1; | |
1287 | ||
1288 | tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0))); | |
1289 | tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0))); | |
1290 | ||
1291 | if (TREE_OPERAND (ref1, 3)) | |
1292 | { | |
1293 | if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2) | |
1294 | || !operand_equal_p (TREE_OPERAND (ref1, 3), | |
1295 | TREE_OPERAND (ref2, 3), 0)) | |
1296 | return -1; | |
1297 | } | |
1298 | else | |
1299 | { | |
1300 | if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1), | |
1301 | TYPE_SIZE_UNIT (elmt_type2), 0)) | |
1302 | return -1; | |
1303 | } | |
1304 | ||
1305 | /* Since we know that type sizes are the same, there is no need to return | |
1306 | -1 after this point. Partial overlap can not be introduced. */ | |
1307 | ||
1308 | /* We may need to fold trees in this case. | |
1309 | TODO: Handle integer constant case at least. */ | |
1310 | if (!operand_equal_p (low_bound1, low_bound2, 0)) | |
1311 | return 0; | |
1312 | ||
1313 | if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST) | |
1314 | { | |
1315 | if (tree_int_cst_equal (index1, index2)) | |
1316 | return 0; | |
1317 | return 1; | |
1318 | } | |
1319 | /* TODO: We can use VRP to further disambiguate here. */ | |
1320 | return 0; | |
1321 | } | |
1322 | ||
dc2ef903 | 1323 | /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and |
1324 | MATCH2 either point to the same address or are disjoint. | |
1325 | MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2 | |
1326 | respectively or NULL in the case we established equivalence of bases. | |
3bcb81b2 | 1327 | If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually |
1328 | overlap by exact multiply of their element size. | |
740a2cb6 | 1329 | |
dc2ef903 | 1330 | This test works by matching the initial segment of the access path |
1331 | and does not rely on TBAA thus is safe for !flag_strict_aliasing if | |
1332 | match was determined without use of TBAA oracle. | |
1333 | ||
1334 | Return 1 if we can determine that component references REF1 and REF2, | |
1335 | that are within a common DECL, cannot overlap. | |
1336 | ||
1337 | Return 0 if paths are same and thus there is nothing to disambiguate more | |
1338 | (i.e. there is must alias assuming there is must alias between MATCH1 and | |
1339 | MATCH2) | |
1340 | ||
1341 | Return -1 if we can not determine 0 or 1 - this happens when we met | |
1342 | non-matching types was met in the path. | |
1343 | In this case it may make sense to continue by other disambiguation | |
1344 | oracles. */ | |
1345 | ||
1346 | static int | |
3bcb81b2 | 1347 | nonoverlapping_refs_since_match_p (tree match1, tree ref1, |
1348 | tree match2, tree ref2, | |
1349 | bool partial_overlap) | |
740a2cb6 | 1350 | { |
f9d6698b | 1351 | /* Early return if there are no references to match, we do not need |
1352 | to walk the access paths. | |
1353 | ||
1354 | Do not consider this as may-alias for stats - it is more useful | |
1355 | to have information how many disambiguations happened provided that | |
1356 | the query was meaningful. */ | |
1357 | ||
1358 | if (match1 == ref1 || !handled_component_p (ref1) | |
1359 | || match2 == ref2 || !handled_component_p (ref2)) | |
1360 | return -1; | |
1361 | ||
4997014d | 1362 | auto_vec<tree, 16> component_refs1; |
1363 | auto_vec<tree, 16> component_refs2; | |
740a2cb6 | 1364 | |
1365 | /* Create the stack of handled components for REF1. */ | |
6b898265 | 1366 | while (handled_component_p (ref1) && ref1 != match1) |
740a2cb6 | 1367 | { |
dc2ef903 | 1368 | if (TREE_CODE (ref1) == VIEW_CONVERT_EXPR |
1369 | || TREE_CODE (ref1) == BIT_FIELD_REF) | |
1370 | component_refs1.truncate (0); | |
1371 | else | |
1372 | component_refs1.safe_push (ref1); | |
740a2cb6 | 1373 | ref1 = TREE_OPERAND (ref1, 0); |
1374 | } | |
740a2cb6 | 1375 | |
1376 | /* Create the stack of handled components for REF2. */ | |
6b898265 | 1377 | while (handled_component_p (ref2) && ref2 != match2) |
740a2cb6 | 1378 | { |
dc2ef903 | 1379 | if (TREE_CODE (ref2) == VIEW_CONVERT_EXPR |
1380 | || TREE_CODE (ref2) == BIT_FIELD_REF) | |
1381 | component_refs2.truncate (0); | |
1382 | else | |
1383 | component_refs2.safe_push (ref2); | |
740a2cb6 | 1384 | ref2 = TREE_OPERAND (ref2, 0); |
1385 | } | |
aaba90b8 | 1386 | |
1387 | bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1; | |
1388 | bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2; | |
1389 | ||
1390 | /* If only one of access path starts with MEM_REF check that offset is 0 | |
1391 | so the addresses stays the same after stripping it. | |
1392 | TODO: In this case we may walk the other access path until we get same | |
1393 | offset. | |
1394 | ||
1395 | If both starts with MEM_REF, offset has to be same. */ | |
1396 | if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1))) | |
1397 | || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1))) | |
1398 | || (mem_ref1 && mem_ref2 | |
1399 | && !tree_int_cst_equal (TREE_OPERAND (ref1, 1), | |
1400 | TREE_OPERAND (ref2, 1)))) | |
dc2ef903 | 1401 | { |
3bcb81b2 | 1402 | ++alias_stats.nonoverlapping_refs_since_match_p_may_alias; |
dc2ef903 | 1403 | return -1; |
1404 | } | |
740a2cb6 | 1405 | |
aaba90b8 | 1406 | /* TARGET_MEM_REF are never wrapped in handled components, so we do not need |
1407 | to handle them here at all. */ | |
1408 | gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF | |
1409 | && TREE_CODE (ref2) != TARGET_MEM_REF); | |
1410 | ||
740a2cb6 | 1411 | /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same |
1412 | rank. This is sufficient because we start from the same DECL and you | |
1413 | cannot reference several fields at a time with COMPONENT_REFs (unlike | |
1414 | with ARRAY_RANGE_REFs for arrays) so you always need the same number | |
1415 | of them to access a sub-component, unless you're in a union, in which | |
1416 | case the return value will precisely be false. */ | |
1417 | while (true) | |
1418 | { | |
3bcb81b2 | 1419 | /* Track if we seen unmatched ref with non-zero offset. In this case |
1420 | we must look for partial overlaps. */ | |
1421 | bool seen_unmatched_ref_p = false; | |
1422 | ||
1423 | /* First match ARRAY_REFs an try to disambiguate. */ | |
1424 | if (!component_refs1.is_empty () | |
1425 | && !component_refs2.is_empty ()) | |
1426 | { | |
1427 | unsigned int narray_refs1=0, narray_refs2=0; | |
1428 | ||
1429 | /* We generally assume that both access paths starts by same sequence | |
1430 | of refs. However if number of array refs is not in sync, try | |
1431 | to recover and pop elts until number match. This helps the case | |
1432 | where one access path starts by array and other by element. */ | |
1433 | for (narray_refs1 = 0; narray_refs1 < component_refs1.length (); | |
1434 | narray_refs1++) | |
1435 | if (TREE_CODE (component_refs1 [component_refs1.length() | |
1436 | - 1 - narray_refs1]) != ARRAY_REF) | |
1437 | break; | |
1438 | ||
1439 | for (narray_refs2 = 0; narray_refs2 < component_refs2.length (); | |
1440 | narray_refs2++) | |
1441 | if (TREE_CODE (component_refs2 [component_refs2.length() | |
1442 | - 1 - narray_refs2]) != ARRAY_REF) | |
1443 | break; | |
1444 | for (; narray_refs1 > narray_refs2; narray_refs1--) | |
1445 | { | |
1446 | ref1 = component_refs1.pop (); | |
1447 | /* Track whether we possibly introduced partial overlap assuming | |
1448 | that innermost type sizes does not match. This only can | |
1449 | happen if the offset introduced by the ARRAY_REF | |
1450 | is non-zero. */ | |
1451 | if (!operand_equal_p (TREE_OPERAND (ref1, 1), | |
1452 | cheap_array_ref_low_bound (ref1), 0)) | |
1453 | seen_unmatched_ref_p = true; | |
1454 | } | |
1455 | for (; narray_refs2 > narray_refs1; narray_refs2--) | |
1456 | { | |
1457 | ref2 = component_refs2.pop (); | |
1458 | if (!operand_equal_p (TREE_OPERAND (ref2, 1), | |
1459 | cheap_array_ref_low_bound (ref2), 0)) | |
1460 | seen_unmatched_ref_p = true; | |
1461 | } | |
1462 | /* Try to disambiguate matched arrays. */ | |
1463 | for (unsigned int i = 0; i < narray_refs1; i++) | |
1464 | { | |
1465 | int cmp = nonoverlapping_array_refs_p (component_refs1.pop (), | |
1466 | component_refs2.pop ()); | |
1467 | if (cmp == 1 && !partial_overlap) | |
1468 | { | |
1469 | ++alias_stats | |
1470 | .nonoverlapping_refs_since_match_p_no_alias; | |
1471 | return 1; | |
1472 | } | |
1473 | partial_overlap = false; | |
1474 | if (cmp == -1) | |
1475 | seen_unmatched_ref_p = true; | |
1476 | } | |
1477 | } | |
1478 | ||
1479 | /* Next look for component_refs. */ | |
740a2cb6 | 1480 | do |
1481 | { | |
1482 | if (component_refs1.is_empty ()) | |
bac90ef8 | 1483 | { |
dc2ef903 | 1484 | ++alias_stats |
3bcb81b2 | 1485 | .nonoverlapping_refs_since_match_p_must_overlap; |
dc2ef903 | 1486 | return 0; |
bac90ef8 | 1487 | } |
740a2cb6 | 1488 | ref1 = component_refs1.pop (); |
0c8f993c | 1489 | if (TREE_CODE (ref1) != COMPONENT_REF) |
3bcb81b2 | 1490 | seen_unmatched_ref_p = true; |
740a2cb6 | 1491 | } |
1492 | while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0)))); | |
1493 | ||
1494 | do | |
1495 | { | |
1496 | if (component_refs2.is_empty ()) | |
bac90ef8 | 1497 | { |
dc2ef903 | 1498 | ++alias_stats |
3bcb81b2 | 1499 | .nonoverlapping_refs_since_match_p_must_overlap; |
dc2ef903 | 1500 | return 0; |
bac90ef8 | 1501 | } |
740a2cb6 | 1502 | ref2 = component_refs2.pop (); |
0c8f993c | 1503 | if (TREE_CODE (ref2) != COMPONENT_REF) |
3bcb81b2 | 1504 | seen_unmatched_ref_p = true; |
740a2cb6 | 1505 | } |
1506 | while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0)))); | |
1507 | ||
0c8f993c | 1508 | /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors |
1509 | earlier. */ | |
1510 | gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF | |
1511 | && TREE_CODE (ref2) == COMPONENT_REF); | |
740a2cb6 | 1512 | |
1513 | tree field1 = TREE_OPERAND (ref1, 1); | |
1514 | tree field2 = TREE_OPERAND (ref2, 1); | |
1515 | ||
1516 | /* ??? We cannot simply use the type of operand #0 of the refs here | |
1517 | as the Fortran compiler smuggles type punning into COMPONENT_REFs | |
1518 | for common blocks instead of using unions like everyone else. */ | |
608927af | 1519 | tree type1 = DECL_CONTEXT (field1); |
1520 | tree type2 = DECL_CONTEXT (field2); | |
740a2cb6 | 1521 | |
3bcb81b2 | 1522 | partial_overlap = false; |
1523 | ||
0c8f993c | 1524 | /* If we skipped array refs on type of different sizes, we can |
1525 | no longer be sure that there are not partial overlaps. */ | |
3bcb81b2 | 1526 | if (seen_unmatched_ref_p |
0c8f993c | 1527 | && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0)) |
bac90ef8 | 1528 | { |
0c8f993c | 1529 | ++alias_stats |
3bcb81b2 | 1530 | .nonoverlapping_refs_since_match_p_may_alias; |
dc2ef903 | 1531 | return -1; |
bac90ef8 | 1532 | } |
740a2cb6 | 1533 | |
0c8f993c | 1534 | int cmp = nonoverlapping_component_refs_p_1 (field1, field2); |
1535 | if (cmp == -1) | |
740a2cb6 | 1536 | { |
0c8f993c | 1537 | ++alias_stats |
3bcb81b2 | 1538 | .nonoverlapping_refs_since_match_p_may_alias; |
0c8f993c | 1539 | return -1; |
1540 | } | |
1541 | else if (cmp == 1) | |
1542 | { | |
1543 | ++alias_stats | |
3bcb81b2 | 1544 | .nonoverlapping_refs_since_match_p_no_alias; |
dc2ef903 | 1545 | return 1; |
740a2cb6 | 1546 | } |
1547 | } | |
1548 | ||
3bcb81b2 | 1549 | ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap; |
dc2ef903 | 1550 | return 0; |
740a2cb6 | 1551 | } |
1552 | ||
0c8f993c | 1553 | /* Return TYPE_UID which can be used to match record types we consider |
1554 | same for TBAA purposes. */ | |
1555 | ||
1556 | static inline int | |
1557 | ncr_type_uid (const_tree field) | |
1558 | { | |
1559 | /* ??? We cannot simply use the type of operand #0 of the refs here | |
1560 | as the Fortran compiler smuggles type punning into COMPONENT_REFs | |
1561 | for common blocks instead of using unions like everyone else. */ | |
1562 | tree type = DECL_FIELD_CONTEXT (field); | |
1563 | /* With LTO types considered same_type_for_tbaa_p | |
1564 | from different translation unit may not have same | |
1565 | main variant. They however have same TYPE_CANONICAL. */ | |
1566 | if (TYPE_CANONICAL (type)) | |
1567 | return TYPE_UID (TYPE_CANONICAL (type)); | |
1568 | return TYPE_UID (type); | |
1569 | } | |
1570 | ||
f270d2a0 | 1571 | /* qsort compare function to sort FIELD_DECLs after their |
1572 | DECL_FIELD_CONTEXT TYPE_UID. */ | |
1573 | ||
1574 | static inline int | |
1575 | ncr_compar (const void *field1_, const void *field2_) | |
1576 | { | |
1577 | const_tree field1 = *(const_tree *) const_cast <void *>(field1_); | |
1578 | const_tree field2 = *(const_tree *) const_cast <void *>(field2_); | |
0c8f993c | 1579 | unsigned int uid1 = ncr_type_uid (field1); |
1580 | unsigned int uid2 = ncr_type_uid (field2); | |
1581 | ||
f270d2a0 | 1582 | if (uid1 < uid2) |
1583 | return -1; | |
1584 | else if (uid1 > uid2) | |
1585 | return 1; | |
1586 | return 0; | |
1587 | } | |
1588 | ||
1589 | /* Return true if we can determine that the fields referenced cannot | |
dc2ef903 | 1590 | overlap for any pair of objects. This relies on TBAA. */ |
f270d2a0 | 1591 | |
1592 | static bool | |
1593 | nonoverlapping_component_refs_p (const_tree x, const_tree y) | |
1594 | { | |
f9d6698b | 1595 | /* Early return if we have nothing to do. |
1596 | ||
1597 | Do not consider this as may-alias for stats - it is more useful | |
1598 | to have information how many disambiguations happened provided that | |
1599 | the query was meaningful. */ | |
f270d2a0 | 1600 | if (!flag_strict_aliasing |
1601 | || !x || !y | |
bac90ef8 | 1602 | || !handled_component_p (x) |
1603 | || !handled_component_p (y)) | |
f9d6698b | 1604 | return false; |
f270d2a0 | 1605 | |
1606 | auto_vec<const_tree, 16> fieldsx; | |
bac90ef8 | 1607 | while (handled_component_p (x)) |
f270d2a0 | 1608 | { |
bac90ef8 | 1609 | if (TREE_CODE (x) == COMPONENT_REF) |
1610 | { | |
1611 | tree field = TREE_OPERAND (x, 1); | |
1612 | tree type = DECL_FIELD_CONTEXT (field); | |
1613 | if (TREE_CODE (type) == RECORD_TYPE) | |
1614 | fieldsx.safe_push (field); | |
1615 | } | |
bab20733 | 1616 | else if (TREE_CODE (x) == VIEW_CONVERT_EXPR |
1617 | || TREE_CODE (x) == BIT_FIELD_REF) | |
bac90ef8 | 1618 | fieldsx.truncate (0); |
f270d2a0 | 1619 | x = TREE_OPERAND (x, 0); |
1620 | } | |
1621 | if (fieldsx.length () == 0) | |
1622 | return false; | |
1623 | auto_vec<const_tree, 16> fieldsy; | |
bac90ef8 | 1624 | while (handled_component_p (y)) |
f270d2a0 | 1625 | { |
bac90ef8 | 1626 | if (TREE_CODE (y) == COMPONENT_REF) |
1627 | { | |
1628 | tree field = TREE_OPERAND (y, 1); | |
1629 | tree type = DECL_FIELD_CONTEXT (field); | |
1630 | if (TREE_CODE (type) == RECORD_TYPE) | |
1631 | fieldsy.safe_push (TREE_OPERAND (y, 1)); | |
1632 | } | |
bab20733 | 1633 | else if (TREE_CODE (y) == VIEW_CONVERT_EXPR |
1634 | || TREE_CODE (y) == BIT_FIELD_REF) | |
90e13ff5 | 1635 | fieldsy.truncate (0); |
f270d2a0 | 1636 | y = TREE_OPERAND (y, 0); |
1637 | } | |
1638 | if (fieldsy.length () == 0) | |
bac90ef8 | 1639 | { |
1640 | ++alias_stats.nonoverlapping_component_refs_p_may_alias; | |
1641 | return false; | |
1642 | } | |
f270d2a0 | 1643 | |
1644 | /* Most common case first. */ | |
1645 | if (fieldsx.length () == 1 | |
1646 | && fieldsy.length () == 1) | |
bac90ef8 | 1647 | { |
0c8f993c | 1648 | if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]), |
1649 | DECL_FIELD_CONTEXT (fieldsy[0])) == 1 | |
1650 | && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1) | |
bac90ef8 | 1651 | { |
1652 | ++alias_stats.nonoverlapping_component_refs_p_no_alias; | |
1653 | return true; | |
1654 | } | |
1655 | else | |
1656 | { | |
1657 | ++alias_stats.nonoverlapping_component_refs_p_may_alias; | |
1658 | return false; | |
1659 | } | |
1660 | } | |
f270d2a0 | 1661 | |
1662 | if (fieldsx.length () == 2) | |
1663 | { | |
1664 | if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1) | |
a4f59596 | 1665 | std::swap (fieldsx[0], fieldsx[1]); |
f270d2a0 | 1666 | } |
1667 | else | |
1668 | fieldsx.qsort (ncr_compar); | |
1669 | ||
1670 | if (fieldsy.length () == 2) | |
1671 | { | |
1672 | if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1) | |
a4f59596 | 1673 | std::swap (fieldsy[0], fieldsy[1]); |
f270d2a0 | 1674 | } |
1675 | else | |
1676 | fieldsy.qsort (ncr_compar); | |
1677 | ||
1678 | unsigned i = 0, j = 0; | |
1679 | do | |
1680 | { | |
1681 | const_tree fieldx = fieldsx[i]; | |
1682 | const_tree fieldy = fieldsy[j]; | |
0c8f993c | 1683 | |
1684 | /* We're left with accessing different fields of a structure, | |
1685 | no possible overlap. */ | |
1686 | if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx), | |
1687 | DECL_FIELD_CONTEXT (fieldy)) == 1 | |
1688 | && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1) | |
f270d2a0 | 1689 | { |
0c8f993c | 1690 | ++alias_stats.nonoverlapping_component_refs_p_no_alias; |
1691 | return true; | |
f270d2a0 | 1692 | } |
0c8f993c | 1693 | |
1694 | if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy)) | |
f270d2a0 | 1695 | { |
1696 | i++; | |
1697 | if (i == fieldsx.length ()) | |
1698 | break; | |
1699 | } | |
1700 | else | |
1701 | { | |
1702 | j++; | |
1703 | if (j == fieldsy.length ()) | |
1704 | break; | |
1705 | } | |
1706 | } | |
1707 | while (1); | |
1708 | ||
bac90ef8 | 1709 | ++alias_stats.nonoverlapping_component_refs_p_may_alias; |
f270d2a0 | 1710 | return false; |
1711 | } | |
1712 | ||
1713 | ||
dd277d48 | 1714 | /* Return true if two memory references based on the variables BASE1 |
077ad5ad | 1715 | and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and |
740a2cb6 | 1716 | [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2 |
1717 | if non-NULL are the complete memory reference trees. */ | |
dd277d48 | 1718 | |
1719 | static bool | |
740a2cb6 | 1720 | decl_refs_may_alias_p (tree ref1, tree base1, |
fe60c82c | 1721 | poly_int64 offset1, poly_int64 max_size1, |
6dc331f6 | 1722 | poly_int64 size1, |
740a2cb6 | 1723 | tree ref2, tree base2, |
6dc331f6 | 1724 | poly_int64 offset2, poly_int64 max_size2, |
1725 | poly_int64 size2) | |
4ee9c684 | 1726 | { |
2622064f | 1727 | gcc_checking_assert (DECL_P (base1) && DECL_P (base2)); |
4ee9c684 | 1728 | |
dd277d48 | 1729 | /* If both references are based on different variables, they cannot alias. */ |
8e9b8d10 | 1730 | if (compare_base_decls (base1, base2) == 0) |
dd277d48 | 1731 | return false; |
4ee9c684 | 1732 | |
dd277d48 | 1733 | /* If both references are based on the same variable, they cannot alias if |
1734 | the accesses do not overlap. */ | |
fe60c82c | 1735 | if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) |
740a2cb6 | 1736 | return false; |
1737 | ||
6dc331f6 | 1738 | /* If there is must alias, there is no use disambiguating further. */ |
1739 | if (known_eq (size1, max_size1) && known_eq (size2, max_size2)) | |
1740 | return true; | |
1741 | ||
740a2cb6 | 1742 | /* For components with variable position, the above test isn't sufficient, |
1743 | so we disambiguate component references manually. */ | |
1744 | if (ref1 && ref2 | |
1745 | && handled_component_p (ref1) && handled_component_p (ref2) | |
3bcb81b2 | 1746 | && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1) |
740a2cb6 | 1747 | return false; |
1748 | ||
1749 | return true; | |
dd277d48 | 1750 | } |
1751 | ||
1752 | /* Return true if an indirect reference based on *PTR1 constrained | |
077ad5ad | 1753 | to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2 |
1754 | constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have | |
dd277d48 | 1755 | the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1 |
1756 | in which case they are computed on-demand. REF1 and REF2 | |
1757 | if non-NULL are the complete memory reference trees. */ | |
1758 | ||
1759 | static bool | |
182cf5a9 | 1760 | indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, |
fe60c82c | 1761 | poly_int64 offset1, poly_int64 max_size1, |
6dc331f6 | 1762 | poly_int64 size1, |
ac30e3b2 | 1763 | alias_set_type ref1_alias_set, |
dd277d48 | 1764 | alias_set_type base1_alias_set, |
182cf5a9 | 1765 | tree ref2 ATTRIBUTE_UNUSED, tree base2, |
fe60c82c | 1766 | poly_int64 offset2, poly_int64 max_size2, |
6dc331f6 | 1767 | poly_int64 size2, |
ac30e3b2 | 1768 | alias_set_type ref2_alias_set, |
182cf5a9 | 1769 | alias_set_type base2_alias_set, bool tbaa_p) |
dd277d48 | 1770 | { |
9a14ba4f | 1771 | tree ptr1; |
061b1263 | 1772 | tree ptrtype1, dbase2; |
2622064f | 1773 | |
1774 | gcc_checking_assert ((TREE_CODE (base1) == MEM_REF | |
1775 | || TREE_CODE (base1) == TARGET_MEM_REF) | |
1776 | && DECL_P (base2)); | |
182cf5a9 | 1777 | |
28daba6f | 1778 | ptr1 = TREE_OPERAND (base1, 0); |
90ca1268 | 1779 | poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT; |
182cf5a9 | 1780 | |
dd277d48 | 1781 | /* If only one reference is based on a variable, they cannot alias if |
1782 | the pointer access is beyond the extent of the variable access. | |
1783 | (the pointer base cannot validly point to an offset less than zero | |
1784 | of the variable). | |
a07f6629 | 1785 | ??? IVOPTs creates bases that do not honor this restriction, |
1786 | so do not apply this optimization for TARGET_MEM_REFs. */ | |
1787 | if (TREE_CODE (base1) != TARGET_MEM_REF | |
ecc647e7 | 1788 | && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2)) |
dd277d48 | 1789 | return false; |
a07f6629 | 1790 | /* They also cannot alias if the pointer may not point to the decl. */ |
dd277d48 | 1791 | if (!ptr_deref_may_alias_decl_p (ptr1, base2)) |
1792 | return false; | |
1793 | ||
1794 | /* Disambiguations that rely on strict aliasing rules follow. */ | |
182cf5a9 | 1795 | if (!flag_strict_aliasing || !tbaa_p) |
dd277d48 | 1796 | return true; |
1797 | ||
1798 | /* If the alias set for a pointer access is zero all bets are off. */ | |
ae40a73a | 1799 | if (base1_alias_set == 0 || base2_alias_set == 0) |
dd277d48 | 1800 | return true; |
dd277d48 | 1801 | |
182cf5a9 | 1802 | /* When we are trying to disambiguate an access with a pointer dereference |
1803 | as base versus one with a decl as base we can use both the size | |
1804 | of the decl and its dynamic type for extra disambiguation. | |
1805 | ??? We do not know anything about the dynamic type of the decl | |
1806 | other than that its alias-set contains base2_alias_set as a subset | |
1807 | which does not help us here. */ | |
1808 | /* As we know nothing useful about the dynamic type of the decl just | |
1809 | use the usual conflict check rather than a subset test. | |
1810 | ??? We could introduce -fvery-strict-aliasing when the language | |
1811 | does not allow decls to have a dynamic type that differs from their | |
1812 | static type. Then we can check | |
1813 | !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */ | |
dd277d48 | 1814 | if (base1_alias_set != base2_alias_set |
182cf5a9 | 1815 | && !alias_sets_conflict_p (base1_alias_set, base2_alias_set)) |
1816 | return false; | |
ae40a73a | 1817 | |
1818 | ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1)); | |
1819 | ||
182cf5a9 | 1820 | /* If the size of the access relevant for TBAA through the pointer |
1821 | is bigger than the size of the decl we can't possibly access the | |
1822 | decl via that pointer. */ | |
ce7b4f26 | 1823 | if (/* ??? This in turn may run afoul when a decl of type T which is |
182cf5a9 | 1824 | a member of union type U is accessed through a pointer to |
1825 | type U and sizeof T is smaller than sizeof U. */ | |
ce7b4f26 | 1826 | TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE |
182cf5a9 | 1827 | && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE |
ce7b4f26 | 1828 | && compare_sizes (DECL_SIZE (base2), |
1829 | TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0) | |
dd277d48 | 1830 | return false; |
1831 | ||
061b1263 | 1832 | if (!ref2) |
1833 | return true; | |
1834 | ||
a07f6629 | 1835 | /* If the decl is accessed via a MEM_REF, reconstruct the base |
061b1263 | 1836 | we can use for TBAA and an appropriately adjusted offset. */ |
1837 | dbase2 = ref2; | |
1838 | while (handled_component_p (dbase2)) | |
1839 | dbase2 = TREE_OPERAND (dbase2, 0); | |
fe60c82c | 1840 | poly_int64 doffset1 = offset1; |
1841 | poly_offset_int doffset2 = offset2; | |
061b1263 | 1842 | if (TREE_CODE (dbase2) == MEM_REF |
1843 | || TREE_CODE (dbase2) == TARGET_MEM_REF) | |
7f00ec76 | 1844 | { |
1845 | doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT; | |
1846 | tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1)); | |
1847 | /* If second reference is view-converted, give up now. */ | |
1848 | if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1) | |
1849 | return true; | |
1850 | } | |
061b1263 | 1851 | |
7f00ec76 | 1852 | /* If first reference is view-converted, give up now. */ |
1853 | if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1) | |
061b1263 | 1854 | return true; |
1855 | ||
1856 | /* If both references are through the same type, they do not alias | |
1857 | if the accesses do not overlap. This does extra disambiguation | |
1858 | for mixed/pointer accesses but requires strict aliasing. | |
1859 | For MEM_REFs we require that the component-ref offset we computed | |
1860 | is relative to the start of the type which we ensure by | |
1861 | comparing rvalue and access type and disregarding the constant | |
952d3202 | 1862 | pointer offset. |
1863 | ||
1864 | But avoid treating variable length arrays as "objects", instead assume they | |
1865 | can overlap by an exact multiple of their element size. | |
1866 | See gcc.dg/torture/alias-2.c. */ | |
8a1af834 | 1867 | if (((TREE_CODE (base1) != TARGET_MEM_REF |
061b1263 | 1868 | || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) |
8a1af834 | 1869 | && (TREE_CODE (dbase2) != TARGET_MEM_REF |
698ba68e | 1870 | || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2)))) |
3bcb81b2 | 1871 | && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1) |
6dc331f6 | 1872 | { |
3bcb81b2 | 1873 | bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE |
1874 | && (TYPE_SIZE (TREE_TYPE (base1)) | |
1875 | && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) | |
1876 | != INTEGER_CST)); | |
1877 | if (!partial_overlap | |
1878 | && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2)) | |
6dc331f6 | 1879 | return false; |
1880 | if (!ref1 || !ref2 | |
1881 | /* If there is must alias, there is no use disambiguating further. */ | |
3bcb81b2 | 1882 | || (!partial_overlap |
1883 | && known_eq (size1, max_size1) && known_eq (size2, max_size2))) | |
6dc331f6 | 1884 | return true; |
3bcb81b2 | 1885 | int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2, |
1886 | partial_overlap); | |
6dc331f6 | 1887 | if (res == -1) |
1888 | return !nonoverlapping_component_refs_p (ref1, ref2); | |
1889 | return !res; | |
1890 | } | |
061b1263 | 1891 | |
dd277d48 | 1892 | /* Do access-path based disambiguation. */ |
1893 | if (ref1 && ref2 | |
061b1263 | 1894 | && (handled_component_p (ref1) || handled_component_p (ref2))) |
0d2f7431 | 1895 | return aliasing_component_refs_p (ref1, |
ac30e3b2 | 1896 | ref1_alias_set, base1_alias_set, |
a42ce2cc | 1897 | offset1, max_size1, |
0d2f7431 | 1898 | ref2, |
ac30e3b2 | 1899 | ref2_alias_set, base2_alias_set, |
28a179b9 | 1900 | offset2, max_size2); |
dd277d48 | 1901 | |
1902 | return true; | |
1903 | } | |
1904 | ||
1905 | /* Return true if two indirect references based on *PTR1 | |
077ad5ad | 1906 | and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and |
1907 | [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have | |
dd277d48 | 1908 | the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1 |
1909 | in which case they are computed on-demand. REF1 and REF2 | |
1910 | if non-NULL are the complete memory reference trees. */ | |
1911 | ||
1912 | static bool | |
182cf5a9 | 1913 | indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, |
fe60c82c | 1914 | poly_int64 offset1, poly_int64 max_size1, |
6dc331f6 | 1915 | poly_int64 size1, |
ac30e3b2 | 1916 | alias_set_type ref1_alias_set, |
dd277d48 | 1917 | alias_set_type base1_alias_set, |
182cf5a9 | 1918 | tree ref2 ATTRIBUTE_UNUSED, tree base2, |
fe60c82c | 1919 | poly_int64 offset2, poly_int64 max_size2, |
6dc331f6 | 1920 | poly_int64 size2, |
ac30e3b2 | 1921 | alias_set_type ref2_alias_set, |
182cf5a9 | 1922 | alias_set_type base2_alias_set, bool tbaa_p) |
dd277d48 | 1923 | { |
9a14ba4f | 1924 | tree ptr1; |
1925 | tree ptr2; | |
182cf5a9 | 1926 | tree ptrtype1, ptrtype2; |
1927 | ||
2622064f | 1928 | gcc_checking_assert ((TREE_CODE (base1) == MEM_REF |
1929 | || TREE_CODE (base1) == TARGET_MEM_REF) | |
1930 | && (TREE_CODE (base2) == MEM_REF | |
1931 | || TREE_CODE (base2) == TARGET_MEM_REF)); | |
1932 | ||
28daba6f | 1933 | ptr1 = TREE_OPERAND (base1, 0); |
1934 | ptr2 = TREE_OPERAND (base2, 0); | |
9a14ba4f | 1935 | |
dd277d48 | 1936 | /* If both bases are based on pointers they cannot alias if they may not |
1937 | point to the same memory object or if they point to the same object | |
1938 | and the accesses do not overlap. */ | |
b5c3c805 | 1939 | if ((!cfun || gimple_in_ssa_p (cfun)) |
9a14ba4f | 1940 | && operand_equal_p (ptr1, ptr2, 0) |
1941 | && (((TREE_CODE (base1) != TARGET_MEM_REF | |
88be3999 | 1942 | || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) |
9a14ba4f | 1943 | && (TREE_CODE (base2) != TARGET_MEM_REF |
88be3999 | 1944 | || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))) |
9a14ba4f | 1945 | || (TREE_CODE (base1) == TARGET_MEM_REF |
1946 | && TREE_CODE (base2) == TARGET_MEM_REF | |
1947 | && (TMR_STEP (base1) == TMR_STEP (base2) | |
1948 | || (TMR_STEP (base1) && TMR_STEP (base2) | |
1949 | && operand_equal_p (TMR_STEP (base1), | |
1950 | TMR_STEP (base2), 0))) | |
88be3999 | 1951 | && (TMR_INDEX (base1) == TMR_INDEX (base2) |
1952 | || (TMR_INDEX (base1) && TMR_INDEX (base2) | |
1953 | && operand_equal_p (TMR_INDEX (base1), | |
1954 | TMR_INDEX (base2), 0))) | |
1955 | && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2) | |
1956 | || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2) | |
1957 | && operand_equal_p (TMR_INDEX2 (base1), | |
1958 | TMR_INDEX2 (base2), 0)))))) | |
182cf5a9 | 1959 | { |
90ca1268 | 1960 | poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT; |
1961 | poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT; | |
dc2ef903 | 1962 | if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1, |
1963 | offset2 + moff2, max_size2)) | |
1964 | return false; | |
6dc331f6 | 1965 | /* If there is must alias, there is no use disambiguating further. */ |
1966 | if (known_eq (size1, max_size1) && known_eq (size2, max_size2)) | |
1967 | return true; | |
dc2ef903 | 1968 | if (ref1 && ref2) |
1969 | { | |
3bcb81b2 | 1970 | int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, |
1971 | false); | |
dc2ef903 | 1972 | if (res != -1) |
1973 | return !res; | |
1974 | } | |
182cf5a9 | 1975 | } |
dd277d48 | 1976 | if (!ptr_derefs_may_alias_p (ptr1, ptr2)) |
1977 | return false; | |
1978 | ||
1979 | /* Disambiguations that rely on strict aliasing rules follow. */ | |
182cf5a9 | 1980 | if (!flag_strict_aliasing || !tbaa_p) |
dd277d48 | 1981 | return true; |
1982 | ||
2622064f | 1983 | ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1)); |
1984 | ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1)); | |
182cf5a9 | 1985 | |
dd277d48 | 1986 | /* If the alias set for a pointer access is zero all bets are off. */ |
f72a5e61 | 1987 | if (base1_alias_set == 0 |
1988 | || base2_alias_set == 0) | |
dd277d48 | 1989 | return true; |
1990 | ||
952d3202 | 1991 | /* Do type-based disambiguation. */ |
1992 | if (base1_alias_set != base2_alias_set | |
1993 | && !alias_sets_conflict_p (base1_alias_set, base2_alias_set)) | |
1994 | return false; | |
1995 | ||
1996 | /* If either reference is view-converted, give up now. */ | |
1997 | if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1 | |
1998 | || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1) | |
1999 | return true; | |
2000 | ||
dd277d48 | 2001 | /* If both references are through the same type, they do not alias |
2002 | if the accesses do not overlap. This does extra disambiguation | |
2003 | for mixed/pointer accesses but requires strict aliasing. */ | |
9c455d1e | 2004 | if ((TREE_CODE (base1) != TARGET_MEM_REF |
2005 | || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) | |
2006 | && (TREE_CODE (base2) != TARGET_MEM_REF | |
2007 | || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))) | |
182cf5a9 | 2008 | && same_type_for_tbaa (TREE_TYPE (ptrtype1), |
3bcb81b2 | 2009 | TREE_TYPE (ptrtype2)) == 1) |
2010 | { | |
873271fd | 2011 | /* But avoid treating arrays as "objects", instead assume they |
952d3202 | 2012 | can overlap by an exact multiple of their element size. |
2013 | See gcc.dg/torture/alias-2.c. */ | |
3bcb81b2 | 2014 | bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE; |
2015 | ||
2016 | if (!partial_overlap | |
2017 | && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) | |
6dc331f6 | 2018 | return false; |
2019 | if (!ref1 || !ref2 | |
3bcb81b2 | 2020 | || (!partial_overlap |
2021 | && known_eq (size1, max_size1) && known_eq (size2, max_size2))) | |
6dc331f6 | 2022 | return true; |
3bcb81b2 | 2023 | int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2, |
2024 | partial_overlap); | |
6dc331f6 | 2025 | if (res == -1) |
2026 | return !nonoverlapping_component_refs_p (ref1, ref2); | |
2027 | return !res; | |
2028 | } | |
dd277d48 | 2029 | |
dd277d48 | 2030 | /* Do access-path based disambiguation. */ |
2031 | if (ref1 && ref2 | |
f270d2a0 | 2032 | && (handled_component_p (ref1) || handled_component_p (ref2))) |
0d2f7431 | 2033 | return aliasing_component_refs_p (ref1, |
ac30e3b2 | 2034 | ref1_alias_set, base1_alias_set, |
a42ce2cc | 2035 | offset1, max_size1, |
0d2f7431 | 2036 | ref2, |
ac30e3b2 | 2037 | ref2_alias_set, base2_alias_set, |
28a179b9 | 2038 | offset2, max_size2); |
dd277d48 | 2039 | |
2040 | return true; | |
2041 | } | |
2042 | ||
2043 | /* Return true, if the two memory references REF1 and REF2 may alias. */ | |
2044 | ||
3bc5893a | 2045 | static bool |
2046 | refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) | |
dd277d48 | 2047 | { |
2048 | tree base1, base2; | |
fe60c82c | 2049 | poly_int64 offset1 = 0, offset2 = 0; |
2050 | poly_int64 max_size1 = -1, max_size2 = -1; | |
dd277d48 | 2051 | bool var1_p, var2_p, ind1_p, ind2_p; |
2052 | ||
c78d5cec | 2053 | gcc_checking_assert ((!ref1->ref |
8e7c2b9c | 2054 | || TREE_CODE (ref1->ref) == SSA_NAME |
c78d5cec | 2055 | || DECL_P (ref1->ref) |
2760c0e3 | 2056 | || TREE_CODE (ref1->ref) == STRING_CST |
c78d5cec | 2057 | || handled_component_p (ref1->ref) |
182cf5a9 | 2058 | || TREE_CODE (ref1->ref) == MEM_REF |
c78d5cec | 2059 | || TREE_CODE (ref1->ref) == TARGET_MEM_REF) |
2060 | && (!ref2->ref | |
8e7c2b9c | 2061 | || TREE_CODE (ref2->ref) == SSA_NAME |
c78d5cec | 2062 | || DECL_P (ref2->ref) |
2760c0e3 | 2063 | || TREE_CODE (ref2->ref) == STRING_CST |
c78d5cec | 2064 | || handled_component_p (ref2->ref) |
182cf5a9 | 2065 | || TREE_CODE (ref2->ref) == MEM_REF |
c78d5cec | 2066 | || TREE_CODE (ref2->ref) == TARGET_MEM_REF)); |
dd277d48 | 2067 | |
dd277d48 | 2068 | /* Decompose the references into their base objects and the access. */ |
3918bd18 | 2069 | base1 = ao_ref_base (ref1); |
2070 | offset1 = ref1->offset; | |
3918bd18 | 2071 | max_size1 = ref1->max_size; |
2072 | base2 = ao_ref_base (ref2); | |
2073 | offset2 = ref2->offset; | |
3918bd18 | 2074 | max_size2 = ref2->max_size; |
dd277d48 | 2075 | |
2076 | /* We can end up with registers or constants as bases for example from | |
2077 | *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59); | |
2078 | which is seen as a struct copy. */ | |
2079 | if (TREE_CODE (base1) == SSA_NAME | |
119147d1 | 2080 | || TREE_CODE (base1) == CONST_DECL |
66b86a74 | 2081 | || TREE_CODE (base1) == CONSTRUCTOR |
2082 | || TREE_CODE (base1) == ADDR_EXPR | |
2083 | || CONSTANT_CLASS_P (base1) | |
2084 | || TREE_CODE (base2) == SSA_NAME | |
119147d1 | 2085 | || TREE_CODE (base2) == CONST_DECL |
66b86a74 | 2086 | || TREE_CODE (base2) == CONSTRUCTOR |
2087 | || TREE_CODE (base2) == ADDR_EXPR | |
2088 | || CONSTANT_CLASS_P (base2)) | |
dd277d48 | 2089 | return false; |
2090 | ||
04ec15fa | 2091 | /* We can end up referring to code via function and label decls. |
c78d5cec | 2092 | As we likely do not properly track code aliases conservatively |
2093 | bail out. */ | |
ceefbccc | 2094 | if (TREE_CODE (base1) == FUNCTION_DECL |
c78d5cec | 2095 | || TREE_CODE (base1) == LABEL_DECL |
66b86a74 | 2096 | || TREE_CODE (base2) == FUNCTION_DECL |
c78d5cec | 2097 | || TREE_CODE (base2) == LABEL_DECL) |
ceefbccc | 2098 | return true; |
2099 | ||
3787db52 | 2100 | /* Two volatile accesses always conflict. */ |
2101 | if (ref1->volatile_p | |
2102 | && ref2->volatile_p) | |
2103 | return true; | |
2104 | ||
090a8c65 | 2105 | /* Defer to simple offset based disambiguation if we have |
2106 | references based on two decls. Do this before defering to | |
2107 | TBAA to handle must-alias cases in conformance with the | |
2108 | GCC extension of allowing type-punning through unions. */ | |
2622064f | 2109 | var1_p = DECL_P (base1); |
2110 | var2_p = DECL_P (base2); | |
dd277d48 | 2111 | if (var1_p && var2_p) |
740a2cb6 | 2112 | return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1, |
6dc331f6 | 2113 | ref1->size, |
2114 | ref2->ref, base2, offset2, max_size2, | |
2115 | ref2->size); | |
090a8c65 | 2116 | |
62b0a610 | 2117 | /* Handle restrict based accesses. |
2118 | ??? ao_ref_base strips inner MEM_REF [&decl], recover from that | |
2119 | here. */ | |
2120 | tree rbase1 = base1; | |
2121 | tree rbase2 = base2; | |
2122 | if (var1_p) | |
2123 | { | |
2124 | rbase1 = ref1->ref; | |
2125 | if (rbase1) | |
2126 | while (handled_component_p (rbase1)) | |
2127 | rbase1 = TREE_OPERAND (rbase1, 0); | |
2128 | } | |
2129 | if (var2_p) | |
2130 | { | |
2131 | rbase2 = ref2->ref; | |
2132 | if (rbase2) | |
2133 | while (handled_component_p (rbase2)) | |
2134 | rbase2 = TREE_OPERAND (rbase2, 0); | |
2135 | } | |
2136 | if (rbase1 && rbase2 | |
2137 | && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF) | |
2138 | && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF) | |
2139 | /* If the accesses are in the same restrict clique... */ | |
2140 | && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2) | |
2141 | /* But based on different pointers they do not alias. */ | |
2142 | && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2)) | |
2143 | return false; | |
2144 | ||
2622064f | 2145 | ind1_p = (TREE_CODE (base1) == MEM_REF |
2146 | || TREE_CODE (base1) == TARGET_MEM_REF); | |
2147 | ind2_p = (TREE_CODE (base2) == MEM_REF | |
2148 | || TREE_CODE (base2) == TARGET_MEM_REF); | |
182cf5a9 | 2149 | |
67817f0f | 2150 | /* Canonicalize the pointer-vs-decl case. */ |
2151 | if (ind1_p && var2_p) | |
2152 | { | |
a4f59596 | 2153 | std::swap (offset1, offset2); |
2154 | std::swap (max_size1, max_size2); | |
2155 | std::swap (base1, base2); | |
2156 | std::swap (ref1, ref2); | |
67817f0f | 2157 | var1_p = true; |
2158 | ind1_p = false; | |
2159 | var2_p = false; | |
2160 | ind2_p = true; | |
2161 | } | |
2162 | ||
090a8c65 | 2163 | /* First defer to TBAA if possible. */ |
2a3ebafa | 2164 | if (tbaa_p |
2165 | && flag_strict_aliasing | |
3918bd18 | 2166 | && !alias_sets_conflict_p (ao_ref_alias_set (ref1), |
2167 | ao_ref_alias_set (ref2))) | |
090a8c65 | 2168 | return false; |
2169 | ||
5477dab8 | 2170 | /* If the reference is based on a pointer that points to memory |
2171 | that may not be written to then the other reference cannot possibly | |
2172 | clobber it. */ | |
2173 | if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME | |
2174 | && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0))) | |
2175 | || (ind1_p | |
2176 | && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME | |
2177 | && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0)))) | |
2178 | return false; | |
2179 | ||
090a8c65 | 2180 | /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */ |
090a8c65 | 2181 | if (var1_p && ind2_p) |
182cf5a9 | 2182 | return indirect_ref_may_alias_decl_p (ref2->ref, base2, |
6dc331f6 | 2183 | offset2, max_size2, ref2->size, |
f72a5e61 | 2184 | ao_ref_alias_set (ref2), |
2185 | ao_ref_base_alias_set (ref2), | |
3918bd18 | 2186 | ref1->ref, base1, |
6dc331f6 | 2187 | offset1, max_size1, ref1->size, |
182cf5a9 | 2188 | ao_ref_alias_set (ref1), |
2189 | ao_ref_base_alias_set (ref1), | |
2190 | tbaa_p); | |
dd277d48 | 2191 | else if (ind1_p && ind2_p) |
182cf5a9 | 2192 | return indirect_refs_may_alias_p (ref1->ref, base1, |
6dc331f6 | 2193 | offset1, max_size1, ref1->size, |
f72a5e61 | 2194 | ao_ref_alias_set (ref1), |
2195 | ao_ref_base_alias_set (ref1), | |
182cf5a9 | 2196 | ref2->ref, base2, |
6dc331f6 | 2197 | offset2, max_size2, ref2->size, |
f72a5e61 | 2198 | ao_ref_alias_set (ref2), |
2199 | ao_ref_base_alias_set (ref2), | |
182cf5a9 | 2200 | tbaa_p); |
dd277d48 | 2201 | |
2202 | gcc_unreachable (); | |
2203 | } | |
2204 | ||
3bc5893a | 2205 | /* Return true, if the two memory references REF1 and REF2 may alias |
2206 | and update statistics. */ | |
2207 | ||
2208 | bool | |
2209 | refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) | |
2210 | { | |
2211 | bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p); | |
2212 | if (res) | |
2213 | ++alias_stats.refs_may_alias_p_may_alias; | |
2214 | else | |
2215 | ++alias_stats.refs_may_alias_p_no_alias; | |
2216 | return res; | |
2217 | } | |
2218 | ||
258bd648 | 2219 | static bool |
58cfef6b | 2220 | refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p) |
258bd648 | 2221 | { |
2222 | ao_ref r1; | |
2223 | ao_ref_init (&r1, ref1); | |
58cfef6b | 2224 | return refs_may_alias_p_1 (&r1, ref2, tbaa_p); |
258bd648 | 2225 | } |
2226 | ||
dd277d48 | 2227 | bool |
58cfef6b | 2228 | refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p) |
dd277d48 | 2229 | { |
3918bd18 | 2230 | ao_ref r1, r2; |
3918bd18 | 2231 | ao_ref_init (&r1, ref1); |
2232 | ao_ref_init (&r2, ref2); | |
3bc5893a | 2233 | return refs_may_alias_p_1 (&r1, &r2, tbaa_p); |
dd277d48 | 2234 | } |
2235 | ||
2a3ebafa | 2236 | /* Returns true if there is a anti-dependence for the STORE that |
2237 | executes after the LOAD. */ | |
2238 | ||
2239 | bool | |
2240 | refs_anti_dependent_p (tree load, tree store) | |
2241 | { | |
3918bd18 | 2242 | ao_ref r1, r2; |
2243 | ao_ref_init (&r1, load); | |
2244 | ao_ref_init (&r2, store); | |
2245 | return refs_may_alias_p_1 (&r1, &r2, false); | |
2a3ebafa | 2246 | } |
2247 | ||
2248 | /* Returns true if there is a output dependence for the stores | |
2249 | STORE1 and STORE2. */ | |
2250 | ||
2251 | bool | |
2252 | refs_output_dependent_p (tree store1, tree store2) | |
2253 | { | |
3918bd18 | 2254 | ao_ref r1, r2; |
2255 | ao_ref_init (&r1, store1); | |
2256 | ao_ref_init (&r2, store2); | |
2257 | return refs_may_alias_p_1 (&r1, &r2, false); | |
2a3ebafa | 2258 | } |
dd277d48 | 2259 | |
2260 | /* If the call CALL may use the memory reference REF return true, | |
2261 | otherwise return false. */ | |
2262 | ||
2263 | static bool | |
58cfef6b | 2264 | ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p) |
dd277d48 | 2265 | { |
047fdd47 | 2266 | tree base, callee; |
dd277d48 | 2267 | unsigned i; |
2268 | int flags = gimple_call_flags (call); | |
2269 | ||
2270 | /* Const functions without a static chain do not implicitly use memory. */ | |
2271 | if (!gimple_call_chain (call) | |
2272 | && (flags & (ECF_CONST|ECF_NOVOPS))) | |
2273 | goto process_args; | |
2274 | ||
134899ac | 2275 | base = ao_ref_base (ref); |
917f08fa | 2276 | if (!base) |
dd277d48 | 2277 | return true; |
2278 | ||
3787db52 | 2279 | /* A call that is not without side-effects might involve volatile |
2280 | accesses and thus conflicts with all other volatile accesses. */ | |
2281 | if (ref->volatile_p) | |
2282 | return true; | |
2283 | ||
09347e88 | 2284 | /* If the reference is based on a decl that is not aliased the call |
2285 | cannot possibly use it. */ | |
2286 | if (DECL_P (base) | |
2287 | && !may_be_aliased (base) | |
7f7f16d4 | 2288 | /* But local statics can be used through recursion. */ |
2289 | && !is_global_var (base)) | |
09347e88 | 2290 | goto process_args; |
2291 | ||
047fdd47 | 2292 | callee = gimple_call_fndecl (call); |
2293 | ||
2294 | /* Handle those builtin functions explicitly that do not act as | |
2295 | escape points. See tree-ssa-structalias.c:find_func_aliases | |
2296 | for the list of builtins we might need to handle here. */ | |
2297 | if (callee != NULL_TREE | |
6189000c | 2298 | && gimple_call_builtin_p (call, BUILT_IN_NORMAL)) |
047fdd47 | 2299 | switch (DECL_FUNCTION_CODE (callee)) |
2300 | { | |
77efe819 | 2301 | /* All the following functions read memory pointed to by |
2302 | their second argument. strcat/strncat additionally | |
2303 | reads memory pointed to by the first argument. */ | |
2304 | case BUILT_IN_STRCAT: | |
2305 | case BUILT_IN_STRNCAT: | |
2306 | { | |
2307 | ao_ref dref; | |
2308 | ao_ref_init_from_ptr_and_size (&dref, | |
2309 | gimple_call_arg (call, 0), | |
2310 | NULL_TREE); | |
2311 | if (refs_may_alias_p_1 (&dref, ref, false)) | |
2312 | return true; | |
2313 | } | |
2314 | /* FALLTHRU */ | |
047fdd47 | 2315 | case BUILT_IN_STRCPY: |
2316 | case BUILT_IN_STRNCPY: | |
047fdd47 | 2317 | case BUILT_IN_MEMCPY: |
2318 | case BUILT_IN_MEMMOVE: | |
2319 | case BUILT_IN_MEMPCPY: | |
2320 | case BUILT_IN_STPCPY: | |
2321 | case BUILT_IN_STPNCPY: | |
4c0315d0 | 2322 | case BUILT_IN_TM_MEMCPY: |
2323 | case BUILT_IN_TM_MEMMOVE: | |
047fdd47 | 2324 | { |
134899ac | 2325 | ao_ref dref; |
2326 | tree size = NULL_TREE; | |
2327 | if (gimple_call_num_args (call) == 3) | |
2328 | size = gimple_call_arg (call, 2); | |
2329 | ao_ref_init_from_ptr_and_size (&dref, | |
2330 | gimple_call_arg (call, 1), | |
2331 | size); | |
2332 | return refs_may_alias_p_1 (&dref, ref, false); | |
047fdd47 | 2333 | } |
77efe819 | 2334 | case BUILT_IN_STRCAT_CHK: |
2335 | case BUILT_IN_STRNCAT_CHK: | |
2336 | { | |
2337 | ao_ref dref; | |
2338 | ao_ref_init_from_ptr_and_size (&dref, | |
2339 | gimple_call_arg (call, 0), | |
2340 | NULL_TREE); | |
2341 | if (refs_may_alias_p_1 (&dref, ref, false)) | |
2342 | return true; | |
2343 | } | |
2344 | /* FALLTHRU */ | |
939514e9 | 2345 | case BUILT_IN_STRCPY_CHK: |
2346 | case BUILT_IN_STRNCPY_CHK: | |
2347 | case BUILT_IN_MEMCPY_CHK: | |
2348 | case BUILT_IN_MEMMOVE_CHK: | |
2349 | case BUILT_IN_MEMPCPY_CHK: | |
2350 | case BUILT_IN_STPCPY_CHK: | |
1063acde | 2351 | case BUILT_IN_STPNCPY_CHK: |
939514e9 | 2352 | { |
2353 | ao_ref dref; | |
2354 | tree size = NULL_TREE; | |
2355 | if (gimple_call_num_args (call) == 4) | |
2356 | size = gimple_call_arg (call, 2); | |
2357 | ao_ref_init_from_ptr_and_size (&dref, | |
2358 | gimple_call_arg (call, 1), | |
2359 | size); | |
2360 | return refs_may_alias_p_1 (&dref, ref, false); | |
2361 | } | |
119147d1 | 2362 | case BUILT_IN_BCOPY: |
2363 | { | |
2364 | ao_ref dref; | |
2365 | tree size = gimple_call_arg (call, 2); | |
2366 | ao_ref_init_from_ptr_and_size (&dref, | |
2367 | gimple_call_arg (call, 0), | |
2368 | size); | |
2369 | return refs_may_alias_p_1 (&dref, ref, false); | |
2370 | } | |
4c0315d0 | 2371 | |
2372 | /* The following functions read memory pointed to by their | |
2373 | first argument. */ | |
2374 | CASE_BUILT_IN_TM_LOAD (1): | |
2375 | CASE_BUILT_IN_TM_LOAD (2): | |
2376 | CASE_BUILT_IN_TM_LOAD (4): | |
2377 | CASE_BUILT_IN_TM_LOAD (8): | |
2378 | CASE_BUILT_IN_TM_LOAD (FLOAT): | |
2379 | CASE_BUILT_IN_TM_LOAD (DOUBLE): | |
2380 | CASE_BUILT_IN_TM_LOAD (LDOUBLE): | |
2381 | CASE_BUILT_IN_TM_LOAD (M64): | |
2382 | CASE_BUILT_IN_TM_LOAD (M128): | |
2383 | CASE_BUILT_IN_TM_LOAD (M256): | |
2384 | case BUILT_IN_TM_LOG: | |
2385 | case BUILT_IN_TM_LOG_1: | |
2386 | case BUILT_IN_TM_LOG_2: | |
2387 | case BUILT_IN_TM_LOG_4: | |
2388 | case BUILT_IN_TM_LOG_8: | |
2389 | case BUILT_IN_TM_LOG_FLOAT: | |
2390 | case BUILT_IN_TM_LOG_DOUBLE: | |
2391 | case BUILT_IN_TM_LOG_LDOUBLE: | |
2392 | case BUILT_IN_TM_LOG_M64: | |
2393 | case BUILT_IN_TM_LOG_M128: | |
2394 | case BUILT_IN_TM_LOG_M256: | |
2395 | return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref); | |
2396 | ||
77efe819 | 2397 | /* These read memory pointed to by the first argument. */ |
2398 | case BUILT_IN_STRDUP: | |
2399 | case BUILT_IN_STRNDUP: | |
ee890734 | 2400 | case BUILT_IN_REALLOC: |
77efe819 | 2401 | { |
2402 | ao_ref dref; | |
2403 | tree size = NULL_TREE; | |
2404 | if (gimple_call_num_args (call) == 2) | |
2405 | size = gimple_call_arg (call, 1); | |
2406 | ao_ref_init_from_ptr_and_size (&dref, | |
2407 | gimple_call_arg (call, 0), | |
2408 | size); | |
2409 | return refs_may_alias_p_1 (&dref, ref, false); | |
2410 | } | |
4953672f | 2411 | /* These read memory pointed to by the first argument. */ |
2412 | case BUILT_IN_INDEX: | |
2413 | case BUILT_IN_STRCHR: | |
2414 | case BUILT_IN_STRRCHR: | |
2415 | { | |
2416 | ao_ref dref; | |
2417 | ao_ref_init_from_ptr_and_size (&dref, | |
2418 | gimple_call_arg (call, 0), | |
2419 | NULL_TREE); | |
2420 | return refs_may_alias_p_1 (&dref, ref, false); | |
2421 | } | |
2422 | /* These read memory pointed to by the first argument with size | |
2423 | in the third argument. */ | |
2424 | case BUILT_IN_MEMCHR: | |
2425 | { | |
2426 | ao_ref dref; | |
2427 | ao_ref_init_from_ptr_and_size (&dref, | |
2428 | gimple_call_arg (call, 0), | |
2429 | gimple_call_arg (call, 2)); | |
2430 | return refs_may_alias_p_1 (&dref, ref, false); | |
2431 | } | |
2432 | /* These read memory pointed to by the first and second arguments. */ | |
2433 | case BUILT_IN_STRSTR: | |
2434 | case BUILT_IN_STRPBRK: | |
2435 | { | |
2436 | ao_ref dref; | |
2437 | ao_ref_init_from_ptr_and_size (&dref, | |
2438 | gimple_call_arg (call, 0), | |
2439 | NULL_TREE); | |
2440 | if (refs_may_alias_p_1 (&dref, ref, false)) | |
2441 | return true; | |
2442 | ao_ref_init_from_ptr_and_size (&dref, | |
2443 | gimple_call_arg (call, 1), | |
2444 | NULL_TREE); | |
2445 | return refs_may_alias_p_1 (&dref, ref, false); | |
2446 | } | |
2447 | ||
09d9c774 | 2448 | /* The following builtins do not read from memory. */ |
2449 | case BUILT_IN_FREE: | |
0d3ca08e | 2450 | case BUILT_IN_MALLOC: |
be2c7f8f | 2451 | case BUILT_IN_POSIX_MEMALIGN: |
060fc206 | 2452 | case BUILT_IN_ALIGNED_ALLOC: |
4ef43bbd | 2453 | case BUILT_IN_CALLOC: |
2b34677f | 2454 | CASE_BUILT_IN_ALLOCA: |
ca793672 | 2455 | case BUILT_IN_STACK_SAVE: |
2456 | case BUILT_IN_STACK_RESTORE: | |
09d9c774 | 2457 | case BUILT_IN_MEMSET: |
4c0315d0 | 2458 | case BUILT_IN_TM_MEMSET: |
939514e9 | 2459 | case BUILT_IN_MEMSET_CHK: |
09d9c774 | 2460 | case BUILT_IN_FREXP: |
2461 | case BUILT_IN_FREXPF: | |
2462 | case BUILT_IN_FREXPL: | |
2463 | case BUILT_IN_GAMMA_R: | |
2464 | case BUILT_IN_GAMMAF_R: | |
2465 | case BUILT_IN_GAMMAL_R: | |
2466 | case BUILT_IN_LGAMMA_R: | |
2467 | case BUILT_IN_LGAMMAF_R: | |
2468 | case BUILT_IN_LGAMMAL_R: | |
2469 | case BUILT_IN_MODF: | |
2470 | case BUILT_IN_MODFF: | |
2471 | case BUILT_IN_MODFL: | |
2472 | case BUILT_IN_REMQUO: | |
2473 | case BUILT_IN_REMQUOF: | |
2474 | case BUILT_IN_REMQUOL: | |
2475 | case BUILT_IN_SINCOS: | |
2476 | case BUILT_IN_SINCOSF: | |
2477 | case BUILT_IN_SINCOSL: | |
fca0886c | 2478 | case BUILT_IN_ASSUME_ALIGNED: |
9d17b4a8 | 2479 | case BUILT_IN_VA_END: |
09d9c774 | 2480 | return false; |
a44814b9 | 2481 | /* __sync_* builtins and some OpenMP builtins act as threading |
2482 | barriers. */ | |
2483 | #undef DEF_SYNC_BUILTIN | |
2484 | #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM: | |
2485 | #include "sync-builtins.def" | |
2486 | #undef DEF_SYNC_BUILTIN | |
2487 | case BUILT_IN_GOMP_ATOMIC_START: | |
2488 | case BUILT_IN_GOMP_ATOMIC_END: | |
2489 | case BUILT_IN_GOMP_BARRIER: | |
bc7bff74 | 2490 | case BUILT_IN_GOMP_BARRIER_CANCEL: |
a44814b9 | 2491 | case BUILT_IN_GOMP_TASKWAIT: |
bc7bff74 | 2492 | case BUILT_IN_GOMP_TASKGROUP_END: |
a44814b9 | 2493 | case BUILT_IN_GOMP_CRITICAL_START: |
2494 | case BUILT_IN_GOMP_CRITICAL_END: | |
2495 | case BUILT_IN_GOMP_CRITICAL_NAME_START: | |
2496 | case BUILT_IN_GOMP_CRITICAL_NAME_END: | |
2497 | case BUILT_IN_GOMP_LOOP_END: | |
bc7bff74 | 2498 | case BUILT_IN_GOMP_LOOP_END_CANCEL: |
a44814b9 | 2499 | case BUILT_IN_GOMP_ORDERED_START: |
2500 | case BUILT_IN_GOMP_ORDERED_END: | |
a44814b9 | 2501 | case BUILT_IN_GOMP_SECTIONS_END: |
bc7bff74 | 2502 | case BUILT_IN_GOMP_SECTIONS_END_CANCEL: |
a44814b9 | 2503 | case BUILT_IN_GOMP_SINGLE_COPY_START: |
2504 | case BUILT_IN_GOMP_SINGLE_COPY_END: | |
2505 | return true; | |
09d9c774 | 2506 | |
047fdd47 | 2507 | default: |
2508 | /* Fallthru to general call handling. */; | |
2509 | } | |
2510 | ||
dd277d48 | 2511 | /* Check if base is a global static variable that is not read |
2512 | by the function. */ | |
53e9c5c4 | 2513 | if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base)) |
4ee9c684 | 2514 | { |
415d1b9a | 2515 | struct cgraph_node *node = cgraph_node::get (callee); |
b39960e1 | 2516 | bitmap not_read; |
dd277d48 | 2517 | |
924de091 | 2518 | /* FIXME: Callee can be an OMP builtin that does not have a call graph |
2519 | node yet. We should enforce that there are nodes for all decls in the | |
2520 | IL and remove this check instead. */ | |
b39960e1 | 2521 | if (node |
2522 | && (not_read = ipa_reference_get_not_read_global (node)) | |
2523 | && bitmap_bit_p (not_read, ipa_reference_var_uid (base))) | |
2524 | goto process_args; | |
4ee9c684 | 2525 | } |
2526 | ||
cb245216 | 2527 | /* Check if the base variable is call-used. */ |
2528 | if (DECL_P (base)) | |
4ee9c684 | 2529 | { |
cb245216 | 2530 | if (pt_solution_includes (gimple_call_use_set (call), base)) |
dd277d48 | 2531 | return true; |
2532 | } | |
2622064f | 2533 | else if ((TREE_CODE (base) == MEM_REF |
2534 | || TREE_CODE (base) == TARGET_MEM_REF) | |
cb245216 | 2535 | && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) |
dd277d48 | 2536 | { |
cb245216 | 2537 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)); |
2538 | if (!pi) | |
2539 | return true; | |
917f08fa | 2540 | |
cb245216 | 2541 | if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt)) |
dd277d48 | 2542 | return true; |
2543 | } | |
cb245216 | 2544 | else |
2545 | return true; | |
dd277d48 | 2546 | |
2547 | /* Inspect call arguments for passed-by-value aliases. */ | |
2548 | process_args: | |
2549 | for (i = 0; i < gimple_call_num_args (call); ++i) | |
2550 | { | |
2551 | tree op = gimple_call_arg (call, i); | |
8ce86007 | 2552 | int flags = gimple_call_arg_flags (call, i); |
2553 | ||
2554 | if (flags & EAF_UNUSED) | |
2555 | continue; | |
dd277d48 | 2556 | |
dd277d48 | 2557 | if (TREE_CODE (op) == WITH_SIZE_EXPR) |
2558 | op = TREE_OPERAND (op, 0); | |
4ee9c684 | 2559 | |
dd277d48 | 2560 | if (TREE_CODE (op) != SSA_NAME |
134899ac | 2561 | && !is_gimple_min_invariant (op)) |
2562 | { | |
2563 | ao_ref r; | |
2564 | ao_ref_init (&r, op); | |
58cfef6b | 2565 | if (refs_may_alias_p_1 (&r, ref, tbaa_p)) |
134899ac | 2566 | return true; |
2567 | } | |
4ee9c684 | 2568 | } |
2569 | ||
dd277d48 | 2570 | return false; |
2571 | } | |
2572 | ||
2573 | static bool | |
58cfef6b | 2574 | ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p) |
dd277d48 | 2575 | { |
134899ac | 2576 | bool res; |
58cfef6b | 2577 | res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p); |
dd277d48 | 2578 | if (res) |
2579 | ++alias_stats.ref_maybe_used_by_call_p_may_alias; | |
2580 | else | |
2581 | ++alias_stats.ref_maybe_used_by_call_p_no_alias; | |
2582 | return res; | |
4ee9c684 | 2583 | } |
2584 | ||
2585 | ||
dd277d48 | 2586 | /* If the statement STMT may use the memory reference REF return |
2587 | true, otherwise return false. */ | |
4ee9c684 | 2588 | |
dd277d48 | 2589 | bool |
58cfef6b | 2590 | ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p) |
4ee9c684 | 2591 | { |
dd277d48 | 2592 | if (is_gimple_assign (stmt)) |
2593 | { | |
2594 | tree rhs; | |
2595 | ||
2596 | /* All memory assign statements are single. */ | |
2597 | if (!gimple_assign_single_p (stmt)) | |
2598 | return false; | |
2599 | ||
2600 | rhs = gimple_assign_rhs1 (stmt); | |
2601 | if (is_gimple_reg (rhs) | |
2602 | || is_gimple_min_invariant (rhs) | |
2603 | || gimple_assign_rhs_code (stmt) == CONSTRUCTOR) | |
2604 | return false; | |
2605 | ||
58cfef6b | 2606 | return refs_may_alias_p (rhs, ref, tbaa_p); |
dd277d48 | 2607 | } |
2608 | else if (is_gimple_call (stmt)) | |
58cfef6b | 2609 | return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p); |
1a91d914 | 2610 | else if (greturn *return_stmt = dyn_cast <greturn *> (stmt)) |
91234829 | 2611 | { |
1a91d914 | 2612 | tree retval = gimple_return_retval (return_stmt); |
91234829 | 2613 | if (retval |
2614 | && TREE_CODE (retval) != SSA_NAME | |
2615 | && !is_gimple_min_invariant (retval) | |
58cfef6b | 2616 | && refs_may_alias_p (retval, ref, tbaa_p)) |
91234829 | 2617 | return true; |
2618 | /* If ref escapes the function then the return acts as a use. */ | |
258bd648 | 2619 | tree base = ao_ref_base (ref); |
91234829 | 2620 | if (!base) |
2621 | ; | |
2622 | else if (DECL_P (base)) | |
2623 | return is_global_var (base); | |
2624 | else if (TREE_CODE (base) == MEM_REF | |
2625 | || TREE_CODE (base) == TARGET_MEM_REF) | |
2626 | return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)); | |
2627 | return false; | |
2628 | } | |
dd277d48 | 2629 | |
2630 | return true; | |
4ee9c684 | 2631 | } |
2632 | ||
258bd648 | 2633 | bool |
58cfef6b | 2634 | ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p) |
258bd648 | 2635 | { |
2636 | ao_ref r; | |
2637 | ao_ref_init (&r, ref); | |
58cfef6b | 2638 | return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p); |
258bd648 | 2639 | } |
2640 | ||
dd277d48 | 2641 | /* If the call in statement CALL may clobber the memory reference REF |
2642 | return true, otherwise return false. */ | |
4ee9c684 | 2643 | |
38168b16 | 2644 | bool |
1a91d914 | 2645 | call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref) |
4ee9c684 | 2646 | { |
7f7f16d4 | 2647 | tree base; |
047fdd47 | 2648 | tree callee; |
dd277d48 | 2649 | |
2650 | /* If the call is pure or const it cannot clobber anything. */ | |
2651 | if (gimple_call_flags (call) | |
2652 | & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS)) | |
2653 | return false; | |
32cf7025 | 2654 | if (gimple_call_internal_p (call)) |
2655 | switch (gimple_call_internal_fn (call)) | |
2656 | { | |
2657 | /* Treat these internal calls like ECF_PURE for aliasing, | |
2658 | they don't write to any memory the program should care about. | |
2659 | They have important other side-effects, and read memory, | |
2660 | so can't be ECF_NOVOPS. */ | |
2661 | case IFN_UBSAN_NULL: | |
2662 | case IFN_UBSAN_BOUNDS: | |
2663 | case IFN_UBSAN_VPTR: | |
2664 | case IFN_UBSAN_OBJECT_SIZE: | |
b4fce8f9 | 2665 | case IFN_UBSAN_PTR: |
32cf7025 | 2666 | case IFN_ASAN_CHECK: |
2667 | return false; | |
2668 | default: | |
2669 | break; | |
2670 | } | |
dd277d48 | 2671 | |
3918bd18 | 2672 | base = ao_ref_base (ref); |
dd277d48 | 2673 | if (!base) |
2674 | return true; | |
2675 | ||
2676 | if (TREE_CODE (base) == SSA_NAME | |
2677 | || CONSTANT_CLASS_P (base)) | |
2678 | return false; | |
2679 | ||
3787db52 | 2680 | /* A call that is not without side-effects might involve volatile |
2681 | accesses and thus conflicts with all other volatile accesses. */ | |
2682 | if (ref->volatile_p) | |
2683 | return true; | |
2684 | ||
09347e88 | 2685 | /* If the reference is based on a decl that is not aliased the call |
2686 | cannot possibly clobber it. */ | |
2687 | if (DECL_P (base) | |
2688 | && !may_be_aliased (base) | |
7f7f16d4 | 2689 | /* But local non-readonly statics can be modified through recursion |
2690 | or the call may implement a threading barrier which we must | |
2691 | treat as may-def. */ | |
09347e88 | 2692 | && (TREE_READONLY (base) |
7f7f16d4 | 2693 | || !is_global_var (base))) |
09347e88 | 2694 | return false; |
2695 | ||
5477dab8 | 2696 | /* If the reference is based on a pointer that points to memory |
2697 | that may not be written to then the call cannot possibly clobber it. */ | |
2698 | if ((TREE_CODE (base) == MEM_REF | |
2699 | || TREE_CODE (base) == TARGET_MEM_REF) | |
2700 | && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME | |
2701 | && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0))) | |
2702 | return false; | |
2703 | ||
047fdd47 | 2704 | callee = gimple_call_fndecl (call); |
2705 | ||
2706 | /* Handle those builtin functions explicitly that do not act as | |
2707 | escape points. See tree-ssa-structalias.c:find_func_aliases | |
2708 | for the list of builtins we might need to handle here. */ | |
2709 | if (callee != NULL_TREE | |
6189000c | 2710 | && gimple_call_builtin_p (call, BUILT_IN_NORMAL)) |
047fdd47 | 2711 | switch (DECL_FUNCTION_CODE (callee)) |
2712 | { | |
2713 | /* All the following functions clobber memory pointed to by | |
2714 | their first argument. */ | |
2715 | case BUILT_IN_STRCPY: | |
2716 | case BUILT_IN_STRNCPY: | |
047fdd47 | 2717 | case BUILT_IN_MEMCPY: |
2718 | case BUILT_IN_MEMMOVE: | |
2719 | case BUILT_IN_MEMPCPY: | |
2720 | case BUILT_IN_STPCPY: | |
2721 | case BUILT_IN_STPNCPY: | |
2722 | case BUILT_IN_STRCAT: | |
2723 | case BUILT_IN_STRNCAT: | |
134899ac | 2724 | case BUILT_IN_MEMSET: |
4c0315d0 | 2725 | case BUILT_IN_TM_MEMSET: |
2726 | CASE_BUILT_IN_TM_STORE (1): | |
2727 | CASE_BUILT_IN_TM_STORE (2): | |
2728 | CASE_BUILT_IN_TM_STORE (4): | |
2729 | CASE_BUILT_IN_TM_STORE (8): | |
2730 | CASE_BUILT_IN_TM_STORE (FLOAT): | |
2731 | CASE_BUILT_IN_TM_STORE (DOUBLE): | |
2732 | CASE_BUILT_IN_TM_STORE (LDOUBLE): | |
2733 | CASE_BUILT_IN_TM_STORE (M64): | |
2734 | CASE_BUILT_IN_TM_STORE (M128): | |
2735 | CASE_BUILT_IN_TM_STORE (M256): | |
2736 | case BUILT_IN_TM_MEMCPY: | |
2737 | case BUILT_IN_TM_MEMMOVE: | |
047fdd47 | 2738 | { |
134899ac | 2739 | ao_ref dref; |
2740 | tree size = NULL_TREE; | |
77efe819 | 2741 | /* Don't pass in size for strncat, as the maximum size |
2742 | is strlen (dest) + n + 1 instead of n, resp. | |
2743 | n + 1 at dest + strlen (dest), but strlen (dest) isn't | |
2744 | known. */ | |
2745 | if (gimple_call_num_args (call) == 3 | |
2746 | && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT) | |
134899ac | 2747 | size = gimple_call_arg (call, 2); |
2748 | ao_ref_init_from_ptr_and_size (&dref, | |
2749 | gimple_call_arg (call, 0), | |
2750 | size); | |
2751 | return refs_may_alias_p_1 (&dref, ref, false); | |
047fdd47 | 2752 | } |
939514e9 | 2753 | case BUILT_IN_STRCPY_CHK: |
2754 | case BUILT_IN_STRNCPY_CHK: | |
2755 | case BUILT_IN_MEMCPY_CHK: | |
2756 | case BUILT_IN_MEMMOVE_CHK: | |
2757 | case BUILT_IN_MEMPCPY_CHK: | |
2758 | case BUILT_IN_STPCPY_CHK: | |
1063acde | 2759 | case BUILT_IN_STPNCPY_CHK: |
939514e9 | 2760 | case BUILT_IN_STRCAT_CHK: |
2761 | case BUILT_IN_STRNCAT_CHK: | |
2762 | case BUILT_IN_MEMSET_CHK: | |
2763 | { | |
2764 | ao_ref dref; | |
2765 | tree size = NULL_TREE; | |
77efe819 | 2766 | /* Don't pass in size for __strncat_chk, as the maximum size |
2767 | is strlen (dest) + n + 1 instead of n, resp. | |
2768 | n + 1 at dest + strlen (dest), but strlen (dest) isn't | |
2769 | known. */ | |
2770 | if (gimple_call_num_args (call) == 4 | |
2771 | && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK) | |
939514e9 | 2772 | size = gimple_call_arg (call, 2); |
2773 | ao_ref_init_from_ptr_and_size (&dref, | |
2774 | gimple_call_arg (call, 0), | |
2775 | size); | |
2776 | return refs_may_alias_p_1 (&dref, ref, false); | |
2777 | } | |
119147d1 | 2778 | case BUILT_IN_BCOPY: |
2779 | { | |
2780 | ao_ref dref; | |
2781 | tree size = gimple_call_arg (call, 2); | |
2782 | ao_ref_init_from_ptr_and_size (&dref, | |
2783 | gimple_call_arg (call, 1), | |
2784 | size); | |
2785 | return refs_may_alias_p_1 (&dref, ref, false); | |
2786 | } | |
0d3ca08e | 2787 | /* Allocating memory does not have any side-effects apart from |
2788 | being the definition point for the pointer. */ | |
2789 | case BUILT_IN_MALLOC: | |
060fc206 | 2790 | case BUILT_IN_ALIGNED_ALLOC: |
4ef43bbd | 2791 | case BUILT_IN_CALLOC: |
77efe819 | 2792 | case BUILT_IN_STRDUP: |
2793 | case BUILT_IN_STRNDUP: | |
be97d4b6 | 2794 | /* Unix98 specifies that errno is set on allocation failure. */ |
a29c5f9c | 2795 | if (flag_errno_math |
be97d4b6 | 2796 | && targetm.ref_may_alias_errno (ref)) |
2797 | return true; | |
0d3ca08e | 2798 | return false; |
ca793672 | 2799 | case BUILT_IN_STACK_SAVE: |
2b34677f | 2800 | CASE_BUILT_IN_ALLOCA: |
fca0886c | 2801 | case BUILT_IN_ASSUME_ALIGNED: |
ca793672 | 2802 | return false; |
be2c7f8f | 2803 | /* But posix_memalign stores a pointer into the memory pointed to |
2804 | by its first argument. */ | |
2805 | case BUILT_IN_POSIX_MEMALIGN: | |
2806 | { | |
2807 | tree ptrptr = gimple_call_arg (call, 0); | |
2808 | ao_ref dref; | |
2809 | ao_ref_init_from_ptr_and_size (&dref, ptrptr, | |
2810 | TYPE_SIZE_UNIT (ptr_type_node)); | |
2811 | return (refs_may_alias_p_1 (&dref, ref, false) | |
2812 | || (flag_errno_math | |
2813 | && targetm.ref_may_alias_errno (ref))); | |
2814 | } | |
047fdd47 | 2815 | /* Freeing memory kills the pointed-to memory. More importantly |
2816 | the call has to serve as a barrier for moving loads and stores | |
134899ac | 2817 | across it. */ |
047fdd47 | 2818 | case BUILT_IN_FREE: |
9d17b4a8 | 2819 | case BUILT_IN_VA_END: |
047fdd47 | 2820 | { |
2821 | tree ptr = gimple_call_arg (call, 0); | |
2822 | return ptr_deref_may_alias_ref_p_1 (ptr, ref); | |
2823 | } | |
ee890734 | 2824 | /* Realloc serves both as allocation point and deallocation point. */ |
2825 | case BUILT_IN_REALLOC: | |
2826 | { | |
2827 | tree ptr = gimple_call_arg (call, 0); | |
2828 | /* Unix98 specifies that errno is set on allocation failure. */ | |
2829 | return ((flag_errno_math | |
2830 | && targetm.ref_may_alias_errno (ref)) | |
2831 | || ptr_deref_may_alias_ref_p_1 (ptr, ref)); | |
2832 | } | |
047fdd47 | 2833 | case BUILT_IN_GAMMA_R: |
2834 | case BUILT_IN_GAMMAF_R: | |
2835 | case BUILT_IN_GAMMAL_R: | |
2836 | case BUILT_IN_LGAMMA_R: | |
2837 | case BUILT_IN_LGAMMAF_R: | |
2838 | case BUILT_IN_LGAMMAL_R: | |
09d9c774 | 2839 | { |
2840 | tree out = gimple_call_arg (call, 1); | |
2841 | if (ptr_deref_may_alias_ref_p_1 (out, ref)) | |
2842 | return true; | |
2843 | if (flag_errno_math) | |
2844 | break; | |
2845 | return false; | |
2846 | } | |
2847 | case BUILT_IN_FREXP: | |
2848 | case BUILT_IN_FREXPF: | |
2849 | case BUILT_IN_FREXPL: | |
047fdd47 | 2850 | case BUILT_IN_MODF: |
2851 | case BUILT_IN_MODFF: | |
2852 | case BUILT_IN_MODFL: | |
2853 | { | |
2854 | tree out = gimple_call_arg (call, 1); | |
2855 | return ptr_deref_may_alias_ref_p_1 (out, ref); | |
2856 | } | |
2857 | case BUILT_IN_REMQUO: | |
2858 | case BUILT_IN_REMQUOF: | |
2859 | case BUILT_IN_REMQUOL: | |
2860 | { | |
2861 | tree out = gimple_call_arg (call, 2); | |
09d9c774 | 2862 | if (ptr_deref_may_alias_ref_p_1 (out, ref)) |
2863 | return true; | |
2864 | if (flag_errno_math) | |
2865 | break; | |
2866 | return false; | |
047fdd47 | 2867 | } |
2868 | case BUILT_IN_SINCOS: | |
2869 | case BUILT_IN_SINCOSF: | |
2870 | case BUILT_IN_SINCOSL: | |
2871 | { | |
2872 | tree sin = gimple_call_arg (call, 1); | |
2873 | tree cos = gimple_call_arg (call, 2); | |
2874 | return (ptr_deref_may_alias_ref_p_1 (sin, ref) | |
2875 | || ptr_deref_may_alias_ref_p_1 (cos, ref)); | |
2876 | } | |
a44814b9 | 2877 | /* __sync_* builtins and some OpenMP builtins act as threading |
2878 | barriers. */ | |
2879 | #undef DEF_SYNC_BUILTIN | |
2880 | #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM: | |
2881 | #include "sync-builtins.def" | |
2882 | #undef DEF_SYNC_BUILTIN | |
2883 | case BUILT_IN_GOMP_ATOMIC_START: | |
2884 | case BUILT_IN_GOMP_ATOMIC_END: | |
2885 | case BUILT_IN_GOMP_BARRIER: | |
bc7bff74 | 2886 | case BUILT_IN_GOMP_BARRIER_CANCEL: |
a44814b9 | 2887 | case BUILT_IN_GOMP_TASKWAIT: |
bc7bff74 | 2888 | case BUILT_IN_GOMP_TASKGROUP_END: |
a44814b9 | 2889 | case BUILT_IN_GOMP_CRITICAL_START: |
2890 | case BUILT_IN_GOMP_CRITICAL_END: | |
2891 | case BUILT_IN_GOMP_CRITICAL_NAME_START: | |
2892 | case BUILT_IN_GOMP_CRITICAL_NAME_END: | |
2893 | case BUILT_IN_GOMP_LOOP_END: | |
bc7bff74 | 2894 | case BUILT_IN_GOMP_LOOP_END_CANCEL: |
a44814b9 | 2895 | case BUILT_IN_GOMP_ORDERED_START: |
2896 | case BUILT_IN_GOMP_ORDERED_END: | |
a44814b9 | 2897 | case BUILT_IN_GOMP_SECTIONS_END: |
bc7bff74 | 2898 | case BUILT_IN_GOMP_SECTIONS_END_CANCEL: |
a44814b9 | 2899 | case BUILT_IN_GOMP_SINGLE_COPY_START: |
2900 | case BUILT_IN_GOMP_SINGLE_COPY_END: | |
2901 | return true; | |
047fdd47 | 2902 | default: |
2903 | /* Fallthru to general call handling. */; | |
2904 | } | |
2905 | ||
dd277d48 | 2906 | /* Check if base is a global static variable that is not written |
2907 | by the function. */ | |
53e9c5c4 | 2908 | if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base)) |
4ee9c684 | 2909 | { |
415d1b9a | 2910 | struct cgraph_node *node = cgraph_node::get (callee); |
b39960e1 | 2911 | bitmap not_written; |
0eaf9bd7 | 2912 | |
b39960e1 | 2913 | if (node |
2914 | && (not_written = ipa_reference_get_not_written_global (node)) | |
2915 | && bitmap_bit_p (not_written, ipa_reference_var_uid (base))) | |
2916 | return false; | |
4ee9c684 | 2917 | } |
4ee9c684 | 2918 | |
cb245216 | 2919 | /* Check if the base variable is call-clobbered. */ |
dd277d48 | 2920 | if (DECL_P (base)) |
cb245216 | 2921 | return pt_solution_includes (gimple_call_clobber_set (call), base); |
2622064f | 2922 | else if ((TREE_CODE (base) == MEM_REF |
2923 | || TREE_CODE (base) == TARGET_MEM_REF) | |
917f08fa | 2924 | && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) |
2925 | { | |
2926 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)); | |
2927 | if (!pi) | |
2928 | return true; | |
2929 | ||
cb245216 | 2930 | return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt); |
917f08fa | 2931 | } |
4ee9c684 | 2932 | |
dd277d48 | 2933 | return true; |
2934 | } | |
4ee9c684 | 2935 | |
4debfcfc | 2936 | /* If the call in statement CALL may clobber the memory reference REF |
2937 | return true, otherwise return false. */ | |
2938 | ||
2939 | bool | |
1a91d914 | 2940 | call_may_clobber_ref_p (gcall *call, tree ref) |
4ee9c684 | 2941 | { |
3918bd18 | 2942 | bool res; |
2943 | ao_ref r; | |
2944 | ao_ref_init (&r, ref); | |
2945 | res = call_may_clobber_ref_p_1 (call, &r); | |
dd277d48 | 2946 | if (res) |
2947 | ++alias_stats.call_may_clobber_ref_p_may_alias; | |
2948 | else | |
2949 | ++alias_stats.call_may_clobber_ref_p_no_alias; | |
2950 | return res; | |
4ee9c684 | 2951 | } |
94e6573f | 2952 | |
dd277d48 | 2953 | |
2954 | /* If the statement STMT may clobber the memory reference REF return true, | |
2955 | otherwise return false. */ | |
94e6573f | 2956 | |
2957 | bool | |
58cfef6b | 2958 | stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p) |
94e6573f | 2959 | { |
dd277d48 | 2960 | if (is_gimple_call (stmt)) |
2961 | { | |
2962 | tree lhs = gimple_call_lhs (stmt); | |
2963 | if (lhs | |
66b86a74 | 2964 | && TREE_CODE (lhs) != SSA_NAME) |
3918bd18 | 2965 | { |
2966 | ao_ref r; | |
2967 | ao_ref_init (&r, lhs); | |
58cfef6b | 2968 | if (refs_may_alias_p_1 (ref, &r, tbaa_p)) |
3918bd18 | 2969 | return true; |
2970 | } | |
94e6573f | 2971 | |
1a91d914 | 2972 | return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref); |
dd277d48 | 2973 | } |
8e7c2b9c | 2974 | else if (gimple_assign_single_p (stmt)) |
3918bd18 | 2975 | { |
8e7c2b9c | 2976 | tree lhs = gimple_assign_lhs (stmt); |
66b86a74 | 2977 | if (TREE_CODE (lhs) != SSA_NAME) |
8e7c2b9c | 2978 | { |
2979 | ao_ref r; | |
66b86a74 | 2980 | ao_ref_init (&r, lhs); |
58cfef6b | 2981 | return refs_may_alias_p_1 (ref, &r, tbaa_p); |
8e7c2b9c | 2982 | } |
3918bd18 | 2983 | } |
dd277d48 | 2984 | else if (gimple_code (stmt) == GIMPLE_ASM) |
94e6573f | 2985 | return true; |
2986 | ||
6329636b | 2987 | return false; |
94e6573f | 2988 | } |
2be14d8b | 2989 | |
3918bd18 | 2990 | bool |
58cfef6b | 2991 | stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p) |
3918bd18 | 2992 | { |
2993 | ao_ref r; | |
2994 | ao_ref_init (&r, ref); | |
58cfef6b | 2995 | return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p); |
3918bd18 | 2996 | } |
2997 | ||
38cfd088 | 2998 | /* Return true if store1 and store2 described by corresponding tuples |
2999 | <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same | |
3000 | address. */ | |
3001 | ||
3002 | static bool | |
2c7ce661 | 3003 | same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1, |
3004 | poly_int64 max_size1, | |
3005 | tree base2, poly_int64 offset2, poly_int64 size2, | |
3006 | poly_int64 max_size2) | |
38cfd088 | 3007 | { |
9355477b | 3008 | /* Offsets need to be 0. */ |
2c7ce661 | 3009 | if (maybe_ne (offset1, 0) |
3010 | || maybe_ne (offset2, 0)) | |
9355477b | 3011 | return false; |
3012 | ||
3013 | bool base1_obj_p = SSA_VAR_P (base1); | |
3014 | bool base2_obj_p = SSA_VAR_P (base2); | |
38cfd088 | 3015 | |
3016 | /* We need one object. */ | |
3017 | if (base1_obj_p == base2_obj_p) | |
3018 | return false; | |
3019 | tree obj = base1_obj_p ? base1 : base2; | |
3020 | ||
3021 | /* And we need one MEM_REF. */ | |
3022 | bool base1_memref_p = TREE_CODE (base1) == MEM_REF; | |
3023 | bool base2_memref_p = TREE_CODE (base2) == MEM_REF; | |
3024 | if (base1_memref_p == base2_memref_p) | |
3025 | return false; | |
3026 | tree memref = base1_memref_p ? base1 : base2; | |
3027 | ||
3028 | /* Sizes need to be valid. */ | |
2c7ce661 | 3029 | if (!known_size_p (max_size1) |
3030 | || !known_size_p (max_size2) | |
3031 | || !known_size_p (size1) | |
3032 | || !known_size_p (size2)) | |
38cfd088 | 3033 | return false; |
3034 | ||
3035 | /* Max_size needs to match size. */ | |
2c7ce661 | 3036 | if (maybe_ne (max_size1, size1) |
3037 | || maybe_ne (max_size2, size2)) | |
38cfd088 | 3038 | return false; |
3039 | ||
3040 | /* Sizes need to match. */ | |
2c7ce661 | 3041 | if (maybe_ne (size1, size2)) |
38cfd088 | 3042 | return false; |
3043 | ||
38cfd088 | 3044 | |
3045 | /* Check that memref is a store to pointer with singleton points-to info. */ | |
9355477b | 3046 | if (!integer_zerop (TREE_OPERAND (memref, 1))) |
38cfd088 | 3047 | return false; |
3048 | tree ptr = TREE_OPERAND (memref, 0); | |
3049 | if (TREE_CODE (ptr) != SSA_NAME) | |
3050 | return false; | |
3051 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); | |
3052 | unsigned int pt_uid; | |
3053 | if (pi == NULL | |
3054 | || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid)) | |
3055 | return false; | |
3056 | ||
1922120c | 3057 | /* Be conservative with non-call exceptions when the address might |
3058 | be NULL. */ | |
7c1cc03c | 3059 | if (cfun->can_throw_non_call_exceptions && pi->pt.null) |
9355477b | 3060 | return false; |
3061 | ||
38cfd088 | 3062 | /* Check that ptr points relative to obj. */ |
9355477b | 3063 | unsigned int obj_uid = DECL_PT_UID (obj); |
38cfd088 | 3064 | if (obj_uid != pt_uid) |
3065 | return false; | |
3066 | ||
3067 | /* Check that the object size is the same as the store size. That ensures us | |
3068 | that ptr points to the start of obj. */ | |
2c7ce661 | 3069 | return (DECL_SIZE (obj) |
3070 | && poly_int_tree_p (DECL_SIZE (obj)) | |
3071 | && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1)); | |
38cfd088 | 3072 | } |
3073 | ||
ab450cc6 | 3074 | /* If STMT kills the memory reference REF return true, otherwise |
3075 | return false. */ | |
3076 | ||
258bd648 | 3077 | bool |
42acab1c | 3078 | stmt_kills_ref_p (gimple *stmt, ao_ref *ref) |
ab450cc6 | 3079 | { |
2e333543 | 3080 | if (!ao_ref_base (ref)) |
07ff6a65 | 3081 | return false; |
3082 | ||
ab450cc6 | 3083 | if (gimple_has_lhs (stmt) |
e54d36c4 | 3084 | && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME |
3085 | /* The assignment is not necessarily carried out if it can throw | |
3086 | and we can catch it in the current function where we could inspect | |
3087 | the previous value. | |
3088 | ??? We only need to care about the RHS throwing. For aggregate | |
3089 | assignments or similar calls and non-call exceptions the LHS | |
3090 | might throw as well. */ | |
aac19106 | 3091 | && !stmt_can_throw_internal (cfun, stmt)) |
ab450cc6 | 3092 | { |
2e333543 | 3093 | tree lhs = gimple_get_lhs (stmt); |
3094 | /* If LHS is literally a base of the access we are done. */ | |
3095 | if (ref->ref) | |
3096 | { | |
3097 | tree base = ref->ref; | |
272512a8 | 3098 | tree innermost_dropped_array_ref = NULL_TREE; |
2e333543 | 3099 | if (handled_component_p (base)) |
3100 | { | |
3101 | tree saved_lhs0 = NULL_TREE; | |
3102 | if (handled_component_p (lhs)) | |
3103 | { | |
3104 | saved_lhs0 = TREE_OPERAND (lhs, 0); | |
3105 | TREE_OPERAND (lhs, 0) = integer_zero_node; | |
3106 | } | |
3107 | do | |
3108 | { | |
3109 | /* Just compare the outermost handled component, if | |
3110 | they are equal we have found a possible common | |
3111 | base. */ | |
3112 | tree saved_base0 = TREE_OPERAND (base, 0); | |
3113 | TREE_OPERAND (base, 0) = integer_zero_node; | |
3114 | bool res = operand_equal_p (lhs, base, 0); | |
3115 | TREE_OPERAND (base, 0) = saved_base0; | |
3116 | if (res) | |
3117 | break; | |
272512a8 | 3118 | /* Remember if we drop an array-ref that we need to |
3119 | double-check not being at struct end. */ | |
3120 | if (TREE_CODE (base) == ARRAY_REF | |
3121 | || TREE_CODE (base) == ARRAY_RANGE_REF) | |
3122 | innermost_dropped_array_ref = base; | |
2e333543 | 3123 | /* Otherwise drop handled components of the access. */ |
3124 | base = saved_base0; | |
3125 | } | |
3126 | while (handled_component_p (base)); | |
3127 | if (saved_lhs0) | |
3128 | TREE_OPERAND (lhs, 0) = saved_lhs0; | |
3129 | } | |
11beb29c | 3130 | /* Finally check if the lhs has the same address and size as the |
272512a8 | 3131 | base candidate of the access. Watch out if we have dropped |
3132 | an array-ref that was at struct end, this means ref->ref may | |
3133 | be outside of the TYPE_SIZE of its base. */ | |
3134 | if ((! innermost_dropped_array_ref | |
3135 | || ! array_at_struct_end_p (innermost_dropped_array_ref)) | |
3136 | && (lhs == base | |
3137 | || (((TYPE_SIZE (TREE_TYPE (lhs)) | |
3138 | == TYPE_SIZE (TREE_TYPE (base))) | |
3139 | || (TYPE_SIZE (TREE_TYPE (lhs)) | |
3140 | && TYPE_SIZE (TREE_TYPE (base)) | |
3141 | && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)), | |
3142 | TYPE_SIZE (TREE_TYPE (base)), | |
3143 | 0))) | |
3144 | && operand_equal_p (lhs, base, | |
3145 | OEP_ADDRESS_OF | |
3146 | | OEP_MATCH_SIDE_EFFECTS)))) | |
2e333543 | 3147 | return true; |
3148 | } | |
3149 | ||
3150 | /* Now look for non-literal equal bases with the restriction of | |
3151 | handling constant offset and size. */ | |
3152 | /* For a must-alias check we need to be able to constrain | |
3153 | the access properly. */ | |
fe60c82c | 3154 | if (!ref->max_size_known_p ()) |
2e333543 | 3155 | return false; |
f3c2a387 | 3156 | poly_int64 size, offset, max_size, ref_offset = ref->offset; |
292237f3 | 3157 | bool reverse; |
f3c2a387 | 3158 | tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size, |
3159 | &reverse); | |
ab450cc6 | 3160 | /* We can get MEM[symbol: sZ, index: D.8862_1] here, |
3161 | so base == ref->base does not always hold. */ | |
137eddca | 3162 | if (base != ref->base) |
ab450cc6 | 3163 | { |
38cfd088 | 3164 | /* Try using points-to info. */ |
3165 | if (same_addr_size_stores_p (base, offset, size, max_size, ref->base, | |
3166 | ref->offset, ref->size, ref->max_size)) | |
3167 | return true; | |
3168 | ||
137eddca | 3169 | /* If both base and ref->base are MEM_REFs, only compare the |
3170 | first operand, and if the second operand isn't equal constant, | |
3171 | try to add the offsets into offset and ref_offset. */ | |
3172 | if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF | |
3173 | && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0)) | |
ab450cc6 | 3174 | { |
83d18d6d | 3175 | if (!tree_int_cst_equal (TREE_OPERAND (base, 1), |
3176 | TREE_OPERAND (ref->base, 1))) | |
137eddca | 3177 | { |
fe60c82c | 3178 | poly_offset_int off1 = mem_ref_offset (base); |
9fdc1ed4 | 3179 | off1 <<= LOG2_BITS_PER_UNIT; |
e913b5cd | 3180 | off1 += offset; |
fe60c82c | 3181 | poly_offset_int off2 = mem_ref_offset (ref->base); |
9fdc1ed4 | 3182 | off2 <<= LOG2_BITS_PER_UNIT; |
e913b5cd | 3183 | off2 += ref_offset; |
fe60c82c | 3184 | if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset)) |
137eddca | 3185 | size = -1; |
3186 | } | |
ab450cc6 | 3187 | } |
137eddca | 3188 | else |
3189 | size = -1; | |
3190 | } | |
3191 | /* For a must-alias check we need to be able to constrain | |
3192 | the access properly. */ | |
f3c2a387 | 3193 | if (known_eq (size, max_size) |
fe60c82c | 3194 | && known_subrange_p (ref_offset, ref->max_size, offset, size)) |
3195 | return true; | |
ab450cc6 | 3196 | } |
07ff6a65 | 3197 | |
3198 | if (is_gimple_call (stmt)) | |
3199 | { | |
3200 | tree callee = gimple_call_fndecl (stmt); | |
3201 | if (callee != NULL_TREE | |
6189000c | 3202 | && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
07ff6a65 | 3203 | switch (DECL_FUNCTION_CODE (callee)) |
3204 | { | |
3d0ea943 | 3205 | case BUILT_IN_FREE: |
3206 | { | |
3207 | tree ptr = gimple_call_arg (stmt, 0); | |
3208 | tree base = ao_ref_base (ref); | |
3209 | if (base && TREE_CODE (base) == MEM_REF | |
3210 | && TREE_OPERAND (base, 0) == ptr) | |
3211 | return true; | |
3212 | break; | |
3213 | } | |
3214 | ||
07ff6a65 | 3215 | case BUILT_IN_MEMCPY: |
3216 | case BUILT_IN_MEMPCPY: | |
3217 | case BUILT_IN_MEMMOVE: | |
3218 | case BUILT_IN_MEMSET: | |
939514e9 | 3219 | case BUILT_IN_MEMCPY_CHK: |
3220 | case BUILT_IN_MEMPCPY_CHK: | |
3221 | case BUILT_IN_MEMMOVE_CHK: | |
3222 | case BUILT_IN_MEMSET_CHK: | |
3fead063 | 3223 | case BUILT_IN_STRNCPY: |
3224 | case BUILT_IN_STPNCPY: | |
bc5b8e83 | 3225 | case BUILT_IN_CALLOC: |
07ff6a65 | 3226 | { |
2e333543 | 3227 | /* For a must-alias check we need to be able to constrain |
3228 | the access properly. */ | |
fe60c82c | 3229 | if (!ref->max_size_known_p ()) |
2e333543 | 3230 | return false; |
bc5b8e83 | 3231 | tree dest; |
3232 | tree len; | |
3233 | ||
3234 | /* In execution order a calloc call will never kill | |
3235 | anything. However, DSE will (ab)use this interface | |
3236 | to ask if a calloc call writes the same memory locations | |
3237 | as a later assignment, memset, etc. So handle calloc | |
3238 | in the expected way. */ | |
3239 | if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC) | |
3240 | { | |
3241 | tree arg0 = gimple_call_arg (stmt, 0); | |
3242 | tree arg1 = gimple_call_arg (stmt, 1); | |
3243 | if (TREE_CODE (arg0) != INTEGER_CST | |
3244 | || TREE_CODE (arg1) != INTEGER_CST) | |
3245 | return false; | |
3246 | ||
3247 | dest = gimple_call_lhs (stmt); | |
3248 | len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1); | |
3249 | } | |
3250 | else | |
3251 | { | |
3252 | dest = gimple_call_arg (stmt, 0); | |
3253 | len = gimple_call_arg (stmt, 2); | |
3254 | } | |
fe60c82c | 3255 | if (!poly_int_tree_p (len)) |
07ff6a65 | 3256 | return false; |
e6c705fb | 3257 | tree rbase = ref->base; |
fe60c82c | 3258 | poly_offset_int roffset = ref->offset; |
e6c705fb | 3259 | ao_ref dref; |
3260 | ao_ref_init_from_ptr_and_size (&dref, dest, len); | |
3261 | tree base = ao_ref_base (&dref); | |
fe60c82c | 3262 | poly_offset_int offset = dref.offset; |
3263 | if (!base || !known_size_p (dref.size)) | |
e6c705fb | 3264 | return false; |
3265 | if (TREE_CODE (base) == MEM_REF) | |
3266 | { | |
3267 | if (TREE_CODE (rbase) != MEM_REF) | |
3268 | return false; | |
3269 | // Compare pointers. | |
9fdc1ed4 | 3270 | offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT; |
3271 | roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT; | |
e6c705fb | 3272 | base = TREE_OPERAND (base, 0); |
3273 | rbase = TREE_OPERAND (rbase, 0); | |
3274 | } | |
2315e038 | 3275 | if (base == rbase |
fe60c82c | 3276 | && known_subrange_p (roffset, ref->max_size, offset, |
3277 | wi::to_poly_offset (len) | |
3278 | << LOG2_BITS_PER_UNIT)) | |
2315e038 | 3279 | return true; |
9d17b4a8 | 3280 | break; |
3281 | } | |
3282 | ||
3283 | case BUILT_IN_VA_END: | |
3284 | { | |
3285 | tree ptr = gimple_call_arg (stmt, 0); | |
3286 | if (TREE_CODE (ptr) == ADDR_EXPR) | |
3287 | { | |
3288 | tree base = ao_ref_base (ref); | |
3289 | if (TREE_OPERAND (ptr, 0) == base) | |
3290 | return true; | |
3291 | } | |
3292 | break; | |
07ff6a65 | 3293 | } |
9d17b4a8 | 3294 | |
07ff6a65 | 3295 | default:; |
3296 | } | |
07ff6a65 | 3297 | } |
ab450cc6 | 3298 | return false; |
3299 | } | |
3300 | ||
3301 | bool | |
42acab1c | 3302 | stmt_kills_ref_p (gimple *stmt, tree ref) |
ab450cc6 | 3303 | { |
3304 | ao_ref r; | |
3305 | ao_ref_init (&r, ref); | |
258bd648 | 3306 | return stmt_kills_ref_p (stmt, &r); |
ab450cc6 | 3307 | } |
3308 | ||
3918bd18 | 3309 | |
dd277d48 | 3310 | /* Walk the virtual use-def chain of VUSE until hitting the virtual operand |
3311 | TARGET or a statement clobbering the memory reference REF in which | |
3312 | case false is returned. The walk starts with VUSE, one argument of PHI. */ | |
3313 | ||
3314 | static bool | |
b19a0dbb | 3315 | maybe_skip_until (gimple *phi, tree &target, basic_block target_bb, |
7dde7294 | 3316 | ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit, |
3317 | bitmap *visited, bool abort_on_visited, | |
0ccd5579 | 3318 | void *(*translate)(ao_ref *, tree, void *, translate_flags *), |
3319 | translate_flags disambiguate_only, | |
38168b16 | 3320 | void *data) |
ac926412 | 3321 | { |
0afe880d | 3322 | basic_block bb = gimple_bb (phi); |
3323 | ||
dd277d48 | 3324 | if (!*visited) |
3325 | *visited = BITMAP_ALLOC (NULL); | |
ac926412 | 3326 | |
dd277d48 | 3327 | bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi))); |
3328 | ||
3329 | /* Walk until we hit the target. */ | |
3330 | while (vuse != target) | |
ac926412 | 3331 | { |
42acab1c | 3332 | gimple *def_stmt = SSA_NAME_DEF_STMT (vuse); |
b19a0dbb | 3333 | /* If we are searching for the target VUSE by walking up to |
3334 | TARGET_BB dominating the original PHI we are finished once | |
3335 | we reach a default def or a definition in a block dominating | |
3336 | that block. Update TARGET and return. */ | |
3337 | if (!target | |
3338 | && (gimple_nop_p (def_stmt) | |
3339 | || dominated_by_p (CDI_DOMINATORS, | |
3340 | target_bb, gimple_bb (def_stmt)))) | |
3341 | { | |
3342 | target = vuse; | |
3343 | return true; | |
3344 | } | |
3345 | ||
dd277d48 | 3346 | /* Recurse for PHI nodes. */ |
3347 | if (gimple_code (def_stmt) == GIMPLE_PHI) | |
3348 | { | |
3349 | /* An already visited PHI node ends the walk successfully. */ | |
3350 | if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt)))) | |
4fd7a6de | 3351 | return !abort_on_visited; |
7dde7294 | 3352 | vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit, |
38168b16 | 3353 | visited, abort_on_visited, |
0ccd5579 | 3354 | translate, data, disambiguate_only); |
dd277d48 | 3355 | if (!vuse) |
3356 | return false; | |
3357 | continue; | |
3358 | } | |
297a2110 | 3359 | else if (gimple_nop_p (def_stmt)) |
dd277d48 | 3360 | return false; |
297a2110 | 3361 | else |
3362 | { | |
3363 | /* A clobbering statement or the end of the IL ends it failing. */ | |
1f510793 | 3364 | if ((int)limit <= 0) |
3365 | return false; | |
3366 | --limit; | |
7dde7294 | 3367 | if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p)) |
38168b16 | 3368 | { |
0ccd5579 | 3369 | translate_flags tf = disambiguate_only; |
38168b16 | 3370 | if (translate |
0ccd5579 | 3371 | && (*translate) (ref, vuse, data, &tf) == NULL) |
38168b16 | 3372 | ; |
3373 | else | |
3374 | return false; | |
3375 | } | |
297a2110 | 3376 | } |
0afe880d | 3377 | /* If we reach a new basic-block see if we already skipped it |
3378 | in a previous walk that ended successfully. */ | |
3379 | if (gimple_bb (def_stmt) != bb) | |
3380 | { | |
3381 | if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse))) | |
4fd7a6de | 3382 | return !abort_on_visited; |
0afe880d | 3383 | bb = gimple_bb (def_stmt); |
3384 | } | |
dd277d48 | 3385 | vuse = gimple_vuse (def_stmt); |
ac926412 | 3386 | } |
dd277d48 | 3387 | return true; |
3388 | } | |
ac926412 | 3389 | |
d7f69727 | 3390 | |
dd277d48 | 3391 | /* Starting from a PHI node for the virtual operand of the memory reference |
3392 | REF find a continuation virtual operand that allows to continue walking | |
3393 | statements dominating PHI skipping only statements that cannot possibly | |
1f510793 | 3394 | clobber REF. Decrements LIMIT for each alias disambiguation done |
3395 | and aborts the walk, returning NULL_TREE if it reaches zero. | |
297a2110 | 3396 | Returns NULL_TREE if no suitable virtual operand can be found. */ |
dd277d48 | 3397 | |
7814d418 | 3398 | tree |
7dde7294 | 3399 | get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p, |
1f510793 | 3400 | unsigned int &limit, bitmap *visited, |
38168b16 | 3401 | bool abort_on_visited, |
0ccd5579 | 3402 | void *(*translate)(ao_ref *, tree, void *, |
3403 | translate_flags *), | |
3404 | void *data, | |
3405 | translate_flags disambiguate_only) | |
dd277d48 | 3406 | { |
3407 | unsigned nargs = gimple_phi_num_args (phi); | |
3408 | ||
3409 | /* Through a single-argument PHI we can simply look through. */ | |
3410 | if (nargs == 1) | |
3411 | return PHI_ARG_DEF (phi, 0); | |
3412 | ||
d7f69727 | 3413 | /* For two or more arguments try to pairwise skip non-aliasing code |
3414 | until we hit the phi argument definition that dominates the other one. */ | |
91f19afb | 3415 | basic_block phi_bb = gimple_bb (phi); |
3416 | tree arg0, arg1; | |
3417 | unsigned i; | |
3418 | ||
3419 | /* Find a candidate for the virtual operand which definition | |
3420 | dominates those of all others. */ | |
3421 | /* First look if any of the args themselves satisfy this. */ | |
3422 | for (i = 0; i < nargs; ++i) | |
ac926412 | 3423 | { |
91f19afb | 3424 | arg0 = PHI_ARG_DEF (phi, i); |
3425 | if (SSA_NAME_IS_DEFAULT_DEF (arg0)) | |
3426 | break; | |
3427 | basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0)); | |
3428 | if (def_bb != phi_bb | |
3429 | && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb)) | |
3430 | break; | |
3431 | arg0 = NULL_TREE; | |
3432 | } | |
3433 | /* If not, look if we can reach such candidate by walking defs | |
b19a0dbb | 3434 | until we hit the immediate dominator. maybe_skip_until will |
3435 | do that for us. */ | |
3436 | basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb); | |
0afe880d | 3437 | |
b19a0dbb | 3438 | /* Then check against the (to be) found candidate. */ |
91f19afb | 3439 | for (i = 0; i < nargs; ++i) |
3440 | { | |
3441 | arg1 = PHI_ARG_DEF (phi, i); | |
3442 | if (arg1 == arg0) | |
3443 | ; | |
7dde7294 | 3444 | else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p, |
3445 | limit, visited, | |
b87672f7 | 3446 | abort_on_visited, |
0ccd5579 | 3447 | translate, |
3448 | /* Do not valueize when walking over | |
b87672f7 | 3449 | backedges. */ |
3450 | dominated_by_p | |
3451 | (CDI_DOMINATORS, | |
3452 | gimple_bb (SSA_NAME_DEF_STMT (arg1)), | |
3453 | phi_bb) | |
0ccd5579 | 3454 | ? TR_DISAMBIGUATE |
3455 | : disambiguate_only, data)) | |
91f19afb | 3456 | return NULL_TREE; |
ac926412 | 3457 | } |
dd277d48 | 3458 | |
91f19afb | 3459 | return arg0; |
ac926412 | 3460 | } |
516849c7 | 3461 | |
dd277d48 | 3462 | /* Based on the memory reference REF and its virtual use VUSE call |
3463 | WALKER for each virtual use that is equivalent to VUSE, including VUSE | |
3464 | itself. That is, for each virtual use for which its defining statement | |
3465 | does not clobber REF. | |
e683de6d | 3466 | |
dd277d48 | 3467 | WALKER is called with REF, the current virtual use and DATA. If |
3468 | WALKER returns non-NULL the walk stops and its result is returned. | |
3469 | At the end of a non-successful walk NULL is returned. | |
3857274c | 3470 | |
d8021dea | 3471 | TRANSLATE if non-NULL is called with a pointer to REF, the virtual |
3472 | use which definition is a statement that may clobber REF and DATA. | |
3473 | If TRANSLATE returns (void *)-1 the walk stops and NULL is returned. | |
3474 | If TRANSLATE returns non-NULL the walk stops and its result is returned. | |
3475 | If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed | |
3476 | to adjust REF and *DATA to make that valid. | |
3477 | ||
46816709 | 3478 | VALUEIZE if non-NULL is called with the next VUSE that is considered |
3479 | and return value is substituted for that. This can be used to | |
3480 | implement optimistic value-numbering for example. Note that the | |
3481 | VUSE argument is assumed to be valueized already. | |
3482 | ||
1f510793 | 3483 | LIMIT specifies the number of alias queries we are allowed to do, |
3484 | the walk stops when it reaches zero and NULL is returned. LIMIT | |
3485 | is decremented by the number of alias queries (plus adjustments | |
3486 | done by the callbacks) upon return. | |
3487 | ||
dd277d48 | 3488 | TODO: Cache the vector of equivalent vuses per ref, vuse pair. */ |
3489 | ||
3490 | void * | |
7dde7294 | 3491 | walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p, |
1f510793 | 3492 | void *(*walker)(ao_ref *, tree, void *), |
0ccd5579 | 3493 | void *(*translate)(ao_ref *, tree, void *, |
3494 | translate_flags *), | |
46816709 | 3495 | tree (*valueize)(tree), |
1f510793 | 3496 | unsigned &limit, void *data) |
3857274c | 3497 | { |
dd277d48 | 3498 | bitmap visited = NULL; |
3499 | void *res; | |
4fd7a6de | 3500 | bool translated = false; |
dd277d48 | 3501 | |
02c74b9b | 3502 | timevar_push (TV_ALIAS_STMT_WALK); |
3503 | ||
dd277d48 | 3504 | do |
3505 | { | |
42acab1c | 3506 | gimple *def_stmt; |
3857274c | 3507 | |
02c74b9b | 3508 | /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */ |
1f510793 | 3509 | res = (*walker) (ref, vuse, data); |
297a2110 | 3510 | /* Abort walk. */ |
3511 | if (res == (void *)-1) | |
3512 | { | |
3513 | res = NULL; | |
3514 | break; | |
3515 | } | |
3516 | /* Lookup succeeded. */ | |
3517 | else if (res != NULL) | |
dd277d48 | 3518 | break; |
3857274c | 3519 | |
46816709 | 3520 | if (valueize) |
51e85e64 | 3521 | { |
3522 | vuse = valueize (vuse); | |
3523 | if (!vuse) | |
3524 | { | |
3525 | res = NULL; | |
3526 | break; | |
3527 | } | |
3528 | } | |
dd277d48 | 3529 | def_stmt = SSA_NAME_DEF_STMT (vuse); |
3530 | if (gimple_nop_p (def_stmt)) | |
3531 | break; | |
3532 | else if (gimple_code (def_stmt) == GIMPLE_PHI) | |
7dde7294 | 3533 | vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit, |
38168b16 | 3534 | &visited, translated, translate, data); |
dd277d48 | 3535 | else |
3536 | { | |
1f510793 | 3537 | if ((int)limit <= 0) |
3538 | { | |
3539 | res = NULL; | |
3540 | break; | |
3541 | } | |
c51a5c54 | 3542 | --limit; |
7dde7294 | 3543 | if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p)) |
d8021dea | 3544 | { |
3545 | if (!translate) | |
3546 | break; | |
0ccd5579 | 3547 | translate_flags disambiguate_only = TR_TRANSLATE; |
dddafd79 | 3548 | res = (*translate) (ref, vuse, data, &disambiguate_only); |
d8021dea | 3549 | /* Failed lookup and translation. */ |
3550 | if (res == (void *)-1) | |
3551 | { | |
3552 | res = NULL; | |
3553 | break; | |
3554 | } | |
3555 | /* Lookup succeeded. */ | |
3556 | else if (res != NULL) | |
3557 | break; | |
3558 | /* Translation succeeded, continue walking. */ | |
0ccd5579 | 3559 | translated = translated || disambiguate_only == TR_TRANSLATE; |
d8021dea | 3560 | } |
dd277d48 | 3561 | vuse = gimple_vuse (def_stmt); |
3562 | } | |
3563 | } | |
3564 | while (vuse); | |
ac926412 | 3565 | |
dd277d48 | 3566 | if (visited) |
3567 | BITMAP_FREE (visited); | |
ac926412 | 3568 | |
02c74b9b | 3569 | timevar_pop (TV_ALIAS_STMT_WALK); |
3570 | ||
dd277d48 | 3571 | return res; |
3857274c | 3572 | } |
3573 | ||
604eef2c | 3574 | |
dd277d48 | 3575 | /* Based on the memory reference REF call WALKER for each vdef which |
3576 | defining statement may clobber REF, starting with VDEF. If REF | |
3577 | is NULL_TREE, each defining statement is visited. | |
3578 | ||
3579 | WALKER is called with REF, the current vdef and DATA. If WALKER | |
3580 | returns true the walk is stopped, otherwise it continues. | |
3581 | ||
f91737f9 | 3582 | If function entry is reached, FUNCTION_ENTRY_REACHED is set to true. |
3583 | The pointer may be NULL and then we do not track this information. | |
3584 | ||
dd277d48 | 3585 | At PHI nodes walk_aliased_vdefs forks into one walk for reach |
3586 | PHI argument (but only one walk continues on merge points), the | |
3587 | return value is true if any of the walks was successful. | |
3588 | ||
4d2b9d1e | 3589 | The function returns the number of statements walked or -1 if |
3590 | LIMIT stmts were walked and the walk was aborted at this point. | |
3591 | If LIMIT is zero the walk is not aborted. */ | |
604eef2c | 3592 | |
4d2b9d1e | 3593 | static int |
1daead67 | 3594 | walk_aliased_vdefs_1 (ao_ref *ref, tree vdef, |
3595 | bool (*walker)(ao_ref *, tree, void *), void *data, | |
f91737f9 | 3596 | bitmap *visited, unsigned int cnt, |
4d2b9d1e | 3597 | bool *function_entry_reached, unsigned limit) |
604eef2c | 3598 | { |
dd277d48 | 3599 | do |
3600 | { | |
42acab1c | 3601 | gimple *def_stmt = SSA_NAME_DEF_STMT (vdef); |
604eef2c | 3602 | |
dd277d48 | 3603 | if (*visited |
3604 | && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef))) | |
3605 | return cnt; | |
3606 | ||
3607 | if (gimple_nop_p (def_stmt)) | |
f91737f9 | 3608 | { |
3609 | if (function_entry_reached) | |
3610 | *function_entry_reached = true; | |
3611 | return cnt; | |
3612 | } | |
dd277d48 | 3613 | else if (gimple_code (def_stmt) == GIMPLE_PHI) |
3614 | { | |
3615 | unsigned i; | |
3616 | if (!*visited) | |
3617 | *visited = BITMAP_ALLOC (NULL); | |
3618 | for (i = 0; i < gimple_phi_num_args (def_stmt); ++i) | |
4d2b9d1e | 3619 | { |
3620 | int res = walk_aliased_vdefs_1 (ref, | |
3621 | gimple_phi_arg_def (def_stmt, i), | |
3622 | walker, data, visited, cnt, | |
3623 | function_entry_reached, limit); | |
3624 | if (res == -1) | |
3625 | return -1; | |
3626 | cnt = res; | |
3627 | } | |
dd277d48 | 3628 | return cnt; |
3629 | } | |
3630 | ||
02c74b9b | 3631 | /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */ |
dd277d48 | 3632 | cnt++; |
4d2b9d1e | 3633 | if (cnt == limit) |
3634 | return -1; | |
dd277d48 | 3635 | if ((!ref |
1daead67 | 3636 | || stmt_may_clobber_ref_p_1 (def_stmt, ref)) |
dd277d48 | 3637 | && (*walker) (ref, vdef, data)) |
3638 | return cnt; | |
3639 | ||
3640 | vdef = gimple_vuse (def_stmt); | |
3641 | } | |
3642 | while (1); | |
604eef2c | 3643 | } |
3644 | ||
4d2b9d1e | 3645 | int |
1daead67 | 3646 | walk_aliased_vdefs (ao_ref *ref, tree vdef, |
3647 | bool (*walker)(ao_ref *, tree, void *), void *data, | |
f91737f9 | 3648 | bitmap *visited, |
4d2b9d1e | 3649 | bool *function_entry_reached, unsigned int limit) |
df3f0669 | 3650 | { |
dd277d48 | 3651 | bitmap local_visited = NULL; |
4d2b9d1e | 3652 | int ret; |
dd277d48 | 3653 | |
02c74b9b | 3654 | timevar_push (TV_ALIAS_STMT_WALK); |
3655 | ||
325396bf | 3656 | if (function_entry_reached) |
3657 | *function_entry_reached = false; | |
3658 | ||
dd277d48 | 3659 | ret = walk_aliased_vdefs_1 (ref, vdef, walker, data, |
f91737f9 | 3660 | visited ? visited : &local_visited, 0, |
4d2b9d1e | 3661 | function_entry_reached, limit); |
dd277d48 | 3662 | if (local_visited) |
3663 | BITMAP_FREE (local_visited); | |
3664 | ||
02c74b9b | 3665 | timevar_pop (TV_ALIAS_STMT_WALK); |
3666 | ||
dd277d48 | 3667 | return ret; |
3668 | } | |
3669 |