]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/alias.c
poly_int: indirect_refs_may_alias_p
[thirdparty/gcc.git] / gcc / alias.c
CommitLineData
9ae8ffe7 1/* Alias analysis for GNU C
cbe34bb5 2 Copyright (C) 1997-2017 Free Software Foundation, Inc.
9ae8ffe7
JL
3 Contributed by John Carr (jfc@mit.edu).
4
1322177d 5This file is part of GCC.
9ae8ffe7 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
1322177d 10version.
9ae8ffe7 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
9ae8ffe7
JL
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9ae8ffe7
JL
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
c7131fb2
AM
27#include "tree.h"
28#include "gimple.h"
c7131fb2 29#include "df.h"
4d0cdd0c 30#include "memmodel.h"
957060b5
AM
31#include "tm_p.h"
32#include "gimple-ssa.h"
957060b5 33#include "emit-rtl.h"
40e23961 34#include "alias.h"
40e23961 35#include "fold-const.h"
d8a2d370 36#include "varasm.h"
eab5c70a 37#include "cselib.h"
d23c55c2 38#include "langhooks.h"
60393bbc 39#include "cfganal.h"
403837b4 40#include "rtl-iter.h"
54363f8a 41#include "cgraph.h"
ea900239
DB
42
43/* The aliasing API provided here solves related but different problems:
44
c22cacf3 45 Say there exists (in c)
ea900239
DB
46
47 struct X {
48 struct Y y1;
49 struct Z z2;
50 } x1, *px1, *px2;
51
52 struct Y y2, *py;
53 struct Z z2, *pz;
54
55
308a3fe2 56 py = &x1.y1;
ea900239
DB
57 px2 = &x1;
58
59 Consider the four questions:
60
61 Can a store to x1 interfere with px2->y1?
62 Can a store to x1 interfere with px2->z2?
ea900239
DB
63 Can a store to x1 change the value pointed to by with py?
64 Can a store to x1 change the value pointed to by with pz?
65
66 The answer to these questions can be yes, yes, yes, and maybe.
67
68 The first two questions can be answered with a simple examination
69 of the type system. If structure X contains a field of type Y then
073a8998 70 a store through a pointer to an X can overwrite any field that is
ea900239
DB
71 contained (recursively) in an X (unless we know that px1 != px2).
72
308a3fe2
DS
73 The last two questions can be solved in the same way as the first
74 two questions but this is too conservative. The observation is
75 that in some cases we can know which (if any) fields are addressed
76 and if those addresses are used in bad ways. This analysis may be
77 language specific. In C, arbitrary operations may be applied to
78 pointers. However, there is some indication that this may be too
79 conservative for some C++ types.
ea900239
DB
80
81 The pass ipa-type-escape does this analysis for the types whose
c22cacf3 82 instances do not escape across the compilation boundary.
ea900239
DB
83
84 Historically in GCC, these two problems were combined and a single
308a3fe2 85 data structure that was used to represent the solution to these
ea900239 86 problems. We now have two similar but different data structures,
308a3fe2
DS
87 The data structure to solve the last two questions is similar to
88 the first, but does not contain the fields whose address are never
89 taken. For types that do escape the compilation unit, the data
90 structures will have identical information.
ea900239 91*/
3932261a
MM
92
93/* The alias sets assigned to MEMs assist the back-end in determining
94 which MEMs can alias which other MEMs. In general, two MEMs in
ac3d9668
RK
95 different alias sets cannot alias each other, with one important
96 exception. Consider something like:
3932261a 97
01d28c3f 98 struct S { int i; double d; };
3932261a
MM
99
100 a store to an `S' can alias something of either type `int' or type
101 `double'. (However, a store to an `int' cannot alias a `double'
102 and vice versa.) We indicate this via a tree structure that looks
103 like:
c22cacf3
MS
104 struct S
105 / \
3932261a 106 / \
c22cacf3
MS
107 |/_ _\|
108 int double
3932261a 109
ac3d9668
RK
110 (The arrows are directed and point downwards.)
111 In this situation we say the alias set for `struct S' is the
112 `superset' and that those for `int' and `double' are `subsets'.
113
3bdf5ad1
RK
114 To see whether two alias sets can point to the same memory, we must
115 see if either alias set is a subset of the other. We need not trace
95bd1dd7 116 past immediate descendants, however, since we propagate all
3bdf5ad1 117 grandchildren up one level.
3932261a
MM
118
119 Alias set zero is implicitly a superset of all other alias sets.
120 However, this is no actual entry for alias set zero. It is an
121 error to attempt to explicitly construct a subset of zero. */
122
e0702244 123struct alias_set_hash : int_hash <int, INT_MIN, INT_MIN + 1> {};
de144fb2 124
02ced957 125struct GTY(()) alias_set_entry {
3932261a 126 /* The alias set number, as stored in MEM_ALIAS_SET. */
4862826d 127 alias_set_type alias_set;
3932261a 128
6e042ef4
JH
129 /* Nonzero if would have a child of zero: this effectively makes this
130 alias set the same as alias set zero. */
131 bool has_zero_child;
132 /* Nonzero if alias set corresponds to pointer type itself (i.e. not to
133 aggregate contaiing pointer.
134 This is used for a special case where we need an universal pointer type
135 compatible with all other pointer types. */
136 bool is_pointer;
137 /* Nonzero if is_pointer or if one of childs have has_pointer set. */
138 bool has_pointer;
34e82342
RB
139
140 /* The children of the alias set. These are not just the immediate
141 children, but, in fact, all descendants. So, if we have:
142
143 struct T { struct S s; float f; }
144
145 continuing our example above, the children here will be all of
146 `int', `double', `float', and `struct S'. */
147 hash_map<alias_set_hash, int> *children;
b604074c 148};
9ae8ffe7 149
ed7a4b4b 150static int rtx_equal_for_memref_p (const_rtx, const_rtx);
4682ae04 151static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
7bc980e1 152static void record_set (rtx, const_rtx, void *);
ef4bddc2
RS
153static int base_alias_check (rtx, rtx, rtx, rtx, machine_mode,
154 machine_mode);
4682ae04 155static rtx find_base_value (rtx);
4f588890 156static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
02ced957 157static alias_set_entry *get_alias_set_entry (alias_set_type);
4682ae04 158static tree decl_for_component_ref (tree);
bd280792 159static int write_dependence_p (const_rtx,
ef4bddc2 160 const_rtx, machine_mode, rtx,
bd280792 161 bool, bool, bool);
73e48cb3 162static int compare_base_symbol_refs (const_rtx, const_rtx);
4682ae04 163
aa317c97 164static void memory_modified_1 (rtx, const_rtx, void *);
9ae8ffe7 165
3ecf9d13
JH
166/* Query statistics for the different low-level disambiguators.
167 A high-level query may trigger multiple of them. */
168
169static struct {
170 unsigned long long num_alias_zero;
171 unsigned long long num_same_alias_set;
172 unsigned long long num_same_objects;
173 unsigned long long num_volatile;
174 unsigned long long num_dag;
6e042ef4 175 unsigned long long num_universal;
3ecf9d13
JH
176 unsigned long long num_disambiguated;
177} alias_stats;
178
179
9ae8ffe7
JL
180/* Set up all info needed to perform alias analysis on memory references. */
181
d4b60170 182/* Returns the size in bytes of the mode of X. */
9ae8ffe7
JL
183#define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
184
ea64ef27 185/* Cap the number of passes we make over the insns propagating alias
131db6b8
SB
186 information through set chains.
187 ??? 10 is a completely arbitrary choice. This should be based on the
188 maximum loop depth in the CFG, but we do not have this information
189 available (even if current_loops _is_ available). */
ea64ef27 190#define MAX_ALIAS_LOOP_PASSES 10
ca7fd9cd 191
9ae8ffe7
JL
192/* reg_base_value[N] gives an address to which register N is related.
193 If all sets after the first add or subtract to the current value
194 or otherwise modify it so it does not point to a different top level
195 object, reg_base_value[N] is equal to the address part of the source
2a2c8203
JC
196 of the first set.
197
198 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
9fc37b2b 199 expressions represent three types of base:
b3b5ad95 200
9fc37b2b
RS
201 1. incoming arguments. There is just one ADDRESS to represent all
202 arguments, since we do not know at this level whether accesses
203 based on different arguments can alias. The ADDRESS has id 0.
b3b5ad95 204
9fc37b2b
RS
205 2. stack_pointer_rtx, frame_pointer_rtx, hard_frame_pointer_rtx
206 (if distinct from frame_pointer_rtx) and arg_pointer_rtx.
207 Each of these rtxes has a separate ADDRESS associated with it,
208 each with a negative id.
209
210 GCC is (and is required to be) precise in which register it
211 chooses to access a particular region of stack. We can therefore
212 assume that accesses based on one of these rtxes do not alias
213 accesses based on another of these rtxes.
214
215 3. bases that are derived from malloc()ed memory (REG_NOALIAS).
216 Each such piece of memory has a separate ADDRESS associated
217 with it, each with an id greater than 0.
218
219 Accesses based on one ADDRESS do not alias accesses based on other
220 ADDRESSes. Accesses based on ADDRESSes in groups (2) and (3) do not
221 alias globals either; the ADDRESSes have Pmode to indicate this.
222 The ADDRESS in group (1) _may_ alias globals; it has VOIDmode to
223 indicate this. */
2a2c8203 224
9771b263 225static GTY(()) vec<rtx, va_gc> *reg_base_value;
ac606739 226static rtx *new_reg_base_value;
c582d54a 227
9fc37b2b
RS
228/* The single VOIDmode ADDRESS that represents all argument bases.
229 It has id 0. */
230static GTY(()) rtx arg_base_value;
231
232/* Used to allocate unique ids to each REG_NOALIAS ADDRESS. */
233static int unique_id;
234
c582d54a
JH
235/* We preserve the copy of old array around to avoid amount of garbage
236 produced. About 8% of garbage produced were attributed to this
237 array. */
9771b263 238static GTY((deletable)) vec<rtx, va_gc> *old_reg_base_value;
d4b60170 239
9e412ca3
RS
240/* Values of XINT (address, 0) of Pmode ADDRESS rtxes for special
241 registers. */
242#define UNIQUE_BASE_VALUE_SP -1
243#define UNIQUE_BASE_VALUE_ARGP -2
244#define UNIQUE_BASE_VALUE_FP -3
245#define UNIQUE_BASE_VALUE_HFP -4
246
7bf84454
RS
247#define static_reg_base_value \
248 (this_target_rtl->x_static_reg_base_value)
bf1660a6 249
9771b263
DN
250#define REG_BASE_VALUE(X) \
251 (REGNO (X) < vec_safe_length (reg_base_value) \
252 ? (*reg_base_value)[REGNO (X)] : 0)
9ae8ffe7 253
c13e8210 254/* Vector indexed by N giving the initial (unchanging) value known for
9ff3c7ca 255 pseudo-register N. This vector is initialized in init_alias_analysis,
bb1acb3e 256 and does not change until end_alias_analysis is called. */
9771b263 257static GTY(()) vec<rtx, va_gc> *reg_known_value;
9ae8ffe7
JL
258
259/* Vector recording for each reg_known_value whether it is due to a
260 REG_EQUIV note. Future passes (viz., reload) may replace the
261 pseudo with the equivalent expression and so we account for the
ac3d9668
RK
262 dependences that would be introduced if that happens.
263
264 The REG_EQUIV notes created in assign_parms may mention the arg
265 pointer, and there are explicit insns in the RTL that modify the
266 arg pointer. Thus we must ensure that such insns don't get
267 scheduled across each other because that would invalidate the
268 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
269 wrong, but solving the problem in the scheduler will likely give
270 better code, so we do it here. */
9ff3c7ca 271static sbitmap reg_known_equiv_p;
9ae8ffe7 272
2a2c8203
JC
273/* True when scanning insns from the start of the rtl to the
274 NOTE_INSN_FUNCTION_BEG note. */
83bbd9b6 275static bool copying_arguments;
9ae8ffe7 276
1a5640b4 277
3932261a 278/* The splay-tree used to store the various alias set entries. */
02ced957 279static GTY (()) vec<alias_set_entry *, va_gc> *alias_sets;
ac3d9668 280\f
55b34b5f
RG
281/* Build a decomposed reference object for querying the alias-oracle
282 from the MEM rtx and store it in *REF.
283 Returns false if MEM is not suitable for the alias-oracle. */
284
285static bool
286ao_ref_from_mem (ao_ref *ref, const_rtx mem)
287{
288 tree expr = MEM_EXPR (mem);
289 tree base;
290
291 if (!expr)
292 return false;
293
294 ao_ref_init (ref, expr);
295
296 /* Get the base of the reference and see if we have to reject or
297 adjust it. */
298 base = ao_ref_base (ref);
299 if (base == NULL_TREE)
300 return false;
301
ef7a9fb8
RB
302 /* The tree oracle doesn't like bases that are neither decls
303 nor indirect references of SSA names. */
304 if (!(DECL_P (base)
305 || (TREE_CODE (base) == MEM_REF
306 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
307 || (TREE_CODE (base) == TARGET_MEM_REF
308 && TREE_CODE (TMR_BASE (base)) == SSA_NAME)))
d15adbeb 309 return false;
55b34b5f
RG
310
311 /* If this is a reference based on a partitioned decl replace the
ef7a9fb8 312 base with a MEM_REF of the pointer representative we
55b34b5f 313 created during stack slot partitioning. */
8813a647 314 if (VAR_P (base)
ef7a9fb8 315 && ! is_global_var (base)
55b34b5f
RG
316 && cfun->gimple_df->decls_to_pointers != NULL)
317 {
39c8aaa4 318 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
55b34b5f 319 if (namep)
39c8aaa4 320 ref->base = build_simple_mem_ref (*namep);
d15adbeb 321 }
55b34b5f
RG
322
323 ref->ref_alias_set = MEM_ALIAS_SET (mem);
324
f68396a1
RG
325 /* If MEM_OFFSET or MEM_SIZE are unknown what we got from MEM_EXPR
326 is conservative, so trust it. */
527210c4 327 if (!MEM_OFFSET_KNOWN_P (mem)
f5541398 328 || !MEM_SIZE_KNOWN_P (mem))
f68396a1 329 return true;
366f945f 330
e8024441
RB
331 /* If MEM_OFFSET/MEM_SIZE get us outside of ref->offset/ref->max_size
332 drop ref->ref. */
527210c4 333 if (MEM_OFFSET (mem) < 0
e8024441
RB
334 || (ref->max_size != -1
335 && ((MEM_OFFSET (mem) + MEM_SIZE (mem)) * BITS_PER_UNIT
336 > ref->max_size)))
337 ref->ref = NULL_TREE;
b0e96404 338
e8024441
RB
339 /* Refine size and offset we got from analyzing MEM_EXPR by using
340 MEM_SIZE and MEM_OFFSET. */
f68396a1 341
527210c4 342 ref->offset += MEM_OFFSET (mem) * BITS_PER_UNIT;
f5541398 343 ref->size = MEM_SIZE (mem) * BITS_PER_UNIT;
b0e96404
RG
344
345 /* The MEM may extend into adjacent fields, so adjust max_size if
346 necessary. */
347 if (ref->max_size != -1
348 && ref->size > ref->max_size)
349 ref->max_size = ref->size;
350
351 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
352 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
353 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
354 && (ref->offset < 0
355 || (DECL_P (ref->base)
807e902e
KZ
356 && (DECL_SIZE (ref->base) == NULL_TREE
357 || TREE_CODE (DECL_SIZE (ref->base)) != INTEGER_CST
358 || wi::ltu_p (wi::to_offset (DECL_SIZE (ref->base)),
359 ref->offset + ref->size)))))
b0e96404 360 return false;
55b34b5f
RG
361
362 return true;
363}
364
365/* Query the alias-oracle on whether the two memory rtx X and MEM may
366 alias. If TBAA_P is set also apply TBAA. Returns true if the
367 two rtxen may alias, false otherwise. */
368
369static bool
370rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
371{
372 ao_ref ref1, ref2;
373
374 if (!ao_ref_from_mem (&ref1, x)
375 || !ao_ref_from_mem (&ref2, mem))
376 return true;
377
55e3bc4c
RG
378 return refs_may_alias_p_1 (&ref1, &ref2,
379 tbaa_p
380 && MEM_ALIAS_SET (x) != 0
381 && MEM_ALIAS_SET (mem) != 0);
55b34b5f
RG
382}
383
3932261a
MM
384/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
385 such an entry, or NULL otherwise. */
386
02ced957 387static inline alias_set_entry *
4862826d 388get_alias_set_entry (alias_set_type alias_set)
3932261a 389{
9771b263 390 return (*alias_sets)[alias_set];
3932261a
MM
391}
392
ac3d9668
RK
393/* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
394 the two MEMs cannot alias each other. */
3932261a 395
9ddb66ca 396static inline int
4f588890 397mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
3932261a 398{
598f8eca
RB
399 return (flag_strict_aliasing
400 && ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1),
401 MEM_ALIAS_SET (mem2)));
1da68f56 402}
3932261a 403
c58936b6
DB
404/* Return true if the first alias set is a subset of the second. */
405
406bool
4862826d 407alias_set_subset_of (alias_set_type set1, alias_set_type set2)
c58936b6 408{
02ced957 409 alias_set_entry *ase2;
c58936b6 410
bd04cddf
JH
411 /* Disable TBAA oracle with !flag_strict_aliasing. */
412 if (!flag_strict_aliasing)
413 return true;
414
c58936b6
DB
415 /* Everything is a subset of the "aliases everything" set. */
416 if (set2 == 0)
417 return true;
418
6e042ef4
JH
419 /* Check if set1 is a subset of set2. */
420 ase2 = get_alias_set_entry (set2);
421 if (ase2 != 0
422 && (ase2->has_zero_child
423 || (ase2->children && ase2->children->get (set1))))
c58936b6 424 return true;
6e042ef4
JH
425
426 /* As a special case we consider alias set of "void *" to be both subset
427 and superset of every alias set of a pointer. This extra symmetry does
428 not matter for alias_sets_conflict_p but it makes aliasing_component_refs_p
429 to return true on the following testcase:
430
431 void *ptr;
432 char **ptr2=(char **)&ptr;
433 *ptr2 = ...
434
435 Additionally if a set contains universal pointer, we consider every pointer
436 to be a subset of it, but we do not represent this explicitely - doing so
437 would require us to update transitive closure each time we introduce new
438 pointer type. This makes aliasing_component_refs_p to return true
439 on the following testcase:
440
441 struct a {void *ptr;}
442 char **ptr = (char **)&a.ptr;
443 ptr = ...
444
445 This makes void * truly universal pointer type. See pointer handling in
446 get_alias_set for more details. */
447 if (ase2 && ase2->has_pointer)
448 {
02ced957 449 alias_set_entry *ase1 = get_alias_set_entry (set1);
6e042ef4
JH
450
451 if (ase1 && ase1->is_pointer)
452 {
453 alias_set_type voidptr_set = TYPE_ALIAS_SET (ptr_type_node);
454 /* If one is ptr_type_node and other is pointer, then we consider
455 them subset of each other. */
456 if (set1 == voidptr_set || set2 == voidptr_set)
457 return true;
458 /* If SET2 contains universal pointer's alias set, then we consdier
459 every (non-universal) pointer. */
460 if (ase2->children && set1 != voidptr_set
461 && ase2->children->get (voidptr_set))
462 return true;
463 }
464 }
c58936b6
DB
465 return false;
466}
467
1da68f56
RK
468/* Return 1 if the two specified alias sets may conflict. */
469
470int
4862826d 471alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
1da68f56 472{
02ced957
TS
473 alias_set_entry *ase1;
474 alias_set_entry *ase2;
1da68f56 475
836f7794
EB
476 /* The easy case. */
477 if (alias_sets_must_conflict_p (set1, set2))
1da68f56 478 return 1;
3932261a 479
3bdf5ad1 480 /* See if the first alias set is a subset of the second. */
6e042ef4
JH
481 ase1 = get_alias_set_entry (set1);
482 if (ase1 != 0
483 && ase1->children && ase1->children->get (set2))
3ecf9d13
JH
484 {
485 ++alias_stats.num_dag;
486 return 1;
487 }
3932261a
MM
488
489 /* Now do the same, but with the alias sets reversed. */
6e042ef4
JH
490 ase2 = get_alias_set_entry (set2);
491 if (ase2 != 0
492 && ase2->children && ase2->children->get (set1))
3ecf9d13
JH
493 {
494 ++alias_stats.num_dag;
495 return 1;
496 }
6e042ef4
JH
497
498 /* We want void * to be compatible with any other pointer without
499 really dropping it to alias set 0. Doing so would make it
500 compatible with all non-pointer types too.
501
502 This is not strictly necessary by the C/C++ language
503 standards, but avoids common type punning mistakes. In
504 addition to that, we need the existence of such universal
505 pointer to implement Fortran's C_PTR type (which is defined as
506 type compatible with all C pointers). */
507 if (ase1 && ase2 && ase1->has_pointer && ase2->has_pointer)
508 {
509 alias_set_type voidptr_set = TYPE_ALIAS_SET (ptr_type_node);
510
511 /* If one of the sets corresponds to universal pointer,
512 we consider it to conflict with anything that is
513 or contains pointer. */
514 if (set1 == voidptr_set || set2 == voidptr_set)
515 {
516 ++alias_stats.num_universal;
517 return true;
518 }
519 /* If one of sets is (non-universal) pointer and the other
520 contains universal pointer, we also get conflict. */
521 if (ase1->is_pointer && set2 != voidptr_set
522 && ase2->children && ase2->children->get (voidptr_set))
523 {
524 ++alias_stats.num_universal;
525 return true;
526 }
527 if (ase2->is_pointer && set1 != voidptr_set
528 && ase1->children && ase1->children->get (voidptr_set))
529 {
530 ++alias_stats.num_universal;
531 return true;
532 }
533 }
534
3ecf9d13 535 ++alias_stats.num_disambiguated;
3932261a 536
1da68f56 537 /* The two alias sets are distinct and neither one is the
836f7794 538 child of the other. Therefore, they cannot conflict. */
1da68f56 539 return 0;
3932261a 540}
5399d643 541
836f7794 542/* Return 1 if the two specified alias sets will always conflict. */
5399d643
JW
543
544int
4862826d 545alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
5399d643 546{
bd04cddf
JH
547 /* Disable TBAA oracle with !flag_strict_aliasing. */
548 if (!flag_strict_aliasing)
549 return 1;
3ecf9d13
JH
550 if (set1 == 0 || set2 == 0)
551 {
552 ++alias_stats.num_alias_zero;
553 return 1;
554 }
555 if (set1 == set2)
556 {
557 ++alias_stats.num_same_alias_set;
558 return 1;
559 }
5399d643
JW
560
561 return 0;
562}
563
1da68f56
RK
564/* Return 1 if any MEM object of type T1 will always conflict (using the
565 dependency routines in this file) with any MEM object of type T2.
566 This is used when allocating temporary storage. If T1 and/or T2 are
567 NULL_TREE, it means we know nothing about the storage. */
568
569int
4682ae04 570objects_must_conflict_p (tree t1, tree t2)
1da68f56 571{
4862826d 572 alias_set_type set1, set2;
82d610ec 573
e8ea2809
RK
574 /* If neither has a type specified, we don't know if they'll conflict
575 because we may be using them to store objects of various types, for
576 example the argument and local variables areas of inlined functions. */
981a4c34 577 if (t1 == 0 && t2 == 0)
e8ea2809
RK
578 return 0;
579
1da68f56 580 /* If they are the same type, they must conflict. */
3ecf9d13
JH
581 if (t1 == t2)
582 {
583 ++alias_stats.num_same_objects;
584 return 1;
585 }
586 /* Likewise if both are volatile. */
587 if (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2))
588 {
589 ++alias_stats.num_volatile;
590 return 1;
591 }
1da68f56 592
82d610ec
RK
593 set1 = t1 ? get_alias_set (t1) : 0;
594 set2 = t2 ? get_alias_set (t2) : 0;
1da68f56 595
836f7794
EB
596 /* We can't use alias_sets_conflict_p because we must make sure
597 that every subtype of t1 will conflict with every subtype of
82d610ec
RK
598 t2 for which a pair of subobjects of these respective subtypes
599 overlaps on the stack. */
836f7794 600 return alias_sets_must_conflict_p (set1, set2);
1da68f56
RK
601}
602\f
b4ada065
RB
603/* Return the outermost parent of component present in the chain of
604 component references handled by get_inner_reference in T with the
605 following property:
606 - the component is non-addressable, or
607 - the parent has alias set zero,
608 or NULL_TREE if no such parent exists. In the former cases, the alias
609 set of this parent is the alias set that must be used for T itself. */
610
611tree
612component_uses_parent_alias_set_from (const_tree t)
6e24b709 613{
b4ada065 614 const_tree found = NULL_TREE;
afe84921 615
350792ff
RB
616 if (AGGREGATE_TYPE_P (TREE_TYPE (t))
617 && TYPE_TYPELESS_STORAGE (TREE_TYPE (t)))
618 return const_cast <tree> (t);
619
b4ada065
RB
620 while (handled_component_p (t))
621 {
afe84921
RH
622 switch (TREE_CODE (t))
623 {
624 case COMPONENT_REF:
625 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
b4ada065 626 found = t;
4aa83879
RB
627 /* Permit type-punning when accessing a union, provided the access
628 is directly through the union. For example, this code does not
629 permit taking the address of a union member and then storing
630 through it. Even the type-punning allowed here is a GCC
631 extension, albeit a common and useful one; the C standard says
632 that such accesses have implementation-defined behavior. */
633 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE)
634 found = t;
afe84921
RH
635 break;
636
637 case ARRAY_REF:
638 case ARRAY_RANGE_REF:
639 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
b4ada065 640 found = t;
afe84921
RH
641 break;
642
643 case REALPART_EXPR:
644 case IMAGPART_EXPR:
645 break;
646
b4ada065
RB
647 case BIT_FIELD_REF:
648 case VIEW_CONVERT_EXPR:
afe84921 649 /* Bitfields and casts are never addressable. */
b4ada065
RB
650 found = t;
651 break;
652
653 default:
654 gcc_unreachable ();
afe84921
RH
655 }
656
b4ada065
RB
657 if (get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) == 0)
658 found = t;
659
afe84921
RH
660 t = TREE_OPERAND (t, 0);
661 }
b4ada065
RB
662
663 if (found)
664 return TREE_OPERAND (found, 0);
665
666 return NULL_TREE;
6e24b709
RK
667}
668
f40333af
RB
669
670/* Return whether the pointer-type T effective for aliasing may
671 access everything and thus the reference has to be assigned
672 alias-set zero. */
673
674static bool
675ref_all_alias_ptr_type_p (const_tree t)
676{
677 return (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
678 || TYPE_REF_CAN_ALIAS_ALL (t));
679}
680
5006671f
RG
681/* Return the alias set for the memory pointed to by T, which may be
682 either a type or an expression. Return -1 if there is nothing
683 special about dereferencing T. */
684
685static alias_set_type
686get_deref_alias_set_1 (tree t)
687{
5b21f0f3 688 /* All we care about is the type. */
5006671f 689 if (! TYPE_P (t))
5b21f0f3 690 t = TREE_TYPE (t);
5006671f
RG
691
692 /* If we have an INDIRECT_REF via a void pointer, we don't
693 know anything about what that might alias. Likewise if the
694 pointer is marked that way. */
f40333af 695 if (ref_all_alias_ptr_type_p (t))
5006671f
RG
696 return 0;
697
698 return -1;
699}
700
701/* Return the alias set for the memory pointed to by T, which may be
702 either a type or an expression. */
703
704alias_set_type
705get_deref_alias_set (tree t)
706{
f40333af
RB
707 /* If we're not doing any alias analysis, just assume everything
708 aliases everything else. */
709 if (!flag_strict_aliasing)
710 return 0;
711
5006671f
RG
712 alias_set_type set = get_deref_alias_set_1 (t);
713
714 /* Fall back to the alias-set of the pointed-to type. */
715 if (set == -1)
716 {
717 if (! TYPE_P (t))
718 t = TREE_TYPE (t);
719 set = get_alias_set (TREE_TYPE (t));
720 }
721
722 return set;
723}
724
f40333af
RB
725/* Return the pointer-type relevant for TBAA purposes from the
726 memory reference tree *T or NULL_TREE in which case *T is
727 adjusted to point to the outermost component reference that
728 can be used for assigning an alias set. */
729
730static tree
731reference_alias_ptr_type_1 (tree *t)
732{
733 tree inner;
734
735 /* Get the base object of the reference. */
736 inner = *t;
737 while (handled_component_p (inner))
738 {
739 /* If there is a VIEW_CONVERT_EXPR in the chain we cannot use
740 the type of any component references that wrap it to
741 determine the alias-set. */
742 if (TREE_CODE (inner) == VIEW_CONVERT_EXPR)
743 *t = TREE_OPERAND (inner, 0);
744 inner = TREE_OPERAND (inner, 0);
745 }
746
747 /* Handle pointer dereferences here, they can override the
748 alias-set. */
749 if (INDIRECT_REF_P (inner)
750 && ref_all_alias_ptr_type_p (TREE_TYPE (TREE_OPERAND (inner, 0))))
751 return TREE_TYPE (TREE_OPERAND (inner, 0));
752 else if (TREE_CODE (inner) == TARGET_MEM_REF)
753 return TREE_TYPE (TMR_OFFSET (inner));
754 else if (TREE_CODE (inner) == MEM_REF
755 && ref_all_alias_ptr_type_p (TREE_TYPE (TREE_OPERAND (inner, 1))))
756 return TREE_TYPE (TREE_OPERAND (inner, 1));
757
758 /* If the innermost reference is a MEM_REF that has a
759 conversion embedded treat it like a VIEW_CONVERT_EXPR above,
760 using the memory access type for determining the alias-set. */
761 if (TREE_CODE (inner) == MEM_REF
762 && (TYPE_MAIN_VARIANT (TREE_TYPE (inner))
763 != TYPE_MAIN_VARIANT
764 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (inner, 1))))))
765 return TREE_TYPE (TREE_OPERAND (inner, 1));
766
b4ada065
RB
767 /* Otherwise, pick up the outermost object that we could have
768 a pointer to. */
769 tree tem = component_uses_parent_alias_set_from (*t);
770 if (tem)
771 *t = tem;
f40333af
RB
772
773 return NULL_TREE;
774}
775
776/* Return the pointer-type relevant for TBAA purposes from the
777 gimple memory reference tree T. This is the type to be used for
778 the offset operand of MEM_REF or TARGET_MEM_REF replacements of T
779 and guarantees that get_alias_set will return the same alias
780 set for T and the replacement. */
781
782tree
783reference_alias_ptr_type (tree t)
784{
ebc1b29e
RB
785 /* If the frontend assigns this alias-set zero, preserve that. */
786 if (lang_hooks.get_alias_set (t) == 0)
787 return ptr_type_node;
788
f40333af
RB
789 tree ptype = reference_alias_ptr_type_1 (&t);
790 /* If there is a given pointer type for aliasing purposes, return it. */
791 if (ptype != NULL_TREE)
792 return ptype;
793
794 /* Otherwise build one from the outermost component reference we
795 may use. */
796 if (TREE_CODE (t) == MEM_REF
797 || TREE_CODE (t) == TARGET_MEM_REF)
798 return TREE_TYPE (TREE_OPERAND (t, 1));
799 else
800 return build_pointer_type (TYPE_MAIN_VARIANT (TREE_TYPE (t)));
801}
802
803/* Return whether the pointer-types T1 and T2 used to determine
804 two alias sets of two references will yield the same answer
805 from get_deref_alias_set. */
806
807bool
808alias_ptr_types_compatible_p (tree t1, tree t2)
809{
810 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
811 return true;
812
813 if (ref_all_alias_ptr_type_p (t1)
814 || ref_all_alias_ptr_type_p (t2))
815 return false;
816
817 return (TYPE_MAIN_VARIANT (TREE_TYPE (t1))
818 == TYPE_MAIN_VARIANT (TREE_TYPE (t2)));
819}
820
6e042ef4
JH
821/* Create emptry alias set entry. */
822
02ced957 823alias_set_entry *
6e042ef4
JH
824init_alias_set_entry (alias_set_type set)
825{
02ced957 826 alias_set_entry *ase = ggc_alloc<alias_set_entry> ();
6e042ef4
JH
827 ase->alias_set = set;
828 ase->children = NULL;
829 ase->has_zero_child = false;
830 ase->is_pointer = false;
831 ase->has_pointer = false;
832 gcc_checking_assert (!get_alias_set_entry (set));
833 (*alias_sets)[set] = ase;
834 return ase;
835}
836
3bdf5ad1
RK
837/* Return the alias set for T, which may be either a type or an
838 expression. Call language-specific routine for help, if needed. */
839
4862826d 840alias_set_type
4682ae04 841get_alias_set (tree t)
3bdf5ad1 842{
4862826d 843 alias_set_type set;
3bdf5ad1 844
bd04cddf
JH
845 /* We can not give up with -fno-strict-aliasing because we need to build
846 proper type representation for possible functions which are build with
e8444ca6 847 -fstrict-aliasing. */
bd04cddf
JH
848
849 /* return 0 if this or its type is an error. */
850 if (t == error_mark_node
3bdf5ad1
RK
851 || (! TYPE_P (t)
852 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
853 return 0;
854
855 /* We can be passed either an expression or a type. This and the
f47e9b4e
RK
856 language-specific routine may make mutually-recursive calls to each other
857 to figure out what to do. At each juncture, we see if this is a tree
858 that the language may need to handle specially. First handle things that
738cc472 859 aren't types. */
f824e5c3 860 if (! TYPE_P (t))
3bdf5ad1 861 {
70f34814
RG
862 /* Give the language a chance to do something with this tree
863 before we look at it. */
8ac61af7 864 STRIP_NOPS (t);
ae2bcd98 865 set = lang_hooks.get_alias_set (t);
8ac61af7
RK
866 if (set != -1)
867 return set;
868
f40333af
RB
869 /* Get the alias pointer-type to use or the outermost object
870 that we could have a pointer to. */
871 tree ptype = reference_alias_ptr_type_1 (&t);
872 if (ptype != NULL)
873 return get_deref_alias_set (ptype);
f824e5c3 874
738cc472
RK
875 /* If we've already determined the alias set for a decl, just return
876 it. This is necessary for C++ anonymous unions, whose component
877 variables don't look like union members (boo!). */
8813a647 878 if (VAR_P (t)
3c0cb5de 879 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
5755cd38
JM
880 return MEM_ALIAS_SET (DECL_RTL (t));
881
f824e5c3
RK
882 /* Now all we care about is the type. */
883 t = TREE_TYPE (t);
3bdf5ad1
RK
884 }
885
3bdf5ad1 886 /* Variant qualifiers don't affect the alias set, so get the main
daad0278 887 variant. */
3bdf5ad1 888 t = TYPE_MAIN_VARIANT (t);
daad0278 889
350792ff
RB
890 if (AGGREGATE_TYPE_P (t)
891 && TYPE_TYPELESS_STORAGE (t))
892 return 0;
893
daad0278
RG
894 /* Always use the canonical type as well. If this is a type that
895 requires structural comparisons to identify compatible types
896 use alias set zero. */
897 if (TYPE_STRUCTURAL_EQUALITY_P (t))
cb9c2485
JM
898 {
899 /* Allow the language to specify another alias set for this
900 type. */
901 set = lang_hooks.get_alias_set (t);
902 if (set != -1)
903 return set;
aea50b45
JH
904 /* Handle structure type equality for pointer types, arrays and vectors.
905 This is easy to do, because the code bellow ignore canonical types on
906 these anyway. This is important for LTO, where TYPE_CANONICAL for
907 pointers can not be meaningfuly computed by the frotnend. */
908 if (canonical_type_used_p (t))
f85d2487
JH
909 {
910 /* In LTO we set canonical types for all types where it makes
911 sense to do so. Double check we did not miss some type. */
912 gcc_checking_assert (!in_lto_p || !type_with_alias_set_p (t));
913 return 0;
914 }
915 }
916 else
917 {
918 t = TYPE_CANONICAL (t);
919 gcc_checking_assert (!TYPE_STRUCTURAL_EQUALITY_P (t));
cb9c2485 920 }
daad0278
RG
921
922 /* If this is a type with a known alias set, return it. */
ba6a6a1d 923 gcc_checking_assert (t == TYPE_MAIN_VARIANT (t));
738cc472 924 if (TYPE_ALIAS_SET_KNOWN_P (t))
3bdf5ad1
RK
925 return TYPE_ALIAS_SET (t);
926
36784d0e
RG
927 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
928 if (!COMPLETE_TYPE_P (t))
929 {
930 /* For arrays with unknown size the conservative answer is the
931 alias set of the element type. */
932 if (TREE_CODE (t) == ARRAY_TYPE)
933 return get_alias_set (TREE_TYPE (t));
934
935 /* But return zero as a conservative answer for incomplete types. */
936 return 0;
937 }
938
3bdf5ad1 939 /* See if the language has special handling for this type. */
ae2bcd98 940 set = lang_hooks.get_alias_set (t);
8ac61af7 941 if (set != -1)
738cc472 942 return set;
2bf105ab 943
3bdf5ad1
RK
944 /* There are no objects of FUNCTION_TYPE, so there's no point in
945 using up an alias set for them. (There are, of course, pointers
946 and references to functions, but that's different.) */
7be7d292 947 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
3bdf5ad1 948 set = 0;
74d86f4f
RH
949
950 /* Unless the language specifies otherwise, let vector types alias
951 their components. This avoids some nasty type punning issues in
952 normal usage. And indeed lets vectors be treated more like an
953 array slice. */
954 else if (TREE_CODE (t) == VECTOR_TYPE)
955 set = get_alias_set (TREE_TYPE (t));
956
4653cae5
RG
957 /* Unless the language specifies otherwise, treat array types the
958 same as their components. This avoids the asymmetry we get
959 through recording the components. Consider accessing a
960 character(kind=1) through a reference to a character(kind=1)[1:1].
961 Or consider if we want to assign integer(kind=4)[0:D.1387] and
962 integer(kind=4)[4] the same alias set or not.
963 Just be pragmatic here and make sure the array and its element
964 type get the same alias set assigned. */
aea50b45
JH
965 else if (TREE_CODE (t) == ARRAY_TYPE
966 && (!TYPE_NONALIASED_COMPONENT (t)
967 || TYPE_STRUCTURAL_EQUALITY_P (t)))
4653cae5
RG
968 set = get_alias_set (TREE_TYPE (t));
969
0ceb0201
RG
970 /* From the former common C and C++ langhook implementation:
971
972 Unfortunately, there is no canonical form of a pointer type.
973 In particular, if we have `typedef int I', then `int *', and
974 `I *' are different types. So, we have to pick a canonical
975 representative. We do this below.
976
977 Technically, this approach is actually more conservative that
978 it needs to be. In particular, `const int *' and `int *'
979 should be in different alias sets, according to the C and C++
980 standard, since their types are not the same, and so,
981 technically, an `int **' and `const int **' cannot point at
982 the same thing.
983
984 But, the standard is wrong. In particular, this code is
985 legal C++:
986
987 int *ip;
988 int **ipp = &ip;
989 const int* const* cipp = ipp;
990 And, it doesn't make sense for that to be legal unless you
991 can dereference IPP and CIPP. So, we ignore cv-qualifiers on
992 the pointed-to types. This issue has been reported to the
993 C++ committee.
994
6e042ef4
JH
995 For this reason go to canonical type of the unqalified pointer type.
996 Until GCC 6 this code set all pointers sets to have alias set of
997 ptr_type_node but that is a bad idea, because it prevents disabiguations
998 in between pointers. For Firefox this accounts about 20% of all
999 disambiguations in the program. */
f85d2487 1000 else if (POINTER_TYPE_P (t) && t != ptr_type_node)
6e042ef4
JH
1001 {
1002 tree p;
1003 auto_vec <bool, 8> reference;
1004
1005 /* Unnest all pointers and references.
f85d2487
JH
1006 We also want to make pointer to array/vector equivalent to pointer to
1007 its element (see the reasoning above). Skip all those types, too. */
6e042ef4 1008 for (p = t; POINTER_TYPE_P (p)
aea50b45
JH
1009 || (TREE_CODE (p) == ARRAY_TYPE
1010 && (!TYPE_NONALIASED_COMPONENT (p)
1011 || !COMPLETE_TYPE_P (p)
1012 || TYPE_STRUCTURAL_EQUALITY_P (p)))
f85d2487 1013 || TREE_CODE (p) == VECTOR_TYPE;
6e042ef4
JH
1014 p = TREE_TYPE (p))
1015 {
54363f8a
JH
1016 /* Ada supports recusive pointers. Instead of doing recrusion check
1017 just give up once the preallocated space of 8 elements is up.
1018 In this case just punt to void * alias set. */
1019 if (reference.length () == 8)
1020 {
1021 p = ptr_type_node;
1022 break;
1023 }
6e042ef4 1024 if (TREE_CODE (p) == REFERENCE_TYPE)
f85d2487
JH
1025 /* In LTO we want languages that use references to be compatible
1026 with languages that use pointers. */
1027 reference.safe_push (true && !in_lto_p);
6e042ef4
JH
1028 if (TREE_CODE (p) == POINTER_TYPE)
1029 reference.safe_push (false);
1030 }
1031 p = TYPE_MAIN_VARIANT (p);
1032
1033 /* Make void * compatible with char * and also void **.
1034 Programs are commonly violating TBAA by this.
1035
1036 We also make void * to conflict with every pointer
1037 (see record_component_aliases) and thus it is safe it to use it for
1038 pointers to types with TYPE_STRUCTURAL_EQUALITY_P. */
1039 if (TREE_CODE (p) == VOID_TYPE || TYPE_STRUCTURAL_EQUALITY_P (p))
1040 set = get_alias_set (ptr_type_node);
1041 else
1042 {
f85d2487 1043 /* Rebuild pointer type starting from canonical types using
6e042ef4
JH
1044 unqualified pointers and references only. This way all such
1045 pointers will have the same alias set and will conflict with
1046 each other.
1047
1048 Most of time we already have pointers or references of a given type.
1049 If not we build new one just to be sure that if someone later
1050 (probably only middle-end can, as we should assign all alias
1051 classes only after finishing translation unit) builds the pointer
1052 type, the canonical type will match. */
1053 p = TYPE_CANONICAL (p);
1054 while (!reference.is_empty ())
1055 {
1056 if (reference.pop ())
1057 p = build_reference_type (p);
1058 else
1059 p = build_pointer_type (p);
f85d2487
JH
1060 gcc_checking_assert (p == TYPE_MAIN_VARIANT (p));
1061 /* build_pointer_type should always return the canonical type.
1062 For LTO TYPE_CANOINCAL may be NULL, because we do not compute
1063 them. Be sure that frontends do not glob canonical types of
1064 pointers in unexpected way and that p == TYPE_CANONICAL (p)
1065 in all other cases. */
1066 gcc_checking_assert (!TYPE_CANONICAL (p)
1067 || p == TYPE_CANONICAL (p));
6e042ef4 1068 }
6e042ef4
JH
1069
1070 /* Assign the alias set to both p and t.
1071 We can not call get_alias_set (p) here as that would trigger
1072 infinite recursion when p == t. In other cases it would just
1073 trigger unnecesary legwork of rebuilding the pointer again. */
ba6a6a1d 1074 gcc_checking_assert (p == TYPE_MAIN_VARIANT (p));
6e042ef4
JH
1075 if (TYPE_ALIAS_SET_KNOWN_P (p))
1076 set = TYPE_ALIAS_SET (p);
1077 else
1078 {
1079 set = new_alias_set ();
1080 TYPE_ALIAS_SET (p) = set;
1081 }
1082 }
1083 }
f85d2487
JH
1084 /* Alias set of ptr_type_node is special and serve as universal pointer which
1085 is TBAA compatible with every other pointer type. Be sure we have the
1086 alias set built even for LTO which otherwise keeps all TYPE_CANONICAL
1087 of pointer types NULL. */
1088 else if (t == ptr_type_node)
1089 set = new_alias_set ();
0ceb0201 1090
7be7d292 1091 /* Otherwise make a new alias set for this type. */
3bdf5ad1 1092 else
96d91dcf
RG
1093 {
1094 /* Each canonical type gets its own alias set, so canonical types
1095 shouldn't form a tree. It doesn't really matter for types
1096 we handle specially above, so only check it where it possibly
1097 would result in a bogus alias set. */
1098 gcc_checking_assert (TYPE_CANONICAL (t) == t);
1099
1100 set = new_alias_set ();
1101 }
3bdf5ad1
RK
1102
1103 TYPE_ALIAS_SET (t) = set;
2bf105ab 1104
7be7d292
EB
1105 /* If this is an aggregate type or a complex type, we must record any
1106 component aliasing information. */
1d79fd2c 1107 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
2bf105ab
RK
1108 record_component_aliases (t);
1109
6e042ef4
JH
1110 /* We treat pointer types specially in alias_set_subset_of. */
1111 if (POINTER_TYPE_P (t) && set)
1112 {
02ced957 1113 alias_set_entry *ase = get_alias_set_entry (set);
6e042ef4
JH
1114 if (!ase)
1115 ase = init_alias_set_entry (set);
1116 ase->is_pointer = true;
1117 ase->has_pointer = true;
1118 }
1119
3bdf5ad1
RK
1120 return set;
1121}
1122
1123/* Return a brand-new alias set. */
1124
4862826d 1125alias_set_type
4682ae04 1126new_alias_set (void)
3bdf5ad1 1127{
bd04cddf
JH
1128 if (alias_sets == 0)
1129 vec_safe_push (alias_sets, (alias_set_entry *) NULL);
1130 vec_safe_push (alias_sets, (alias_set_entry *) NULL);
1131 return alias_sets->length () - 1;
3bdf5ad1 1132}
3932261a 1133
01d28c3f
JM
1134/* Indicate that things in SUBSET can alias things in SUPERSET, but that
1135 not everything that aliases SUPERSET also aliases SUBSET. For example,
1136 in C, a store to an `int' can alias a load of a structure containing an
1137 `int', and vice versa. But it can't alias a load of a 'double' member
1138 of the same structure. Here, the structure would be the SUPERSET and
1139 `int' the SUBSET. This relationship is also described in the comment at
1140 the beginning of this file.
1141
1142 This function should be called only once per SUPERSET/SUBSET pair.
3932261a
MM
1143
1144 It is illegal for SUPERSET to be zero; everything is implicitly a
1145 subset of alias set zero. */
1146
794511d2 1147void
4862826d 1148record_alias_subset (alias_set_type superset, alias_set_type subset)
3932261a 1149{
02ced957
TS
1150 alias_set_entry *superset_entry;
1151 alias_set_entry *subset_entry;
3932261a 1152
f47e9b4e
RK
1153 /* It is possible in complex type situations for both sets to be the same,
1154 in which case we can ignore this operation. */
1155 if (superset == subset)
1156 return;
1157
298e6adc 1158 gcc_assert (superset);
3932261a
MM
1159
1160 superset_entry = get_alias_set_entry (superset);
ca7fd9cd 1161 if (superset_entry == 0)
3932261a
MM
1162 {
1163 /* Create an entry for the SUPERSET, so that we have a place to
1164 attach the SUBSET. */
6e042ef4 1165 superset_entry = init_alias_set_entry (superset);
3932261a
MM
1166 }
1167
2bf105ab
RK
1168 if (subset == 0)
1169 superset_entry->has_zero_child = 1;
1170 else
1171 {
1172 subset_entry = get_alias_set_entry (subset);
6e042ef4
JH
1173 if (!superset_entry->children)
1174 superset_entry->children
fb5c464a 1175 = hash_map<alias_set_hash, int>::create_ggc (64);
2bf105ab
RK
1176 /* If there is an entry for the subset, enter all of its children
1177 (if they are not already present) as children of the SUPERSET. */
ca7fd9cd 1178 if (subset_entry)
2bf105ab
RK
1179 {
1180 if (subset_entry->has_zero_child)
6e042ef4
JH
1181 superset_entry->has_zero_child = true;
1182 if (subset_entry->has_pointer)
1183 superset_entry->has_pointer = true;
d4b60170 1184
6e042ef4
JH
1185 if (subset_entry->children)
1186 {
fb5c464a 1187 hash_map<alias_set_hash, int>::iterator iter
6e042ef4
JH
1188 = subset_entry->children->begin ();
1189 for (; iter != subset_entry->children->end (); ++iter)
1190 superset_entry->children->put ((*iter).first, (*iter).second);
1191 }
2bf105ab 1192 }
3932261a 1193
2bf105ab 1194 /* Enter the SUBSET itself as a child of the SUPERSET. */
de144fb2 1195 superset_entry->children->put (subset, 0);
2bf105ab 1196 }
3932261a
MM
1197}
1198
a0c33338
RK
1199/* Record that component types of TYPE, if any, are part of that type for
1200 aliasing purposes. For record types, we only record component types
b5487346
EB
1201 for fields that are not marked non-addressable. For array types, we
1202 only record the component type if it is not marked non-aliased. */
a0c33338
RK
1203
1204void
4682ae04 1205record_component_aliases (tree type)
a0c33338 1206{
4862826d 1207 alias_set_type superset = get_alias_set (type);
a0c33338
RK
1208 tree field;
1209
1210 if (superset == 0)
1211 return;
1212
1213 switch (TREE_CODE (type))
1214 {
a0c33338
RK
1215 case RECORD_TYPE:
1216 case UNION_TYPE:
1217 case QUAL_UNION_TYPE:
910ad8de 1218 for (field = TYPE_FIELDS (type); field != 0; field = DECL_CHAIN (field))
b5487346 1219 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
f85d2487
JH
1220 {
1221 /* LTO type merging does not make any difference between
1222 component pointer types. We may have
1223
1224 struct foo {int *a;};
1225
1226 as TYPE_CANONICAL of
1227
1228 struct bar {float *a;};
1229
1230 Because accesses to int * and float * do not alias, we would get
1231 false negative when accessing the same memory location by
1232 float ** and bar *. We thus record the canonical type as:
1233
1234 struct {void *a;};
1235
1236 void * is special cased and works as a universal pointer type.
1237 Accesses to it conflicts with accesses to any other pointer
1238 type. */
1239 tree t = TREE_TYPE (field);
1240 if (in_lto_p)
1241 {
1242 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
1243 element type and that type has to be normalized to void *,
1244 too, in the case it is a pointer. */
aea50b45
JH
1245 while (!canonical_type_used_p (t) && !POINTER_TYPE_P (t))
1246 {
1247 gcc_checking_assert (TYPE_STRUCTURAL_EQUALITY_P (t));
1248 t = TREE_TYPE (t);
1249 }
f85d2487
JH
1250 if (POINTER_TYPE_P (t))
1251 t = ptr_type_node;
aea50b45
JH
1252 else if (flag_checking)
1253 gcc_checking_assert (get_alias_set (t)
1254 == get_alias_set (TREE_TYPE (field)));
f85d2487 1255 }
aea50b45 1256
f85d2487
JH
1257 record_alias_subset (superset, get_alias_set (t));
1258 }
a0c33338
RK
1259 break;
1260
1d79fd2c
JW
1261 case COMPLEX_TYPE:
1262 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
1263 break;
1264
4653cae5
RG
1265 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
1266 element type. */
1267
a0c33338
RK
1268 default:
1269 break;
1270 }
1271}
1272
3bdf5ad1
RK
1273/* Allocate an alias set for use in storing and reading from the varargs
1274 spill area. */
1275
4862826d 1276static GTY(()) alias_set_type varargs_set = -1;
f103e34d 1277
4862826d 1278alias_set_type
4682ae04 1279get_varargs_alias_set (void)
3bdf5ad1 1280{
cd3ce9b4
JM
1281#if 1
1282 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
1283 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
1284 consistently use the varargs alias set for loads from the varargs
1285 area. So don't use it anywhere. */
1286 return 0;
1287#else
f103e34d
GK
1288 if (varargs_set == -1)
1289 varargs_set = new_alias_set ();
3bdf5ad1 1290
f103e34d 1291 return varargs_set;
cd3ce9b4 1292#endif
3bdf5ad1
RK
1293}
1294
1295/* Likewise, but used for the fixed portions of the frame, e.g., register
1296 save areas. */
1297
4862826d 1298static GTY(()) alias_set_type frame_set = -1;
f103e34d 1299
4862826d 1300alias_set_type
4682ae04 1301get_frame_alias_set (void)
3bdf5ad1 1302{
f103e34d
GK
1303 if (frame_set == -1)
1304 frame_set = new_alias_set ();
3bdf5ad1 1305
f103e34d 1306 return frame_set;
3bdf5ad1
RK
1307}
1308
9fc37b2b
RS
1309/* Create a new, unique base with id ID. */
1310
1311static rtx
1312unique_base_value (HOST_WIDE_INT id)
1313{
1314 return gen_rtx_ADDRESS (Pmode, id);
1315}
1316
1317/* Return true if accesses based on any other base value cannot alias
1318 those based on X. */
1319
1320static bool
1321unique_base_value_p (rtx x)
1322{
1323 return GET_CODE (x) == ADDRESS && GET_MODE (x) == Pmode;
1324}
1325
1326/* Return true if X is known to be a base value. */
1327
1328static bool
1329known_base_value_p (rtx x)
1330{
1331 switch (GET_CODE (x))
1332 {
1333 case LABEL_REF:
1334 case SYMBOL_REF:
1335 return true;
1336
1337 case ADDRESS:
1338 /* Arguments may or may not be bases; we don't know for sure. */
1339 return GET_MODE (x) != VOIDmode;
1340
1341 default:
1342 return false;
1343 }
1344}
1345
2a2c8203
JC
1346/* Inside SRC, the source of a SET, find a base address. */
1347
9ae8ffe7 1348static rtx
4682ae04 1349find_base_value (rtx src)
9ae8ffe7 1350{
713f41f9 1351 unsigned int regno;
6645d841 1352 scalar_int_mode int_mode;
0aacc8ed 1353
53451050
RS
1354#if defined (FIND_BASE_TERM)
1355 /* Try machine-dependent ways to find the base term. */
1356 src = FIND_BASE_TERM (src);
1357#endif
1358
9ae8ffe7
JL
1359 switch (GET_CODE (src))
1360 {
1361 case SYMBOL_REF:
1362 case LABEL_REF:
1363 return src;
1364
1365 case REG:
fb6754f0 1366 regno = REGNO (src);
d4b60170 1367 /* At the start of a function, argument registers have known base
2a2c8203
JC
1368 values which may be lost later. Returning an ADDRESS
1369 expression here allows optimization based on argument values
1370 even when the argument registers are used for other purposes. */
713f41f9
BS
1371 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
1372 return new_reg_base_value[regno];
73774bc7 1373
eaf407a5 1374 /* If a pseudo has a known base value, return it. Do not do this
9b462c42
RH
1375 for non-fixed hard regs since it can result in a circular
1376 dependency chain for registers which have values at function entry.
eaf407a5
JL
1377
1378 The test above is not sufficient because the scheduler may move
1379 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
9b462c42 1380 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
9771b263 1381 && regno < vec_safe_length (reg_base_value))
83bbd9b6
RH
1382 {
1383 /* If we're inside init_alias_analysis, use new_reg_base_value
1384 to reduce the number of relaxation iterations. */
1afdf91c 1385 if (new_reg_base_value && new_reg_base_value[regno]
6fb5fa3c 1386 && DF_REG_DEF_COUNT (regno) == 1)
83bbd9b6
RH
1387 return new_reg_base_value[regno];
1388
9771b263
DN
1389 if ((*reg_base_value)[regno])
1390 return (*reg_base_value)[regno];
83bbd9b6 1391 }
73774bc7 1392
e3f049a8 1393 return 0;
9ae8ffe7
JL
1394
1395 case MEM:
1396 /* Check for an argument passed in memory. Only record in the
1397 copying-arguments block; it is too hard to track changes
1398 otherwise. */
1399 if (copying_arguments
1400 && (XEXP (src, 0) == arg_pointer_rtx
1401 || (GET_CODE (XEXP (src, 0)) == PLUS
1402 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
9fc37b2b 1403 return arg_base_value;
9ae8ffe7
JL
1404 return 0;
1405
1406 case CONST:
1407 src = XEXP (src, 0);
1408 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
1409 break;
d4b60170 1410
191816a3 1411 /* fall through */
2a2c8203 1412
9ae8ffe7
JL
1413 case PLUS:
1414 case MINUS:
2a2c8203 1415 {
ec907dd8
JL
1416 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
1417
0134bf2d
DE
1418 /* If either operand is a REG that is a known pointer, then it
1419 is the base. */
1420 if (REG_P (src_0) && REG_POINTER (src_0))
1421 return find_base_value (src_0);
1422 if (REG_P (src_1) && REG_POINTER (src_1))
1423 return find_base_value (src_1);
1424
ec907dd8
JL
1425 /* If either operand is a REG, then see if we already have
1426 a known value for it. */
0134bf2d 1427 if (REG_P (src_0))
ec907dd8
JL
1428 {
1429 temp = find_base_value (src_0);
d4b60170 1430 if (temp != 0)
ec907dd8
JL
1431 src_0 = temp;
1432 }
1433
0134bf2d 1434 if (REG_P (src_1))
ec907dd8
JL
1435 {
1436 temp = find_base_value (src_1);
d4b60170 1437 if (temp!= 0)
ec907dd8
JL
1438 src_1 = temp;
1439 }
2a2c8203 1440
0134bf2d
DE
1441 /* If either base is named object or a special address
1442 (like an argument or stack reference), then use it for the
1443 base term. */
9fc37b2b 1444 if (src_0 != 0 && known_base_value_p (src_0))
0134bf2d
DE
1445 return src_0;
1446
9fc37b2b 1447 if (src_1 != 0 && known_base_value_p (src_1))
0134bf2d
DE
1448 return src_1;
1449
d4b60170 1450 /* Guess which operand is the base address:
ec907dd8
JL
1451 If either operand is a symbol, then it is the base. If
1452 either operand is a CONST_INT, then the other is the base. */
481683e1 1453 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
2a2c8203 1454 return find_base_value (src_0);
481683e1 1455 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
ec907dd8
JL
1456 return find_base_value (src_1);
1457
9ae8ffe7 1458 return 0;
2a2c8203
JC
1459 }
1460
1461 case LO_SUM:
1462 /* The standard form is (lo_sum reg sym) so look only at the
1463 second operand. */
1464 return find_base_value (XEXP (src, 1));
9ae8ffe7
JL
1465
1466 case AND:
1467 /* If the second operand is constant set the base
ec5c56db 1468 address to the first operand. */
481683e1 1469 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
2a2c8203 1470 return find_base_value (XEXP (src, 0));
9ae8ffe7
JL
1471 return 0;
1472
61f0131c 1473 case TRUNCATE:
5932a4d4 1474 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1475 handle this only if the target does not support different pointer or
1476 address modes depending on the address space. */
1477 if (!target_default_pointer_address_modes_p ())
1478 break;
6645d841
RS
1479 if (!is_a <scalar_int_mode> (GET_MODE (src), &int_mode)
1480 || GET_MODE_PRECISION (int_mode) < GET_MODE_PRECISION (Pmode))
61f0131c
R
1481 break;
1482 /* Fall through. */
9ae8ffe7 1483 case HIGH:
d288e53d
DE
1484 case PRE_INC:
1485 case PRE_DEC:
1486 case POST_INC:
1487 case POST_DEC:
1488 case PRE_MODIFY:
1489 case POST_MODIFY:
2a2c8203 1490 return find_base_value (XEXP (src, 0));
1d300e19 1491
0aacc8ed
RK
1492 case ZERO_EXTEND:
1493 case SIGN_EXTEND: /* used for NT/Alpha pointers */
5932a4d4 1494 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1495 handle this only if the target does not support different pointer or
1496 address modes depending on the address space. */
1497 if (!target_default_pointer_address_modes_p ())
1498 break;
1499
0aacc8ed
RK
1500 {
1501 rtx temp = find_base_value (XEXP (src, 0));
1502
5ae6cd0d 1503 if (temp != 0 && CONSTANT_P (temp))
0aacc8ed 1504 temp = convert_memory_address (Pmode, temp);
0aacc8ed
RK
1505
1506 return temp;
1507 }
1508
1d300e19
KG
1509 default:
1510 break;
9ae8ffe7
JL
1511 }
1512
1513 return 0;
1514}
1515
9fc37b2b
RS
1516/* Called from init_alias_analysis indirectly through note_stores,
1517 or directly if DEST is a register with a REG_NOALIAS note attached.
1518 SET is null in the latter case. */
9ae8ffe7 1519
d4b60170 1520/* While scanning insns to find base values, reg_seen[N] is nonzero if
9ae8ffe7 1521 register N has been set in this function. */
d630245f 1522static sbitmap reg_seen;
9ae8ffe7 1523
2a2c8203 1524static void
7bc980e1 1525record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
9ae8ffe7 1526{
b3694847 1527 unsigned regno;
9ae8ffe7 1528 rtx src;
c28b4e40 1529 int n;
9ae8ffe7 1530
f8cfc6aa 1531 if (!REG_P (dest))
9ae8ffe7
JL
1532 return;
1533
fb6754f0 1534 regno = REGNO (dest);
9ae8ffe7 1535
9771b263 1536 gcc_checking_assert (regno < reg_base_value->length ());
ac606739 1537
dc8afb70 1538 n = REG_NREGS (dest);
c28b4e40
JW
1539 if (n != 1)
1540 {
1541 while (--n >= 0)
1542 {
d7c028c0 1543 bitmap_set_bit (reg_seen, regno + n);
c28b4e40
JW
1544 new_reg_base_value[regno + n] = 0;
1545 }
1546 return;
1547 }
1548
9ae8ffe7
JL
1549 if (set)
1550 {
1551 /* A CLOBBER wipes out any old value but does not prevent a previously
1552 unset register from acquiring a base address (i.e. reg_seen is not
1553 set). */
1554 if (GET_CODE (set) == CLOBBER)
1555 {
ec907dd8 1556 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1557 return;
1558 }
1559 src = SET_SRC (set);
1560 }
1561 else
1562 {
9fc37b2b 1563 /* There's a REG_NOALIAS note against DEST. */
d7c028c0 1564 if (bitmap_bit_p (reg_seen, regno))
9ae8ffe7 1565 {
ec907dd8 1566 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1567 return;
1568 }
d7c028c0 1569 bitmap_set_bit (reg_seen, regno);
9fc37b2b 1570 new_reg_base_value[regno] = unique_base_value (unique_id++);
9ae8ffe7
JL
1571 return;
1572 }
1573
5da6f168
RS
1574 /* If this is not the first set of REGNO, see whether the new value
1575 is related to the old one. There are two cases of interest:
1576
1577 (1) The register might be assigned an entirely new value
1578 that has the same base term as the original set.
1579
1580 (2) The set might be a simple self-modification that
1581 cannot change REGNO's base value.
1582
1583 If neither case holds, reject the original base value as invalid.
1584 Note that the following situation is not detected:
1585
c22cacf3 1586 extern int x, y; int *p = &x; p += (&y-&x);
5da6f168 1587
9ae8ffe7
JL
1588 ANSI C does not allow computing the difference of addresses
1589 of distinct top level objects. */
5da6f168
RS
1590 if (new_reg_base_value[regno] != 0
1591 && find_base_value (src) != new_reg_base_value[regno])
9ae8ffe7
JL
1592 switch (GET_CODE (src))
1593 {
2a2c8203 1594 case LO_SUM:
9ae8ffe7
JL
1595 case MINUS:
1596 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
ec907dd8 1597 new_reg_base_value[regno] = 0;
9ae8ffe7 1598 break;
61f0131c
R
1599 case PLUS:
1600 /* If the value we add in the PLUS is also a valid base value,
1601 this might be the actual base value, and the original value
1602 an index. */
1603 {
1604 rtx other = NULL_RTX;
1605
1606 if (XEXP (src, 0) == dest)
1607 other = XEXP (src, 1);
1608 else if (XEXP (src, 1) == dest)
1609 other = XEXP (src, 0);
1610
1611 if (! other || find_base_value (other))
1612 new_reg_base_value[regno] = 0;
1613 break;
1614 }
9ae8ffe7 1615 case AND:
481683e1 1616 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
ec907dd8 1617 new_reg_base_value[regno] = 0;
9ae8ffe7 1618 break;
9ae8ffe7 1619 default:
ec907dd8 1620 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1621 break;
1622 }
1623 /* If this is the first set of a register, record the value. */
1624 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
d7c028c0 1625 && ! bitmap_bit_p (reg_seen, regno) && new_reg_base_value[regno] == 0)
ec907dd8 1626 new_reg_base_value[regno] = find_base_value (src);
9ae8ffe7 1627
d7c028c0 1628 bitmap_set_bit (reg_seen, regno);
9ae8ffe7
JL
1629}
1630
8fd0a474
AM
1631/* Return REG_BASE_VALUE for REGNO. Selective scheduler uses this to avoid
1632 using hard registers with non-null REG_BASE_VALUE for renaming. */
1633rtx
1634get_reg_base_value (unsigned int regno)
1635{
9771b263 1636 return (*reg_base_value)[regno];
8fd0a474
AM
1637}
1638
bb1acb3e
RH
1639/* If a value is known for REGNO, return it. */
1640
c22cacf3 1641rtx
bb1acb3e
RH
1642get_reg_known_value (unsigned int regno)
1643{
1644 if (regno >= FIRST_PSEUDO_REGISTER)
1645 {
1646 regno -= FIRST_PSEUDO_REGISTER;
9771b263
DN
1647 if (regno < vec_safe_length (reg_known_value))
1648 return (*reg_known_value)[regno];
bb1acb3e
RH
1649 }
1650 return NULL;
43fe47ca
JW
1651}
1652
bb1acb3e
RH
1653/* Set it. */
1654
1655static void
1656set_reg_known_value (unsigned int regno, rtx val)
1657{
1658 if (regno >= FIRST_PSEUDO_REGISTER)
1659 {
1660 regno -= FIRST_PSEUDO_REGISTER;
9771b263
DN
1661 if (regno < vec_safe_length (reg_known_value))
1662 (*reg_known_value)[regno] = val;
bb1acb3e
RH
1663 }
1664}
1665
1666/* Similarly for reg_known_equiv_p. */
1667
1668bool
1669get_reg_known_equiv_p (unsigned int regno)
1670{
1671 if (regno >= FIRST_PSEUDO_REGISTER)
1672 {
1673 regno -= FIRST_PSEUDO_REGISTER;
9771b263 1674 if (regno < vec_safe_length (reg_known_value))
d7c028c0 1675 return bitmap_bit_p (reg_known_equiv_p, regno);
bb1acb3e
RH
1676 }
1677 return false;
1678}
1679
1680static void
1681set_reg_known_equiv_p (unsigned int regno, bool val)
1682{
1683 if (regno >= FIRST_PSEUDO_REGISTER)
1684 {
1685 regno -= FIRST_PSEUDO_REGISTER;
9771b263 1686 if (regno < vec_safe_length (reg_known_value))
9ff3c7ca
SB
1687 {
1688 if (val)
d7c028c0 1689 bitmap_set_bit (reg_known_equiv_p, regno);
9ff3c7ca 1690 else
d7c028c0 1691 bitmap_clear_bit (reg_known_equiv_p, regno);
9ff3c7ca 1692 }
bb1acb3e
RH
1693 }
1694}
1695
1696
db048faf
MM
1697/* Returns a canonical version of X, from the point of view alias
1698 analysis. (For example, if X is a MEM whose address is a register,
1699 and the register has a known value (say a SYMBOL_REF), then a MEM
1700 whose address is the SYMBOL_REF is returned.) */
1701
1702rtx
4682ae04 1703canon_rtx (rtx x)
9ae8ffe7
JL
1704{
1705 /* Recursively look for equivalences. */
f8cfc6aa 1706 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
bb1acb3e
RH
1707 {
1708 rtx t = get_reg_known_value (REGNO (x));
1709 if (t == x)
1710 return x;
1711 if (t)
1712 return canon_rtx (t);
1713 }
1714
1715 if (GET_CODE (x) == PLUS)
9ae8ffe7
JL
1716 {
1717 rtx x0 = canon_rtx (XEXP (x, 0));
1718 rtx x1 = canon_rtx (XEXP (x, 1));
1719
1720 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
231314e3 1721 return simplify_gen_binary (PLUS, GET_MODE (x), x0, x1);
9ae8ffe7 1722 }
d4b60170 1723
9ae8ffe7
JL
1724 /* This gives us much better alias analysis when called from
1725 the loop optimizer. Note we want to leave the original
1726 MEM alone, but need to return the canonicalized MEM with
1727 all the flags with their original values. */
3c0cb5de 1728 else if (MEM_P (x))
f1ec5147 1729 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
d4b60170 1730
9ae8ffe7
JL
1731 return x;
1732}
1733
1734/* Return 1 if X and Y are identical-looking rtx's.
45183e03 1735 Expect that X and Y has been already canonicalized.
9ae8ffe7
JL
1736
1737 We use the data in reg_known_value above to see if two registers with
1738 different numbers are, in fact, equivalent. */
1739
1740static int
ed7a4b4b 1741rtx_equal_for_memref_p (const_rtx x, const_rtx y)
9ae8ffe7 1742{
b3694847
SS
1743 int i;
1744 int j;
1745 enum rtx_code code;
1746 const char *fmt;
9ae8ffe7
JL
1747
1748 if (x == 0 && y == 0)
1749 return 1;
1750 if (x == 0 || y == 0)
1751 return 0;
d4b60170 1752
9ae8ffe7
JL
1753 if (x == y)
1754 return 1;
1755
1756 code = GET_CODE (x);
1757 /* Rtx's of different codes cannot be equal. */
1758 if (code != GET_CODE (y))
1759 return 0;
1760
1761 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1762 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1763
1764 if (GET_MODE (x) != GET_MODE (y))
1765 return 0;
1766
db048faf
MM
1767 /* Some RTL can be compared without a recursive examination. */
1768 switch (code)
1769 {
1770 case REG:
1771 return REGNO (x) == REGNO (y);
1772
1773 case LABEL_REF:
04a121a7 1774 return label_ref_label (x) == label_ref_label (y);
ca7fd9cd 1775
db048faf 1776 case SYMBOL_REF:
73e48cb3 1777 return compare_base_symbol_refs (x, y) == 1;
db048faf 1778
af6236c1
AO
1779 case ENTRY_VALUE:
1780 /* This is magic, don't go through canonicalization et al. */
1781 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1782
40e02b4a 1783 case VALUE:
d8116890 1784 CASE_CONST_UNIQUE:
807e902e 1785 /* Pointer equality guarantees equality for these nodes. */
db048faf
MM
1786 return 0;
1787
db048faf
MM
1788 default:
1789 break;
1790 }
9ae8ffe7 1791
45183e03
JH
1792 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1793 if (code == PLUS)
9ae8ffe7
JL
1794 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1795 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1796 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1797 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
45183e03
JH
1798 /* For commutative operations, the RTX match if the operand match in any
1799 order. Also handle the simple binary and unary cases without a loop. */
ec8e098d 1800 if (COMMUTATIVE_P (x))
45183e03
JH
1801 {
1802 rtx xop0 = canon_rtx (XEXP (x, 0));
1803 rtx yop0 = canon_rtx (XEXP (y, 0));
1804 rtx yop1 = canon_rtx (XEXP (y, 1));
1805
1806 return ((rtx_equal_for_memref_p (xop0, yop0)
1807 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1808 || (rtx_equal_for_memref_p (xop0, yop1)
1809 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1810 }
ec8e098d 1811 else if (NON_COMMUTATIVE_P (x))
45183e03
JH
1812 {
1813 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
4682ae04 1814 canon_rtx (XEXP (y, 0)))
45183e03
JH
1815 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1816 canon_rtx (XEXP (y, 1))));
1817 }
ec8e098d 1818 else if (UNARY_P (x))
45183e03 1819 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
4682ae04 1820 canon_rtx (XEXP (y, 0)));
9ae8ffe7
JL
1821
1822 /* Compare the elements. If any pair of corresponding elements
de12be17
JC
1823 fail to match, return 0 for the whole things.
1824
1825 Limit cases to types which actually appear in addresses. */
9ae8ffe7
JL
1826
1827 fmt = GET_RTX_FORMAT (code);
1828 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1829 {
1830 switch (fmt[i])
1831 {
9ae8ffe7
JL
1832 case 'i':
1833 if (XINT (x, i) != XINT (y, i))
1834 return 0;
1835 break;
1836
9ae8ffe7
JL
1837 case 'E':
1838 /* Two vectors must have the same length. */
1839 if (XVECLEN (x, i) != XVECLEN (y, i))
1840 return 0;
1841
1842 /* And the corresponding elements must match. */
1843 for (j = 0; j < XVECLEN (x, i); j++)
45183e03
JH
1844 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1845 canon_rtx (XVECEXP (y, i, j))) == 0)
9ae8ffe7
JL
1846 return 0;
1847 break;
1848
1849 case 'e':
45183e03
JH
1850 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1851 canon_rtx (XEXP (y, i))) == 0)
9ae8ffe7
JL
1852 return 0;
1853 break;
1854
3237ac18
AH
1855 /* This can happen for asm operands. */
1856 case 's':
1857 if (strcmp (XSTR (x, i), XSTR (y, i)))
1858 return 0;
1859 break;
1860
aee21ba9
JL
1861 /* This can happen for an asm which clobbers memory. */
1862 case '0':
1863 break;
1864
9ae8ffe7
JL
1865 /* It is believed that rtx's at this level will never
1866 contain anything but integers and other rtx's,
1867 except for within LABEL_REFs and SYMBOL_REFs. */
1868 default:
298e6adc 1869 gcc_unreachable ();
9ae8ffe7
JL
1870 }
1871 }
1872 return 1;
1873}
1874
9e412ca3 1875static rtx
4682ae04 1876find_base_term (rtx x)
9ae8ffe7 1877{
eab5c70a 1878 cselib_val *val;
6f2ffb4b
AO
1879 struct elt_loc_list *l, *f;
1880 rtx ret;
6645d841 1881 scalar_int_mode int_mode;
eab5c70a 1882
b949ea8b
JW
1883#if defined (FIND_BASE_TERM)
1884 /* Try machine-dependent ways to find the base term. */
1885 x = FIND_BASE_TERM (x);
1886#endif
1887
9ae8ffe7
JL
1888 switch (GET_CODE (x))
1889 {
1890 case REG:
1891 return REG_BASE_VALUE (x);
1892
d288e53d 1893 case TRUNCATE:
5932a4d4 1894 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1895 handle this only if the target does not support different pointer or
1896 address modes depending on the address space. */
1897 if (!target_default_pointer_address_modes_p ())
1898 return 0;
6645d841
RS
1899 if (!is_a <scalar_int_mode> (GET_MODE (x), &int_mode)
1900 || GET_MODE_PRECISION (int_mode) < GET_MODE_PRECISION (Pmode))
ca7fd9cd 1901 return 0;
d288e53d 1902 /* Fall through. */
9ae8ffe7 1903 case HIGH:
6d849a2a
JL
1904 case PRE_INC:
1905 case PRE_DEC:
1906 case POST_INC:
1907 case POST_DEC:
d288e53d
DE
1908 case PRE_MODIFY:
1909 case POST_MODIFY:
6d849a2a
JL
1910 return find_base_term (XEXP (x, 0));
1911
1abade85
RK
1912 case ZERO_EXTEND:
1913 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
5932a4d4 1914 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1915 handle this only if the target does not support different pointer or
1916 address modes depending on the address space. */
1917 if (!target_default_pointer_address_modes_p ())
1918 return 0;
1919
1abade85
RK
1920 {
1921 rtx temp = find_base_term (XEXP (x, 0));
1922
5ae6cd0d 1923 if (temp != 0 && CONSTANT_P (temp))
1abade85 1924 temp = convert_memory_address (Pmode, temp);
1abade85
RK
1925
1926 return temp;
1927 }
1928
eab5c70a
BS
1929 case VALUE:
1930 val = CSELIB_VAL_PTR (x);
6f2ffb4b
AO
1931 ret = NULL_RTX;
1932
40e02b4a 1933 if (!val)
6f2ffb4b
AO
1934 return ret;
1935
0fe03ac3
JJ
1936 if (cselib_sp_based_value_p (val))
1937 return static_reg_base_value[STACK_POINTER_REGNUM];
1938
6f2ffb4b
AO
1939 f = val->locs;
1940 /* Temporarily reset val->locs to avoid infinite recursion. */
1941 val->locs = NULL;
1942
1943 for (l = f; l; l = l->next)
1944 if (GET_CODE (l->loc) == VALUE
1945 && CSELIB_VAL_PTR (l->loc)->locs
1946 && !CSELIB_VAL_PTR (l->loc)->locs->next
1947 && CSELIB_VAL_PTR (l->loc)->locs->loc == x)
1948 continue;
1949 else if ((ret = find_base_term (l->loc)) != 0)
1950 break;
1951
1952 val->locs = f;
1953 return ret;
eab5c70a 1954
023f059b
JJ
1955 case LO_SUM:
1956 /* The standard form is (lo_sum reg sym) so look only at the
1957 second operand. */
1958 return find_base_term (XEXP (x, 1));
1959
9ae8ffe7
JL
1960 case CONST:
1961 x = XEXP (x, 0);
1962 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1963 return 0;
938d968e 1964 /* Fall through. */
9ae8ffe7
JL
1965 case PLUS:
1966 case MINUS:
1967 {
3c567fae
JL
1968 rtx tmp1 = XEXP (x, 0);
1969 rtx tmp2 = XEXP (x, 1);
1970
f5143c46 1971 /* This is a little bit tricky since we have to determine which of
3c567fae
JL
1972 the two operands represents the real base address. Otherwise this
1973 routine may return the index register instead of the base register.
1974
1975 That may cause us to believe no aliasing was possible, when in
1976 fact aliasing is possible.
1977
1978 We use a few simple tests to guess the base register. Additional
1979 tests can certainly be added. For example, if one of the operands
1980 is a shift or multiply, then it must be the index register and the
1981 other operand is the base register. */
ca7fd9cd 1982
b949ea8b
JW
1983 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1984 return find_base_term (tmp2);
1985
31b0a960 1986 /* If either operand is known to be a pointer, then prefer it
3c567fae 1987 to determine the base term. */
3502dc9c 1988 if (REG_P (tmp1) && REG_POINTER (tmp1))
31b0a960
RB
1989 ;
1990 else if (REG_P (tmp2) && REG_POINTER (tmp2))
a7c75343
JJ
1991 std::swap (tmp1, tmp2);
1992 /* If second argument is constant which has base term, prefer it
1993 over variable tmp1. See PR64025. */
1994 else if (CONSTANT_P (tmp2) && !CONST_INT_P (tmp2))
1995 std::swap (tmp1, tmp2);
3c567fae 1996
31b0a960
RB
1997 /* Go ahead and find the base term for both operands. If either base
1998 term is from a pointer or is a named object or a special address
3c567fae
JL
1999 (like an argument or stack reference), then use it for the
2000 base term. */
481be1c4
RB
2001 rtx base = find_base_term (tmp1);
2002 if (base != NULL_RTX
31b0a960 2003 && ((REG_P (tmp1) && REG_POINTER (tmp1))
481be1c4
RB
2004 || known_base_value_p (base)))
2005 return base;
2006 base = find_base_term (tmp2);
2007 if (base != NULL_RTX
31b0a960 2008 && ((REG_P (tmp2) && REG_POINTER (tmp2))
481be1c4
RB
2009 || known_base_value_p (base)))
2010 return base;
3c567fae
JL
2011
2012 /* We could not determine which of the two operands was the
2013 base register and which was the index. So we can determine
2014 nothing from the base alias check. */
2015 return 0;
9ae8ffe7
JL
2016 }
2017
2018 case AND:
481683e1 2019 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
d288e53d 2020 return find_base_term (XEXP (x, 0));
9ae8ffe7
JL
2021 return 0;
2022
2023 case SYMBOL_REF:
2024 case LABEL_REF:
2025 return x;
2026
2027 default:
2028 return 0;
2029 }
2030}
2031
9e412ca3
RS
2032/* Return true if accesses to address X may alias accesses based
2033 on the stack pointer. */
2034
2035bool
2036may_be_sp_based_p (rtx x)
2037{
2038 rtx base = find_base_term (x);
2039 return !base || base == static_reg_base_value[STACK_POINTER_REGNUM];
2040}
2041
54363f8a
JH
2042/* BASE1 and BASE2 are decls. Return 1 if they refer to same object, 0
2043 if they refer to different objects and -1 if we can not decide. */
2044
2045int
2046compare_base_decls (tree base1, tree base2)
2047{
2048 int ret;
2049 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2050 if (base1 == base2)
2051 return 1;
2052
bed3fd46 2053 /* If we have two register decls with register specification we
816c4ba2 2054 cannot decide unless their assembler names are the same. */
bed3fd46
RB
2055 if (DECL_REGISTER (base1)
2056 && DECL_REGISTER (base2)
816c4ba2
NS
2057 && HAS_DECL_ASSEMBLER_NAME_P (base1)
2058 && HAS_DECL_ASSEMBLER_NAME_P (base2)
bed3fd46
RB
2059 && DECL_ASSEMBLER_NAME_SET_P (base1)
2060 && DECL_ASSEMBLER_NAME_SET_P (base2))
2061 {
816c4ba2 2062 if (DECL_ASSEMBLER_NAME_RAW (base1) == DECL_ASSEMBLER_NAME_RAW (base2))
bed3fd46
RB
2063 return 1;
2064 return -1;
2065 }
2066
54363f8a
JH
2067 /* Declarations of non-automatic variables may have aliases. All other
2068 decls are unique. */
7ec4f343
NS
2069 if (!decl_in_symtab_p (base1)
2070 || !decl_in_symtab_p (base2))
54363f8a 2071 return 0;
7ec4f343 2072
929710d9
NS
2073 /* Don't cause symbols to be inserted by the act of checking. */
2074 symtab_node *node1 = symtab_node::get (base1);
2075 if (!node1)
2076 return 0;
2077 symtab_node *node2 = symtab_node::get (base2);
2078 if (!node2)
2079 return 0;
2080
2081 ret = node1->equal_address_to (node2, true);
54363f8a
JH
2082 return ret;
2083}
2084
73e48cb3
JH
2085/* Same as compare_base_decls but for SYMBOL_REF. */
2086
2087static int
2088compare_base_symbol_refs (const_rtx x_base, const_rtx y_base)
2089{
2090 tree x_decl = SYMBOL_REF_DECL (x_base);
2091 tree y_decl = SYMBOL_REF_DECL (y_base);
2092 bool binds_def = true;
2093
2094 if (XSTR (x_base, 0) == XSTR (y_base, 0))
2095 return 1;
2096 if (x_decl && y_decl)
2097 return compare_base_decls (x_decl, y_decl);
2098 if (x_decl || y_decl)
2099 {
2100 if (!x_decl)
2101 {
2102 std::swap (x_decl, y_decl);
2103 std::swap (x_base, y_base);
2104 }
2105 /* We handle specially only section anchors and assume that other
2106 labels may overlap with user variables in an arbitrary way. */
2107 if (!SYMBOL_REF_HAS_BLOCK_INFO_P (y_base))
2108 return -1;
2109 /* Anchors contains static VAR_DECLs and CONST_DECLs. We are safe
2110 to ignore CONST_DECLs because they are readonly. */
8813a647 2111 if (!VAR_P (x_decl)
73e48cb3
JH
2112 || (!TREE_STATIC (x_decl) && !TREE_PUBLIC (x_decl)))
2113 return 0;
2114
2115 symtab_node *x_node = symtab_node::get_create (x_decl)
2116 ->ultimate_alias_target ();
2117 /* External variable can not be in section anchor. */
2118 if (!x_node->definition)
2119 return 0;
2120 x_base = XEXP (DECL_RTL (x_node->decl), 0);
2121 /* If not in anchor, we can disambiguate. */
2122 if (!SYMBOL_REF_HAS_BLOCK_INFO_P (x_base))
2123 return 0;
2124
2125 /* We have an alias of anchored variable. If it can be interposed;
2126 we must assume it may or may not alias its anchor. */
2127 binds_def = decl_binds_to_current_def_p (x_decl);
2128 }
2129 /* If we have variable in section anchor, we can compare by offset. */
2130 if (SYMBOL_REF_HAS_BLOCK_INFO_P (x_base)
2131 && SYMBOL_REF_HAS_BLOCK_INFO_P (y_base))
2132 {
2133 if (SYMBOL_REF_BLOCK (x_base) != SYMBOL_REF_BLOCK (y_base))
2134 return 0;
2135 if (SYMBOL_REF_BLOCK_OFFSET (x_base) == SYMBOL_REF_BLOCK_OFFSET (y_base))
2136 return binds_def ? 1 : -1;
2137 if (SYMBOL_REF_ANCHOR_P (x_base) != SYMBOL_REF_ANCHOR_P (y_base))
2138 return -1;
2139 return 0;
2140 }
2141 /* In general we assume that memory locations pointed to by different labels
2142 may overlap in undefined ways. */
2143 return -1;
2144}
2145
9ae8ffe7
JL
2146/* Return 0 if the addresses X and Y are known to point to different
2147 objects, 1 if they might be pointers to the same object. */
2148
2149static int
31b0a960 2150base_alias_check (rtx x, rtx x_base, rtx y, rtx y_base,
ef4bddc2 2151 machine_mode x_mode, machine_mode y_mode)
9ae8ffe7 2152{
1c72c7f6
JC
2153 /* If the address itself has no known base see if a known equivalent
2154 value has one. If either address still has no known base, nothing
2155 is known about aliasing. */
2156 if (x_base == 0)
2157 {
2158 rtx x_c;
d4b60170 2159
1c72c7f6
JC
2160 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
2161 return 1;
d4b60170 2162
1c72c7f6
JC
2163 x_base = find_base_term (x_c);
2164 if (x_base == 0)
2165 return 1;
2166 }
9ae8ffe7 2167
1c72c7f6
JC
2168 if (y_base == 0)
2169 {
2170 rtx y_c;
2171 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
2172 return 1;
d4b60170 2173
1c72c7f6
JC
2174 y_base = find_base_term (y_c);
2175 if (y_base == 0)
2176 return 1;
2177 }
2178
2179 /* If the base addresses are equal nothing is known about aliasing. */
2180 if (rtx_equal_p (x_base, y_base))
9ae8ffe7
JL
2181 return 1;
2182
435da628
UB
2183 /* The base addresses are different expressions. If they are not accessed
2184 via AND, there is no conflict. We can bring knowledge of object
2185 alignment into play here. For example, on alpha, "char a, b;" can
5764ee3c 2186 alias one another, though "char a; long b;" cannot. AND addresses may
435da628
UB
2187 implicitly alias surrounding objects; i.e. unaligned access in DImode
2188 via AND address can alias all surrounding object types except those
2189 with aligment 8 or higher. */
2190 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
2191 return 1;
2192 if (GET_CODE (x) == AND
481683e1 2193 && (!CONST_INT_P (XEXP (x, 1))
435da628
UB
2194 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
2195 return 1;
2196 if (GET_CODE (y) == AND
481683e1 2197 && (!CONST_INT_P (XEXP (y, 1))
435da628
UB
2198 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
2199 return 1;
2200
73e48cb3 2201 /* Differing symbols not accessed via AND never alias. */
3a28db46 2202 if (GET_CODE (x_base) == SYMBOL_REF && GET_CODE (y_base) == SYMBOL_REF)
73e48cb3 2203 return compare_base_symbol_refs (x_base, y_base) != 0;
3a28db46 2204
9ae8ffe7 2205 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
435da628 2206 return 0;
9ae8ffe7 2207
9fc37b2b 2208 if (unique_base_value_p (x_base) || unique_base_value_p (y_base))
9ae8ffe7
JL
2209 return 0;
2210
0d3c82d6 2211 return 1;
9ae8ffe7
JL
2212}
2213
a5628378 2214/* Return TRUE if EXPR refers to a VALUE whose uid is greater than
c779924e 2215 (or equal to) that of V. */
a5628378
AO
2216
2217static bool
403837b4 2218refs_newer_value_p (const_rtx expr, rtx v)
a5628378
AO
2219{
2220 int minuid = CSELIB_VAL_PTR (v)->uid;
403837b4
RS
2221 subrtx_iterator::array_type array;
2222 FOR_EACH_SUBRTX (iter, array, expr, NONCONST)
c779924e 2223 if (GET_CODE (*iter) == VALUE && CSELIB_VAL_PTR (*iter)->uid >= minuid)
403837b4
RS
2224 return true;
2225 return false;
a5628378
AO
2226}
2227
eab5c70a 2228/* Convert the address X into something we can use. This is done by returning
569efc34
JJ
2229 it unchanged unless it is a VALUE or VALUE +/- constant; for VALUE
2230 we call cselib to get a more useful rtx. */
3bdf5ad1 2231
a13d4ebf 2232rtx
4682ae04 2233get_addr (rtx x)
eab5c70a
BS
2234{
2235 cselib_val *v;
2236 struct elt_loc_list *l;
2237
2238 if (GET_CODE (x) != VALUE)
569efc34
JJ
2239 {
2240 if ((GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
2241 && GET_CODE (XEXP (x, 0)) == VALUE
2242 && CONST_SCALAR_INT_P (XEXP (x, 1)))
2243 {
2244 rtx op0 = get_addr (XEXP (x, 0));
2245 if (op0 != XEXP (x, 0))
2246 {
2247 if (GET_CODE (x) == PLUS
2248 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2249 return plus_constant (GET_MODE (x), op0, INTVAL (XEXP (x, 1)));
2250 return simplify_gen_binary (GET_CODE (x), GET_MODE (x),
2251 op0, XEXP (x, 1));
2252 }
2253 }
2254 return x;
2255 }
eab5c70a 2256 v = CSELIB_VAL_PTR (x);
40e02b4a
JH
2257 if (v)
2258 {
0f68ba3e
AO
2259 bool have_equivs = cselib_have_permanent_equivalences ();
2260 if (have_equivs)
2261 v = canonical_cselib_val (v);
40e02b4a
JH
2262 for (l = v->locs; l; l = l->next)
2263 if (CONSTANT_P (l->loc))
2264 return l->loc;
2265 for (l = v->locs; l; l = l->next)
0f68ba3e
AO
2266 if (!REG_P (l->loc) && !MEM_P (l->loc)
2267 /* Avoid infinite recursion when potentially dealing with
2268 var-tracking artificial equivalences, by skipping the
2269 equivalences themselves, and not choosing expressions
2270 that refer to newer VALUEs. */
2271 && (!have_equivs
2272 || (GET_CODE (l->loc) != VALUE
2273 && !refs_newer_value_p (l->loc, x))))
a5628378 2274 return l->loc;
0f68ba3e
AO
2275 if (have_equivs)
2276 {
2277 for (l = v->locs; l; l = l->next)
2278 if (REG_P (l->loc)
2279 || (GET_CODE (l->loc) != VALUE
2280 && !refs_newer_value_p (l->loc, x)))
2281 return l->loc;
2282 /* Return the canonical value. */
2283 return v->val_rtx;
2284 }
2285 if (v->locs)
2286 return v->locs->loc;
40e02b4a 2287 }
eab5c70a
BS
2288 return x;
2289}
2290
39cec1ac
MH
2291/* Return the address of the (N_REFS + 1)th memory reference to ADDR
2292 where SIZE is the size in bytes of the memory reference. If ADDR
2293 is not modified by the memory reference then ADDR is returned. */
2294
04e2b4d3 2295static rtx
4682ae04 2296addr_side_effect_eval (rtx addr, int size, int n_refs)
39cec1ac
MH
2297{
2298 int offset = 0;
ca7fd9cd 2299
39cec1ac
MH
2300 switch (GET_CODE (addr))
2301 {
2302 case PRE_INC:
2303 offset = (n_refs + 1) * size;
2304 break;
2305 case PRE_DEC:
2306 offset = -(n_refs + 1) * size;
2307 break;
2308 case POST_INC:
2309 offset = n_refs * size;
2310 break;
2311 case POST_DEC:
2312 offset = -n_refs * size;
2313 break;
2314
2315 default:
2316 return addr;
2317 }
ca7fd9cd 2318
39cec1ac 2319 if (offset)
45183e03 2320 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
4789c0ce 2321 gen_int_mode (offset, GET_MODE (addr)));
39cec1ac
MH
2322 else
2323 addr = XEXP (addr, 0);
45183e03 2324 addr = canon_rtx (addr);
39cec1ac
MH
2325
2326 return addr;
2327}
2328
3aa03517
AO
2329/* Return TRUE if an object X sized at XSIZE bytes and another object
2330 Y sized at YSIZE bytes, starting C bytes after X, may overlap. If
2331 any of the sizes is zero, assume an overlap, otherwise use the
2332 absolute value of the sizes as the actual sizes. */
2333
2334static inline bool
2335offset_overlap_p (HOST_WIDE_INT c, int xsize, int ysize)
2336{
2337 return (xsize == 0 || ysize == 0
2338 || (c >= 0
2339 ? (abs (xsize) > c)
2340 : (abs (ysize) > -c)));
2341}
2342
f47e08d9
RG
2343/* Return one if X and Y (memory addresses) reference the
2344 same location in memory or if the references overlap.
2345 Return zero if they do not overlap, else return
2346 minus one in which case they still might reference the same location.
2347
2348 C is an offset accumulator. When
9ae8ffe7
JL
2349 C is nonzero, we are testing aliases between X and Y + C.
2350 XSIZE is the size in bytes of the X reference,
2351 similarly YSIZE is the size in bytes for Y.
45183e03 2352 Expect that canon_rtx has been already called for X and Y.
9ae8ffe7
JL
2353
2354 If XSIZE or YSIZE is zero, we do not know the amount of memory being
2355 referenced (the reference was BLKmode), so make the most pessimistic
2356 assumptions.
2357
c02f035f
RH
2358 If XSIZE or YSIZE is negative, we may access memory outside the object
2359 being referenced as a side effect. This can happen when using AND to
2360 align memory references, as is done on the Alpha.
2361
9ae8ffe7 2362 Nice to notice that varying addresses cannot conflict with fp if no
f47e08d9
RG
2363 local variables had their addresses taken, but that's too hard now.
2364
2365 ??? Contrary to the tree alias oracle this does not return
2366 one for X + non-constant and Y + non-constant when X and Y are equal.
2367 If that is fixed the TBAA hack for union type-punning can be removed. */
9ae8ffe7 2368
9ae8ffe7 2369static int
4682ae04 2370memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
9ae8ffe7 2371{
eab5c70a 2372 if (GET_CODE (x) == VALUE)
5312b066
JJ
2373 {
2374 if (REG_P (y))
2375 {
24f8d71e
JJ
2376 struct elt_loc_list *l = NULL;
2377 if (CSELIB_VAL_PTR (x))
a5628378
AO
2378 for (l = canonical_cselib_val (CSELIB_VAL_PTR (x))->locs;
2379 l; l = l->next)
24f8d71e
JJ
2380 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, y))
2381 break;
5312b066
JJ
2382 if (l)
2383 x = y;
2384 else
2385 x = get_addr (x);
2386 }
2387 /* Don't call get_addr if y is the same VALUE. */
2388 else if (x != y)
2389 x = get_addr (x);
2390 }
eab5c70a 2391 if (GET_CODE (y) == VALUE)
5312b066
JJ
2392 {
2393 if (REG_P (x))
2394 {
24f8d71e
JJ
2395 struct elt_loc_list *l = NULL;
2396 if (CSELIB_VAL_PTR (y))
a5628378
AO
2397 for (l = canonical_cselib_val (CSELIB_VAL_PTR (y))->locs;
2398 l; l = l->next)
24f8d71e
JJ
2399 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, x))
2400 break;
5312b066
JJ
2401 if (l)
2402 y = x;
2403 else
2404 y = get_addr (y);
2405 }
2406 /* Don't call get_addr if x is the same VALUE. */
2407 else if (y != x)
2408 y = get_addr (y);
2409 }
9ae8ffe7
JL
2410 if (GET_CODE (x) == HIGH)
2411 x = XEXP (x, 0);
2412 else if (GET_CODE (x) == LO_SUM)
2413 x = XEXP (x, 1);
2414 else
3aa03517 2415 x = addr_side_effect_eval (x, abs (xsize), 0);
9ae8ffe7
JL
2416 if (GET_CODE (y) == HIGH)
2417 y = XEXP (y, 0);
2418 else if (GET_CODE (y) == LO_SUM)
2419 y = XEXP (y, 1);
2420 else
3aa03517 2421 y = addr_side_effect_eval (y, abs (ysize), 0);
9ae8ffe7 2422
54363f8a
JH
2423 if (GET_CODE (x) == SYMBOL_REF && GET_CODE (y) == SYMBOL_REF)
2424 {
73e48cb3 2425 int cmp = compare_base_symbol_refs (x,y);
54363f8a
JH
2426
2427 /* If both decls are the same, decide by offsets. */
2428 if (cmp == 1)
2429 return offset_overlap_p (c, xsize, ysize);
3a28db46
UB
2430 /* Assume a potential overlap for symbolic addresses that went
2431 through alignment adjustments (i.e., that have negative
2432 sizes), because we can't know how far they are from each
2433 other. */
2434 if (xsize < 0 || ysize < 0)
2435 return -1;
54363f8a
JH
2436 /* If decls are different or we know by offsets that there is no overlap,
2437 we win. */
2438 if (!cmp || !offset_overlap_p (c, xsize, ysize))
2439 return 0;
2440 /* Decls may or may not be different and offsets overlap....*/
2441 return -1;
2442 }
2443 else if (rtx_equal_for_memref_p (x, y))
9ae8ffe7 2444 {
3aa03517 2445 return offset_overlap_p (c, xsize, ysize);
9ae8ffe7
JL
2446 }
2447
6e73e666
JC
2448 /* This code used to check for conflicts involving stack references and
2449 globals but the base address alias code now handles these cases. */
9ae8ffe7
JL
2450
2451 if (GET_CODE (x) == PLUS)
2452 {
2453 /* The fact that X is canonicalized means that this
2454 PLUS rtx is canonicalized. */
2455 rtx x0 = XEXP (x, 0);
2456 rtx x1 = XEXP (x, 1);
2457
2d88904a
AO
2458 /* However, VALUEs might end up in different positions even in
2459 canonical PLUSes. Comparing their addresses is enough. */
2460 if (x0 == y)
2461 return memrefs_conflict_p (xsize, x1, ysize, const0_rtx, c);
2462 else if (x1 == y)
2463 return memrefs_conflict_p (xsize, x0, ysize, const0_rtx, c);
2464
9ae8ffe7
JL
2465 if (GET_CODE (y) == PLUS)
2466 {
2467 /* The fact that Y is canonicalized means that this
2468 PLUS rtx is canonicalized. */
2469 rtx y0 = XEXP (y, 0);
2470 rtx y1 = XEXP (y, 1);
2471
2d88904a
AO
2472 if (x0 == y1)
2473 return memrefs_conflict_p (xsize, x1, ysize, y0, c);
2474 if (x1 == y0)
2475 return memrefs_conflict_p (xsize, x0, ysize, y1, c);
2476
9ae8ffe7
JL
2477 if (rtx_equal_for_memref_p (x1, y1))
2478 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
2479 if (rtx_equal_for_memref_p (x0, y0))
2480 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
481683e1 2481 if (CONST_INT_P (x1))
63be02db 2482 {
481683e1 2483 if (CONST_INT_P (y1))
63be02db
JM
2484 return memrefs_conflict_p (xsize, x0, ysize, y0,
2485 c - INTVAL (x1) + INTVAL (y1));
2486 else
2487 return memrefs_conflict_p (xsize, x0, ysize, y,
2488 c - INTVAL (x1));
2489 }
481683e1 2490 else if (CONST_INT_P (y1))
9ae8ffe7
JL
2491 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
2492
f47e08d9 2493 return -1;
9ae8ffe7 2494 }
481683e1 2495 else if (CONST_INT_P (x1))
9ae8ffe7
JL
2496 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
2497 }
2498 else if (GET_CODE (y) == PLUS)
2499 {
2500 /* The fact that Y is canonicalized means that this
2501 PLUS rtx is canonicalized. */
2502 rtx y0 = XEXP (y, 0);
2503 rtx y1 = XEXP (y, 1);
2504
2d88904a
AO
2505 if (x == y0)
2506 return memrefs_conflict_p (xsize, const0_rtx, ysize, y1, c);
2507 if (x == y1)
2508 return memrefs_conflict_p (xsize, const0_rtx, ysize, y0, c);
2509
481683e1 2510 if (CONST_INT_P (y1))
9ae8ffe7
JL
2511 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
2512 else
f47e08d9 2513 return -1;
9ae8ffe7
JL
2514 }
2515
2516 if (GET_CODE (x) == GET_CODE (y))
2517 switch (GET_CODE (x))
2518 {
2519 case MULT:
2520 {
2521 /* Handle cases where we expect the second operands to be the
2522 same, and check only whether the first operand would conflict
2523 or not. */
2524 rtx x0, y0;
2525 rtx x1 = canon_rtx (XEXP (x, 1));
2526 rtx y1 = canon_rtx (XEXP (y, 1));
2527 if (! rtx_equal_for_memref_p (x1, y1))
f47e08d9 2528 return -1;
9ae8ffe7
JL
2529 x0 = canon_rtx (XEXP (x, 0));
2530 y0 = canon_rtx (XEXP (y, 0));
2531 if (rtx_equal_for_memref_p (x0, y0))
3aa03517 2532 return offset_overlap_p (c, xsize, ysize);
9ae8ffe7
JL
2533
2534 /* Can't properly adjust our sizes. */
481683e1 2535 if (!CONST_INT_P (x1))
f47e08d9 2536 return -1;
9ae8ffe7
JL
2537 xsize /= INTVAL (x1);
2538 ysize /= INTVAL (x1);
2539 c /= INTVAL (x1);
2540 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
2541 }
1d300e19
KG
2542
2543 default:
2544 break;
9ae8ffe7
JL
2545 }
2546
a9bf4fe2
AO
2547 /* Deal with alignment ANDs by adjusting offset and size so as to
2548 cover the maximum range, without taking any previously known
5147bf6a
AO
2549 alignment into account. Make a size negative after such an
2550 adjustments, so that, if we end up with e.g. two SYMBOL_REFs, we
2551 assume a potential overlap, because they may end up in contiguous
2552 memory locations and the stricter-alignment access may span over
2553 part of both. */
481683e1 2554 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
56ee9281 2555 {
a9bf4fe2
AO
2556 HOST_WIDE_INT sc = INTVAL (XEXP (x, 1));
2557 unsigned HOST_WIDE_INT uc = sc;
146ec50f 2558 if (sc < 0 && pow2_or_zerop (-uc))
a9bf4fe2 2559 {
5147bf6a
AO
2560 if (xsize > 0)
2561 xsize = -xsize;
3aa03517
AO
2562 if (xsize)
2563 xsize += sc + 1;
fe8fb1c4 2564 c -= sc + 1;
a9bf4fe2
AO
2565 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2566 ysize, y, c);
2567 }
56ee9281 2568 }
481683e1 2569 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
c02f035f 2570 {
a9bf4fe2
AO
2571 HOST_WIDE_INT sc = INTVAL (XEXP (y, 1));
2572 unsigned HOST_WIDE_INT uc = sc;
146ec50f 2573 if (sc < 0 && pow2_or_zerop (-uc))
a9bf4fe2 2574 {
5147bf6a
AO
2575 if (ysize > 0)
2576 ysize = -ysize;
3aa03517
AO
2577 if (ysize)
2578 ysize += sc + 1;
fe8fb1c4 2579 c += sc + 1;
a9bf4fe2
AO
2580 return memrefs_conflict_p (xsize, x,
2581 ysize, canon_rtx (XEXP (y, 0)), c);
2582 }
c02f035f 2583 }
9ae8ffe7
JL
2584
2585 if (CONSTANT_P (x))
2586 {
481683e1 2587 if (CONST_INT_P (x) && CONST_INT_P (y))
9ae8ffe7
JL
2588 {
2589 c += (INTVAL (y) - INTVAL (x));
3aa03517 2590 return offset_overlap_p (c, xsize, ysize);
9ae8ffe7
JL
2591 }
2592
2593 if (GET_CODE (x) == CONST)
2594 {
2595 if (GET_CODE (y) == CONST)
2596 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2597 ysize, canon_rtx (XEXP (y, 0)), c);
2598 else
2599 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2600 ysize, y, c);
2601 }
2602 if (GET_CODE (y) == CONST)
2603 return memrefs_conflict_p (xsize, x, ysize,
2604 canon_rtx (XEXP (y, 0)), c);
2605
3aa03517
AO
2606 /* Assume a potential overlap for symbolic addresses that went
2607 through alignment adjustments (i.e., that have negative
2608 sizes), because we can't know how far they are from each
2609 other. */
9ae8ffe7 2610 if (CONSTANT_P (y))
3aa03517 2611 return (xsize < 0 || ysize < 0 || offset_overlap_p (c, xsize, ysize));
9ae8ffe7 2612
f47e08d9 2613 return -1;
9ae8ffe7 2614 }
f47e08d9
RG
2615
2616 return -1;
9ae8ffe7
JL
2617}
2618
2619/* Functions to compute memory dependencies.
2620
2621 Since we process the insns in execution order, we can build tables
2622 to keep track of what registers are fixed (and not aliased), what registers
2623 are varying in known ways, and what registers are varying in unknown
2624 ways.
2625
2626 If both memory references are volatile, then there must always be a
2627 dependence between the two references, since their order can not be
2628 changed. A volatile and non-volatile reference can be interchanged
ca7fd9cd 2629 though.
9ae8ffe7 2630
53d9622b
RS
2631 We also must allow AND addresses, because they may generate accesses
2632 outside the object being referenced. This is used to generate aligned
2633 addresses from unaligned addresses, for instance, the alpha
dc1618bc 2634 storeqi_unaligned pattern. */
9ae8ffe7
JL
2635
2636/* Read dependence: X is read after read in MEM takes place. There can
96672a3e
RH
2637 only be a dependence here if both reads are volatile, or if either is
2638 an explicit barrier. */
9ae8ffe7
JL
2639
2640int
4f588890 2641read_dependence (const_rtx mem, const_rtx x)
9ae8ffe7 2642{
96672a3e
RH
2643 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2644 return true;
2645 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2646 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2647 return true;
2648 return false;
9ae8ffe7
JL
2649}
2650
998d7deb
RH
2651/* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2652
2653static tree
4682ae04 2654decl_for_component_ref (tree x)
998d7deb
RH
2655{
2656 do
2657 {
2658 x = TREE_OPERAND (x, 0);
2659 }
2660 while (x && TREE_CODE (x) == COMPONENT_REF);
2661
2662 return x && DECL_P (x) ? x : NULL_TREE;
2663}
2664
527210c4
RS
2665/* Walk up the COMPONENT_REF list in X and adjust *OFFSET to compensate
2666 for the offset of the field reference. *KNOWN_P says whether the
2667 offset is known. */
998d7deb 2668
527210c4
RS
2669static void
2670adjust_offset_for_component_ref (tree x, bool *known_p,
2671 HOST_WIDE_INT *offset)
998d7deb 2672{
527210c4
RS
2673 if (!*known_p)
2674 return;
ca7fd9cd 2675 do
998d7deb 2676 {
527210c4 2677 tree xoffset = component_ref_field_offset (x);
998d7deb 2678 tree field = TREE_OPERAND (x, 1);
807e902e
KZ
2679 if (TREE_CODE (xoffset) != INTEGER_CST)
2680 {
2681 *known_p = false;
2682 return;
2683 }
998d7deb 2684
807e902e
KZ
2685 offset_int woffset
2686 = (wi::to_offset (xoffset)
8de73453
RS
2687 + (wi::to_offset (DECL_FIELD_BIT_OFFSET (field))
2688 >> LOG2_BITS_PER_UNIT));
807e902e 2689 if (!wi::fits_uhwi_p (woffset))
527210c4
RS
2690 {
2691 *known_p = false;
2692 return;
2693 }
807e902e 2694 *offset += woffset.to_uhwi ();
998d7deb
RH
2695
2696 x = TREE_OPERAND (x, 0);
2697 }
2698 while (x && TREE_CODE (x) == COMPONENT_REF);
998d7deb
RH
2699}
2700
95bd1dd7 2701/* Return nonzero if we can determine the exprs corresponding to memrefs
c6ea834c
BM
2702 X and Y and they do not overlap.
2703 If LOOP_VARIANT is set, skip offset-based disambiguation */
a4311dfe 2704
2e4e39f6 2705int
c6ea834c 2706nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant)
a4311dfe 2707{
998d7deb 2708 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
a4311dfe
RK
2709 rtx rtlx, rtly;
2710 rtx basex, basey;
527210c4
RS
2711 bool moffsetx_known_p, moffsety_known_p;
2712 HOST_WIDE_INT moffsetx = 0, moffsety = 0;
4e1952ab 2713 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey;
a4311dfe 2714
998d7deb
RH
2715 /* Unless both have exprs, we can't tell anything. */
2716 if (exprx == 0 || expry == 0)
2717 return 0;
2b22e382
RG
2718
2719 /* For spill-slot accesses make sure we have valid offsets. */
2720 if ((exprx == get_spill_slot_decl (false)
527210c4 2721 && ! MEM_OFFSET_KNOWN_P (x))
2b22e382 2722 || (expry == get_spill_slot_decl (false)
527210c4 2723 && ! MEM_OFFSET_KNOWN_P (y)))
2b22e382 2724 return 0;
c22cacf3 2725
998d7deb 2726 /* If the field reference test failed, look at the DECLs involved. */
527210c4
RS
2727 moffsetx_known_p = MEM_OFFSET_KNOWN_P (x);
2728 if (moffsetx_known_p)
2729 moffsetx = MEM_OFFSET (x);
998d7deb
RH
2730 if (TREE_CODE (exprx) == COMPONENT_REF)
2731 {
2e0c984c
RG
2732 tree t = decl_for_component_ref (exprx);
2733 if (! t)
2734 return 0;
527210c4 2735 adjust_offset_for_component_ref (exprx, &moffsetx_known_p, &moffsetx);
2e0c984c 2736 exprx = t;
998d7deb 2737 }
c67a1cf6 2738
527210c4
RS
2739 moffsety_known_p = MEM_OFFSET_KNOWN_P (y);
2740 if (moffsety_known_p)
2741 moffsety = MEM_OFFSET (y);
998d7deb
RH
2742 if (TREE_CODE (expry) == COMPONENT_REF)
2743 {
2e0c984c
RG
2744 tree t = decl_for_component_ref (expry);
2745 if (! t)
2746 return 0;
527210c4 2747 adjust_offset_for_component_ref (expry, &moffsety_known_p, &moffsety);
2e0c984c 2748 expry = t;
998d7deb
RH
2749 }
2750
2751 if (! DECL_P (exprx) || ! DECL_P (expry))
a4311dfe
RK
2752 return 0;
2753
1f9ceff1
AO
2754 /* If we refer to different gimple registers, or one gimple register
2755 and one non-gimple-register, we know they can't overlap. First,
2756 gimple registers don't have their addresses taken. Now, there
2757 could be more than one stack slot for (different versions of) the
2758 same gimple register, but we can presumably tell they don't
2759 overlap based on offsets from stack base addresses elsewhere.
2760 It's important that we don't proceed to DECL_RTL, because gimple
2761 registers may not pass DECL_RTL_SET_P, and make_decl_rtl won't be
2762 able to do anything about them since no SSA information will have
2763 remained to guide it. */
2764 if (is_gimple_reg (exprx) || is_gimple_reg (expry))
2d88904a
AO
2765 return exprx != expry
2766 || (moffsetx_known_p && moffsety_known_p
2767 && MEM_SIZE_KNOWN_P (x) && MEM_SIZE_KNOWN_P (y)
2768 && !offset_overlap_p (moffsety - moffsetx,
2769 MEM_SIZE (x), MEM_SIZE (y)));
1f9ceff1 2770
1307c758
RG
2771 /* With invalid code we can end up storing into the constant pool.
2772 Bail out to avoid ICEing when creating RTL for this.
2773 See gfortran.dg/lto/20091028-2_0.f90. */
2774 if (TREE_CODE (exprx) == CONST_DECL
2775 || TREE_CODE (expry) == CONST_DECL)
2776 return 1;
2777
dca16798
JJ
2778 /* If one decl is known to be a function or label in a function and
2779 the other is some kind of data, they can't overlap. */
2780 if ((TREE_CODE (exprx) == FUNCTION_DECL
2781 || TREE_CODE (exprx) == LABEL_DECL)
2782 != (TREE_CODE (expry) == FUNCTION_DECL
2783 || TREE_CODE (expry) == LABEL_DECL))
2784 return 1;
2785
5f4cebba
JJ
2786 /* If either of the decls doesn't have DECL_RTL set (e.g. marked as
2787 living in multiple places), we can't tell anything. Exception
2788 are FUNCTION_DECLs for which we can create DECL_RTL on demand. */
2789 if ((!DECL_RTL_SET_P (exprx) && TREE_CODE (exprx) != FUNCTION_DECL)
2790 || (!DECL_RTL_SET_P (expry) && TREE_CODE (expry) != FUNCTION_DECL))
2791 return 0;
2792
998d7deb
RH
2793 rtlx = DECL_RTL (exprx);
2794 rtly = DECL_RTL (expry);
a4311dfe 2795
1edcd60b
RK
2796 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2797 can't overlap unless they are the same because we never reuse that part
2798 of the stack frame used for locals for spilled pseudos. */
3c0cb5de 2799 if ((!MEM_P (rtlx) || !MEM_P (rtly))
1edcd60b 2800 && ! rtx_equal_p (rtlx, rtly))
a4311dfe
RK
2801 return 1;
2802
5932a4d4 2803 /* If we have MEMs referring to different address spaces (which can
09e881c9
BE
2804 potentially overlap), we cannot easily tell from the addresses
2805 whether the references overlap. */
2806 if (MEM_P (rtlx) && MEM_P (rtly)
2807 && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly))
2808 return 0;
2809
a4311dfe
RK
2810 /* Get the base and offsets of both decls. If either is a register, we
2811 know both are and are the same, so use that as the base. The only
2812 we can avoid overlap is if we can deduce that they are nonoverlapping
2813 pieces of that decl, which is very rare. */
3c0cb5de 2814 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
481683e1 2815 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
a4311dfe
RK
2816 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2817
3c0cb5de 2818 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
481683e1 2819 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
a4311dfe
RK
2820 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2821
d746694a 2822 /* If the bases are different, we know they do not overlap if both
ca7fd9cd 2823 are constants or if one is a constant and the other a pointer into the
d746694a
RK
2824 stack frame. Otherwise a different base means we can't tell if they
2825 overlap or not. */
54363f8a 2826 if (compare_base_decls (exprx, expry) == 0)
ca7fd9cd
KH
2827 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2828 || (CONSTANT_P (basex) && REG_P (basey)
2829 && REGNO_PTR_FRAME_P (REGNO (basey)))
2830 || (CONSTANT_P (basey) && REG_P (basex)
2831 && REGNO_PTR_FRAME_P (REGNO (basex))));
a4311dfe 2832
c6ea834c
BM
2833 /* Offset based disambiguation not appropriate for loop invariant */
2834 if (loop_invariant)
dca16798 2835 return 0;
c6ea834c 2836
54363f8a
JH
2837 /* Offset based disambiguation is OK even if we do not know that the
2838 declarations are necessarily different
2839 (i.e. compare_base_decls (exprx, expry) == -1) */
2840
3c0cb5de 2841 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
f5541398 2842 : MEM_SIZE_KNOWN_P (rtlx) ? MEM_SIZE (rtlx)
a4311dfe 2843 : -1);
3c0cb5de 2844 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
f5541398
RS
2845 : MEM_SIZE_KNOWN_P (rtly) ? MEM_SIZE (rtly)
2846 : -1);
a4311dfe 2847
0af5bc3e
RK
2848 /* If we have an offset for either memref, it can update the values computed
2849 above. */
527210c4
RS
2850 if (moffsetx_known_p)
2851 offsetx += moffsetx, sizex -= moffsetx;
2852 if (moffsety_known_p)
2853 offsety += moffsety, sizey -= moffsety;
a4311dfe 2854
0af5bc3e 2855 /* If a memref has both a size and an offset, we can use the smaller size.
efc981bb 2856 We can't do this if the offset isn't known because we must view this
0af5bc3e 2857 memref as being anywhere inside the DECL's MEM. */
527210c4 2858 if (MEM_SIZE_KNOWN_P (x) && moffsetx_known_p)
f5541398 2859 sizex = MEM_SIZE (x);
527210c4 2860 if (MEM_SIZE_KNOWN_P (y) && moffsety_known_p)
f5541398 2861 sizey = MEM_SIZE (y);
a4311dfe
RK
2862
2863 /* Put the values of the memref with the lower offset in X's values. */
2864 if (offsetx > offsety)
2865 {
4e1952ab
KT
2866 std::swap (offsetx, offsety);
2867 std::swap (sizex, sizey);
a4311dfe
RK
2868 }
2869
2870 /* If we don't know the size of the lower-offset value, we can't tell
2871 if they conflict. Otherwise, we do the test. */
a6f7c915 2872 return sizex >= 0 && offsety >= offsetx + sizex;
a4311dfe
RK
2873}
2874
9362286d
SB
2875/* Helper for true_dependence and canon_true_dependence.
2876 Checks for true dependence: X is read after store in MEM takes place.
9ae8ffe7 2877
9362286d
SB
2878 If MEM_CANONICALIZED is FALSE, then X_ADDR and MEM_ADDR should be
2879 NULL_RTX, and the canonical addresses of MEM and X are both computed
2880 here. If MEM_CANONICALIZED, then MEM must be already canonicalized.
2881
2882 If X_ADDR is non-NULL, it is used in preference of XEXP (x, 0).
2883
2884 Returns 1 if there is a true dependence, 0 otherwise. */
2885
2886static int
ef4bddc2 2887true_dependence_1 (const_rtx mem, machine_mode mem_mode, rtx mem_addr,
53d9622b 2888 const_rtx x, rtx x_addr, bool mem_canonicalized)
9ae8ffe7 2889{
0777fc02 2890 rtx true_mem_addr;
49982682 2891 rtx base;
f47e08d9 2892 int ret;
9ae8ffe7 2893
9362286d
SB
2894 gcc_checking_assert (mem_canonicalized ? (mem_addr != NULL_RTX)
2895 : (mem_addr == NULL_RTX && x_addr == NULL_RTX));
2896
9ae8ffe7
JL
2897 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2898 return 1;
2899
c4484b8f 2900 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
ac3768f6 2901 This is used in epilogue deallocation functions, and in cselib. */
c4484b8f
RH
2902 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2903 return 1;
2904 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2905 return 1;
9cd9e512
RH
2906 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2907 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2908 return 1;
c4484b8f 2909
0777fc02
UB
2910 if (! x_addr)
2911 x_addr = XEXP (x, 0);
2912 x_addr = get_addr (x_addr);
2913
9362286d
SB
2914 if (! mem_addr)
2915 {
2916 mem_addr = XEXP (mem, 0);
2917 if (mem_mode == VOIDmode)
2918 mem_mode = GET_MODE (mem);
2919 }
0777fc02 2920 true_mem_addr = get_addr (mem_addr);
eab5c70a 2921
878f5596
UB
2922 /* Read-only memory is by definition never modified, and therefore can't
2923 conflict with anything. However, don't assume anything when AND
2924 addresses are involved and leave to the code below to determine
2925 dependence. We don't expect to find read-only set on MEM, but
2926 stupid user tricks can produce them, so don't die. */
2927 if (MEM_READONLY_P (x)
2928 && GET_CODE (x_addr) != AND
0777fc02 2929 && GET_CODE (true_mem_addr) != AND)
878f5596
UB
2930 return 0;
2931
2932 /* If we have MEMs referring to different address spaces (which can
2933 potentially overlap), we cannot easily tell from the addresses
2934 whether the references overlap. */
2935 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2936 return 1;
2937
55efb413
JW
2938 base = find_base_term (x_addr);
2939 if (base && (GET_CODE (base) == LABEL_REF
2940 || (GET_CODE (base) == SYMBOL_REF
2941 && CONSTANT_POOL_ADDRESS_P (base))))
2942 return 0;
2943
0777fc02
UB
2944 rtx mem_base = find_base_term (true_mem_addr);
2945 if (! base_alias_check (x_addr, base, true_mem_addr, mem_base,
31b0a960 2946 GET_MODE (x), mem_mode))
1c72c7f6
JC
2947 return 0;
2948
eab5c70a 2949 x_addr = canon_rtx (x_addr);
9362286d 2950 if (!mem_canonicalized)
0777fc02 2951 mem_addr = canon_rtx (true_mem_addr);
6e73e666 2952
f47e08d9
RG
2953 if ((ret = memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2954 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2955 return ret;
2956
a95b3cc7 2957 if (mems_in_disjoint_alias_sets_p (x, mem))
f47e08d9
RG
2958 return 0;
2959
c6ea834c 2960 if (nonoverlapping_memrefs_p (mem, x, false))
0211b6ab 2961 return 0;
175a7536 2962
55b34b5f 2963 return rtx_refs_may_alias_p (x, mem, true);
a13d4ebf
AM
2964}
2965
9362286d
SB
2966/* True dependence: X is read after store in MEM takes place. */
2967
2968int
ef4bddc2 2969true_dependence (const_rtx mem, machine_mode mem_mode, const_rtx x)
9362286d
SB
2970{
2971 return true_dependence_1 (mem, mem_mode, NULL_RTX,
53d9622b 2972 x, NULL_RTX, /*mem_canonicalized=*/false);
9362286d
SB
2973}
2974
a13d4ebf 2975/* Canonical true dependence: X is read after store in MEM takes place.
ca7fd9cd
KH
2976 Variant of true_dependence which assumes MEM has already been
2977 canonicalized (hence we no longer do that here).
9362286d
SB
2978 The mem_addr argument has been added, since true_dependence_1 computed
2979 this value prior to canonicalizing. */
a13d4ebf
AM
2980
2981int
ef4bddc2 2982canon_true_dependence (const_rtx mem, machine_mode mem_mode, rtx mem_addr,
53d9622b 2983 const_rtx x, rtx x_addr)
a13d4ebf 2984{
9362286d 2985 return true_dependence_1 (mem, mem_mode, mem_addr,
53d9622b 2986 x, x_addr, /*mem_canonicalized=*/true);
9ae8ffe7
JL
2987}
2988
da7d8304 2989/* Returns nonzero if a write to X might alias a previous read from
393f9fed 2990 (or, if WRITEP is true, a write to) MEM.
bd280792
JR
2991 If X_CANONCALIZED is true, then X_ADDR is the canonicalized address of X,
2992 and X_MODE the mode for that access.
2993 If MEM_CANONICALIZED is true, MEM is canonicalized. */
9ae8ffe7 2994
2c72b78f 2995static int
bd280792 2996write_dependence_p (const_rtx mem,
ef4bddc2 2997 const_rtx x, machine_mode x_mode, rtx x_addr,
bd280792 2998 bool mem_canonicalized, bool x_canonicalized, bool writep)
9ae8ffe7 2999{
bd280792 3000 rtx mem_addr;
0777fc02 3001 rtx true_mem_addr, true_x_addr;
49982682 3002 rtx base;
f47e08d9 3003 int ret;
6e73e666 3004
bd280792
JR
3005 gcc_checking_assert (x_canonicalized
3006 ? (x_addr != NULL_RTX && x_mode != VOIDmode)
3007 : (x_addr == NULL_RTX && x_mode == VOIDmode));
393f9fed 3008
9ae8ffe7
JL
3009 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
3010 return 1;
3011
c4484b8f
RH
3012 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
3013 This is used in epilogue deallocation functions. */
3014 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
3015 return 1;
3016 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
3017 return 1;
9cd9e512
RH
3018 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
3019 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
3020 return 1;
c4484b8f 3021
bd280792 3022 if (!x_addr)
0777fc02
UB
3023 x_addr = XEXP (x, 0);
3024 true_x_addr = get_addr (x_addr);
3025
3026 mem_addr = XEXP (mem, 0);
3027 true_mem_addr = get_addr (mem_addr);
55efb413 3028
878f5596
UB
3029 /* A read from read-only memory can't conflict with read-write memory.
3030 Don't assume anything when AND addresses are involved and leave to
3031 the code below to determine dependence. */
3032 if (!writep
3033 && MEM_READONLY_P (mem)
0777fc02
UB
3034 && GET_CODE (true_x_addr) != AND
3035 && GET_CODE (true_mem_addr) != AND)
878f5596
UB
3036 return 0;
3037
3038 /* If we have MEMs referring to different address spaces (which can
3039 potentially overlap), we cannot easily tell from the addresses
3040 whether the references overlap. */
3041 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
3042 return 1;
3043
0777fc02 3044 base = find_base_term (true_mem_addr);
31b0a960
RB
3045 if (! writep
3046 && base
3047 && (GET_CODE (base) == LABEL_REF
3048 || (GET_CODE (base) == SYMBOL_REF
3049 && CONSTANT_POOL_ADDRESS_P (base))))
3050 return 0;
49982682 3051
0777fc02
UB
3052 rtx x_base = find_base_term (true_x_addr);
3053 if (! base_alias_check (true_x_addr, x_base, true_mem_addr, base,
3054 GET_MODE (x), GET_MODE (mem)))
41472af8
MM
3055 return 0;
3056
bd280792 3057 if (!x_canonicalized)
393f9fed 3058 {
0777fc02 3059 x_addr = canon_rtx (true_x_addr);
bd280792 3060 x_mode = GET_MODE (x);
393f9fed 3061 }
bd280792 3062 if (!mem_canonicalized)
0777fc02 3063 mem_addr = canon_rtx (true_mem_addr);
6e73e666 3064
bd280792
JR
3065 if ((ret = memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
3066 GET_MODE_SIZE (x_mode), x_addr, 0)) != -1)
f47e08d9
RG
3067 return ret;
3068
c6ea834c 3069 if (nonoverlapping_memrefs_p (x, mem, false))
c6df88cb
MM
3070 return 0;
3071
55b34b5f 3072 return rtx_refs_may_alias_p (x, mem, false);
c6df88cb
MM
3073}
3074
3075/* Anti dependence: X is written after read in MEM takes place. */
3076
3077int
4f588890 3078anti_dependence (const_rtx mem, const_rtx x)
c6df88cb 3079{
bd280792
JR
3080 return write_dependence_p (mem, x, VOIDmode, NULL_RTX,
3081 /*mem_canonicalized=*/false,
3082 /*x_canonicalized*/false, /*writep=*/false);
393f9fed
JR
3083}
3084
bd280792
JR
3085/* Likewise, but we already have a canonicalized MEM, and X_ADDR for X.
3086 Also, consider X in X_MODE (which might be from an enclosing
3087 STRICT_LOW_PART / ZERO_EXTRACT).
3088 If MEM_CANONICALIZED is true, MEM is canonicalized. */
393f9fed
JR
3089
3090int
bd280792 3091canon_anti_dependence (const_rtx mem, bool mem_canonicalized,
ef4bddc2 3092 const_rtx x, machine_mode x_mode, rtx x_addr)
393f9fed 3093{
bd280792
JR
3094 return write_dependence_p (mem, x, x_mode, x_addr,
3095 mem_canonicalized, /*x_canonicalized=*/true,
3096 /*writep=*/false);
9ae8ffe7
JL
3097}
3098
3099/* Output dependence: X is written after store in MEM takes place. */
3100
3101int
4f588890 3102output_dependence (const_rtx mem, const_rtx x)
9ae8ffe7 3103{
bd280792
JR
3104 return write_dependence_p (mem, x, VOIDmode, NULL_RTX,
3105 /*mem_canonicalized=*/false,
3106 /*x_canonicalized*/false, /*writep=*/true);
9ae8ffe7 3107}
43b9f499
RB
3108
3109/* Likewise, but we already have a canonicalized MEM, and X_ADDR for X.
3110 Also, consider X in X_MODE (which might be from an enclosing
3111 STRICT_LOW_PART / ZERO_EXTRACT).
3112 If MEM_CANONICALIZED is true, MEM is canonicalized. */
3113
3114int
3115canon_output_dependence (const_rtx mem, bool mem_canonicalized,
3116 const_rtx x, machine_mode x_mode, rtx x_addr)
3117{
3118 return write_dependence_p (mem, x, x_mode, x_addr,
3119 mem_canonicalized, /*x_canonicalized=*/true,
3120 /*writep=*/true);
3121}
c14b9960 3122\f
6e73e666 3123
c6ea834c
BM
3124
3125/* Check whether X may be aliased with MEM. Don't do offset-based
3126 memory disambiguation & TBAA. */
3127int
3128may_alias_p (const_rtx mem, const_rtx x)
3129{
3130 rtx x_addr, mem_addr;
c6ea834c
BM
3131
3132 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
3133 return 1;
3134
a95b3cc7
RG
3135 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
3136 This is used in epilogue deallocation functions. */
3137 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
3138 return 1;
3139 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
c6ea834c 3140 return 1;
c6ea834c
BM
3141 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
3142 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
3143 return 1;
3144
c6ea834c 3145 x_addr = XEXP (x, 0);
0777fc02
UB
3146 x_addr = get_addr (x_addr);
3147
c6ea834c 3148 mem_addr = XEXP (mem, 0);
0777fc02 3149 mem_addr = get_addr (mem_addr);
c6ea834c 3150
878f5596
UB
3151 /* Read-only memory is by definition never modified, and therefore can't
3152 conflict with anything. However, don't assume anything when AND
3153 addresses are involved and leave to the code below to determine
3154 dependence. We don't expect to find read-only set on MEM, but
3155 stupid user tricks can produce them, so don't die. */
3156 if (MEM_READONLY_P (x)
3157 && GET_CODE (x_addr) != AND
3158 && GET_CODE (mem_addr) != AND)
3159 return 0;
3160
3161 /* If we have MEMs referring to different address spaces (which can
3162 potentially overlap), we cannot easily tell from the addresses
3163 whether the references overlap. */
3164 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
3165 return 1;
3166
31b0a960
RB
3167 rtx x_base = find_base_term (x_addr);
3168 rtx mem_base = find_base_term (mem_addr);
3169 if (! base_alias_check (x_addr, x_base, mem_addr, mem_base,
3170 GET_MODE (x), GET_MODE (mem_addr)))
c6ea834c
BM
3171 return 0;
3172
c6ea834c
BM
3173 if (nonoverlapping_memrefs_p (mem, x, true))
3174 return 0;
3175
c6ea834c
BM
3176 /* TBAA not valid for loop_invarint */
3177 return rtx_refs_may_alias_p (x, mem, false);
3178}
3179
6e73e666 3180void
b5deb7b6 3181init_alias_target (void)
6e73e666 3182{
b3694847 3183 int i;
6e73e666 3184
9fc37b2b
RS
3185 if (!arg_base_value)
3186 arg_base_value = gen_rtx_ADDRESS (VOIDmode, 0);
3187
b5deb7b6
SL
3188 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
3189
6e73e666
JC
3190 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3191 /* Check whether this register can hold an incoming pointer
3192 argument. FUNCTION_ARG_REGNO_P tests outgoing register
ec5c56db 3193 numbers, so translate if necessary due to register windows. */
6e73e666 3194 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
f939c3e6 3195 && targetm.hard_regno_mode_ok (i, Pmode))
9fc37b2b
RS
3196 static_reg_base_value[i] = arg_base_value;
3197
757e8ba2
JJ
3198 static_reg_base_value[STACK_POINTER_REGNUM]
3199 = unique_base_value (UNIQUE_BASE_VALUE_SP);
3200 static_reg_base_value[ARG_POINTER_REGNUM]
3201 = unique_base_value (UNIQUE_BASE_VALUE_ARGP);
3202 static_reg_base_value[FRAME_POINTER_REGNUM]
3203 = unique_base_value (UNIQUE_BASE_VALUE_FP);
c3e08036
TS
3204 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3205 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
3206 = unique_base_value (UNIQUE_BASE_VALUE_HFP);
bf1660a6
JL
3207}
3208
7b52eede
JH
3209/* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
3210 to be memory reference. */
3211static bool memory_modified;
3212static void
aa317c97 3213memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7b52eede 3214{
3c0cb5de 3215 if (MEM_P (x))
7b52eede 3216 {
9678086d 3217 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
7b52eede
JH
3218 memory_modified = true;
3219 }
3220}
3221
3222
3223/* Return true when INSN possibly modify memory contents of MEM
454ff5cb 3224 (i.e. address can be modified). */
7b52eede 3225bool
9678086d 3226memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
7b52eede
JH
3227{
3228 if (!INSN_P (insn))
3229 return false;
bc36c711
JJ
3230 /* Conservatively assume all non-readonly MEMs might be modified in
3231 calls. */
3232 if (CALL_P (insn))
3233 return true;
7b52eede 3234 memory_modified = false;
aa317c97 3235 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
7b52eede
JH
3236 return memory_modified;
3237}
3238
a7b159a4
AH
3239/* Return TRUE if the destination of a set is rtx identical to
3240 ITEM. */
3241static inline bool
3242set_dest_equal_p (const_rtx set, const_rtx item)
3243{
3244 rtx dest = SET_DEST (set);
3245 return rtx_equal_p (dest, item);
3246}
3247
c13e8210
MM
3248/* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
3249 array. */
3250
9ae8ffe7 3251void
4682ae04 3252init_alias_analysis (void)
9ae8ffe7 3253{
c582d54a 3254 unsigned int maxreg = max_reg_num ();
ea64ef27 3255 int changed, pass;
b3694847
SS
3256 int i;
3257 unsigned int ui;
d36a28b8
DM
3258 rtx_insn *insn;
3259 rtx val;
131db6b8
SB
3260 int rpo_cnt;
3261 int *rpo;
9ae8ffe7 3262
0d446150
JH
3263 timevar_push (TV_ALIAS_ANALYSIS);
3264
92390dd1 3265 vec_safe_grow_cleared (reg_known_value, maxreg - FIRST_PSEUDO_REGISTER);
9ff3c7ca 3266 reg_known_equiv_p = sbitmap_alloc (maxreg - FIRST_PSEUDO_REGISTER);
dd3d1ec0 3267 bitmap_clear (reg_known_equiv_p);
9ae8ffe7 3268
08c79682 3269 /* If we have memory allocated from the previous run, use it. */
c582d54a 3270 if (old_reg_base_value)
08c79682
KH
3271 reg_base_value = old_reg_base_value;
3272
3273 if (reg_base_value)
9771b263 3274 reg_base_value->truncate (0);
08c79682 3275
9771b263 3276 vec_safe_grow_cleared (reg_base_value, maxreg);
ac606739 3277
5ed6ace5 3278 new_reg_base_value = XNEWVEC (rtx, maxreg);
d630245f 3279 reg_seen = sbitmap_alloc (maxreg);
ec907dd8
JL
3280
3281 /* The basic idea is that each pass through this loop will use the
3282 "constant" information from the previous pass to propagate alias
3283 information through another level of assignments.
3284
131db6b8
SB
3285 The propagation is done on the CFG in reverse post-order, to propagate
3286 things forward as far as possible in each iteration.
3287
ec907dd8
JL
3288 This could get expensive if the assignment chains are long. Maybe
3289 we should throttle the number of iterations, possibly based on
6e73e666 3290 the optimization level or flag_expensive_optimizations.
ec907dd8
JL
3291
3292 We could propagate more information in the first pass by making use
6fb5fa3c 3293 of DF_REG_DEF_COUNT to determine immediately that the alias information
ea64ef27
JL
3294 for a pseudo is "constant".
3295
3296 A program with an uninitialized variable can cause an infinite loop
3297 here. Instead of doing a full dataflow analysis to detect such problems
3298 we just cap the number of iterations for the loop.
3299
3300 The state of the arrays for the set chain in question does not matter
3301 since the program has undefined behavior. */
6e73e666 3302
0cae8d31 3303 rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
131db6b8
SB
3304 rpo_cnt = pre_and_rev_post_order_compute (NULL, rpo, false);
3305
e86a9946
RS
3306 /* The prologue/epilogue insns are not threaded onto the
3307 insn chain until after reload has completed. Thus,
3308 there is no sense wasting time checking if INSN is in
3309 the prologue/epilogue until after reload has completed. */
3310 bool could_be_prologue_epilogue = ((targetm.have_prologue ()
3311 || targetm.have_epilogue ())
3312 && reload_completed);
3313
ea64ef27 3314 pass = 0;
6e73e666 3315 do
ec907dd8
JL
3316 {
3317 /* Assume nothing will change this iteration of the loop. */
3318 changed = 0;
3319
ec907dd8 3320 /* We want to assign the same IDs each iteration of this loop, so
9fc37b2b
RS
3321 start counting from one each iteration of the loop. */
3322 unique_id = 1;
ec907dd8 3323
f5143c46 3324 /* We're at the start of the function each iteration through the
ec907dd8 3325 loop, so we're copying arguments. */
83bbd9b6 3326 copying_arguments = true;
9ae8ffe7 3327
6e73e666 3328 /* Wipe the potential alias information clean for this pass. */
c582d54a 3329 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
8072f69c 3330
6e73e666 3331 /* Wipe the reg_seen array clean. */
f61e445a 3332 bitmap_clear (reg_seen);
9ae8ffe7 3333
356610cb
EB
3334 /* Initialize the alias information for this pass. */
3335 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3336 if (static_reg_base_value[i])
3337 {
3338 new_reg_base_value[i] = static_reg_base_value[i];
3339 bitmap_set_bit (reg_seen, i);
3340 }
6e73e666 3341
ec907dd8 3342 /* Walk the insns adding values to the new_reg_base_value array. */
131db6b8 3343 for (i = 0; i < rpo_cnt; i++)
9ae8ffe7 3344 {
06e28de2 3345 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
131db6b8 3346 FOR_BB_INSNS (bb, insn)
ec907dd8 3347 {
131db6b8
SB
3348 if (NONDEBUG_INSN_P (insn))
3349 {
3350 rtx note, set;
efc9bd41 3351
e86a9946 3352 if (could_be_prologue_epilogue
131db6b8
SB
3353 && prologue_epilogue_contains (insn))
3354 continue;
efc9bd41 3355
131db6b8
SB
3356 /* If this insn has a noalias note, process it, Otherwise,
3357 scan for sets. A simple set will have no side effects
3358 which could change the base value of any other register. */
6e73e666 3359
131db6b8
SB
3360 if (GET_CODE (PATTERN (insn)) == SET
3361 && REG_NOTES (insn) != 0
3362 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
3363 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
3364 else
3365 note_stores (PATTERN (insn), record_set, NULL);
6e73e666 3366
131db6b8 3367 set = single_set (insn);
6e73e666 3368
131db6b8
SB
3369 if (set != 0
3370 && REG_P (SET_DEST (set))
3371 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
713f41f9 3372 {
131db6b8
SB
3373 unsigned int regno = REGNO (SET_DEST (set));
3374 rtx src = SET_SRC (set);
3375 rtx t;
3376
3377 note = find_reg_equal_equiv_note (insn);
3378 if (note && REG_NOTE_KIND (note) == REG_EQUAL
3379 && DF_REG_DEF_COUNT (regno) != 1)
3380 note = NULL_RTX;
3381
3382 if (note != NULL_RTX
3383 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
3384 && ! rtx_varies_p (XEXP (note, 0), 1)
3385 && ! reg_overlap_mentioned_p (SET_DEST (set),
3386 XEXP (note, 0)))
3387 {
3388 set_reg_known_value (regno, XEXP (note, 0));
3389 set_reg_known_equiv_p (regno,
3390 REG_NOTE_KIND (note) == REG_EQUIV);
3391 }
3392 else if (DF_REG_DEF_COUNT (regno) == 1
3393 && GET_CODE (src) == PLUS
3394 && REG_P (XEXP (src, 0))
3395 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
3396 && CONST_INT_P (XEXP (src, 1)))
3397 {
3398 t = plus_constant (GET_MODE (src), t,
3399 INTVAL (XEXP (src, 1)));
3400 set_reg_known_value (regno, t);
3401 set_reg_known_equiv_p (regno, false);
3402 }
3403 else if (DF_REG_DEF_COUNT (regno) == 1
3404 && ! rtx_varies_p (src, 1))
3405 {
3406 set_reg_known_value (regno, src);
3407 set_reg_known_equiv_p (regno, false);
3408 }
713f41f9 3409 }
6e73e666 3410 }
131db6b8
SB
3411 else if (NOTE_P (insn)
3412 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3413 copying_arguments = false;
ec907dd8 3414 }
6e73e666 3415 }
ec907dd8 3416
6e73e666 3417 /* Now propagate values from new_reg_base_value to reg_base_value. */
62e5bf5d 3418 gcc_assert (maxreg == (unsigned int) max_reg_num ());
c22cacf3 3419
c582d54a 3420 for (ui = 0; ui < maxreg; ui++)
6e73e666 3421 {
e51712db 3422 if (new_reg_base_value[ui]
9771b263
DN
3423 && new_reg_base_value[ui] != (*reg_base_value)[ui]
3424 && ! rtx_equal_p (new_reg_base_value[ui], (*reg_base_value)[ui]))
ec907dd8 3425 {
9771b263 3426 (*reg_base_value)[ui] = new_reg_base_value[ui];
6e73e666 3427 changed = 1;
ec907dd8 3428 }
9ae8ffe7 3429 }
9ae8ffe7 3430 }
6e73e666 3431 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
131db6b8 3432 XDELETEVEC (rpo);
9ae8ffe7
JL
3433
3434 /* Fill in the remaining entries. */
9771b263 3435 FOR_EACH_VEC_ELT (*reg_known_value, i, val)
9ff3c7ca
SB
3436 {
3437 int regno = i + FIRST_PSEUDO_REGISTER;
3438 if (! val)
3439 set_reg_known_value (regno, regno_reg_rtx[regno]);
3440 }
9ae8ffe7 3441
e05e2395
MM
3442 /* Clean up. */
3443 free (new_reg_base_value);
ec907dd8 3444 new_reg_base_value = 0;
d630245f 3445 sbitmap_free (reg_seen);
9ae8ffe7 3446 reg_seen = 0;
0d446150 3447 timevar_pop (TV_ALIAS_ANALYSIS);
9ae8ffe7
JL
3448}
3449
61630b27
JJ
3450/* Equate REG_BASE_VALUE (reg1) to REG_BASE_VALUE (reg2).
3451 Special API for var-tracking pass purposes. */
3452
3453void
3454vt_equate_reg_base_value (const_rtx reg1, const_rtx reg2)
3455{
9771b263 3456 (*reg_base_value)[REGNO (reg1)] = REG_BASE_VALUE (reg2);
61630b27
JJ
3457}
3458
9ae8ffe7 3459void
4682ae04 3460end_alias_analysis (void)
9ae8ffe7 3461{
c582d54a 3462 old_reg_base_value = reg_base_value;
9771b263 3463 vec_free (reg_known_value);
9ff3c7ca 3464 sbitmap_free (reg_known_equiv_p);
9ae8ffe7 3465}
e2500fed 3466
3ecf9d13
JH
3467void
3468dump_alias_stats_in_alias_c (FILE *s)
3469{
3470 fprintf (s, " TBAA oracle: %llu disambiguations %llu queries\n"
3471 " %llu are in alias set 0\n"
3472 " %llu queries asked about the same object\n"
3473 " %llu queries asked about the same alias set\n"
3474 " %llu access volatile\n"
6e042ef4
JH
3475 " %llu are dependent in the DAG\n"
3476 " %llu are aritificially in conflict with void *\n",
3ecf9d13
JH
3477 alias_stats.num_disambiguated,
3478 alias_stats.num_alias_zero + alias_stats.num_same_alias_set
3479 + alias_stats.num_same_objects + alias_stats.num_volatile
6e042ef4
JH
3480 + alias_stats.num_dag + alias_stats.num_disambiguated
3481 + alias_stats.num_universal,
3ecf9d13 3482 alias_stats.num_alias_zero, alias_stats.num_same_alias_set,
6e042ef4
JH
3483 alias_stats.num_same_objects, alias_stats.num_volatile,
3484 alias_stats.num_dag, alias_stats.num_universal);
3ecf9d13 3485}
e2500fed 3486#include "gt-alias.h"