]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/alias.c
IBM Z: Fix vcond-shift testcase.
[thirdparty/gcc.git] / gcc / alias.c
CommitLineData
9ae8ffe7 1/* Alias analysis for GNU C
85ec4feb 2 Copyright (C) 1997-2018 Free Software Foundation, Inc.
9ae8ffe7
JL
3 Contributed by John Carr (jfc@mit.edu).
4
1322177d 5This file is part of GCC.
9ae8ffe7 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
1322177d 10version.
9ae8ffe7 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
9ae8ffe7
JL
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9ae8ffe7
JL
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
c7131fb2
AM
27#include "tree.h"
28#include "gimple.h"
c7131fb2 29#include "df.h"
4d0cdd0c 30#include "memmodel.h"
957060b5
AM
31#include "tm_p.h"
32#include "gimple-ssa.h"
957060b5 33#include "emit-rtl.h"
40e23961 34#include "alias.h"
40e23961 35#include "fold-const.h"
d8a2d370 36#include "varasm.h"
eab5c70a 37#include "cselib.h"
d23c55c2 38#include "langhooks.h"
60393bbc 39#include "cfganal.h"
403837b4 40#include "rtl-iter.h"
54363f8a 41#include "cgraph.h"
ea900239
DB
42
43/* The aliasing API provided here solves related but different problems:
44
c22cacf3 45 Say there exists (in c)
ea900239
DB
46
47 struct X {
48 struct Y y1;
49 struct Z z2;
50 } x1, *px1, *px2;
51
52 struct Y y2, *py;
53 struct Z z2, *pz;
54
55
308a3fe2 56 py = &x1.y1;
ea900239
DB
57 px2 = &x1;
58
59 Consider the four questions:
60
61 Can a store to x1 interfere with px2->y1?
62 Can a store to x1 interfere with px2->z2?
ea900239
DB
63 Can a store to x1 change the value pointed to by with py?
64 Can a store to x1 change the value pointed to by with pz?
65
66 The answer to these questions can be yes, yes, yes, and maybe.
67
68 The first two questions can be answered with a simple examination
69 of the type system. If structure X contains a field of type Y then
073a8998 70 a store through a pointer to an X can overwrite any field that is
ea900239
DB
71 contained (recursively) in an X (unless we know that px1 != px2).
72
308a3fe2
DS
73 The last two questions can be solved in the same way as the first
74 two questions but this is too conservative. The observation is
75 that in some cases we can know which (if any) fields are addressed
76 and if those addresses are used in bad ways. This analysis may be
77 language specific. In C, arbitrary operations may be applied to
78 pointers. However, there is some indication that this may be too
79 conservative for some C++ types.
ea900239
DB
80
81 The pass ipa-type-escape does this analysis for the types whose
c22cacf3 82 instances do not escape across the compilation boundary.
ea900239
DB
83
84 Historically in GCC, these two problems were combined and a single
308a3fe2 85 data structure that was used to represent the solution to these
ea900239 86 problems. We now have two similar but different data structures,
308a3fe2
DS
87 The data structure to solve the last two questions is similar to
88 the first, but does not contain the fields whose address are never
89 taken. For types that do escape the compilation unit, the data
90 structures will have identical information.
ea900239 91*/
3932261a
MM
92
93/* The alias sets assigned to MEMs assist the back-end in determining
94 which MEMs can alias which other MEMs. In general, two MEMs in
ac3d9668
RK
95 different alias sets cannot alias each other, with one important
96 exception. Consider something like:
3932261a 97
01d28c3f 98 struct S { int i; double d; };
3932261a
MM
99
100 a store to an `S' can alias something of either type `int' or type
101 `double'. (However, a store to an `int' cannot alias a `double'
102 and vice versa.) We indicate this via a tree structure that looks
103 like:
c22cacf3
MS
104 struct S
105 / \
3932261a 106 / \
c22cacf3
MS
107 |/_ _\|
108 int double
3932261a 109
ac3d9668
RK
110 (The arrows are directed and point downwards.)
111 In this situation we say the alias set for `struct S' is the
112 `superset' and that those for `int' and `double' are `subsets'.
113
3bdf5ad1
RK
114 To see whether two alias sets can point to the same memory, we must
115 see if either alias set is a subset of the other. We need not trace
95bd1dd7 116 past immediate descendants, however, since we propagate all
3bdf5ad1 117 grandchildren up one level.
3932261a
MM
118
119 Alias set zero is implicitly a superset of all other alias sets.
120 However, this is no actual entry for alias set zero. It is an
121 error to attempt to explicitly construct a subset of zero. */
122
e0702244 123struct alias_set_hash : int_hash <int, INT_MIN, INT_MIN + 1> {};
de144fb2 124
02ced957 125struct GTY(()) alias_set_entry {
3932261a 126 /* The alias set number, as stored in MEM_ALIAS_SET. */
4862826d 127 alias_set_type alias_set;
3932261a 128
6e042ef4
JH
129 /* Nonzero if would have a child of zero: this effectively makes this
130 alias set the same as alias set zero. */
131 bool has_zero_child;
132 /* Nonzero if alias set corresponds to pointer type itself (i.e. not to
133 aggregate contaiing pointer.
134 This is used for a special case where we need an universal pointer type
135 compatible with all other pointer types. */
136 bool is_pointer;
137 /* Nonzero if is_pointer or if one of childs have has_pointer set. */
138 bool has_pointer;
34e82342
RB
139
140 /* The children of the alias set. These are not just the immediate
141 children, but, in fact, all descendants. So, if we have:
142
143 struct T { struct S s; float f; }
144
145 continuing our example above, the children here will be all of
146 `int', `double', `float', and `struct S'. */
147 hash_map<alias_set_hash, int> *children;
b604074c 148};
9ae8ffe7 149
ed7a4b4b 150static int rtx_equal_for_memref_p (const_rtx, const_rtx);
7bc980e1 151static void record_set (rtx, const_rtx, void *);
ef4bddc2
RS
152static int base_alias_check (rtx, rtx, rtx, rtx, machine_mode,
153 machine_mode);
4682ae04 154static rtx find_base_value (rtx);
4f588890 155static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
02ced957 156static alias_set_entry *get_alias_set_entry (alias_set_type);
4682ae04 157static tree decl_for_component_ref (tree);
bd280792 158static int write_dependence_p (const_rtx,
ef4bddc2 159 const_rtx, machine_mode, rtx,
bd280792 160 bool, bool, bool);
73e48cb3 161static int compare_base_symbol_refs (const_rtx, const_rtx);
4682ae04 162
aa317c97 163static void memory_modified_1 (rtx, const_rtx, void *);
9ae8ffe7 164
3ecf9d13
JH
165/* Query statistics for the different low-level disambiguators.
166 A high-level query may trigger multiple of them. */
167
168static struct {
169 unsigned long long num_alias_zero;
170 unsigned long long num_same_alias_set;
171 unsigned long long num_same_objects;
172 unsigned long long num_volatile;
173 unsigned long long num_dag;
6e042ef4 174 unsigned long long num_universal;
3ecf9d13
JH
175 unsigned long long num_disambiguated;
176} alias_stats;
177
178
9ae8ffe7
JL
179/* Set up all info needed to perform alias analysis on memory references. */
180
d4b60170 181/* Returns the size in bytes of the mode of X. */
9ae8ffe7
JL
182#define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
183
ea64ef27 184/* Cap the number of passes we make over the insns propagating alias
131db6b8
SB
185 information through set chains.
186 ??? 10 is a completely arbitrary choice. This should be based on the
187 maximum loop depth in the CFG, but we do not have this information
188 available (even if current_loops _is_ available). */
ea64ef27 189#define MAX_ALIAS_LOOP_PASSES 10
ca7fd9cd 190
9ae8ffe7
JL
191/* reg_base_value[N] gives an address to which register N is related.
192 If all sets after the first add or subtract to the current value
193 or otherwise modify it so it does not point to a different top level
194 object, reg_base_value[N] is equal to the address part of the source
2a2c8203
JC
195 of the first set.
196
197 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
9fc37b2b 198 expressions represent three types of base:
b3b5ad95 199
9fc37b2b
RS
200 1. incoming arguments. There is just one ADDRESS to represent all
201 arguments, since we do not know at this level whether accesses
202 based on different arguments can alias. The ADDRESS has id 0.
b3b5ad95 203
9fc37b2b
RS
204 2. stack_pointer_rtx, frame_pointer_rtx, hard_frame_pointer_rtx
205 (if distinct from frame_pointer_rtx) and arg_pointer_rtx.
206 Each of these rtxes has a separate ADDRESS associated with it,
207 each with a negative id.
208
209 GCC is (and is required to be) precise in which register it
210 chooses to access a particular region of stack. We can therefore
211 assume that accesses based on one of these rtxes do not alias
212 accesses based on another of these rtxes.
213
214 3. bases that are derived from malloc()ed memory (REG_NOALIAS).
215 Each such piece of memory has a separate ADDRESS associated
216 with it, each with an id greater than 0.
217
218 Accesses based on one ADDRESS do not alias accesses based on other
219 ADDRESSes. Accesses based on ADDRESSes in groups (2) and (3) do not
220 alias globals either; the ADDRESSes have Pmode to indicate this.
221 The ADDRESS in group (1) _may_ alias globals; it has VOIDmode to
222 indicate this. */
2a2c8203 223
9771b263 224static GTY(()) vec<rtx, va_gc> *reg_base_value;
ac606739 225static rtx *new_reg_base_value;
c582d54a 226
9fc37b2b
RS
227/* The single VOIDmode ADDRESS that represents all argument bases.
228 It has id 0. */
229static GTY(()) rtx arg_base_value;
230
231/* Used to allocate unique ids to each REG_NOALIAS ADDRESS. */
232static int unique_id;
233
c582d54a
JH
234/* We preserve the copy of old array around to avoid amount of garbage
235 produced. About 8% of garbage produced were attributed to this
236 array. */
9771b263 237static GTY((deletable)) vec<rtx, va_gc> *old_reg_base_value;
d4b60170 238
9e412ca3
RS
239/* Values of XINT (address, 0) of Pmode ADDRESS rtxes for special
240 registers. */
241#define UNIQUE_BASE_VALUE_SP -1
242#define UNIQUE_BASE_VALUE_ARGP -2
243#define UNIQUE_BASE_VALUE_FP -3
244#define UNIQUE_BASE_VALUE_HFP -4
245
7bf84454
RS
246#define static_reg_base_value \
247 (this_target_rtl->x_static_reg_base_value)
bf1660a6 248
9771b263
DN
249#define REG_BASE_VALUE(X) \
250 (REGNO (X) < vec_safe_length (reg_base_value) \
251 ? (*reg_base_value)[REGNO (X)] : 0)
9ae8ffe7 252
c13e8210 253/* Vector indexed by N giving the initial (unchanging) value known for
9ff3c7ca 254 pseudo-register N. This vector is initialized in init_alias_analysis,
bb1acb3e 255 and does not change until end_alias_analysis is called. */
9771b263 256static GTY(()) vec<rtx, va_gc> *reg_known_value;
9ae8ffe7
JL
257
258/* Vector recording for each reg_known_value whether it is due to a
259 REG_EQUIV note. Future passes (viz., reload) may replace the
260 pseudo with the equivalent expression and so we account for the
ac3d9668
RK
261 dependences that would be introduced if that happens.
262
263 The REG_EQUIV notes created in assign_parms may mention the arg
264 pointer, and there are explicit insns in the RTL that modify the
265 arg pointer. Thus we must ensure that such insns don't get
266 scheduled across each other because that would invalidate the
267 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
268 wrong, but solving the problem in the scheduler will likely give
269 better code, so we do it here. */
9ff3c7ca 270static sbitmap reg_known_equiv_p;
9ae8ffe7 271
2a2c8203
JC
272/* True when scanning insns from the start of the rtl to the
273 NOTE_INSN_FUNCTION_BEG note. */
83bbd9b6 274static bool copying_arguments;
9ae8ffe7 275
1a5640b4 276
3932261a 277/* The splay-tree used to store the various alias set entries. */
02ced957 278static GTY (()) vec<alias_set_entry *, va_gc> *alias_sets;
ac3d9668 279\f
55b34b5f
RG
280/* Build a decomposed reference object for querying the alias-oracle
281 from the MEM rtx and store it in *REF.
282 Returns false if MEM is not suitable for the alias-oracle. */
283
284static bool
285ao_ref_from_mem (ao_ref *ref, const_rtx mem)
286{
287 tree expr = MEM_EXPR (mem);
288 tree base;
289
290 if (!expr)
291 return false;
292
293 ao_ref_init (ref, expr);
294
295 /* Get the base of the reference and see if we have to reject or
296 adjust it. */
297 base = ao_ref_base (ref);
298 if (base == NULL_TREE)
299 return false;
300
ef7a9fb8
RB
301 /* The tree oracle doesn't like bases that are neither decls
302 nor indirect references of SSA names. */
303 if (!(DECL_P (base)
304 || (TREE_CODE (base) == MEM_REF
305 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
306 || (TREE_CODE (base) == TARGET_MEM_REF
307 && TREE_CODE (TMR_BASE (base)) == SSA_NAME)))
d15adbeb 308 return false;
55b34b5f
RG
309
310 /* If this is a reference based on a partitioned decl replace the
ef7a9fb8 311 base with a MEM_REF of the pointer representative we
55b34b5f 312 created during stack slot partitioning. */
8813a647 313 if (VAR_P (base)
ef7a9fb8 314 && ! is_global_var (base)
55b34b5f
RG
315 && cfun->gimple_df->decls_to_pointers != NULL)
316 {
39c8aaa4 317 tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
55b34b5f 318 if (namep)
39c8aaa4 319 ref->base = build_simple_mem_ref (*namep);
d15adbeb 320 }
55b34b5f
RG
321
322 ref->ref_alias_set = MEM_ALIAS_SET (mem);
323
f68396a1
RG
324 /* If MEM_OFFSET or MEM_SIZE are unknown what we got from MEM_EXPR
325 is conservative, so trust it. */
527210c4 326 if (!MEM_OFFSET_KNOWN_P (mem)
f5541398 327 || !MEM_SIZE_KNOWN_P (mem))
f68396a1 328 return true;
366f945f 329
e8024441
RB
330 /* If MEM_OFFSET/MEM_SIZE get us outside of ref->offset/ref->max_size
331 drop ref->ref. */
d05d7551 332 if (maybe_lt (MEM_OFFSET (mem), 0)
b9c25734
RS
333 || (ref->max_size_known_p ()
334 && maybe_gt ((MEM_OFFSET (mem) + MEM_SIZE (mem)) * BITS_PER_UNIT,
335 ref->max_size)))
e8024441 336 ref->ref = NULL_TREE;
b0e96404 337
e8024441
RB
338 /* Refine size and offset we got from analyzing MEM_EXPR by using
339 MEM_SIZE and MEM_OFFSET. */
f68396a1 340
527210c4 341 ref->offset += MEM_OFFSET (mem) * BITS_PER_UNIT;
f5541398 342 ref->size = MEM_SIZE (mem) * BITS_PER_UNIT;
b0e96404
RG
343
344 /* The MEM may extend into adjacent fields, so adjust max_size if
345 necessary. */
b9c25734
RS
346 if (ref->max_size_known_p ())
347 ref->max_size = upper_bound (ref->max_size, ref->size);
b0e96404 348
b9c25734 349 /* If MEM_OFFSET and MEM_SIZE might get us outside of the base object of
b0e96404
RG
350 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
351 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
b9c25734 352 && (maybe_lt (ref->offset, 0)
b0e96404 353 || (DECL_P (ref->base)
807e902e 354 && (DECL_SIZE (ref->base) == NULL_TREE
b9c25734
RS
355 || !poly_int_tree_p (DECL_SIZE (ref->base))
356 || maybe_lt (wi::to_poly_offset (DECL_SIZE (ref->base)),
357 ref->offset + ref->size)))))
b0e96404 358 return false;
55b34b5f
RG
359
360 return true;
361}
362
363/* Query the alias-oracle on whether the two memory rtx X and MEM may
364 alias. If TBAA_P is set also apply TBAA. Returns true if the
365 two rtxen may alias, false otherwise. */
366
367static bool
368rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
369{
370 ao_ref ref1, ref2;
371
372 if (!ao_ref_from_mem (&ref1, x)
373 || !ao_ref_from_mem (&ref2, mem))
374 return true;
375
55e3bc4c
RG
376 return refs_may_alias_p_1 (&ref1, &ref2,
377 tbaa_p
378 && MEM_ALIAS_SET (x) != 0
379 && MEM_ALIAS_SET (mem) != 0);
55b34b5f
RG
380}
381
3932261a
MM
382/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
383 such an entry, or NULL otherwise. */
384
02ced957 385static inline alias_set_entry *
4862826d 386get_alias_set_entry (alias_set_type alias_set)
3932261a 387{
9771b263 388 return (*alias_sets)[alias_set];
3932261a
MM
389}
390
ac3d9668
RK
391/* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
392 the two MEMs cannot alias each other. */
3932261a 393
9ddb66ca 394static inline int
4f588890 395mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
3932261a 396{
598f8eca
RB
397 return (flag_strict_aliasing
398 && ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1),
399 MEM_ALIAS_SET (mem2)));
1da68f56 400}
3932261a 401
c58936b6
DB
402/* Return true if the first alias set is a subset of the second. */
403
404bool
4862826d 405alias_set_subset_of (alias_set_type set1, alias_set_type set2)
c58936b6 406{
02ced957 407 alias_set_entry *ase2;
c58936b6 408
bd04cddf
JH
409 /* Disable TBAA oracle with !flag_strict_aliasing. */
410 if (!flag_strict_aliasing)
411 return true;
412
c58936b6
DB
413 /* Everything is a subset of the "aliases everything" set. */
414 if (set2 == 0)
415 return true;
416
6e042ef4
JH
417 /* Check if set1 is a subset of set2. */
418 ase2 = get_alias_set_entry (set2);
419 if (ase2 != 0
420 && (ase2->has_zero_child
421 || (ase2->children && ase2->children->get (set1))))
c58936b6 422 return true;
6e042ef4
JH
423
424 /* As a special case we consider alias set of "void *" to be both subset
425 and superset of every alias set of a pointer. This extra symmetry does
426 not matter for alias_sets_conflict_p but it makes aliasing_component_refs_p
427 to return true on the following testcase:
428
429 void *ptr;
430 char **ptr2=(char **)&ptr;
431 *ptr2 = ...
432
433 Additionally if a set contains universal pointer, we consider every pointer
434 to be a subset of it, but we do not represent this explicitely - doing so
435 would require us to update transitive closure each time we introduce new
436 pointer type. This makes aliasing_component_refs_p to return true
437 on the following testcase:
438
439 struct a {void *ptr;}
440 char **ptr = (char **)&a.ptr;
441 ptr = ...
442
443 This makes void * truly universal pointer type. See pointer handling in
444 get_alias_set for more details. */
445 if (ase2 && ase2->has_pointer)
446 {
02ced957 447 alias_set_entry *ase1 = get_alias_set_entry (set1);
6e042ef4
JH
448
449 if (ase1 && ase1->is_pointer)
450 {
451 alias_set_type voidptr_set = TYPE_ALIAS_SET (ptr_type_node);
452 /* If one is ptr_type_node and other is pointer, then we consider
453 them subset of each other. */
454 if (set1 == voidptr_set || set2 == voidptr_set)
455 return true;
456 /* If SET2 contains universal pointer's alias set, then we consdier
457 every (non-universal) pointer. */
458 if (ase2->children && set1 != voidptr_set
459 && ase2->children->get (voidptr_set))
460 return true;
461 }
462 }
c58936b6
DB
463 return false;
464}
465
1da68f56
RK
466/* Return 1 if the two specified alias sets may conflict. */
467
468int
4862826d 469alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
1da68f56 470{
02ced957
TS
471 alias_set_entry *ase1;
472 alias_set_entry *ase2;
1da68f56 473
836f7794
EB
474 /* The easy case. */
475 if (alias_sets_must_conflict_p (set1, set2))
1da68f56 476 return 1;
3932261a 477
3bdf5ad1 478 /* See if the first alias set is a subset of the second. */
6e042ef4
JH
479 ase1 = get_alias_set_entry (set1);
480 if (ase1 != 0
481 && ase1->children && ase1->children->get (set2))
3ecf9d13
JH
482 {
483 ++alias_stats.num_dag;
484 return 1;
485 }
3932261a
MM
486
487 /* Now do the same, but with the alias sets reversed. */
6e042ef4
JH
488 ase2 = get_alias_set_entry (set2);
489 if (ase2 != 0
490 && ase2->children && ase2->children->get (set1))
3ecf9d13
JH
491 {
492 ++alias_stats.num_dag;
493 return 1;
494 }
6e042ef4
JH
495
496 /* We want void * to be compatible with any other pointer without
497 really dropping it to alias set 0. Doing so would make it
498 compatible with all non-pointer types too.
499
500 This is not strictly necessary by the C/C++ language
501 standards, but avoids common type punning mistakes. In
502 addition to that, we need the existence of such universal
503 pointer to implement Fortran's C_PTR type (which is defined as
504 type compatible with all C pointers). */
505 if (ase1 && ase2 && ase1->has_pointer && ase2->has_pointer)
506 {
507 alias_set_type voidptr_set = TYPE_ALIAS_SET (ptr_type_node);
508
509 /* If one of the sets corresponds to universal pointer,
510 we consider it to conflict with anything that is
511 or contains pointer. */
512 if (set1 == voidptr_set || set2 == voidptr_set)
513 {
514 ++alias_stats.num_universal;
515 return true;
516 }
517 /* If one of sets is (non-universal) pointer and the other
518 contains universal pointer, we also get conflict. */
519 if (ase1->is_pointer && set2 != voidptr_set
520 && ase2->children && ase2->children->get (voidptr_set))
521 {
522 ++alias_stats.num_universal;
523 return true;
524 }
525 if (ase2->is_pointer && set1 != voidptr_set
526 && ase1->children && ase1->children->get (voidptr_set))
527 {
528 ++alias_stats.num_universal;
529 return true;
530 }
531 }
532
3ecf9d13 533 ++alias_stats.num_disambiguated;
3932261a 534
1da68f56 535 /* The two alias sets are distinct and neither one is the
836f7794 536 child of the other. Therefore, they cannot conflict. */
1da68f56 537 return 0;
3932261a 538}
5399d643 539
836f7794 540/* Return 1 if the two specified alias sets will always conflict. */
5399d643
JW
541
542int
4862826d 543alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
5399d643 544{
bd04cddf
JH
545 /* Disable TBAA oracle with !flag_strict_aliasing. */
546 if (!flag_strict_aliasing)
547 return 1;
3ecf9d13
JH
548 if (set1 == 0 || set2 == 0)
549 {
550 ++alias_stats.num_alias_zero;
551 return 1;
552 }
553 if (set1 == set2)
554 {
555 ++alias_stats.num_same_alias_set;
556 return 1;
557 }
5399d643
JW
558
559 return 0;
560}
561
1da68f56
RK
562/* Return 1 if any MEM object of type T1 will always conflict (using the
563 dependency routines in this file) with any MEM object of type T2.
564 This is used when allocating temporary storage. If T1 and/or T2 are
565 NULL_TREE, it means we know nothing about the storage. */
566
567int
4682ae04 568objects_must_conflict_p (tree t1, tree t2)
1da68f56 569{
4862826d 570 alias_set_type set1, set2;
82d610ec 571
e8ea2809
RK
572 /* If neither has a type specified, we don't know if they'll conflict
573 because we may be using them to store objects of various types, for
574 example the argument and local variables areas of inlined functions. */
981a4c34 575 if (t1 == 0 && t2 == 0)
e8ea2809
RK
576 return 0;
577
1da68f56 578 /* If they are the same type, they must conflict. */
3ecf9d13
JH
579 if (t1 == t2)
580 {
581 ++alias_stats.num_same_objects;
582 return 1;
583 }
584 /* Likewise if both are volatile. */
585 if (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2))
586 {
587 ++alias_stats.num_volatile;
588 return 1;
589 }
1da68f56 590
82d610ec
RK
591 set1 = t1 ? get_alias_set (t1) : 0;
592 set2 = t2 ? get_alias_set (t2) : 0;
1da68f56 593
836f7794
EB
594 /* We can't use alias_sets_conflict_p because we must make sure
595 that every subtype of t1 will conflict with every subtype of
82d610ec
RK
596 t2 for which a pair of subobjects of these respective subtypes
597 overlaps on the stack. */
836f7794 598 return alias_sets_must_conflict_p (set1, set2);
1da68f56
RK
599}
600\f
b4ada065
RB
601/* Return the outermost parent of component present in the chain of
602 component references handled by get_inner_reference in T with the
603 following property:
604 - the component is non-addressable, or
605 - the parent has alias set zero,
606 or NULL_TREE if no such parent exists. In the former cases, the alias
607 set of this parent is the alias set that must be used for T itself. */
608
609tree
610component_uses_parent_alias_set_from (const_tree t)
6e24b709 611{
b4ada065 612 const_tree found = NULL_TREE;
afe84921 613
350792ff
RB
614 if (AGGREGATE_TYPE_P (TREE_TYPE (t))
615 && TYPE_TYPELESS_STORAGE (TREE_TYPE (t)))
616 return const_cast <tree> (t);
617
b4ada065
RB
618 while (handled_component_p (t))
619 {
afe84921
RH
620 switch (TREE_CODE (t))
621 {
622 case COMPONENT_REF:
623 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
b4ada065 624 found = t;
4aa83879
RB
625 /* Permit type-punning when accessing a union, provided the access
626 is directly through the union. For example, this code does not
627 permit taking the address of a union member and then storing
628 through it. Even the type-punning allowed here is a GCC
629 extension, albeit a common and useful one; the C standard says
630 that such accesses have implementation-defined behavior. */
631 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE)
632 found = t;
afe84921
RH
633 break;
634
635 case ARRAY_REF:
636 case ARRAY_RANGE_REF:
637 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
b4ada065 638 found = t;
afe84921
RH
639 break;
640
641 case REALPART_EXPR:
642 case IMAGPART_EXPR:
643 break;
644
b4ada065
RB
645 case BIT_FIELD_REF:
646 case VIEW_CONVERT_EXPR:
afe84921 647 /* Bitfields and casts are never addressable. */
b4ada065
RB
648 found = t;
649 break;
650
651 default:
652 gcc_unreachable ();
afe84921
RH
653 }
654
b4ada065
RB
655 if (get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) == 0)
656 found = t;
657
afe84921
RH
658 t = TREE_OPERAND (t, 0);
659 }
b4ada065
RB
660
661 if (found)
662 return TREE_OPERAND (found, 0);
663
664 return NULL_TREE;
6e24b709
RK
665}
666
f40333af
RB
667
668/* Return whether the pointer-type T effective for aliasing may
669 access everything and thus the reference has to be assigned
670 alias-set zero. */
671
672static bool
673ref_all_alias_ptr_type_p (const_tree t)
674{
675 return (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
676 || TYPE_REF_CAN_ALIAS_ALL (t));
677}
678
5006671f
RG
679/* Return the alias set for the memory pointed to by T, which may be
680 either a type or an expression. Return -1 if there is nothing
681 special about dereferencing T. */
682
683static alias_set_type
684get_deref_alias_set_1 (tree t)
685{
5b21f0f3 686 /* All we care about is the type. */
5006671f 687 if (! TYPE_P (t))
5b21f0f3 688 t = TREE_TYPE (t);
5006671f
RG
689
690 /* If we have an INDIRECT_REF via a void pointer, we don't
691 know anything about what that might alias. Likewise if the
692 pointer is marked that way. */
f40333af 693 if (ref_all_alias_ptr_type_p (t))
5006671f
RG
694 return 0;
695
696 return -1;
697}
698
699/* Return the alias set for the memory pointed to by T, which may be
700 either a type or an expression. */
701
702alias_set_type
703get_deref_alias_set (tree t)
704{
f40333af
RB
705 /* If we're not doing any alias analysis, just assume everything
706 aliases everything else. */
707 if (!flag_strict_aliasing)
708 return 0;
709
5006671f
RG
710 alias_set_type set = get_deref_alias_set_1 (t);
711
712 /* Fall back to the alias-set of the pointed-to type. */
713 if (set == -1)
714 {
715 if (! TYPE_P (t))
716 t = TREE_TYPE (t);
717 set = get_alias_set (TREE_TYPE (t));
718 }
719
720 return set;
721}
722
f40333af
RB
723/* Return the pointer-type relevant for TBAA purposes from the
724 memory reference tree *T or NULL_TREE in which case *T is
725 adjusted to point to the outermost component reference that
726 can be used for assigning an alias set. */
727
728static tree
729reference_alias_ptr_type_1 (tree *t)
730{
731 tree inner;
732
733 /* Get the base object of the reference. */
734 inner = *t;
735 while (handled_component_p (inner))
736 {
737 /* If there is a VIEW_CONVERT_EXPR in the chain we cannot use
738 the type of any component references that wrap it to
739 determine the alias-set. */
740 if (TREE_CODE (inner) == VIEW_CONVERT_EXPR)
741 *t = TREE_OPERAND (inner, 0);
742 inner = TREE_OPERAND (inner, 0);
743 }
744
745 /* Handle pointer dereferences here, they can override the
746 alias-set. */
747 if (INDIRECT_REF_P (inner)
748 && ref_all_alias_ptr_type_p (TREE_TYPE (TREE_OPERAND (inner, 0))))
749 return TREE_TYPE (TREE_OPERAND (inner, 0));
750 else if (TREE_CODE (inner) == TARGET_MEM_REF)
751 return TREE_TYPE (TMR_OFFSET (inner));
752 else if (TREE_CODE (inner) == MEM_REF
753 && ref_all_alias_ptr_type_p (TREE_TYPE (TREE_OPERAND (inner, 1))))
754 return TREE_TYPE (TREE_OPERAND (inner, 1));
755
756 /* If the innermost reference is a MEM_REF that has a
757 conversion embedded treat it like a VIEW_CONVERT_EXPR above,
758 using the memory access type for determining the alias-set. */
759 if (TREE_CODE (inner) == MEM_REF
760 && (TYPE_MAIN_VARIANT (TREE_TYPE (inner))
761 != TYPE_MAIN_VARIANT
762 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (inner, 1))))))
763 return TREE_TYPE (TREE_OPERAND (inner, 1));
764
b4ada065
RB
765 /* Otherwise, pick up the outermost object that we could have
766 a pointer to. */
767 tree tem = component_uses_parent_alias_set_from (*t);
768 if (tem)
769 *t = tem;
f40333af
RB
770
771 return NULL_TREE;
772}
773
774/* Return the pointer-type relevant for TBAA purposes from the
775 gimple memory reference tree T. This is the type to be used for
776 the offset operand of MEM_REF or TARGET_MEM_REF replacements of T
777 and guarantees that get_alias_set will return the same alias
778 set for T and the replacement. */
779
780tree
781reference_alias_ptr_type (tree t)
782{
ebc1b29e
RB
783 /* If the frontend assigns this alias-set zero, preserve that. */
784 if (lang_hooks.get_alias_set (t) == 0)
785 return ptr_type_node;
786
f40333af
RB
787 tree ptype = reference_alias_ptr_type_1 (&t);
788 /* If there is a given pointer type for aliasing purposes, return it. */
789 if (ptype != NULL_TREE)
790 return ptype;
791
792 /* Otherwise build one from the outermost component reference we
793 may use. */
794 if (TREE_CODE (t) == MEM_REF
795 || TREE_CODE (t) == TARGET_MEM_REF)
796 return TREE_TYPE (TREE_OPERAND (t, 1));
797 else
798 return build_pointer_type (TYPE_MAIN_VARIANT (TREE_TYPE (t)));
799}
800
801/* Return whether the pointer-types T1 and T2 used to determine
802 two alias sets of two references will yield the same answer
803 from get_deref_alias_set. */
804
805bool
806alias_ptr_types_compatible_p (tree t1, tree t2)
807{
808 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
809 return true;
810
811 if (ref_all_alias_ptr_type_p (t1)
812 || ref_all_alias_ptr_type_p (t2))
813 return false;
814
815 return (TYPE_MAIN_VARIANT (TREE_TYPE (t1))
816 == TYPE_MAIN_VARIANT (TREE_TYPE (t2)));
817}
818
6e042ef4
JH
819/* Create emptry alias set entry. */
820
02ced957 821alias_set_entry *
6e042ef4
JH
822init_alias_set_entry (alias_set_type set)
823{
02ced957 824 alias_set_entry *ase = ggc_alloc<alias_set_entry> ();
6e042ef4
JH
825 ase->alias_set = set;
826 ase->children = NULL;
827 ase->has_zero_child = false;
828 ase->is_pointer = false;
829 ase->has_pointer = false;
830 gcc_checking_assert (!get_alias_set_entry (set));
831 (*alias_sets)[set] = ase;
832 return ase;
833}
834
3bdf5ad1
RK
835/* Return the alias set for T, which may be either a type or an
836 expression. Call language-specific routine for help, if needed. */
837
4862826d 838alias_set_type
4682ae04 839get_alias_set (tree t)
3bdf5ad1 840{
4862826d 841 alias_set_type set;
3bdf5ad1 842
bd04cddf
JH
843 /* We can not give up with -fno-strict-aliasing because we need to build
844 proper type representation for possible functions which are build with
e8444ca6 845 -fstrict-aliasing. */
bd04cddf
JH
846
847 /* return 0 if this or its type is an error. */
848 if (t == error_mark_node
3bdf5ad1
RK
849 || (! TYPE_P (t)
850 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
851 return 0;
852
853 /* We can be passed either an expression or a type. This and the
f47e9b4e
RK
854 language-specific routine may make mutually-recursive calls to each other
855 to figure out what to do. At each juncture, we see if this is a tree
856 that the language may need to handle specially. First handle things that
738cc472 857 aren't types. */
f824e5c3 858 if (! TYPE_P (t))
3bdf5ad1 859 {
70f34814
RG
860 /* Give the language a chance to do something with this tree
861 before we look at it. */
8ac61af7 862 STRIP_NOPS (t);
ae2bcd98 863 set = lang_hooks.get_alias_set (t);
8ac61af7
RK
864 if (set != -1)
865 return set;
866
f40333af
RB
867 /* Get the alias pointer-type to use or the outermost object
868 that we could have a pointer to. */
869 tree ptype = reference_alias_ptr_type_1 (&t);
870 if (ptype != NULL)
871 return get_deref_alias_set (ptype);
f824e5c3 872
738cc472
RK
873 /* If we've already determined the alias set for a decl, just return
874 it. This is necessary for C++ anonymous unions, whose component
875 variables don't look like union members (boo!). */
8813a647 876 if (VAR_P (t)
3c0cb5de 877 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
5755cd38
JM
878 return MEM_ALIAS_SET (DECL_RTL (t));
879
f824e5c3
RK
880 /* Now all we care about is the type. */
881 t = TREE_TYPE (t);
3bdf5ad1
RK
882 }
883
3bdf5ad1 884 /* Variant qualifiers don't affect the alias set, so get the main
daad0278 885 variant. */
3bdf5ad1 886 t = TYPE_MAIN_VARIANT (t);
daad0278 887
350792ff
RB
888 if (AGGREGATE_TYPE_P (t)
889 && TYPE_TYPELESS_STORAGE (t))
890 return 0;
891
daad0278
RG
892 /* Always use the canonical type as well. If this is a type that
893 requires structural comparisons to identify compatible types
894 use alias set zero. */
895 if (TYPE_STRUCTURAL_EQUALITY_P (t))
cb9c2485
JM
896 {
897 /* Allow the language to specify another alias set for this
898 type. */
899 set = lang_hooks.get_alias_set (t);
900 if (set != -1)
901 return set;
aea50b45
JH
902 /* Handle structure type equality for pointer types, arrays and vectors.
903 This is easy to do, because the code bellow ignore canonical types on
904 these anyway. This is important for LTO, where TYPE_CANONICAL for
905 pointers can not be meaningfuly computed by the frotnend. */
906 if (canonical_type_used_p (t))
f85d2487
JH
907 {
908 /* In LTO we set canonical types for all types where it makes
909 sense to do so. Double check we did not miss some type. */
910 gcc_checking_assert (!in_lto_p || !type_with_alias_set_p (t));
911 return 0;
912 }
913 }
914 else
915 {
916 t = TYPE_CANONICAL (t);
917 gcc_checking_assert (!TYPE_STRUCTURAL_EQUALITY_P (t));
cb9c2485 918 }
daad0278
RG
919
920 /* If this is a type with a known alias set, return it. */
ba6a6a1d 921 gcc_checking_assert (t == TYPE_MAIN_VARIANT (t));
738cc472 922 if (TYPE_ALIAS_SET_KNOWN_P (t))
3bdf5ad1
RK
923 return TYPE_ALIAS_SET (t);
924
36784d0e
RG
925 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
926 if (!COMPLETE_TYPE_P (t))
927 {
928 /* For arrays with unknown size the conservative answer is the
929 alias set of the element type. */
930 if (TREE_CODE (t) == ARRAY_TYPE)
931 return get_alias_set (TREE_TYPE (t));
932
933 /* But return zero as a conservative answer for incomplete types. */
934 return 0;
935 }
936
3bdf5ad1 937 /* See if the language has special handling for this type. */
ae2bcd98 938 set = lang_hooks.get_alias_set (t);
8ac61af7 939 if (set != -1)
738cc472 940 return set;
2bf105ab 941
3bdf5ad1
RK
942 /* There are no objects of FUNCTION_TYPE, so there's no point in
943 using up an alias set for them. (There are, of course, pointers
944 and references to functions, but that's different.) */
7be7d292 945 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
3bdf5ad1 946 set = 0;
74d86f4f
RH
947
948 /* Unless the language specifies otherwise, let vector types alias
949 their components. This avoids some nasty type punning issues in
950 normal usage. And indeed lets vectors be treated more like an
951 array slice. */
952 else if (TREE_CODE (t) == VECTOR_TYPE)
953 set = get_alias_set (TREE_TYPE (t));
954
4653cae5
RG
955 /* Unless the language specifies otherwise, treat array types the
956 same as their components. This avoids the asymmetry we get
957 through recording the components. Consider accessing a
958 character(kind=1) through a reference to a character(kind=1)[1:1].
959 Or consider if we want to assign integer(kind=4)[0:D.1387] and
960 integer(kind=4)[4] the same alias set or not.
961 Just be pragmatic here and make sure the array and its element
962 type get the same alias set assigned. */
aea50b45
JH
963 else if (TREE_CODE (t) == ARRAY_TYPE
964 && (!TYPE_NONALIASED_COMPONENT (t)
965 || TYPE_STRUCTURAL_EQUALITY_P (t)))
4653cae5
RG
966 set = get_alias_set (TREE_TYPE (t));
967
0ceb0201
RG
968 /* From the former common C and C++ langhook implementation:
969
970 Unfortunately, there is no canonical form of a pointer type.
971 In particular, if we have `typedef int I', then `int *', and
972 `I *' are different types. So, we have to pick a canonical
973 representative. We do this below.
974
975 Technically, this approach is actually more conservative that
976 it needs to be. In particular, `const int *' and `int *'
977 should be in different alias sets, according to the C and C++
978 standard, since their types are not the same, and so,
979 technically, an `int **' and `const int **' cannot point at
980 the same thing.
981
982 But, the standard is wrong. In particular, this code is
983 legal C++:
984
985 int *ip;
986 int **ipp = &ip;
987 const int* const* cipp = ipp;
988 And, it doesn't make sense for that to be legal unless you
989 can dereference IPP and CIPP. So, we ignore cv-qualifiers on
990 the pointed-to types. This issue has been reported to the
991 C++ committee.
992
6e042ef4
JH
993 For this reason go to canonical type of the unqalified pointer type.
994 Until GCC 6 this code set all pointers sets to have alias set of
995 ptr_type_node but that is a bad idea, because it prevents disabiguations
996 in between pointers. For Firefox this accounts about 20% of all
997 disambiguations in the program. */
f85d2487 998 else if (POINTER_TYPE_P (t) && t != ptr_type_node)
6e042ef4
JH
999 {
1000 tree p;
1001 auto_vec <bool, 8> reference;
1002
1003 /* Unnest all pointers and references.
f85d2487
JH
1004 We also want to make pointer to array/vector equivalent to pointer to
1005 its element (see the reasoning above). Skip all those types, too. */
6e042ef4 1006 for (p = t; POINTER_TYPE_P (p)
aea50b45
JH
1007 || (TREE_CODE (p) == ARRAY_TYPE
1008 && (!TYPE_NONALIASED_COMPONENT (p)
1009 || !COMPLETE_TYPE_P (p)
1010 || TYPE_STRUCTURAL_EQUALITY_P (p)))
f85d2487 1011 || TREE_CODE (p) == VECTOR_TYPE;
6e042ef4
JH
1012 p = TREE_TYPE (p))
1013 {
54363f8a
JH
1014 /* Ada supports recusive pointers. Instead of doing recrusion check
1015 just give up once the preallocated space of 8 elements is up.
1016 In this case just punt to void * alias set. */
1017 if (reference.length () == 8)
1018 {
1019 p = ptr_type_node;
1020 break;
1021 }
6e042ef4 1022 if (TREE_CODE (p) == REFERENCE_TYPE)
f85d2487
JH
1023 /* In LTO we want languages that use references to be compatible
1024 with languages that use pointers. */
1025 reference.safe_push (true && !in_lto_p);
6e042ef4
JH
1026 if (TREE_CODE (p) == POINTER_TYPE)
1027 reference.safe_push (false);
1028 }
1029 p = TYPE_MAIN_VARIANT (p);
1030
1031 /* Make void * compatible with char * and also void **.
1032 Programs are commonly violating TBAA by this.
1033
1034 We also make void * to conflict with every pointer
1035 (see record_component_aliases) and thus it is safe it to use it for
1036 pointers to types with TYPE_STRUCTURAL_EQUALITY_P. */
1037 if (TREE_CODE (p) == VOID_TYPE || TYPE_STRUCTURAL_EQUALITY_P (p))
1038 set = get_alias_set (ptr_type_node);
1039 else
1040 {
f85d2487 1041 /* Rebuild pointer type starting from canonical types using
6e042ef4
JH
1042 unqualified pointers and references only. This way all such
1043 pointers will have the same alias set and will conflict with
1044 each other.
1045
1046 Most of time we already have pointers or references of a given type.
1047 If not we build new one just to be sure that if someone later
1048 (probably only middle-end can, as we should assign all alias
1049 classes only after finishing translation unit) builds the pointer
1050 type, the canonical type will match. */
1051 p = TYPE_CANONICAL (p);
1052 while (!reference.is_empty ())
1053 {
1054 if (reference.pop ())
1055 p = build_reference_type (p);
1056 else
1057 p = build_pointer_type (p);
f85d2487
JH
1058 gcc_checking_assert (p == TYPE_MAIN_VARIANT (p));
1059 /* build_pointer_type should always return the canonical type.
1060 For LTO TYPE_CANOINCAL may be NULL, because we do not compute
1061 them. Be sure that frontends do not glob canonical types of
1062 pointers in unexpected way and that p == TYPE_CANONICAL (p)
1063 in all other cases. */
1064 gcc_checking_assert (!TYPE_CANONICAL (p)
1065 || p == TYPE_CANONICAL (p));
6e042ef4 1066 }
6e042ef4
JH
1067
1068 /* Assign the alias set to both p and t.
1069 We can not call get_alias_set (p) here as that would trigger
1070 infinite recursion when p == t. In other cases it would just
1071 trigger unnecesary legwork of rebuilding the pointer again. */
ba6a6a1d 1072 gcc_checking_assert (p == TYPE_MAIN_VARIANT (p));
6e042ef4
JH
1073 if (TYPE_ALIAS_SET_KNOWN_P (p))
1074 set = TYPE_ALIAS_SET (p);
1075 else
1076 {
1077 set = new_alias_set ();
1078 TYPE_ALIAS_SET (p) = set;
1079 }
1080 }
1081 }
f85d2487
JH
1082 /* Alias set of ptr_type_node is special and serve as universal pointer which
1083 is TBAA compatible with every other pointer type. Be sure we have the
1084 alias set built even for LTO which otherwise keeps all TYPE_CANONICAL
1085 of pointer types NULL. */
1086 else if (t == ptr_type_node)
1087 set = new_alias_set ();
0ceb0201 1088
7be7d292 1089 /* Otherwise make a new alias set for this type. */
3bdf5ad1 1090 else
96d91dcf
RG
1091 {
1092 /* Each canonical type gets its own alias set, so canonical types
1093 shouldn't form a tree. It doesn't really matter for types
1094 we handle specially above, so only check it where it possibly
1095 would result in a bogus alias set. */
1096 gcc_checking_assert (TYPE_CANONICAL (t) == t);
1097
1098 set = new_alias_set ();
1099 }
3bdf5ad1
RK
1100
1101 TYPE_ALIAS_SET (t) = set;
2bf105ab 1102
7be7d292
EB
1103 /* If this is an aggregate type or a complex type, we must record any
1104 component aliasing information. */
1d79fd2c 1105 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
2bf105ab
RK
1106 record_component_aliases (t);
1107
6e042ef4
JH
1108 /* We treat pointer types specially in alias_set_subset_of. */
1109 if (POINTER_TYPE_P (t) && set)
1110 {
02ced957 1111 alias_set_entry *ase = get_alias_set_entry (set);
6e042ef4
JH
1112 if (!ase)
1113 ase = init_alias_set_entry (set);
1114 ase->is_pointer = true;
1115 ase->has_pointer = true;
1116 }
1117
3bdf5ad1
RK
1118 return set;
1119}
1120
1121/* Return a brand-new alias set. */
1122
4862826d 1123alias_set_type
4682ae04 1124new_alias_set (void)
3bdf5ad1 1125{
bd04cddf
JH
1126 if (alias_sets == 0)
1127 vec_safe_push (alias_sets, (alias_set_entry *) NULL);
1128 vec_safe_push (alias_sets, (alias_set_entry *) NULL);
1129 return alias_sets->length () - 1;
3bdf5ad1 1130}
3932261a 1131
01d28c3f
JM
1132/* Indicate that things in SUBSET can alias things in SUPERSET, but that
1133 not everything that aliases SUPERSET also aliases SUBSET. For example,
1134 in C, a store to an `int' can alias a load of a structure containing an
1135 `int', and vice versa. But it can't alias a load of a 'double' member
1136 of the same structure. Here, the structure would be the SUPERSET and
1137 `int' the SUBSET. This relationship is also described in the comment at
1138 the beginning of this file.
1139
1140 This function should be called only once per SUPERSET/SUBSET pair.
3932261a
MM
1141
1142 It is illegal for SUPERSET to be zero; everything is implicitly a
1143 subset of alias set zero. */
1144
794511d2 1145void
4862826d 1146record_alias_subset (alias_set_type superset, alias_set_type subset)
3932261a 1147{
02ced957
TS
1148 alias_set_entry *superset_entry;
1149 alias_set_entry *subset_entry;
3932261a 1150
f47e9b4e
RK
1151 /* It is possible in complex type situations for both sets to be the same,
1152 in which case we can ignore this operation. */
1153 if (superset == subset)
1154 return;
1155
298e6adc 1156 gcc_assert (superset);
3932261a
MM
1157
1158 superset_entry = get_alias_set_entry (superset);
ca7fd9cd 1159 if (superset_entry == 0)
3932261a
MM
1160 {
1161 /* Create an entry for the SUPERSET, so that we have a place to
1162 attach the SUBSET. */
6e042ef4 1163 superset_entry = init_alias_set_entry (superset);
3932261a
MM
1164 }
1165
2bf105ab
RK
1166 if (subset == 0)
1167 superset_entry->has_zero_child = 1;
1168 else
1169 {
1170 subset_entry = get_alias_set_entry (subset);
6e042ef4
JH
1171 if (!superset_entry->children)
1172 superset_entry->children
fb5c464a 1173 = hash_map<alias_set_hash, int>::create_ggc (64);
2bf105ab
RK
1174 /* If there is an entry for the subset, enter all of its children
1175 (if they are not already present) as children of the SUPERSET. */
ca7fd9cd 1176 if (subset_entry)
2bf105ab
RK
1177 {
1178 if (subset_entry->has_zero_child)
6e042ef4
JH
1179 superset_entry->has_zero_child = true;
1180 if (subset_entry->has_pointer)
1181 superset_entry->has_pointer = true;
d4b60170 1182
6e042ef4
JH
1183 if (subset_entry->children)
1184 {
fb5c464a 1185 hash_map<alias_set_hash, int>::iterator iter
6e042ef4
JH
1186 = subset_entry->children->begin ();
1187 for (; iter != subset_entry->children->end (); ++iter)
1188 superset_entry->children->put ((*iter).first, (*iter).second);
1189 }
2bf105ab 1190 }
3932261a 1191
2bf105ab 1192 /* Enter the SUBSET itself as a child of the SUPERSET. */
de144fb2 1193 superset_entry->children->put (subset, 0);
2bf105ab 1194 }
3932261a
MM
1195}
1196
a0c33338
RK
1197/* Record that component types of TYPE, if any, are part of that type for
1198 aliasing purposes. For record types, we only record component types
b5487346
EB
1199 for fields that are not marked non-addressable. For array types, we
1200 only record the component type if it is not marked non-aliased. */
a0c33338
RK
1201
1202void
4682ae04 1203record_component_aliases (tree type)
a0c33338 1204{
4862826d 1205 alias_set_type superset = get_alias_set (type);
a0c33338
RK
1206 tree field;
1207
1208 if (superset == 0)
1209 return;
1210
1211 switch (TREE_CODE (type))
1212 {
a0c33338
RK
1213 case RECORD_TYPE:
1214 case UNION_TYPE:
1215 case QUAL_UNION_TYPE:
910ad8de 1216 for (field = TYPE_FIELDS (type); field != 0; field = DECL_CHAIN (field))
b5487346 1217 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
f85d2487
JH
1218 {
1219 /* LTO type merging does not make any difference between
1220 component pointer types. We may have
1221
1222 struct foo {int *a;};
1223
1224 as TYPE_CANONICAL of
1225
1226 struct bar {float *a;};
1227
1228 Because accesses to int * and float * do not alias, we would get
1229 false negative when accessing the same memory location by
1230 float ** and bar *. We thus record the canonical type as:
1231
1232 struct {void *a;};
1233
1234 void * is special cased and works as a universal pointer type.
1235 Accesses to it conflicts with accesses to any other pointer
1236 type. */
1237 tree t = TREE_TYPE (field);
1238 if (in_lto_p)
1239 {
1240 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
1241 element type and that type has to be normalized to void *,
1242 too, in the case it is a pointer. */
aea50b45
JH
1243 while (!canonical_type_used_p (t) && !POINTER_TYPE_P (t))
1244 {
1245 gcc_checking_assert (TYPE_STRUCTURAL_EQUALITY_P (t));
1246 t = TREE_TYPE (t);
1247 }
f85d2487
JH
1248 if (POINTER_TYPE_P (t))
1249 t = ptr_type_node;
aea50b45
JH
1250 else if (flag_checking)
1251 gcc_checking_assert (get_alias_set (t)
1252 == get_alias_set (TREE_TYPE (field)));
f85d2487 1253 }
aea50b45 1254
f85d2487
JH
1255 record_alias_subset (superset, get_alias_set (t));
1256 }
a0c33338
RK
1257 break;
1258
1d79fd2c
JW
1259 case COMPLEX_TYPE:
1260 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
1261 break;
1262
4653cae5
RG
1263 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
1264 element type. */
1265
a0c33338
RK
1266 default:
1267 break;
1268 }
1269}
1270
3bdf5ad1
RK
1271/* Allocate an alias set for use in storing and reading from the varargs
1272 spill area. */
1273
4862826d 1274static GTY(()) alias_set_type varargs_set = -1;
f103e34d 1275
4862826d 1276alias_set_type
4682ae04 1277get_varargs_alias_set (void)
3bdf5ad1 1278{
cd3ce9b4
JM
1279#if 1
1280 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
1281 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
1282 consistently use the varargs alias set for loads from the varargs
1283 area. So don't use it anywhere. */
1284 return 0;
1285#else
f103e34d
GK
1286 if (varargs_set == -1)
1287 varargs_set = new_alias_set ();
3bdf5ad1 1288
f103e34d 1289 return varargs_set;
cd3ce9b4 1290#endif
3bdf5ad1
RK
1291}
1292
1293/* Likewise, but used for the fixed portions of the frame, e.g., register
1294 save areas. */
1295
4862826d 1296static GTY(()) alias_set_type frame_set = -1;
f103e34d 1297
4862826d 1298alias_set_type
4682ae04 1299get_frame_alias_set (void)
3bdf5ad1 1300{
f103e34d
GK
1301 if (frame_set == -1)
1302 frame_set = new_alias_set ();
3bdf5ad1 1303
f103e34d 1304 return frame_set;
3bdf5ad1
RK
1305}
1306
9fc37b2b
RS
1307/* Create a new, unique base with id ID. */
1308
1309static rtx
1310unique_base_value (HOST_WIDE_INT id)
1311{
1312 return gen_rtx_ADDRESS (Pmode, id);
1313}
1314
1315/* Return true if accesses based on any other base value cannot alias
1316 those based on X. */
1317
1318static bool
1319unique_base_value_p (rtx x)
1320{
1321 return GET_CODE (x) == ADDRESS && GET_MODE (x) == Pmode;
1322}
1323
1324/* Return true if X is known to be a base value. */
1325
1326static bool
1327known_base_value_p (rtx x)
1328{
1329 switch (GET_CODE (x))
1330 {
1331 case LABEL_REF:
1332 case SYMBOL_REF:
1333 return true;
1334
1335 case ADDRESS:
1336 /* Arguments may or may not be bases; we don't know for sure. */
1337 return GET_MODE (x) != VOIDmode;
1338
1339 default:
1340 return false;
1341 }
1342}
1343
2a2c8203
JC
1344/* Inside SRC, the source of a SET, find a base address. */
1345
9ae8ffe7 1346static rtx
4682ae04 1347find_base_value (rtx src)
9ae8ffe7 1348{
713f41f9 1349 unsigned int regno;
6645d841 1350 scalar_int_mode int_mode;
0aacc8ed 1351
53451050
RS
1352#if defined (FIND_BASE_TERM)
1353 /* Try machine-dependent ways to find the base term. */
1354 src = FIND_BASE_TERM (src);
1355#endif
1356
9ae8ffe7
JL
1357 switch (GET_CODE (src))
1358 {
1359 case SYMBOL_REF:
1360 case LABEL_REF:
1361 return src;
1362
1363 case REG:
fb6754f0 1364 regno = REGNO (src);
d4b60170 1365 /* At the start of a function, argument registers have known base
2a2c8203
JC
1366 values which may be lost later. Returning an ADDRESS
1367 expression here allows optimization based on argument values
1368 even when the argument registers are used for other purposes. */
713f41f9
BS
1369 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
1370 return new_reg_base_value[regno];
73774bc7 1371
eaf407a5 1372 /* If a pseudo has a known base value, return it. Do not do this
9b462c42
RH
1373 for non-fixed hard regs since it can result in a circular
1374 dependency chain for registers which have values at function entry.
eaf407a5
JL
1375
1376 The test above is not sufficient because the scheduler may move
1377 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
9b462c42 1378 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
9771b263 1379 && regno < vec_safe_length (reg_base_value))
83bbd9b6
RH
1380 {
1381 /* If we're inside init_alias_analysis, use new_reg_base_value
1382 to reduce the number of relaxation iterations. */
1afdf91c 1383 if (new_reg_base_value && new_reg_base_value[regno]
6fb5fa3c 1384 && DF_REG_DEF_COUNT (regno) == 1)
83bbd9b6
RH
1385 return new_reg_base_value[regno];
1386
9771b263
DN
1387 if ((*reg_base_value)[regno])
1388 return (*reg_base_value)[regno];
83bbd9b6 1389 }
73774bc7 1390
e3f049a8 1391 return 0;
9ae8ffe7
JL
1392
1393 case MEM:
1394 /* Check for an argument passed in memory. Only record in the
1395 copying-arguments block; it is too hard to track changes
1396 otherwise. */
1397 if (copying_arguments
1398 && (XEXP (src, 0) == arg_pointer_rtx
1399 || (GET_CODE (XEXP (src, 0)) == PLUS
1400 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
9fc37b2b 1401 return arg_base_value;
9ae8ffe7
JL
1402 return 0;
1403
1404 case CONST:
1405 src = XEXP (src, 0);
1406 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
1407 break;
d4b60170 1408
191816a3 1409 /* fall through */
2a2c8203 1410
9ae8ffe7
JL
1411 case PLUS:
1412 case MINUS:
2a2c8203 1413 {
ec907dd8
JL
1414 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
1415
0134bf2d
DE
1416 /* If either operand is a REG that is a known pointer, then it
1417 is the base. */
1418 if (REG_P (src_0) && REG_POINTER (src_0))
1419 return find_base_value (src_0);
1420 if (REG_P (src_1) && REG_POINTER (src_1))
1421 return find_base_value (src_1);
1422
ec907dd8
JL
1423 /* If either operand is a REG, then see if we already have
1424 a known value for it. */
0134bf2d 1425 if (REG_P (src_0))
ec907dd8
JL
1426 {
1427 temp = find_base_value (src_0);
d4b60170 1428 if (temp != 0)
ec907dd8
JL
1429 src_0 = temp;
1430 }
1431
0134bf2d 1432 if (REG_P (src_1))
ec907dd8
JL
1433 {
1434 temp = find_base_value (src_1);
d4b60170 1435 if (temp!= 0)
ec907dd8
JL
1436 src_1 = temp;
1437 }
2a2c8203 1438
0134bf2d
DE
1439 /* If either base is named object or a special address
1440 (like an argument or stack reference), then use it for the
1441 base term. */
9fc37b2b 1442 if (src_0 != 0 && known_base_value_p (src_0))
0134bf2d
DE
1443 return src_0;
1444
9fc37b2b 1445 if (src_1 != 0 && known_base_value_p (src_1))
0134bf2d
DE
1446 return src_1;
1447
d4b60170 1448 /* Guess which operand is the base address:
ec907dd8
JL
1449 If either operand is a symbol, then it is the base. If
1450 either operand is a CONST_INT, then the other is the base. */
481683e1 1451 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
2a2c8203 1452 return find_base_value (src_0);
481683e1 1453 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
ec907dd8
JL
1454 return find_base_value (src_1);
1455
9ae8ffe7 1456 return 0;
2a2c8203
JC
1457 }
1458
1459 case LO_SUM:
1460 /* The standard form is (lo_sum reg sym) so look only at the
1461 second operand. */
1462 return find_base_value (XEXP (src, 1));
9ae8ffe7
JL
1463
1464 case AND:
1465 /* If the second operand is constant set the base
ec5c56db 1466 address to the first operand. */
481683e1 1467 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
2a2c8203 1468 return find_base_value (XEXP (src, 0));
9ae8ffe7
JL
1469 return 0;
1470
61f0131c 1471 case TRUNCATE:
5932a4d4 1472 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1473 handle this only if the target does not support different pointer or
1474 address modes depending on the address space. */
1475 if (!target_default_pointer_address_modes_p ())
1476 break;
6645d841
RS
1477 if (!is_a <scalar_int_mode> (GET_MODE (src), &int_mode)
1478 || GET_MODE_PRECISION (int_mode) < GET_MODE_PRECISION (Pmode))
61f0131c
R
1479 break;
1480 /* Fall through. */
9ae8ffe7 1481 case HIGH:
d288e53d
DE
1482 case PRE_INC:
1483 case PRE_DEC:
1484 case POST_INC:
1485 case POST_DEC:
1486 case PRE_MODIFY:
1487 case POST_MODIFY:
2a2c8203 1488 return find_base_value (XEXP (src, 0));
1d300e19 1489
0aacc8ed
RK
1490 case ZERO_EXTEND:
1491 case SIGN_EXTEND: /* used for NT/Alpha pointers */
5932a4d4 1492 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1493 handle this only if the target does not support different pointer or
1494 address modes depending on the address space. */
1495 if (!target_default_pointer_address_modes_p ())
1496 break;
1497
0aacc8ed
RK
1498 {
1499 rtx temp = find_base_value (XEXP (src, 0));
1500
5ae6cd0d 1501 if (temp != 0 && CONSTANT_P (temp))
0aacc8ed 1502 temp = convert_memory_address (Pmode, temp);
0aacc8ed
RK
1503
1504 return temp;
1505 }
1506
1d300e19
KG
1507 default:
1508 break;
9ae8ffe7
JL
1509 }
1510
1511 return 0;
1512}
1513
9fc37b2b
RS
1514/* Called from init_alias_analysis indirectly through note_stores,
1515 or directly if DEST is a register with a REG_NOALIAS note attached.
1516 SET is null in the latter case. */
9ae8ffe7 1517
d4b60170 1518/* While scanning insns to find base values, reg_seen[N] is nonzero if
9ae8ffe7 1519 register N has been set in this function. */
d630245f 1520static sbitmap reg_seen;
9ae8ffe7 1521
2a2c8203 1522static void
7bc980e1 1523record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
9ae8ffe7 1524{
b3694847 1525 unsigned regno;
9ae8ffe7 1526 rtx src;
c28b4e40 1527 int n;
9ae8ffe7 1528
f8cfc6aa 1529 if (!REG_P (dest))
9ae8ffe7
JL
1530 return;
1531
fb6754f0 1532 regno = REGNO (dest);
9ae8ffe7 1533
9771b263 1534 gcc_checking_assert (regno < reg_base_value->length ());
ac606739 1535
dc8afb70 1536 n = REG_NREGS (dest);
c28b4e40
JW
1537 if (n != 1)
1538 {
1539 while (--n >= 0)
1540 {
d7c028c0 1541 bitmap_set_bit (reg_seen, regno + n);
c28b4e40
JW
1542 new_reg_base_value[regno + n] = 0;
1543 }
1544 return;
1545 }
1546
9ae8ffe7
JL
1547 if (set)
1548 {
1549 /* A CLOBBER wipes out any old value but does not prevent a previously
1550 unset register from acquiring a base address (i.e. reg_seen is not
1551 set). */
1552 if (GET_CODE (set) == CLOBBER)
1553 {
ec907dd8 1554 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1555 return;
1556 }
1557 src = SET_SRC (set);
1558 }
1559 else
1560 {
9fc37b2b 1561 /* There's a REG_NOALIAS note against DEST. */
d7c028c0 1562 if (bitmap_bit_p (reg_seen, regno))
9ae8ffe7 1563 {
ec907dd8 1564 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1565 return;
1566 }
d7c028c0 1567 bitmap_set_bit (reg_seen, regno);
9fc37b2b 1568 new_reg_base_value[regno] = unique_base_value (unique_id++);
9ae8ffe7
JL
1569 return;
1570 }
1571
5da6f168
RS
1572 /* If this is not the first set of REGNO, see whether the new value
1573 is related to the old one. There are two cases of interest:
1574
1575 (1) The register might be assigned an entirely new value
1576 that has the same base term as the original set.
1577
1578 (2) The set might be a simple self-modification that
1579 cannot change REGNO's base value.
1580
1581 If neither case holds, reject the original base value as invalid.
1582 Note that the following situation is not detected:
1583
c22cacf3 1584 extern int x, y; int *p = &x; p += (&y-&x);
5da6f168 1585
9ae8ffe7
JL
1586 ANSI C does not allow computing the difference of addresses
1587 of distinct top level objects. */
5da6f168
RS
1588 if (new_reg_base_value[regno] != 0
1589 && find_base_value (src) != new_reg_base_value[regno])
9ae8ffe7
JL
1590 switch (GET_CODE (src))
1591 {
2a2c8203 1592 case LO_SUM:
9ae8ffe7
JL
1593 case MINUS:
1594 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
ec907dd8 1595 new_reg_base_value[regno] = 0;
9ae8ffe7 1596 break;
61f0131c
R
1597 case PLUS:
1598 /* If the value we add in the PLUS is also a valid base value,
1599 this might be the actual base value, and the original value
1600 an index. */
1601 {
1602 rtx other = NULL_RTX;
1603
1604 if (XEXP (src, 0) == dest)
1605 other = XEXP (src, 1);
1606 else if (XEXP (src, 1) == dest)
1607 other = XEXP (src, 0);
1608
1609 if (! other || find_base_value (other))
1610 new_reg_base_value[regno] = 0;
1611 break;
1612 }
9ae8ffe7 1613 case AND:
481683e1 1614 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
ec907dd8 1615 new_reg_base_value[regno] = 0;
9ae8ffe7 1616 break;
9ae8ffe7 1617 default:
ec907dd8 1618 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1619 break;
1620 }
1621 /* If this is the first set of a register, record the value. */
1622 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
d7c028c0 1623 && ! bitmap_bit_p (reg_seen, regno) && new_reg_base_value[regno] == 0)
ec907dd8 1624 new_reg_base_value[regno] = find_base_value (src);
9ae8ffe7 1625
d7c028c0 1626 bitmap_set_bit (reg_seen, regno);
9ae8ffe7
JL
1627}
1628
8fd0a474
AM
1629/* Return REG_BASE_VALUE for REGNO. Selective scheduler uses this to avoid
1630 using hard registers with non-null REG_BASE_VALUE for renaming. */
1631rtx
1632get_reg_base_value (unsigned int regno)
1633{
9771b263 1634 return (*reg_base_value)[regno];
8fd0a474
AM
1635}
1636
bb1acb3e
RH
1637/* If a value is known for REGNO, return it. */
1638
c22cacf3 1639rtx
bb1acb3e
RH
1640get_reg_known_value (unsigned int regno)
1641{
1642 if (regno >= FIRST_PSEUDO_REGISTER)
1643 {
1644 regno -= FIRST_PSEUDO_REGISTER;
9771b263
DN
1645 if (regno < vec_safe_length (reg_known_value))
1646 return (*reg_known_value)[regno];
bb1acb3e
RH
1647 }
1648 return NULL;
43fe47ca
JW
1649}
1650
bb1acb3e
RH
1651/* Set it. */
1652
1653static void
1654set_reg_known_value (unsigned int regno, rtx val)
1655{
1656 if (regno >= FIRST_PSEUDO_REGISTER)
1657 {
1658 regno -= FIRST_PSEUDO_REGISTER;
9771b263
DN
1659 if (regno < vec_safe_length (reg_known_value))
1660 (*reg_known_value)[regno] = val;
bb1acb3e
RH
1661 }
1662}
1663
1664/* Similarly for reg_known_equiv_p. */
1665
1666bool
1667get_reg_known_equiv_p (unsigned int regno)
1668{
1669 if (regno >= FIRST_PSEUDO_REGISTER)
1670 {
1671 regno -= FIRST_PSEUDO_REGISTER;
9771b263 1672 if (regno < vec_safe_length (reg_known_value))
d7c028c0 1673 return bitmap_bit_p (reg_known_equiv_p, regno);
bb1acb3e
RH
1674 }
1675 return false;
1676}
1677
1678static void
1679set_reg_known_equiv_p (unsigned int regno, bool val)
1680{
1681 if (regno >= FIRST_PSEUDO_REGISTER)
1682 {
1683 regno -= FIRST_PSEUDO_REGISTER;
9771b263 1684 if (regno < vec_safe_length (reg_known_value))
9ff3c7ca
SB
1685 {
1686 if (val)
d7c028c0 1687 bitmap_set_bit (reg_known_equiv_p, regno);
9ff3c7ca 1688 else
d7c028c0 1689 bitmap_clear_bit (reg_known_equiv_p, regno);
9ff3c7ca 1690 }
bb1acb3e
RH
1691 }
1692}
1693
1694
db048faf
MM
1695/* Returns a canonical version of X, from the point of view alias
1696 analysis. (For example, if X is a MEM whose address is a register,
1697 and the register has a known value (say a SYMBOL_REF), then a MEM
1698 whose address is the SYMBOL_REF is returned.) */
1699
1700rtx
4682ae04 1701canon_rtx (rtx x)
9ae8ffe7
JL
1702{
1703 /* Recursively look for equivalences. */
f8cfc6aa 1704 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
bb1acb3e
RH
1705 {
1706 rtx t = get_reg_known_value (REGNO (x));
1707 if (t == x)
1708 return x;
1709 if (t)
1710 return canon_rtx (t);
1711 }
1712
1713 if (GET_CODE (x) == PLUS)
9ae8ffe7
JL
1714 {
1715 rtx x0 = canon_rtx (XEXP (x, 0));
1716 rtx x1 = canon_rtx (XEXP (x, 1));
1717
1718 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
231314e3 1719 return simplify_gen_binary (PLUS, GET_MODE (x), x0, x1);
9ae8ffe7 1720 }
d4b60170 1721
9ae8ffe7
JL
1722 /* This gives us much better alias analysis when called from
1723 the loop optimizer. Note we want to leave the original
1724 MEM alone, but need to return the canonicalized MEM with
1725 all the flags with their original values. */
3c0cb5de 1726 else if (MEM_P (x))
f1ec5147 1727 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
d4b60170 1728
9ae8ffe7
JL
1729 return x;
1730}
1731
1732/* Return 1 if X and Y are identical-looking rtx's.
45183e03 1733 Expect that X and Y has been already canonicalized.
9ae8ffe7
JL
1734
1735 We use the data in reg_known_value above to see if two registers with
1736 different numbers are, in fact, equivalent. */
1737
1738static int
ed7a4b4b 1739rtx_equal_for_memref_p (const_rtx x, const_rtx y)
9ae8ffe7 1740{
b3694847
SS
1741 int i;
1742 int j;
1743 enum rtx_code code;
1744 const char *fmt;
9ae8ffe7
JL
1745
1746 if (x == 0 && y == 0)
1747 return 1;
1748 if (x == 0 || y == 0)
1749 return 0;
d4b60170 1750
9ae8ffe7
JL
1751 if (x == y)
1752 return 1;
1753
1754 code = GET_CODE (x);
1755 /* Rtx's of different codes cannot be equal. */
1756 if (code != GET_CODE (y))
1757 return 0;
1758
1759 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1760 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1761
1762 if (GET_MODE (x) != GET_MODE (y))
1763 return 0;
1764
db048faf
MM
1765 /* Some RTL can be compared without a recursive examination. */
1766 switch (code)
1767 {
1768 case REG:
1769 return REGNO (x) == REGNO (y);
1770
1771 case LABEL_REF:
04a121a7 1772 return label_ref_label (x) == label_ref_label (y);
ca7fd9cd 1773
db048faf 1774 case SYMBOL_REF:
73e48cb3 1775 return compare_base_symbol_refs (x, y) == 1;
db048faf 1776
af6236c1
AO
1777 case ENTRY_VALUE:
1778 /* This is magic, don't go through canonicalization et al. */
1779 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1780
40e02b4a 1781 case VALUE:
d8116890 1782 CASE_CONST_UNIQUE:
807e902e 1783 /* Pointer equality guarantees equality for these nodes. */
db048faf
MM
1784 return 0;
1785
db048faf
MM
1786 default:
1787 break;
1788 }
9ae8ffe7 1789
45183e03
JH
1790 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1791 if (code == PLUS)
9ae8ffe7
JL
1792 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1793 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1794 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1795 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
45183e03
JH
1796 /* For commutative operations, the RTX match if the operand match in any
1797 order. Also handle the simple binary and unary cases without a loop. */
ec8e098d 1798 if (COMMUTATIVE_P (x))
45183e03
JH
1799 {
1800 rtx xop0 = canon_rtx (XEXP (x, 0));
1801 rtx yop0 = canon_rtx (XEXP (y, 0));
1802 rtx yop1 = canon_rtx (XEXP (y, 1));
1803
1804 return ((rtx_equal_for_memref_p (xop0, yop0)
1805 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1806 || (rtx_equal_for_memref_p (xop0, yop1)
1807 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1808 }
ec8e098d 1809 else if (NON_COMMUTATIVE_P (x))
45183e03
JH
1810 {
1811 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
4682ae04 1812 canon_rtx (XEXP (y, 0)))
45183e03
JH
1813 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1814 canon_rtx (XEXP (y, 1))));
1815 }
ec8e098d 1816 else if (UNARY_P (x))
45183e03 1817 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
4682ae04 1818 canon_rtx (XEXP (y, 0)));
9ae8ffe7
JL
1819
1820 /* Compare the elements. If any pair of corresponding elements
de12be17
JC
1821 fail to match, return 0 for the whole things.
1822
1823 Limit cases to types which actually appear in addresses. */
9ae8ffe7
JL
1824
1825 fmt = GET_RTX_FORMAT (code);
1826 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1827 {
1828 switch (fmt[i])
1829 {
9ae8ffe7
JL
1830 case 'i':
1831 if (XINT (x, i) != XINT (y, i))
1832 return 0;
1833 break;
1834
91914e56
RS
1835 case 'p':
1836 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1837 return 0;
1838 break;
1839
9ae8ffe7
JL
1840 case 'E':
1841 /* Two vectors must have the same length. */
1842 if (XVECLEN (x, i) != XVECLEN (y, i))
1843 return 0;
1844
1845 /* And the corresponding elements must match. */
1846 for (j = 0; j < XVECLEN (x, i); j++)
45183e03
JH
1847 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1848 canon_rtx (XVECEXP (y, i, j))) == 0)
9ae8ffe7
JL
1849 return 0;
1850 break;
1851
1852 case 'e':
45183e03
JH
1853 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1854 canon_rtx (XEXP (y, i))) == 0)
9ae8ffe7
JL
1855 return 0;
1856 break;
1857
3237ac18
AH
1858 /* This can happen for asm operands. */
1859 case 's':
1860 if (strcmp (XSTR (x, i), XSTR (y, i)))
1861 return 0;
1862 break;
1863
aee21ba9
JL
1864 /* This can happen for an asm which clobbers memory. */
1865 case '0':
1866 break;
1867
9ae8ffe7
JL
1868 /* It is believed that rtx's at this level will never
1869 contain anything but integers and other rtx's,
1870 except for within LABEL_REFs and SYMBOL_REFs. */
1871 default:
298e6adc 1872 gcc_unreachable ();
9ae8ffe7
JL
1873 }
1874 }
1875 return 1;
1876}
1877
9e412ca3 1878static rtx
4682ae04 1879find_base_term (rtx x)
9ae8ffe7 1880{
eab5c70a 1881 cselib_val *val;
6f2ffb4b
AO
1882 struct elt_loc_list *l, *f;
1883 rtx ret;
6645d841 1884 scalar_int_mode int_mode;
eab5c70a 1885
b949ea8b
JW
1886#if defined (FIND_BASE_TERM)
1887 /* Try machine-dependent ways to find the base term. */
1888 x = FIND_BASE_TERM (x);
1889#endif
1890
9ae8ffe7
JL
1891 switch (GET_CODE (x))
1892 {
1893 case REG:
1894 return REG_BASE_VALUE (x);
1895
d288e53d 1896 case TRUNCATE:
5932a4d4 1897 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1898 handle this only if the target does not support different pointer or
1899 address modes depending on the address space. */
1900 if (!target_default_pointer_address_modes_p ())
1901 return 0;
6645d841
RS
1902 if (!is_a <scalar_int_mode> (GET_MODE (x), &int_mode)
1903 || GET_MODE_PRECISION (int_mode) < GET_MODE_PRECISION (Pmode))
ca7fd9cd 1904 return 0;
d288e53d 1905 /* Fall through. */
9ae8ffe7 1906 case HIGH:
6d849a2a
JL
1907 case PRE_INC:
1908 case PRE_DEC:
1909 case POST_INC:
1910 case POST_DEC:
d288e53d
DE
1911 case PRE_MODIFY:
1912 case POST_MODIFY:
6d849a2a
JL
1913 return find_base_term (XEXP (x, 0));
1914
1abade85
RK
1915 case ZERO_EXTEND:
1916 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
5932a4d4 1917 /* As we do not know which address space the pointer is referring to, we can
d4ebfa65
BE
1918 handle this only if the target does not support different pointer or
1919 address modes depending on the address space. */
1920 if (!target_default_pointer_address_modes_p ())
1921 return 0;
1922
1abade85
RK
1923 {
1924 rtx temp = find_base_term (XEXP (x, 0));
1925
5ae6cd0d 1926 if (temp != 0 && CONSTANT_P (temp))
1abade85 1927 temp = convert_memory_address (Pmode, temp);
1abade85
RK
1928
1929 return temp;
1930 }
1931
eab5c70a
BS
1932 case VALUE:
1933 val = CSELIB_VAL_PTR (x);
6f2ffb4b
AO
1934 ret = NULL_RTX;
1935
40e02b4a 1936 if (!val)
6f2ffb4b
AO
1937 return ret;
1938
0fe03ac3
JJ
1939 if (cselib_sp_based_value_p (val))
1940 return static_reg_base_value[STACK_POINTER_REGNUM];
1941
6f2ffb4b
AO
1942 f = val->locs;
1943 /* Temporarily reset val->locs to avoid infinite recursion. */
1944 val->locs = NULL;
1945
1946 for (l = f; l; l = l->next)
1947 if (GET_CODE (l->loc) == VALUE
1948 && CSELIB_VAL_PTR (l->loc)->locs
1949 && !CSELIB_VAL_PTR (l->loc)->locs->next
1950 && CSELIB_VAL_PTR (l->loc)->locs->loc == x)
1951 continue;
1952 else if ((ret = find_base_term (l->loc)) != 0)
1953 break;
1954
1955 val->locs = f;
1956 return ret;
eab5c70a 1957
023f059b
JJ
1958 case LO_SUM:
1959 /* The standard form is (lo_sum reg sym) so look only at the
1960 second operand. */
1961 return find_base_term (XEXP (x, 1));
1962
9ae8ffe7
JL
1963 case CONST:
1964 x = XEXP (x, 0);
1965 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1966 return 0;
938d968e 1967 /* Fall through. */
9ae8ffe7
JL
1968 case PLUS:
1969 case MINUS:
1970 {
3c567fae
JL
1971 rtx tmp1 = XEXP (x, 0);
1972 rtx tmp2 = XEXP (x, 1);
1973
f5143c46 1974 /* This is a little bit tricky since we have to determine which of
3c567fae
JL
1975 the two operands represents the real base address. Otherwise this
1976 routine may return the index register instead of the base register.
1977
1978 That may cause us to believe no aliasing was possible, when in
1979 fact aliasing is possible.
1980
1981 We use a few simple tests to guess the base register. Additional
1982 tests can certainly be added. For example, if one of the operands
1983 is a shift or multiply, then it must be the index register and the
1984 other operand is the base register. */
ca7fd9cd 1985
b949ea8b
JW
1986 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1987 return find_base_term (tmp2);
1988
31b0a960 1989 /* If either operand is known to be a pointer, then prefer it
3c567fae 1990 to determine the base term. */
3502dc9c 1991 if (REG_P (tmp1) && REG_POINTER (tmp1))
31b0a960
RB
1992 ;
1993 else if (REG_P (tmp2) && REG_POINTER (tmp2))
a7c75343
JJ
1994 std::swap (tmp1, tmp2);
1995 /* If second argument is constant which has base term, prefer it
1996 over variable tmp1. See PR64025. */
1997 else if (CONSTANT_P (tmp2) && !CONST_INT_P (tmp2))
1998 std::swap (tmp1, tmp2);
3c567fae 1999
31b0a960
RB
2000 /* Go ahead and find the base term for both operands. If either base
2001 term is from a pointer or is a named object or a special address
3c567fae
JL
2002 (like an argument or stack reference), then use it for the
2003 base term. */
481be1c4
RB
2004 rtx base = find_base_term (tmp1);
2005 if (base != NULL_RTX
31b0a960 2006 && ((REG_P (tmp1) && REG_POINTER (tmp1))
481be1c4
RB
2007 || known_base_value_p (base)))
2008 return base;
2009 base = find_base_term (tmp2);
2010 if (base != NULL_RTX
31b0a960 2011 && ((REG_P (tmp2) && REG_POINTER (tmp2))
481be1c4
RB
2012 || known_base_value_p (base)))
2013 return base;
3c567fae
JL
2014
2015 /* We could not determine which of the two operands was the
2016 base register and which was the index. So we can determine
2017 nothing from the base alias check. */
2018 return 0;
9ae8ffe7
JL
2019 }
2020
2021 case AND:
481683e1 2022 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
d288e53d 2023 return find_base_term (XEXP (x, 0));
9ae8ffe7
JL
2024 return 0;
2025
2026 case SYMBOL_REF:
2027 case LABEL_REF:
2028 return x;
2029
2030 default:
2031 return 0;
2032 }
2033}
2034
9e412ca3
RS
2035/* Return true if accesses to address X may alias accesses based
2036 on the stack pointer. */
2037
2038bool
2039may_be_sp_based_p (rtx x)
2040{
2041 rtx base = find_base_term (x);
2042 return !base || base == static_reg_base_value[STACK_POINTER_REGNUM];
2043}
2044
54363f8a
JH
2045/* BASE1 and BASE2 are decls. Return 1 if they refer to same object, 0
2046 if they refer to different objects and -1 if we can not decide. */
2047
2048int
2049compare_base_decls (tree base1, tree base2)
2050{
2051 int ret;
2052 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2053 if (base1 == base2)
2054 return 1;
2055
bed3fd46 2056 /* If we have two register decls with register specification we
816c4ba2 2057 cannot decide unless their assembler names are the same. */
bed3fd46
RB
2058 if (DECL_REGISTER (base1)
2059 && DECL_REGISTER (base2)
816c4ba2
NS
2060 && HAS_DECL_ASSEMBLER_NAME_P (base1)
2061 && HAS_DECL_ASSEMBLER_NAME_P (base2)
bed3fd46
RB
2062 && DECL_ASSEMBLER_NAME_SET_P (base1)
2063 && DECL_ASSEMBLER_NAME_SET_P (base2))
2064 {
816c4ba2 2065 if (DECL_ASSEMBLER_NAME_RAW (base1) == DECL_ASSEMBLER_NAME_RAW (base2))
bed3fd46
RB
2066 return 1;
2067 return -1;
2068 }
2069
54363f8a
JH
2070 /* Declarations of non-automatic variables may have aliases. All other
2071 decls are unique. */
7ec4f343
NS
2072 if (!decl_in_symtab_p (base1)
2073 || !decl_in_symtab_p (base2))
54363f8a 2074 return 0;
7ec4f343 2075
929710d9
NS
2076 /* Don't cause symbols to be inserted by the act of checking. */
2077 symtab_node *node1 = symtab_node::get (base1);
2078 if (!node1)
2079 return 0;
2080 symtab_node *node2 = symtab_node::get (base2);
2081 if (!node2)
2082 return 0;
2083
2084 ret = node1->equal_address_to (node2, true);
54363f8a
JH
2085 return ret;
2086}
2087
73e48cb3
JH
2088/* Same as compare_base_decls but for SYMBOL_REF. */
2089
2090static int
2091compare_base_symbol_refs (const_rtx x_base, const_rtx y_base)
2092{
2093 tree x_decl = SYMBOL_REF_DECL (x_base);
2094 tree y_decl = SYMBOL_REF_DECL (y_base);
2095 bool binds_def = true;
2096
2097 if (XSTR (x_base, 0) == XSTR (y_base, 0))
2098 return 1;
2099 if (x_decl && y_decl)
2100 return compare_base_decls (x_decl, y_decl);
2101 if (x_decl || y_decl)
2102 {
2103 if (!x_decl)
2104 {
2105 std::swap (x_decl, y_decl);
2106 std::swap (x_base, y_base);
2107 }
2108 /* We handle specially only section anchors and assume that other
2109 labels may overlap with user variables in an arbitrary way. */
2110 if (!SYMBOL_REF_HAS_BLOCK_INFO_P (y_base))
2111 return -1;
2112 /* Anchors contains static VAR_DECLs and CONST_DECLs. We are safe
2113 to ignore CONST_DECLs because they are readonly. */
8813a647 2114 if (!VAR_P (x_decl)
73e48cb3
JH
2115 || (!TREE_STATIC (x_decl) && !TREE_PUBLIC (x_decl)))
2116 return 0;
2117
2118 symtab_node *x_node = symtab_node::get_create (x_decl)
2119 ->ultimate_alias_target ();
2120 /* External variable can not be in section anchor. */
2121 if (!x_node->definition)
2122 return 0;
2123 x_base = XEXP (DECL_RTL (x_node->decl), 0);
2124 /* If not in anchor, we can disambiguate. */
2125 if (!SYMBOL_REF_HAS_BLOCK_INFO_P (x_base))
2126 return 0;
2127
2128 /* We have an alias of anchored variable. If it can be interposed;
2129 we must assume it may or may not alias its anchor. */
2130 binds_def = decl_binds_to_current_def_p (x_decl);
2131 }
2132 /* If we have variable in section anchor, we can compare by offset. */
2133 if (SYMBOL_REF_HAS_BLOCK_INFO_P (x_base)
2134 && SYMBOL_REF_HAS_BLOCK_INFO_P (y_base))
2135 {
2136 if (SYMBOL_REF_BLOCK (x_base) != SYMBOL_REF_BLOCK (y_base))
2137 return 0;
2138 if (SYMBOL_REF_BLOCK_OFFSET (x_base) == SYMBOL_REF_BLOCK_OFFSET (y_base))
2139 return binds_def ? 1 : -1;
2140 if (SYMBOL_REF_ANCHOR_P (x_base) != SYMBOL_REF_ANCHOR_P (y_base))
2141 return -1;
2142 return 0;
2143 }
2144 /* In general we assume that memory locations pointed to by different labels
2145 may overlap in undefined ways. */
2146 return -1;
2147}
2148
9ae8ffe7
JL
2149/* Return 0 if the addresses X and Y are known to point to different
2150 objects, 1 if they might be pointers to the same object. */
2151
2152static int
31b0a960 2153base_alias_check (rtx x, rtx x_base, rtx y, rtx y_base,
ef4bddc2 2154 machine_mode x_mode, machine_mode y_mode)
9ae8ffe7 2155{
1c72c7f6
JC
2156 /* If the address itself has no known base see if a known equivalent
2157 value has one. If either address still has no known base, nothing
2158 is known about aliasing. */
2159 if (x_base == 0)
2160 {
2161 rtx x_c;
d4b60170 2162
1c72c7f6
JC
2163 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
2164 return 1;
d4b60170 2165
1c72c7f6
JC
2166 x_base = find_base_term (x_c);
2167 if (x_base == 0)
2168 return 1;
2169 }
9ae8ffe7 2170
1c72c7f6
JC
2171 if (y_base == 0)
2172 {
2173 rtx y_c;
2174 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
2175 return 1;
d4b60170 2176
1c72c7f6
JC
2177 y_base = find_base_term (y_c);
2178 if (y_base == 0)
2179 return 1;
2180 }
2181
2182 /* If the base addresses are equal nothing is known about aliasing. */
2183 if (rtx_equal_p (x_base, y_base))
9ae8ffe7
JL
2184 return 1;
2185
435da628
UB
2186 /* The base addresses are different expressions. If they are not accessed
2187 via AND, there is no conflict. We can bring knowledge of object
2188 alignment into play here. For example, on alpha, "char a, b;" can
5764ee3c 2189 alias one another, though "char a; long b;" cannot. AND addresses may
435da628
UB
2190 implicitly alias surrounding objects; i.e. unaligned access in DImode
2191 via AND address can alias all surrounding object types except those
2192 with aligment 8 or higher. */
2193 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
2194 return 1;
2195 if (GET_CODE (x) == AND
481683e1 2196 && (!CONST_INT_P (XEXP (x, 1))
435da628
UB
2197 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
2198 return 1;
2199 if (GET_CODE (y) == AND
481683e1 2200 && (!CONST_INT_P (XEXP (y, 1))
435da628
UB
2201 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
2202 return 1;
2203
73e48cb3 2204 /* Differing symbols not accessed via AND never alias. */
3a28db46 2205 if (GET_CODE (x_base) == SYMBOL_REF && GET_CODE (y_base) == SYMBOL_REF)
73e48cb3 2206 return compare_base_symbol_refs (x_base, y_base) != 0;
3a28db46 2207
9ae8ffe7 2208 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
435da628 2209 return 0;
9ae8ffe7 2210
9fc37b2b 2211 if (unique_base_value_p (x_base) || unique_base_value_p (y_base))
9ae8ffe7
JL
2212 return 0;
2213
0d3c82d6 2214 return 1;
9ae8ffe7
JL
2215}
2216
a5628378 2217/* Return TRUE if EXPR refers to a VALUE whose uid is greater than
c779924e 2218 (or equal to) that of V. */
a5628378
AO
2219
2220static bool
403837b4 2221refs_newer_value_p (const_rtx expr, rtx v)
a5628378
AO
2222{
2223 int minuid = CSELIB_VAL_PTR (v)->uid;
403837b4
RS
2224 subrtx_iterator::array_type array;
2225 FOR_EACH_SUBRTX (iter, array, expr, NONCONST)
c779924e 2226 if (GET_CODE (*iter) == VALUE && CSELIB_VAL_PTR (*iter)->uid >= minuid)
403837b4
RS
2227 return true;
2228 return false;
a5628378
AO
2229}
2230
eab5c70a 2231/* Convert the address X into something we can use. This is done by returning
569efc34
JJ
2232 it unchanged unless it is a VALUE or VALUE +/- constant; for VALUE
2233 we call cselib to get a more useful rtx. */
3bdf5ad1 2234
a13d4ebf 2235rtx
4682ae04 2236get_addr (rtx x)
eab5c70a
BS
2237{
2238 cselib_val *v;
2239 struct elt_loc_list *l;
2240
2241 if (GET_CODE (x) != VALUE)
569efc34
JJ
2242 {
2243 if ((GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
2244 && GET_CODE (XEXP (x, 0)) == VALUE
2245 && CONST_SCALAR_INT_P (XEXP (x, 1)))
2246 {
2247 rtx op0 = get_addr (XEXP (x, 0));
2248 if (op0 != XEXP (x, 0))
2249 {
2250 if (GET_CODE (x) == PLUS
2251 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2252 return plus_constant (GET_MODE (x), op0, INTVAL (XEXP (x, 1)));
2253 return simplify_gen_binary (GET_CODE (x), GET_MODE (x),
2254 op0, XEXP (x, 1));
2255 }
2256 }
2257 return x;
2258 }
eab5c70a 2259 v = CSELIB_VAL_PTR (x);
40e02b4a
JH
2260 if (v)
2261 {
0f68ba3e
AO
2262 bool have_equivs = cselib_have_permanent_equivalences ();
2263 if (have_equivs)
2264 v = canonical_cselib_val (v);
40e02b4a
JH
2265 for (l = v->locs; l; l = l->next)
2266 if (CONSTANT_P (l->loc))
2267 return l->loc;
2268 for (l = v->locs; l; l = l->next)
0f68ba3e
AO
2269 if (!REG_P (l->loc) && !MEM_P (l->loc)
2270 /* Avoid infinite recursion when potentially dealing with
2271 var-tracking artificial equivalences, by skipping the
2272 equivalences themselves, and not choosing expressions
2273 that refer to newer VALUEs. */
2274 && (!have_equivs
2275 || (GET_CODE (l->loc) != VALUE
2276 && !refs_newer_value_p (l->loc, x))))
a5628378 2277 return l->loc;
0f68ba3e
AO
2278 if (have_equivs)
2279 {
2280 for (l = v->locs; l; l = l->next)
2281 if (REG_P (l->loc)
2282 || (GET_CODE (l->loc) != VALUE
2283 && !refs_newer_value_p (l->loc, x)))
2284 return l->loc;
2285 /* Return the canonical value. */
2286 return v->val_rtx;
2287 }
2288 if (v->locs)
2289 return v->locs->loc;
40e02b4a 2290 }
eab5c70a
BS
2291 return x;
2292}
2293
39cec1ac
MH
2294/* Return the address of the (N_REFS + 1)th memory reference to ADDR
2295 where SIZE is the size in bytes of the memory reference. If ADDR
2296 is not modified by the memory reference then ADDR is returned. */
2297
04e2b4d3 2298static rtx
9f61be58 2299addr_side_effect_eval (rtx addr, poly_int64 size, int n_refs)
39cec1ac 2300{
9f61be58 2301 poly_int64 offset = 0;
ca7fd9cd 2302
39cec1ac
MH
2303 switch (GET_CODE (addr))
2304 {
2305 case PRE_INC:
2306 offset = (n_refs + 1) * size;
2307 break;
2308 case PRE_DEC:
2309 offset = -(n_refs + 1) * size;
2310 break;
2311 case POST_INC:
2312 offset = n_refs * size;
2313 break;
2314 case POST_DEC:
2315 offset = -n_refs * size;
2316 break;
2317
2318 default:
2319 return addr;
2320 }
ca7fd9cd 2321
9f61be58 2322 addr = plus_constant (GET_MODE (addr), XEXP (addr, 0), offset);
45183e03 2323 addr = canon_rtx (addr);
39cec1ac
MH
2324
2325 return addr;
2326}
2327
3aa03517
AO
2328/* Return TRUE if an object X sized at XSIZE bytes and another object
2329 Y sized at YSIZE bytes, starting C bytes after X, may overlap. If
2330 any of the sizes is zero, assume an overlap, otherwise use the
2331 absolute value of the sizes as the actual sizes. */
2332
2333static inline bool
d05d7551 2334offset_overlap_p (poly_int64 c, poly_int64 xsize, poly_int64 ysize)
3aa03517 2335{
d05d7551
RS
2336 if (known_eq (xsize, 0) || known_eq (ysize, 0))
2337 return true;
2338
2339 if (maybe_ge (c, 0))
2340 return maybe_gt (maybe_lt (xsize, 0) ? -xsize : xsize, c);
2341 else
2342 return maybe_gt (maybe_lt (ysize, 0) ? -ysize : ysize, -c);
3aa03517
AO
2343}
2344
f47e08d9
RG
2345/* Return one if X and Y (memory addresses) reference the
2346 same location in memory or if the references overlap.
2347 Return zero if they do not overlap, else return
2348 minus one in which case they still might reference the same location.
2349
2350 C is an offset accumulator. When
9ae8ffe7
JL
2351 C is nonzero, we are testing aliases between X and Y + C.
2352 XSIZE is the size in bytes of the X reference,
2353 similarly YSIZE is the size in bytes for Y.
45183e03 2354 Expect that canon_rtx has been already called for X and Y.
9ae8ffe7
JL
2355
2356 If XSIZE or YSIZE is zero, we do not know the amount of memory being
2357 referenced (the reference was BLKmode), so make the most pessimistic
2358 assumptions.
2359
c02f035f
RH
2360 If XSIZE or YSIZE is negative, we may access memory outside the object
2361 being referenced as a side effect. This can happen when using AND to
2362 align memory references, as is done on the Alpha.
2363
9ae8ffe7 2364 Nice to notice that varying addresses cannot conflict with fp if no
f47e08d9
RG
2365 local variables had their addresses taken, but that's too hard now.
2366
2367 ??? Contrary to the tree alias oracle this does not return
2368 one for X + non-constant and Y + non-constant when X and Y are equal.
2369 If that is fixed the TBAA hack for union type-punning can be removed. */
9ae8ffe7 2370
9ae8ffe7 2371static int
9f61be58
RS
2372memrefs_conflict_p (poly_int64 xsize, rtx x, poly_int64 ysize, rtx y,
2373 poly_int64 c)
9ae8ffe7 2374{
eab5c70a 2375 if (GET_CODE (x) == VALUE)
5312b066
JJ
2376 {
2377 if (REG_P (y))
2378 {
24f8d71e
JJ
2379 struct elt_loc_list *l = NULL;
2380 if (CSELIB_VAL_PTR (x))
a5628378
AO
2381 for (l = canonical_cselib_val (CSELIB_VAL_PTR (x))->locs;
2382 l; l = l->next)
24f8d71e
JJ
2383 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, y))
2384 break;
5312b066
JJ
2385 if (l)
2386 x = y;
2387 else
2388 x = get_addr (x);
2389 }
2390 /* Don't call get_addr if y is the same VALUE. */
2391 else if (x != y)
2392 x = get_addr (x);
2393 }
eab5c70a 2394 if (GET_CODE (y) == VALUE)
5312b066
JJ
2395 {
2396 if (REG_P (x))
2397 {
24f8d71e
JJ
2398 struct elt_loc_list *l = NULL;
2399 if (CSELIB_VAL_PTR (y))
a5628378
AO
2400 for (l = canonical_cselib_val (CSELIB_VAL_PTR (y))->locs;
2401 l; l = l->next)
24f8d71e
JJ
2402 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, x))
2403 break;
5312b066
JJ
2404 if (l)
2405 y = x;
2406 else
2407 y = get_addr (y);
2408 }
2409 /* Don't call get_addr if x is the same VALUE. */
2410 else if (y != x)
2411 y = get_addr (y);
2412 }
9ae8ffe7
JL
2413 if (GET_CODE (x) == HIGH)
2414 x = XEXP (x, 0);
2415 else if (GET_CODE (x) == LO_SUM)
2416 x = XEXP (x, 1);
2417 else
9f61be58 2418 x = addr_side_effect_eval (x, maybe_lt (xsize, 0) ? -xsize : xsize, 0);
9ae8ffe7
JL
2419 if (GET_CODE (y) == HIGH)
2420 y = XEXP (y, 0);
2421 else if (GET_CODE (y) == LO_SUM)
2422 y = XEXP (y, 1);
2423 else
9f61be58 2424 y = addr_side_effect_eval (y, maybe_lt (ysize, 0) ? -ysize : ysize, 0);
9ae8ffe7 2425
54363f8a
JH
2426 if (GET_CODE (x) == SYMBOL_REF && GET_CODE (y) == SYMBOL_REF)
2427 {
73e48cb3 2428 int cmp = compare_base_symbol_refs (x,y);
54363f8a
JH
2429
2430 /* If both decls are the same, decide by offsets. */
2431 if (cmp == 1)
2432 return offset_overlap_p (c, xsize, ysize);
3a28db46
UB
2433 /* Assume a potential overlap for symbolic addresses that went
2434 through alignment adjustments (i.e., that have negative
2435 sizes), because we can't know how far they are from each
2436 other. */
9f61be58 2437 if (maybe_lt (xsize, 0) || maybe_lt (ysize, 0))
3a28db46 2438 return -1;
54363f8a
JH
2439 /* If decls are different or we know by offsets that there is no overlap,
2440 we win. */
2441 if (!cmp || !offset_overlap_p (c, xsize, ysize))
2442 return 0;
2443 /* Decls may or may not be different and offsets overlap....*/
2444 return -1;
2445 }
2446 else if (rtx_equal_for_memref_p (x, y))
9ae8ffe7 2447 {
3aa03517 2448 return offset_overlap_p (c, xsize, ysize);
9ae8ffe7
JL
2449 }
2450
6e73e666
JC
2451 /* This code used to check for conflicts involving stack references and
2452 globals but the base address alias code now handles these cases. */
9ae8ffe7
JL
2453
2454 if (GET_CODE (x) == PLUS)
2455 {
2456 /* The fact that X is canonicalized means that this
2457 PLUS rtx is canonicalized. */
2458 rtx x0 = XEXP (x, 0);
2459 rtx x1 = XEXP (x, 1);
2460
2d88904a
AO
2461 /* However, VALUEs might end up in different positions even in
2462 canonical PLUSes. Comparing their addresses is enough. */
2463 if (x0 == y)
2464 return memrefs_conflict_p (xsize, x1, ysize, const0_rtx, c);
2465 else if (x1 == y)
2466 return memrefs_conflict_p (xsize, x0, ysize, const0_rtx, c);
2467
9f61be58 2468 poly_int64 cx1, cy1;
9ae8ffe7
JL
2469 if (GET_CODE (y) == PLUS)
2470 {
2471 /* The fact that Y is canonicalized means that this
2472 PLUS rtx is canonicalized. */
2473 rtx y0 = XEXP (y, 0);
2474 rtx y1 = XEXP (y, 1);
2475
2d88904a
AO
2476 if (x0 == y1)
2477 return memrefs_conflict_p (xsize, x1, ysize, y0, c);
2478 if (x1 == y0)
2479 return memrefs_conflict_p (xsize, x0, ysize, y1, c);
2480
9ae8ffe7
JL
2481 if (rtx_equal_for_memref_p (x1, y1))
2482 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
2483 if (rtx_equal_for_memref_p (x0, y0))
2484 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
9f61be58 2485 if (poly_int_rtx_p (x1, &cx1))
63be02db 2486 {
9f61be58 2487 if (poly_int_rtx_p (y1, &cy1))
63be02db 2488 return memrefs_conflict_p (xsize, x0, ysize, y0,
9f61be58 2489 c - cx1 + cy1);
63be02db 2490 else
9f61be58 2491 return memrefs_conflict_p (xsize, x0, ysize, y, c - cx1);
63be02db 2492 }
9f61be58
RS
2493 else if (poly_int_rtx_p (y1, &cy1))
2494 return memrefs_conflict_p (xsize, x, ysize, y0, c + cy1);
9ae8ffe7 2495
f47e08d9 2496 return -1;
9ae8ffe7 2497 }
9f61be58
RS
2498 else if (poly_int_rtx_p (x1, &cx1))
2499 return memrefs_conflict_p (xsize, x0, ysize, y, c - cx1);
9ae8ffe7
JL
2500 }
2501 else if (GET_CODE (y) == PLUS)
2502 {
2503 /* The fact that Y is canonicalized means that this
2504 PLUS rtx is canonicalized. */
2505 rtx y0 = XEXP (y, 0);
2506 rtx y1 = XEXP (y, 1);
2507
2d88904a
AO
2508 if (x == y0)
2509 return memrefs_conflict_p (xsize, const0_rtx, ysize, y1, c);
2510 if (x == y1)
2511 return memrefs_conflict_p (xsize, const0_rtx, ysize, y0, c);
2512
9f61be58
RS
2513 poly_int64 cy1;
2514 if (poly_int_rtx_p (y1, &cy1))
2515 return memrefs_conflict_p (xsize, x, ysize, y0, c + cy1);
9ae8ffe7 2516 else
f47e08d9 2517 return -1;
9ae8ffe7
JL
2518 }
2519
2520 if (GET_CODE (x) == GET_CODE (y))
2521 switch (GET_CODE (x))
2522 {
2523 case MULT:
2524 {
2525 /* Handle cases where we expect the second operands to be the
2526 same, and check only whether the first operand would conflict
2527 or not. */
2528 rtx x0, y0;
2529 rtx x1 = canon_rtx (XEXP (x, 1));
2530 rtx y1 = canon_rtx (XEXP (y, 1));
2531 if (! rtx_equal_for_memref_p (x1, y1))
f47e08d9 2532 return -1;
9ae8ffe7
JL
2533 x0 = canon_rtx (XEXP (x, 0));
2534 y0 = canon_rtx (XEXP (y, 0));
2535 if (rtx_equal_for_memref_p (x0, y0))
3aa03517 2536 return offset_overlap_p (c, xsize, ysize);
9ae8ffe7
JL
2537
2538 /* Can't properly adjust our sizes. */
9f61be58
RS
2539 if (!CONST_INT_P (x1)
2540 || !can_div_trunc_p (xsize, INTVAL (x1), &xsize)
2541 || !can_div_trunc_p (ysize, INTVAL (x1), &ysize)
2542 || !can_div_trunc_p (c, INTVAL (x1), &c))
f47e08d9 2543 return -1;
9ae8ffe7
JL
2544 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
2545 }
1d300e19
KG
2546
2547 default:
2548 break;
9ae8ffe7
JL
2549 }
2550
a9bf4fe2
AO
2551 /* Deal with alignment ANDs by adjusting offset and size so as to
2552 cover the maximum range, without taking any previously known
5147bf6a
AO
2553 alignment into account. Make a size negative after such an
2554 adjustments, so that, if we end up with e.g. two SYMBOL_REFs, we
2555 assume a potential overlap, because they may end up in contiguous
2556 memory locations and the stricter-alignment access may span over
2557 part of both. */
481683e1 2558 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
56ee9281 2559 {
a9bf4fe2
AO
2560 HOST_WIDE_INT sc = INTVAL (XEXP (x, 1));
2561 unsigned HOST_WIDE_INT uc = sc;
146ec50f 2562 if (sc < 0 && pow2_or_zerop (-uc))
a9bf4fe2 2563 {
9f61be58 2564 if (maybe_gt (xsize, 0))
5147bf6a 2565 xsize = -xsize;
9f61be58 2566 if (maybe_ne (xsize, 0))
3aa03517 2567 xsize += sc + 1;
fe8fb1c4 2568 c -= sc + 1;
a9bf4fe2
AO
2569 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2570 ysize, y, c);
2571 }
56ee9281 2572 }
481683e1 2573 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
c02f035f 2574 {
a9bf4fe2
AO
2575 HOST_WIDE_INT sc = INTVAL (XEXP (y, 1));
2576 unsigned HOST_WIDE_INT uc = sc;
146ec50f 2577 if (sc < 0 && pow2_or_zerop (-uc))
a9bf4fe2 2578 {
9f61be58 2579 if (maybe_gt (ysize, 0))
5147bf6a 2580 ysize = -ysize;
9f61be58 2581 if (maybe_ne (ysize, 0))
3aa03517 2582 ysize += sc + 1;
fe8fb1c4 2583 c += sc + 1;
a9bf4fe2
AO
2584 return memrefs_conflict_p (xsize, x,
2585 ysize, canon_rtx (XEXP (y, 0)), c);
2586 }
c02f035f 2587 }
9ae8ffe7
JL
2588
2589 if (CONSTANT_P (x))
2590 {
9f61be58
RS
2591 poly_int64 cx, cy;
2592 if (poly_int_rtx_p (x, &cx) && poly_int_rtx_p (y, &cy))
9ae8ffe7 2593 {
9f61be58 2594 c += cy - cx;
3aa03517 2595 return offset_overlap_p (c, xsize, ysize);
9ae8ffe7
JL
2596 }
2597
2598 if (GET_CODE (x) == CONST)
2599 {
2600 if (GET_CODE (y) == CONST)
2601 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2602 ysize, canon_rtx (XEXP (y, 0)), c);
2603 else
2604 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2605 ysize, y, c);
2606 }
2607 if (GET_CODE (y) == CONST)
2608 return memrefs_conflict_p (xsize, x, ysize,
2609 canon_rtx (XEXP (y, 0)), c);
2610
3aa03517
AO
2611 /* Assume a potential overlap for symbolic addresses that went
2612 through alignment adjustments (i.e., that have negative
2613 sizes), because we can't know how far they are from each
2614 other. */
9ae8ffe7 2615 if (CONSTANT_P (y))
9f61be58
RS
2616 return (maybe_lt (xsize, 0)
2617 || maybe_lt (ysize, 0)
2618 || offset_overlap_p (c, xsize, ysize));
9ae8ffe7 2619
f47e08d9 2620 return -1;
9ae8ffe7 2621 }
f47e08d9
RG
2622
2623 return -1;
9ae8ffe7
JL
2624}
2625
2626/* Functions to compute memory dependencies.
2627
2628 Since we process the insns in execution order, we can build tables
2629 to keep track of what registers are fixed (and not aliased), what registers
2630 are varying in known ways, and what registers are varying in unknown
2631 ways.
2632
2633 If both memory references are volatile, then there must always be a
2634 dependence between the two references, since their order can not be
2635 changed. A volatile and non-volatile reference can be interchanged
ca7fd9cd 2636 though.
9ae8ffe7 2637
53d9622b
RS
2638 We also must allow AND addresses, because they may generate accesses
2639 outside the object being referenced. This is used to generate aligned
2640 addresses from unaligned addresses, for instance, the alpha
dc1618bc 2641 storeqi_unaligned pattern. */
9ae8ffe7
JL
2642
2643/* Read dependence: X is read after read in MEM takes place. There can
96672a3e
RH
2644 only be a dependence here if both reads are volatile, or if either is
2645 an explicit barrier. */
9ae8ffe7
JL
2646
2647int
4f588890 2648read_dependence (const_rtx mem, const_rtx x)
9ae8ffe7 2649{
96672a3e
RH
2650 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2651 return true;
2652 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2653 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2654 return true;
2655 return false;
9ae8ffe7
JL
2656}
2657
998d7deb
RH
2658/* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2659
2660static tree
4682ae04 2661decl_for_component_ref (tree x)
998d7deb
RH
2662{
2663 do
2664 {
2665 x = TREE_OPERAND (x, 0);
2666 }
2667 while (x && TREE_CODE (x) == COMPONENT_REF);
2668
2669 return x && DECL_P (x) ? x : NULL_TREE;
2670}
2671
527210c4
RS
2672/* Walk up the COMPONENT_REF list in X and adjust *OFFSET to compensate
2673 for the offset of the field reference. *KNOWN_P says whether the
2674 offset is known. */
998d7deb 2675
527210c4
RS
2676static void
2677adjust_offset_for_component_ref (tree x, bool *known_p,
d05d7551 2678 poly_int64 *offset)
998d7deb 2679{
527210c4
RS
2680 if (!*known_p)
2681 return;
ca7fd9cd 2682 do
998d7deb 2683 {
527210c4 2684 tree xoffset = component_ref_field_offset (x);
998d7deb 2685 tree field = TREE_OPERAND (x, 1);
807e902e
KZ
2686 if (TREE_CODE (xoffset) != INTEGER_CST)
2687 {
2688 *known_p = false;
2689 return;
2690 }
998d7deb 2691
807e902e
KZ
2692 offset_int woffset
2693 = (wi::to_offset (xoffset)
8de73453
RS
2694 + (wi::to_offset (DECL_FIELD_BIT_OFFSET (field))
2695 >> LOG2_BITS_PER_UNIT));
807e902e 2696 if (!wi::fits_uhwi_p (woffset))
527210c4
RS
2697 {
2698 *known_p = false;
2699 return;
2700 }
807e902e 2701 *offset += woffset.to_uhwi ();
998d7deb
RH
2702
2703 x = TREE_OPERAND (x, 0);
2704 }
2705 while (x && TREE_CODE (x) == COMPONENT_REF);
998d7deb
RH
2706}
2707
95bd1dd7 2708/* Return nonzero if we can determine the exprs corresponding to memrefs
c6ea834c
BM
2709 X and Y and they do not overlap.
2710 If LOOP_VARIANT is set, skip offset-based disambiguation */
a4311dfe 2711
2e4e39f6 2712int
c6ea834c 2713nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant)
a4311dfe 2714{
998d7deb 2715 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
a4311dfe
RK
2716 rtx rtlx, rtly;
2717 rtx basex, basey;
527210c4 2718 bool moffsetx_known_p, moffsety_known_p;
d05d7551
RS
2719 poly_int64 moffsetx = 0, moffsety = 0;
2720 poly_int64 offsetx = 0, offsety = 0, sizex, sizey;
a4311dfe 2721
998d7deb
RH
2722 /* Unless both have exprs, we can't tell anything. */
2723 if (exprx == 0 || expry == 0)
2724 return 0;
2b22e382
RG
2725
2726 /* For spill-slot accesses make sure we have valid offsets. */
2727 if ((exprx == get_spill_slot_decl (false)
527210c4 2728 && ! MEM_OFFSET_KNOWN_P (x))
2b22e382 2729 || (expry == get_spill_slot_decl (false)
527210c4 2730 && ! MEM_OFFSET_KNOWN_P (y)))
2b22e382 2731 return 0;
c22cacf3 2732
998d7deb 2733 /* If the field reference test failed, look at the DECLs involved. */
527210c4
RS
2734 moffsetx_known_p = MEM_OFFSET_KNOWN_P (x);
2735 if (moffsetx_known_p)
2736 moffsetx = MEM_OFFSET (x);
998d7deb
RH
2737 if (TREE_CODE (exprx) == COMPONENT_REF)
2738 {
2e0c984c
RG
2739 tree t = decl_for_component_ref (exprx);
2740 if (! t)
2741 return 0;
527210c4 2742 adjust_offset_for_component_ref (exprx, &moffsetx_known_p, &moffsetx);
2e0c984c 2743 exprx = t;
998d7deb 2744 }
c67a1cf6 2745
527210c4
RS
2746 moffsety_known_p = MEM_OFFSET_KNOWN_P (y);
2747 if (moffsety_known_p)
2748 moffsety = MEM_OFFSET (y);
998d7deb
RH
2749 if (TREE_CODE (expry) == COMPONENT_REF)
2750 {
2e0c984c
RG
2751 tree t = decl_for_component_ref (expry);
2752 if (! t)
2753 return 0;
527210c4 2754 adjust_offset_for_component_ref (expry, &moffsety_known_p, &moffsety);
2e0c984c 2755 expry = t;
998d7deb
RH
2756 }
2757
2758 if (! DECL_P (exprx) || ! DECL_P (expry))
a4311dfe
RK
2759 return 0;
2760
1f9ceff1
AO
2761 /* If we refer to different gimple registers, or one gimple register
2762 and one non-gimple-register, we know they can't overlap. First,
2763 gimple registers don't have their addresses taken. Now, there
2764 could be more than one stack slot for (different versions of) the
2765 same gimple register, but we can presumably tell they don't
2766 overlap based on offsets from stack base addresses elsewhere.
2767 It's important that we don't proceed to DECL_RTL, because gimple
2768 registers may not pass DECL_RTL_SET_P, and make_decl_rtl won't be
2769 able to do anything about them since no SSA information will have
2770 remained to guide it. */
2771 if (is_gimple_reg (exprx) || is_gimple_reg (expry))
2d88904a
AO
2772 return exprx != expry
2773 || (moffsetx_known_p && moffsety_known_p
2774 && MEM_SIZE_KNOWN_P (x) && MEM_SIZE_KNOWN_P (y)
2775 && !offset_overlap_p (moffsety - moffsetx,
2776 MEM_SIZE (x), MEM_SIZE (y)));
1f9ceff1 2777
1307c758
RG
2778 /* With invalid code we can end up storing into the constant pool.
2779 Bail out to avoid ICEing when creating RTL for this.
2780 See gfortran.dg/lto/20091028-2_0.f90. */
2781 if (TREE_CODE (exprx) == CONST_DECL
2782 || TREE_CODE (expry) == CONST_DECL)
2783 return 1;
2784
dca16798
JJ
2785 /* If one decl is known to be a function or label in a function and
2786 the other is some kind of data, they can't overlap. */
2787 if ((TREE_CODE (exprx) == FUNCTION_DECL
2788 || TREE_CODE (exprx) == LABEL_DECL)
2789 != (TREE_CODE (expry) == FUNCTION_DECL
2790 || TREE_CODE (expry) == LABEL_DECL))
2791 return 1;
2792
5f4cebba
JJ
2793 /* If either of the decls doesn't have DECL_RTL set (e.g. marked as
2794 living in multiple places), we can't tell anything. Exception
2795 are FUNCTION_DECLs for which we can create DECL_RTL on demand. */
2796 if ((!DECL_RTL_SET_P (exprx) && TREE_CODE (exprx) != FUNCTION_DECL)
2797 || (!DECL_RTL_SET_P (expry) && TREE_CODE (expry) != FUNCTION_DECL))
2798 return 0;
2799
998d7deb
RH
2800 rtlx = DECL_RTL (exprx);
2801 rtly = DECL_RTL (expry);
a4311dfe 2802
1edcd60b
RK
2803 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2804 can't overlap unless they are the same because we never reuse that part
2805 of the stack frame used for locals for spilled pseudos. */
3c0cb5de 2806 if ((!MEM_P (rtlx) || !MEM_P (rtly))
1edcd60b 2807 && ! rtx_equal_p (rtlx, rtly))
a4311dfe
RK
2808 return 1;
2809
5932a4d4 2810 /* If we have MEMs referring to different address spaces (which can
09e881c9
BE
2811 potentially overlap), we cannot easily tell from the addresses
2812 whether the references overlap. */
2813 if (MEM_P (rtlx) && MEM_P (rtly)
2814 && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly))
2815 return 0;
2816
a4311dfe
RK
2817 /* Get the base and offsets of both decls. If either is a register, we
2818 know both are and are the same, so use that as the base. The only
2819 we can avoid overlap is if we can deduce that they are nonoverlapping
2820 pieces of that decl, which is very rare. */
3c0cb5de 2821 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
d05d7551 2822 basex = strip_offset_and_add (basex, &offsetx);
a4311dfe 2823
3c0cb5de 2824 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
d05d7551 2825 basey = strip_offset_and_add (basey, &offsety);
a4311dfe 2826
d746694a 2827 /* If the bases are different, we know they do not overlap if both
ca7fd9cd 2828 are constants or if one is a constant and the other a pointer into the
d746694a
RK
2829 stack frame. Otherwise a different base means we can't tell if they
2830 overlap or not. */
54363f8a 2831 if (compare_base_decls (exprx, expry) == 0)
ca7fd9cd
KH
2832 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2833 || (CONSTANT_P (basex) && REG_P (basey)
2834 && REGNO_PTR_FRAME_P (REGNO (basey)))
2835 || (CONSTANT_P (basey) && REG_P (basex)
2836 && REGNO_PTR_FRAME_P (REGNO (basex))));
a4311dfe 2837
c6ea834c
BM
2838 /* Offset based disambiguation not appropriate for loop invariant */
2839 if (loop_invariant)
dca16798 2840 return 0;
c6ea834c 2841
54363f8a
JH
2842 /* Offset based disambiguation is OK even if we do not know that the
2843 declarations are necessarily different
2844 (i.e. compare_base_decls (exprx, expry) == -1) */
2845
d05d7551 2846 sizex = (!MEM_P (rtlx) ? poly_int64 (GET_MODE_SIZE (GET_MODE (rtlx)))
f5541398 2847 : MEM_SIZE_KNOWN_P (rtlx) ? MEM_SIZE (rtlx)
a4311dfe 2848 : -1);
d05d7551 2849 sizey = (!MEM_P (rtly) ? poly_int64 (GET_MODE_SIZE (GET_MODE (rtly)))
f5541398
RS
2850 : MEM_SIZE_KNOWN_P (rtly) ? MEM_SIZE (rtly)
2851 : -1);
a4311dfe 2852
0af5bc3e
RK
2853 /* If we have an offset for either memref, it can update the values computed
2854 above. */
527210c4
RS
2855 if (moffsetx_known_p)
2856 offsetx += moffsetx, sizex -= moffsetx;
2857 if (moffsety_known_p)
2858 offsety += moffsety, sizey -= moffsety;
a4311dfe 2859
0af5bc3e 2860 /* If a memref has both a size and an offset, we can use the smaller size.
efc981bb 2861 We can't do this if the offset isn't known because we must view this
0af5bc3e 2862 memref as being anywhere inside the DECL's MEM. */
527210c4 2863 if (MEM_SIZE_KNOWN_P (x) && moffsetx_known_p)
f5541398 2864 sizex = MEM_SIZE (x);
527210c4 2865 if (MEM_SIZE_KNOWN_P (y) && moffsety_known_p)
f5541398 2866 sizey = MEM_SIZE (y);
a4311dfe 2867
d05d7551 2868 return !ranges_maybe_overlap_p (offsetx, sizex, offsety, sizey);
a4311dfe
RK
2869}
2870
9362286d
SB
2871/* Helper for true_dependence and canon_true_dependence.
2872 Checks for true dependence: X is read after store in MEM takes place.
9ae8ffe7 2873
9362286d
SB
2874 If MEM_CANONICALIZED is FALSE, then X_ADDR and MEM_ADDR should be
2875 NULL_RTX, and the canonical addresses of MEM and X are both computed
2876 here. If MEM_CANONICALIZED, then MEM must be already canonicalized.
2877
2878 If X_ADDR is non-NULL, it is used in preference of XEXP (x, 0).
2879
2880 Returns 1 if there is a true dependence, 0 otherwise. */
2881
2882static int
ef4bddc2 2883true_dependence_1 (const_rtx mem, machine_mode mem_mode, rtx mem_addr,
53d9622b 2884 const_rtx x, rtx x_addr, bool mem_canonicalized)
9ae8ffe7 2885{
0777fc02 2886 rtx true_mem_addr;
49982682 2887 rtx base;
f47e08d9 2888 int ret;
9ae8ffe7 2889
9362286d
SB
2890 gcc_checking_assert (mem_canonicalized ? (mem_addr != NULL_RTX)
2891 : (mem_addr == NULL_RTX && x_addr == NULL_RTX));
2892
9ae8ffe7
JL
2893 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2894 return 1;
2895
c4484b8f 2896 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
ac3768f6 2897 This is used in epilogue deallocation functions, and in cselib. */
c4484b8f
RH
2898 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2899 return 1;
2900 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2901 return 1;
9cd9e512
RH
2902 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2903 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2904 return 1;
c4484b8f 2905
0777fc02
UB
2906 if (! x_addr)
2907 x_addr = XEXP (x, 0);
2908 x_addr = get_addr (x_addr);
2909
9362286d
SB
2910 if (! mem_addr)
2911 {
2912 mem_addr = XEXP (mem, 0);
2913 if (mem_mode == VOIDmode)
2914 mem_mode = GET_MODE (mem);
2915 }
0777fc02 2916 true_mem_addr = get_addr (mem_addr);
eab5c70a 2917
878f5596
UB
2918 /* Read-only memory is by definition never modified, and therefore can't
2919 conflict with anything. However, don't assume anything when AND
2920 addresses are involved and leave to the code below to determine
2921 dependence. We don't expect to find read-only set on MEM, but
2922 stupid user tricks can produce them, so don't die. */
2923 if (MEM_READONLY_P (x)
2924 && GET_CODE (x_addr) != AND
0777fc02 2925 && GET_CODE (true_mem_addr) != AND)
878f5596
UB
2926 return 0;
2927
2928 /* If we have MEMs referring to different address spaces (which can
2929 potentially overlap), we cannot easily tell from the addresses
2930 whether the references overlap. */
2931 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2932 return 1;
2933
55efb413
JW
2934 base = find_base_term (x_addr);
2935 if (base && (GET_CODE (base) == LABEL_REF
2936 || (GET_CODE (base) == SYMBOL_REF
2937 && CONSTANT_POOL_ADDRESS_P (base))))
2938 return 0;
2939
0777fc02
UB
2940 rtx mem_base = find_base_term (true_mem_addr);
2941 if (! base_alias_check (x_addr, base, true_mem_addr, mem_base,
31b0a960 2942 GET_MODE (x), mem_mode))
1c72c7f6
JC
2943 return 0;
2944
eab5c70a 2945 x_addr = canon_rtx (x_addr);
9362286d 2946 if (!mem_canonicalized)
0777fc02 2947 mem_addr = canon_rtx (true_mem_addr);
6e73e666 2948
f47e08d9
RG
2949 if ((ret = memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2950 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2951 return ret;
2952
a95b3cc7 2953 if (mems_in_disjoint_alias_sets_p (x, mem))
f47e08d9
RG
2954 return 0;
2955
c6ea834c 2956 if (nonoverlapping_memrefs_p (mem, x, false))
0211b6ab 2957 return 0;
175a7536 2958
55b34b5f 2959 return rtx_refs_may_alias_p (x, mem, true);
a13d4ebf
AM
2960}
2961
9362286d
SB
2962/* True dependence: X is read after store in MEM takes place. */
2963
2964int
ef4bddc2 2965true_dependence (const_rtx mem, machine_mode mem_mode, const_rtx x)
9362286d
SB
2966{
2967 return true_dependence_1 (mem, mem_mode, NULL_RTX,
53d9622b 2968 x, NULL_RTX, /*mem_canonicalized=*/false);
9362286d
SB
2969}
2970
a13d4ebf 2971/* Canonical true dependence: X is read after store in MEM takes place.
ca7fd9cd
KH
2972 Variant of true_dependence which assumes MEM has already been
2973 canonicalized (hence we no longer do that here).
9362286d
SB
2974 The mem_addr argument has been added, since true_dependence_1 computed
2975 this value prior to canonicalizing. */
a13d4ebf
AM
2976
2977int
ef4bddc2 2978canon_true_dependence (const_rtx mem, machine_mode mem_mode, rtx mem_addr,
53d9622b 2979 const_rtx x, rtx x_addr)
a13d4ebf 2980{
9362286d 2981 return true_dependence_1 (mem, mem_mode, mem_addr,
53d9622b 2982 x, x_addr, /*mem_canonicalized=*/true);
9ae8ffe7
JL
2983}
2984
da7d8304 2985/* Returns nonzero if a write to X might alias a previous read from
393f9fed 2986 (or, if WRITEP is true, a write to) MEM.
bd280792
JR
2987 If X_CANONCALIZED is true, then X_ADDR is the canonicalized address of X,
2988 and X_MODE the mode for that access.
2989 If MEM_CANONICALIZED is true, MEM is canonicalized. */
9ae8ffe7 2990
2c72b78f 2991static int
bd280792 2992write_dependence_p (const_rtx mem,
ef4bddc2 2993 const_rtx x, machine_mode x_mode, rtx x_addr,
bd280792 2994 bool mem_canonicalized, bool x_canonicalized, bool writep)
9ae8ffe7 2995{
bd280792 2996 rtx mem_addr;
0777fc02 2997 rtx true_mem_addr, true_x_addr;
49982682 2998 rtx base;
f47e08d9 2999 int ret;
6e73e666 3000
bd280792 3001 gcc_checking_assert (x_canonicalized
6f5799be
JJ
3002 ? (x_addr != NULL_RTX
3003 && (x_mode != VOIDmode || GET_MODE (x) == VOIDmode))
bd280792 3004 : (x_addr == NULL_RTX && x_mode == VOIDmode));
393f9fed 3005
9ae8ffe7
JL
3006 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
3007 return 1;
3008
c4484b8f
RH
3009 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
3010 This is used in epilogue deallocation functions. */
3011 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
3012 return 1;
3013 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
3014 return 1;
9cd9e512
RH
3015 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
3016 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
3017 return 1;
c4484b8f 3018
bd280792 3019 if (!x_addr)
0777fc02
UB
3020 x_addr = XEXP (x, 0);
3021 true_x_addr = get_addr (x_addr);
3022
3023 mem_addr = XEXP (mem, 0);
3024 true_mem_addr = get_addr (mem_addr);
55efb413 3025
878f5596
UB
3026 /* A read from read-only memory can't conflict with read-write memory.
3027 Don't assume anything when AND addresses are involved and leave to
3028 the code below to determine dependence. */
3029 if (!writep
3030 && MEM_READONLY_P (mem)
0777fc02
UB
3031 && GET_CODE (true_x_addr) != AND
3032 && GET_CODE (true_mem_addr) != AND)
878f5596
UB
3033 return 0;
3034
3035 /* If we have MEMs referring to different address spaces (which can
3036 potentially overlap), we cannot easily tell from the addresses
3037 whether the references overlap. */
3038 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
3039 return 1;
3040
0777fc02 3041 base = find_base_term (true_mem_addr);
31b0a960
RB
3042 if (! writep
3043 && base
3044 && (GET_CODE (base) == LABEL_REF
3045 || (GET_CODE (base) == SYMBOL_REF
3046 && CONSTANT_POOL_ADDRESS_P (base))))
3047 return 0;
49982682 3048
0777fc02
UB
3049 rtx x_base = find_base_term (true_x_addr);
3050 if (! base_alias_check (true_x_addr, x_base, true_mem_addr, base,
3051 GET_MODE (x), GET_MODE (mem)))
41472af8
MM
3052 return 0;
3053
bd280792 3054 if (!x_canonicalized)
393f9fed 3055 {
0777fc02 3056 x_addr = canon_rtx (true_x_addr);
bd280792 3057 x_mode = GET_MODE (x);
393f9fed 3058 }
bd280792 3059 if (!mem_canonicalized)
0777fc02 3060 mem_addr = canon_rtx (true_mem_addr);
6e73e666 3061
bd280792
JR
3062 if ((ret = memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
3063 GET_MODE_SIZE (x_mode), x_addr, 0)) != -1)
f47e08d9
RG
3064 return ret;
3065
c6ea834c 3066 if (nonoverlapping_memrefs_p (x, mem, false))
c6df88cb
MM
3067 return 0;
3068
55b34b5f 3069 return rtx_refs_may_alias_p (x, mem, false);
c6df88cb
MM
3070}
3071
3072/* Anti dependence: X is written after read in MEM takes place. */
3073
3074int
4f588890 3075anti_dependence (const_rtx mem, const_rtx x)
c6df88cb 3076{
bd280792
JR
3077 return write_dependence_p (mem, x, VOIDmode, NULL_RTX,
3078 /*mem_canonicalized=*/false,
3079 /*x_canonicalized*/false, /*writep=*/false);
393f9fed
JR
3080}
3081
bd280792
JR
3082/* Likewise, but we already have a canonicalized MEM, and X_ADDR for X.
3083 Also, consider X in X_MODE (which might be from an enclosing
3084 STRICT_LOW_PART / ZERO_EXTRACT).
3085 If MEM_CANONICALIZED is true, MEM is canonicalized. */
393f9fed
JR
3086
3087int
bd280792 3088canon_anti_dependence (const_rtx mem, bool mem_canonicalized,
ef4bddc2 3089 const_rtx x, machine_mode x_mode, rtx x_addr)
393f9fed 3090{
bd280792
JR
3091 return write_dependence_p (mem, x, x_mode, x_addr,
3092 mem_canonicalized, /*x_canonicalized=*/true,
3093 /*writep=*/false);
9ae8ffe7
JL
3094}
3095
3096/* Output dependence: X is written after store in MEM takes place. */
3097
3098int
4f588890 3099output_dependence (const_rtx mem, const_rtx x)
9ae8ffe7 3100{
bd280792
JR
3101 return write_dependence_p (mem, x, VOIDmode, NULL_RTX,
3102 /*mem_canonicalized=*/false,
3103 /*x_canonicalized*/false, /*writep=*/true);
9ae8ffe7 3104}
43b9f499
RB
3105
3106/* Likewise, but we already have a canonicalized MEM, and X_ADDR for X.
3107 Also, consider X in X_MODE (which might be from an enclosing
3108 STRICT_LOW_PART / ZERO_EXTRACT).
3109 If MEM_CANONICALIZED is true, MEM is canonicalized. */
3110
3111int
3112canon_output_dependence (const_rtx mem, bool mem_canonicalized,
3113 const_rtx x, machine_mode x_mode, rtx x_addr)
3114{
3115 return write_dependence_p (mem, x, x_mode, x_addr,
3116 mem_canonicalized, /*x_canonicalized=*/true,
3117 /*writep=*/true);
3118}
c14b9960 3119\f
6e73e666 3120
c6ea834c
BM
3121
3122/* Check whether X may be aliased with MEM. Don't do offset-based
3123 memory disambiguation & TBAA. */
3124int
3125may_alias_p (const_rtx mem, const_rtx x)
3126{
3127 rtx x_addr, mem_addr;
c6ea834c
BM
3128
3129 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
3130 return 1;
3131
a95b3cc7
RG
3132 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
3133 This is used in epilogue deallocation functions. */
3134 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
3135 return 1;
3136 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
c6ea834c 3137 return 1;
c6ea834c
BM
3138 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
3139 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
3140 return 1;
3141
c6ea834c 3142 x_addr = XEXP (x, 0);
0777fc02
UB
3143 x_addr = get_addr (x_addr);
3144
c6ea834c 3145 mem_addr = XEXP (mem, 0);
0777fc02 3146 mem_addr = get_addr (mem_addr);
c6ea834c 3147
878f5596
UB
3148 /* Read-only memory is by definition never modified, and therefore can't
3149 conflict with anything. However, don't assume anything when AND
3150 addresses are involved and leave to the code below to determine
3151 dependence. We don't expect to find read-only set on MEM, but
3152 stupid user tricks can produce them, so don't die. */
3153 if (MEM_READONLY_P (x)
3154 && GET_CODE (x_addr) != AND
3155 && GET_CODE (mem_addr) != AND)
3156 return 0;
3157
3158 /* If we have MEMs referring to different address spaces (which can
3159 potentially overlap), we cannot easily tell from the addresses
3160 whether the references overlap. */
3161 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
3162 return 1;
3163
31b0a960
RB
3164 rtx x_base = find_base_term (x_addr);
3165 rtx mem_base = find_base_term (mem_addr);
3166 if (! base_alias_check (x_addr, x_base, mem_addr, mem_base,
3167 GET_MODE (x), GET_MODE (mem_addr)))
c6ea834c
BM
3168 return 0;
3169
c6ea834c
BM
3170 if (nonoverlapping_memrefs_p (mem, x, true))
3171 return 0;
3172
c6ea834c
BM
3173 /* TBAA not valid for loop_invarint */
3174 return rtx_refs_may_alias_p (x, mem, false);
3175}
3176
6e73e666 3177void
b5deb7b6 3178init_alias_target (void)
6e73e666 3179{
b3694847 3180 int i;
6e73e666 3181
9fc37b2b
RS
3182 if (!arg_base_value)
3183 arg_base_value = gen_rtx_ADDRESS (VOIDmode, 0);
3184
b5deb7b6
SL
3185 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
3186
6e73e666
JC
3187 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3188 /* Check whether this register can hold an incoming pointer
3189 argument. FUNCTION_ARG_REGNO_P tests outgoing register
ec5c56db 3190 numbers, so translate if necessary due to register windows. */
6e73e666 3191 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
f939c3e6 3192 && targetm.hard_regno_mode_ok (i, Pmode))
9fc37b2b
RS
3193 static_reg_base_value[i] = arg_base_value;
3194
e6eacdc9
RS
3195 /* RTL code is required to be consistent about whether it uses the
3196 stack pointer, the frame pointer or the argument pointer to
3197 access a given area of the frame. We can therefore use the
3198 base address to distinguish between the different areas. */
757e8ba2
JJ
3199 static_reg_base_value[STACK_POINTER_REGNUM]
3200 = unique_base_value (UNIQUE_BASE_VALUE_SP);
3201 static_reg_base_value[ARG_POINTER_REGNUM]
3202 = unique_base_value (UNIQUE_BASE_VALUE_ARGP);
3203 static_reg_base_value[FRAME_POINTER_REGNUM]
3204 = unique_base_value (UNIQUE_BASE_VALUE_FP);
e6eacdc9
RS
3205
3206 /* The above rules extend post-reload, with eliminations applying
3207 consistently to each of the three pointers. Cope with cases in
3208 which the frame pointer is eliminated to the hard frame pointer
3209 rather than the stack pointer. */
c3e08036
TS
3210 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3211 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
3212 = unique_base_value (UNIQUE_BASE_VALUE_HFP);
bf1660a6
JL
3213}
3214
7b52eede
JH
3215/* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
3216 to be memory reference. */
3217static bool memory_modified;
3218static void
aa317c97 3219memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7b52eede 3220{
3c0cb5de 3221 if (MEM_P (x))
7b52eede 3222 {
9678086d 3223 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
7b52eede
JH
3224 memory_modified = true;
3225 }
3226}
3227
3228
3229/* Return true when INSN possibly modify memory contents of MEM
454ff5cb 3230 (i.e. address can be modified). */
7b52eede 3231bool
9678086d 3232memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
7b52eede
JH
3233{
3234 if (!INSN_P (insn))
3235 return false;
bc36c711
JJ
3236 /* Conservatively assume all non-readonly MEMs might be modified in
3237 calls. */
3238 if (CALL_P (insn))
3239 return true;
7b52eede 3240 memory_modified = false;
aa317c97 3241 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
7b52eede
JH
3242 return memory_modified;
3243}
3244
a7b159a4
AH
3245/* Return TRUE if the destination of a set is rtx identical to
3246 ITEM. */
3247static inline bool
3248set_dest_equal_p (const_rtx set, const_rtx item)
3249{
3250 rtx dest = SET_DEST (set);
3251 return rtx_equal_p (dest, item);
3252}
3253
c13e8210
MM
3254/* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
3255 array. */
3256
9ae8ffe7 3257void
4682ae04 3258init_alias_analysis (void)
9ae8ffe7 3259{
c582d54a 3260 unsigned int maxreg = max_reg_num ();
ea64ef27 3261 int changed, pass;
b3694847
SS
3262 int i;
3263 unsigned int ui;
d36a28b8
DM
3264 rtx_insn *insn;
3265 rtx val;
131db6b8
SB
3266 int rpo_cnt;
3267 int *rpo;
9ae8ffe7 3268
0d446150
JH
3269 timevar_push (TV_ALIAS_ANALYSIS);
3270
92390dd1 3271 vec_safe_grow_cleared (reg_known_value, maxreg - FIRST_PSEUDO_REGISTER);
9ff3c7ca 3272 reg_known_equiv_p = sbitmap_alloc (maxreg - FIRST_PSEUDO_REGISTER);
dd3d1ec0 3273 bitmap_clear (reg_known_equiv_p);
9ae8ffe7 3274
08c79682 3275 /* If we have memory allocated from the previous run, use it. */
c582d54a 3276 if (old_reg_base_value)
08c79682
KH
3277 reg_base_value = old_reg_base_value;
3278
3279 if (reg_base_value)
9771b263 3280 reg_base_value->truncate (0);
08c79682 3281
9771b263 3282 vec_safe_grow_cleared (reg_base_value, maxreg);
ac606739 3283
5ed6ace5 3284 new_reg_base_value = XNEWVEC (rtx, maxreg);
d630245f 3285 reg_seen = sbitmap_alloc (maxreg);
ec907dd8
JL
3286
3287 /* The basic idea is that each pass through this loop will use the
3288 "constant" information from the previous pass to propagate alias
3289 information through another level of assignments.
3290
131db6b8
SB
3291 The propagation is done on the CFG in reverse post-order, to propagate
3292 things forward as far as possible in each iteration.
3293
ec907dd8
JL
3294 This could get expensive if the assignment chains are long. Maybe
3295 we should throttle the number of iterations, possibly based on
6e73e666 3296 the optimization level or flag_expensive_optimizations.
ec907dd8
JL
3297
3298 We could propagate more information in the first pass by making use
6fb5fa3c 3299 of DF_REG_DEF_COUNT to determine immediately that the alias information
ea64ef27
JL
3300 for a pseudo is "constant".
3301
3302 A program with an uninitialized variable can cause an infinite loop
3303 here. Instead of doing a full dataflow analysis to detect such problems
3304 we just cap the number of iterations for the loop.
3305
3306 The state of the arrays for the set chain in question does not matter
3307 since the program has undefined behavior. */
6e73e666 3308
0cae8d31 3309 rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
131db6b8
SB
3310 rpo_cnt = pre_and_rev_post_order_compute (NULL, rpo, false);
3311
e86a9946
RS
3312 /* The prologue/epilogue insns are not threaded onto the
3313 insn chain until after reload has completed. Thus,
3314 there is no sense wasting time checking if INSN is in
3315 the prologue/epilogue until after reload has completed. */
3316 bool could_be_prologue_epilogue = ((targetm.have_prologue ()
3317 || targetm.have_epilogue ())
3318 && reload_completed);
3319
ea64ef27 3320 pass = 0;
6e73e666 3321 do
ec907dd8
JL
3322 {
3323 /* Assume nothing will change this iteration of the loop. */
3324 changed = 0;
3325
ec907dd8 3326 /* We want to assign the same IDs each iteration of this loop, so
9fc37b2b
RS
3327 start counting from one each iteration of the loop. */
3328 unique_id = 1;
ec907dd8 3329
f5143c46 3330 /* We're at the start of the function each iteration through the
ec907dd8 3331 loop, so we're copying arguments. */
83bbd9b6 3332 copying_arguments = true;
9ae8ffe7 3333
6e73e666 3334 /* Wipe the potential alias information clean for this pass. */
c582d54a 3335 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
8072f69c 3336
6e73e666 3337 /* Wipe the reg_seen array clean. */
f61e445a 3338 bitmap_clear (reg_seen);
9ae8ffe7 3339
356610cb
EB
3340 /* Initialize the alias information for this pass. */
3341 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
e6eacdc9
RS
3342 if (static_reg_base_value[i]
3343 /* Don't treat the hard frame pointer as special if we
3344 eliminated the frame pointer to the stack pointer instead. */
3345 && !(i == HARD_FRAME_POINTER_REGNUM
3346 && reload_completed
3347 && !frame_pointer_needed
3348 && targetm.can_eliminate (FRAME_POINTER_REGNUM,
3349 STACK_POINTER_REGNUM)))
356610cb
EB
3350 {
3351 new_reg_base_value[i] = static_reg_base_value[i];
3352 bitmap_set_bit (reg_seen, i);
3353 }
6e73e666 3354
ec907dd8 3355 /* Walk the insns adding values to the new_reg_base_value array. */
131db6b8 3356 for (i = 0; i < rpo_cnt; i++)
9ae8ffe7 3357 {
06e28de2 3358 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
131db6b8 3359 FOR_BB_INSNS (bb, insn)
ec907dd8 3360 {
131db6b8
SB
3361 if (NONDEBUG_INSN_P (insn))
3362 {
3363 rtx note, set;
efc9bd41 3364
e86a9946 3365 if (could_be_prologue_epilogue
131db6b8
SB
3366 && prologue_epilogue_contains (insn))
3367 continue;
efc9bd41 3368
131db6b8
SB
3369 /* If this insn has a noalias note, process it, Otherwise,
3370 scan for sets. A simple set will have no side effects
3371 which could change the base value of any other register. */
6e73e666 3372
131db6b8
SB
3373 if (GET_CODE (PATTERN (insn)) == SET
3374 && REG_NOTES (insn) != 0
3375 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
3376 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
3377 else
3378 note_stores (PATTERN (insn), record_set, NULL);
6e73e666 3379
131db6b8 3380 set = single_set (insn);
6e73e666 3381
131db6b8
SB
3382 if (set != 0
3383 && REG_P (SET_DEST (set))
3384 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
713f41f9 3385 {
131db6b8
SB
3386 unsigned int regno = REGNO (SET_DEST (set));
3387 rtx src = SET_SRC (set);
3388 rtx t;
3389
3390 note = find_reg_equal_equiv_note (insn);
3391 if (note && REG_NOTE_KIND (note) == REG_EQUAL
3392 && DF_REG_DEF_COUNT (regno) != 1)
3393 note = NULL_RTX;
3394
3395 if (note != NULL_RTX
3396 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
3397 && ! rtx_varies_p (XEXP (note, 0), 1)
3398 && ! reg_overlap_mentioned_p (SET_DEST (set),
3399 XEXP (note, 0)))
3400 {
3401 set_reg_known_value (regno, XEXP (note, 0));
3402 set_reg_known_equiv_p (regno,
3403 REG_NOTE_KIND (note) == REG_EQUIV);
3404 }
3405 else if (DF_REG_DEF_COUNT (regno) == 1
3406 && GET_CODE (src) == PLUS
3407 && REG_P (XEXP (src, 0))
3408 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
3409 && CONST_INT_P (XEXP (src, 1)))
3410 {
3411 t = plus_constant (GET_MODE (src), t,
3412 INTVAL (XEXP (src, 1)));
3413 set_reg_known_value (regno, t);
3414 set_reg_known_equiv_p (regno, false);
3415 }
3416 else if (DF_REG_DEF_COUNT (regno) == 1
3417 && ! rtx_varies_p (src, 1))
3418 {
3419 set_reg_known_value (regno, src);
3420 set_reg_known_equiv_p (regno, false);
3421 }
713f41f9 3422 }
6e73e666 3423 }
131db6b8
SB
3424 else if (NOTE_P (insn)
3425 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3426 copying_arguments = false;
ec907dd8 3427 }
6e73e666 3428 }
ec907dd8 3429
6e73e666 3430 /* Now propagate values from new_reg_base_value to reg_base_value. */
62e5bf5d 3431 gcc_assert (maxreg == (unsigned int) max_reg_num ());
c22cacf3 3432
c582d54a 3433 for (ui = 0; ui < maxreg; ui++)
6e73e666 3434 {
e51712db 3435 if (new_reg_base_value[ui]
9771b263
DN
3436 && new_reg_base_value[ui] != (*reg_base_value)[ui]
3437 && ! rtx_equal_p (new_reg_base_value[ui], (*reg_base_value)[ui]))
ec907dd8 3438 {
9771b263 3439 (*reg_base_value)[ui] = new_reg_base_value[ui];
6e73e666 3440 changed = 1;
ec907dd8 3441 }
9ae8ffe7 3442 }
9ae8ffe7 3443 }
6e73e666 3444 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
131db6b8 3445 XDELETEVEC (rpo);
9ae8ffe7
JL
3446
3447 /* Fill in the remaining entries. */
9771b263 3448 FOR_EACH_VEC_ELT (*reg_known_value, i, val)
9ff3c7ca
SB
3449 {
3450 int regno = i + FIRST_PSEUDO_REGISTER;
3451 if (! val)
3452 set_reg_known_value (regno, regno_reg_rtx[regno]);
3453 }
9ae8ffe7 3454
e05e2395
MM
3455 /* Clean up. */
3456 free (new_reg_base_value);
ec907dd8 3457 new_reg_base_value = 0;
d630245f 3458 sbitmap_free (reg_seen);
9ae8ffe7 3459 reg_seen = 0;
0d446150 3460 timevar_pop (TV_ALIAS_ANALYSIS);
9ae8ffe7
JL
3461}
3462
61630b27
JJ
3463/* Equate REG_BASE_VALUE (reg1) to REG_BASE_VALUE (reg2).
3464 Special API for var-tracking pass purposes. */
3465
3466void
3467vt_equate_reg_base_value (const_rtx reg1, const_rtx reg2)
3468{
9771b263 3469 (*reg_base_value)[REGNO (reg1)] = REG_BASE_VALUE (reg2);
61630b27
JJ
3470}
3471
9ae8ffe7 3472void
4682ae04 3473end_alias_analysis (void)
9ae8ffe7 3474{
c582d54a 3475 old_reg_base_value = reg_base_value;
9771b263 3476 vec_free (reg_known_value);
9ff3c7ca 3477 sbitmap_free (reg_known_equiv_p);
9ae8ffe7 3478}
e2500fed 3479
3ecf9d13
JH
3480void
3481dump_alias_stats_in_alias_c (FILE *s)
3482{
3483 fprintf (s, " TBAA oracle: %llu disambiguations %llu queries\n"
3484 " %llu are in alias set 0\n"
3485 " %llu queries asked about the same object\n"
3486 " %llu queries asked about the same alias set\n"
3487 " %llu access volatile\n"
6e042ef4
JH
3488 " %llu are dependent in the DAG\n"
3489 " %llu are aritificially in conflict with void *\n",
3ecf9d13
JH
3490 alias_stats.num_disambiguated,
3491 alias_stats.num_alias_zero + alias_stats.num_same_alias_set
3492 + alias_stats.num_same_objects + alias_stats.num_volatile
6e042ef4
JH
3493 + alias_stats.num_dag + alias_stats.num_disambiguated
3494 + alias_stats.num_universal,
3ecf9d13 3495 alias_stats.num_alias_zero, alias_stats.num_same_alias_set,
6e042ef4
JH
3496 alias_stats.num_same_objects, alias_stats.num_volatile,
3497 alias_stats.num_dag, alias_stats.num_universal);
3ecf9d13 3498}
e2500fed 3499#include "gt-alias.h"