]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/alias.c
alias.c (ao_ref_from_mem): Adjust.
[thirdparty/gcc.git] / gcc / alias.c
CommitLineData
9ae8ffe7 1/* Alias analysis for GNU C
62e5bf5d 2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
c75c517d 3 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
9ae8ffe7
JL
4 Contributed by John Carr (jfc@mit.edu).
5
1322177d 6This file is part of GCC.
9ae8ffe7 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
9ae8ffe7 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
9ae8ffe7
JL
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
9ae8ffe7
JL
21
22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
9ae8ffe7 26#include "rtl.h"
7790df19 27#include "tree.h"
6baf1cc8 28#include "tm_p.h"
49ad7cfa 29#include "function.h"
78528714
JQ
30#include "alias.h"
31#include "emit-rtl.h"
9ae8ffe7
JL
32#include "regs.h"
33#include "hard-reg-set.h"
e004f2f7 34#include "basic-block.h"
9ae8ffe7 35#include "flags.h"
264fac34 36#include "output.h"
718f9c0f 37#include "diagnostic-core.h"
2e107e9e 38#include "toplev.h"
eab5c70a 39#include "cselib.h"
3932261a 40#include "splay-tree.h"
ac606739 41#include "ggc.h"
d23c55c2 42#include "langhooks.h"
0d446150 43#include "timevar.h"
ab780373 44#include "target.h"
b255a036 45#include "cgraph.h"
ef330312 46#include "tree-pass.h"
ea900239 47#include "ipa-type-escape.h"
6fb5fa3c 48#include "df.h"
55b34b5f
RG
49#include "tree-ssa-alias.h"
50#include "pointer-set.h"
51#include "tree-flow.h"
ea900239
DB
52
53/* The aliasing API provided here solves related but different problems:
54
c22cacf3 55 Say there exists (in c)
ea900239
DB
56
57 struct X {
58 struct Y y1;
59 struct Z z2;
60 } x1, *px1, *px2;
61
62 struct Y y2, *py;
63 struct Z z2, *pz;
64
65
66 py = &px1.y1;
67 px2 = &x1;
68
69 Consider the four questions:
70
71 Can a store to x1 interfere with px2->y1?
72 Can a store to x1 interfere with px2->z2?
73 (*px2).z2
74 Can a store to x1 change the value pointed to by with py?
75 Can a store to x1 change the value pointed to by with pz?
76
77 The answer to these questions can be yes, yes, yes, and maybe.
78
79 The first two questions can be answered with a simple examination
80 of the type system. If structure X contains a field of type Y then
81 a store thru a pointer to an X can overwrite any field that is
82 contained (recursively) in an X (unless we know that px1 != px2).
83
84 The last two of the questions can be solved in the same way as the
85 first two questions but this is too conservative. The observation
86 is that in some cases analysis we can know if which (if any) fields
87 are addressed and if those addresses are used in bad ways. This
88 analysis may be language specific. In C, arbitrary operations may
89 be applied to pointers. However, there is some indication that
90 this may be too conservative for some C++ types.
91
92 The pass ipa-type-escape does this analysis for the types whose
c22cacf3 93 instances do not escape across the compilation boundary.
ea900239
DB
94
95 Historically in GCC, these two problems were combined and a single
96 data structure was used to represent the solution to these
97 problems. We now have two similar but different data structures,
98 The data structure to solve the last two question is similar to the
99 first, but does not contain have the fields in it whose address are
100 never taken. For types that do escape the compilation unit, the
101 data structures will have identical information.
102*/
3932261a
MM
103
104/* The alias sets assigned to MEMs assist the back-end in determining
105 which MEMs can alias which other MEMs. In general, two MEMs in
ac3d9668
RK
106 different alias sets cannot alias each other, with one important
107 exception. Consider something like:
3932261a 108
01d28c3f 109 struct S { int i; double d; };
3932261a
MM
110
111 a store to an `S' can alias something of either type `int' or type
112 `double'. (However, a store to an `int' cannot alias a `double'
113 and vice versa.) We indicate this via a tree structure that looks
114 like:
c22cacf3
MS
115 struct S
116 / \
3932261a 117 / \
c22cacf3
MS
118 |/_ _\|
119 int double
3932261a 120
ac3d9668
RK
121 (The arrows are directed and point downwards.)
122 In this situation we say the alias set for `struct S' is the
123 `superset' and that those for `int' and `double' are `subsets'.
124
3bdf5ad1
RK
125 To see whether two alias sets can point to the same memory, we must
126 see if either alias set is a subset of the other. We need not trace
95bd1dd7 127 past immediate descendants, however, since we propagate all
3bdf5ad1 128 grandchildren up one level.
3932261a
MM
129
130 Alias set zero is implicitly a superset of all other alias sets.
131 However, this is no actual entry for alias set zero. It is an
132 error to attempt to explicitly construct a subset of zero. */
133
7e5487a2 134struct GTY(()) alias_set_entry_d {
3932261a 135 /* The alias set number, as stored in MEM_ALIAS_SET. */
4862826d 136 alias_set_type alias_set;
3932261a 137
4c067742
RG
138 /* Nonzero if would have a child of zero: this effectively makes this
139 alias set the same as alias set zero. */
140 int has_zero_child;
141
3932261a 142 /* The children of the alias set. These are not just the immediate
95bd1dd7 143 children, but, in fact, all descendants. So, if we have:
3932261a 144
ca7fd9cd 145 struct T { struct S s; float f; }
3932261a
MM
146
147 continuing our example above, the children here will be all of
148 `int', `double', `float', and `struct S'. */
b604074c 149 splay_tree GTY((param1_is (int), param2_is (int))) children;
b604074c 150};
7e5487a2 151typedef struct alias_set_entry_d *alias_set_entry;
9ae8ffe7 152
ed7a4b4b 153static int rtx_equal_for_memref_p (const_rtx, const_rtx);
4682ae04 154static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
7bc980e1 155static void record_set (rtx, const_rtx, void *);
4682ae04
AJ
156static int base_alias_check (rtx, rtx, enum machine_mode,
157 enum machine_mode);
158static rtx find_base_value (rtx);
4f588890 159static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
4682ae04 160static int insert_subset_children (splay_tree_node, void*);
4862826d 161static alias_set_entry get_alias_set_entry (alias_set_type);
4f588890
KG
162static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
163 bool (*) (const_rtx, bool));
164static int aliases_everything_p (const_rtx);
165static bool nonoverlapping_component_refs_p (const_tree, const_tree);
4682ae04
AJ
166static tree decl_for_component_ref (tree);
167static rtx adjust_offset_for_component_ref (tree, rtx);
4f588890 168static int write_dependence_p (const_rtx, const_rtx, int);
4682ae04 169
aa317c97 170static void memory_modified_1 (rtx, const_rtx, void *);
9ae8ffe7
JL
171
172/* Set up all info needed to perform alias analysis on memory references. */
173
d4b60170 174/* Returns the size in bytes of the mode of X. */
9ae8ffe7
JL
175#define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
176
41472af8 177/* Returns nonzero if MEM1 and MEM2 do not alias because they are in
264fac34
MM
178 different alias sets. We ignore alias sets in functions making use
179 of variable arguments because the va_arg macros on some systems are
180 not legal ANSI C. */
181#define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
3932261a 182 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
41472af8 183
ea64ef27 184/* Cap the number of passes we make over the insns propagating alias
ac3d9668 185 information through set chains. 10 is a completely arbitrary choice. */
ea64ef27 186#define MAX_ALIAS_LOOP_PASSES 10
ca7fd9cd 187
9ae8ffe7
JL
188/* reg_base_value[N] gives an address to which register N is related.
189 If all sets after the first add or subtract to the current value
190 or otherwise modify it so it does not point to a different top level
191 object, reg_base_value[N] is equal to the address part of the source
2a2c8203
JC
192 of the first set.
193
194 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
195 expressions represent certain special values: function arguments and
ca7fd9cd 196 the stack, frame, and argument pointers.
b3b5ad95
JL
197
198 The contents of an ADDRESS is not normally used, the mode of the
199 ADDRESS determines whether the ADDRESS is a function argument or some
200 other special value. Pointer equality, not rtx_equal_p, determines whether
201 two ADDRESS expressions refer to the same base address.
202
203 The only use of the contents of an ADDRESS is for determining if the
204 current function performs nonlocal memory memory references for the
205 purposes of marking the function as a constant function. */
2a2c8203 206
08c79682 207static GTY(()) VEC(rtx,gc) *reg_base_value;
ac606739 208static rtx *new_reg_base_value;
c582d54a
JH
209
210/* We preserve the copy of old array around to avoid amount of garbage
211 produced. About 8% of garbage produced were attributed to this
212 array. */
08c79682 213static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
d4b60170 214
7bf84454
RS
215#define static_reg_base_value \
216 (this_target_rtl->x_static_reg_base_value)
bf1660a6 217
08c79682
KH
218#define REG_BASE_VALUE(X) \
219 (REGNO (X) < VEC_length (rtx, reg_base_value) \
220 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
9ae8ffe7 221
c13e8210 222/* Vector indexed by N giving the initial (unchanging) value known for
bb1acb3e
RH
223 pseudo-register N. This array is initialized in init_alias_analysis,
224 and does not change until end_alias_analysis is called. */
225static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
9ae8ffe7
JL
226
227/* Indicates number of valid entries in reg_known_value. */
bb1acb3e 228static GTY(()) unsigned int reg_known_value_size;
9ae8ffe7
JL
229
230/* Vector recording for each reg_known_value whether it is due to a
231 REG_EQUIV note. Future passes (viz., reload) may replace the
232 pseudo with the equivalent expression and so we account for the
ac3d9668
RK
233 dependences that would be introduced if that happens.
234
235 The REG_EQUIV notes created in assign_parms may mention the arg
236 pointer, and there are explicit insns in the RTL that modify the
237 arg pointer. Thus we must ensure that such insns don't get
238 scheduled across each other because that would invalidate the
239 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
240 wrong, but solving the problem in the scheduler will likely give
241 better code, so we do it here. */
bb1acb3e 242static bool *reg_known_equiv_p;
9ae8ffe7 243
2a2c8203
JC
244/* True when scanning insns from the start of the rtl to the
245 NOTE_INSN_FUNCTION_BEG note. */
83bbd9b6 246static bool copying_arguments;
9ae8ffe7 247
1a5640b4
KH
248DEF_VEC_P(alias_set_entry);
249DEF_VEC_ALLOC_P(alias_set_entry,gc);
250
3932261a 251/* The splay-tree used to store the various alias set entries. */
1a5640b4 252static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
ac3d9668 253\f
55b34b5f
RG
254/* Build a decomposed reference object for querying the alias-oracle
255 from the MEM rtx and store it in *REF.
256 Returns false if MEM is not suitable for the alias-oracle. */
257
258static bool
259ao_ref_from_mem (ao_ref *ref, const_rtx mem)
260{
261 tree expr = MEM_EXPR (mem);
262 tree base;
263
264 if (!expr)
265 return false;
266
267 ao_ref_init (ref, expr);
268
269 /* Get the base of the reference and see if we have to reject or
270 adjust it. */
271 base = ao_ref_base (ref);
272 if (base == NULL_TREE)
273 return false;
274
366f945f
RG
275 /* The tree oracle doesn't like to have these. */
276 if (TREE_CODE (base) == FUNCTION_DECL
277 || TREE_CODE (base) == LABEL_DECL)
278 return false;
279
55b34b5f
RG
280 /* If this is a pointer dereference of a non-SSA_NAME punt.
281 ??? We could replace it with a pointer to anything. */
70f34814
RG
282 if ((INDIRECT_REF_P (base)
283 || TREE_CODE (base) == MEM_REF)
55b34b5f
RG
284 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
285 return false;
d15adbeb
RG
286 if (TREE_CODE (base) == TARGET_MEM_REF
287 && TMR_BASE (base)
288 && TREE_CODE (TMR_BASE (base)) != SSA_NAME)
289 return false;
55b34b5f
RG
290
291 /* If this is a reference based on a partitioned decl replace the
292 base with an INDIRECT_REF of the pointer representative we
293 created during stack slot partitioning. */
294 if (TREE_CODE (base) == VAR_DECL
295 && ! TREE_STATIC (base)
296 && cfun->gimple_df->decls_to_pointers != NULL)
297 {
298 void *namep;
299 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
300 if (namep)
70f34814 301 ref->base = build_simple_mem_ref (*(tree *)namep);
55b34b5f 302 }
d15adbeb
RG
303 else if (TREE_CODE (base) == TARGET_MEM_REF
304 && TMR_SYMBOL (base)
23a534a1
RG
305 && TREE_CODE (TREE_OPERAND (TMR_SYMBOL (base), 0)) == VAR_DECL
306 && ! TREE_STATIC (TREE_OPERAND (TMR_SYMBOL (base), 0))
d15adbeb
RG
307 && cfun->gimple_df->decls_to_pointers != NULL)
308 {
309 void *namep;
310 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers,
23a534a1 311 TREE_OPERAND (TMR_SYMBOL (base), 0));
d15adbeb
RG
312 if (namep)
313 ref->base = build_simple_mem_ref (*(tree *)namep);
314 }
55b34b5f
RG
315
316 ref->ref_alias_set = MEM_ALIAS_SET (mem);
317
366f945f
RG
318 /* If MEM_OFFSET or MEM_SIZE are NULL we have to punt.
319 Keep points-to related information though. */
320 if (!MEM_OFFSET (mem)
321 || !MEM_SIZE (mem))
322 {
323 ref->ref = NULL_TREE;
324 ref->offset = 0;
325 ref->size = -1;
326 ref->max_size = -1;
327 return true;
328 }
329
b0e96404
RG
330 /* If the base decl is a parameter we can have negative MEM_OFFSET in
331 case of promoted subregs on bigendian targets. Trust the MEM_EXPR
332 here. */
333 if (INTVAL (MEM_OFFSET (mem)) < 0
334 && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
335 * BITS_PER_UNIT) == ref->size)
336 return true;
337
338 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
339 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
340
341 /* The MEM may extend into adjacent fields, so adjust max_size if
342 necessary. */
343 if (ref->max_size != -1
344 && ref->size > ref->max_size)
345 ref->max_size = ref->size;
346
347 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
348 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
349 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
350 && (ref->offset < 0
351 || (DECL_P (ref->base)
352 && (!host_integerp (DECL_SIZE (ref->base), 1)
353 || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base)))
354 < (unsigned HOST_WIDE_INT)(ref->offset + ref->size))))))
355 return false;
55b34b5f
RG
356
357 return true;
358}
359
360/* Query the alias-oracle on whether the two memory rtx X and MEM may
361 alias. If TBAA_P is set also apply TBAA. Returns true if the
362 two rtxen may alias, false otherwise. */
363
364static bool
365rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
366{
367 ao_ref ref1, ref2;
368
369 if (!ao_ref_from_mem (&ref1, x)
370 || !ao_ref_from_mem (&ref2, mem))
371 return true;
372
55e3bc4c
RG
373 return refs_may_alias_p_1 (&ref1, &ref2,
374 tbaa_p
375 && MEM_ALIAS_SET (x) != 0
376 && MEM_ALIAS_SET (mem) != 0);
55b34b5f
RG
377}
378
3932261a
MM
379/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
380 such an entry, or NULL otherwise. */
381
9ddb66ca 382static inline alias_set_entry
4862826d 383get_alias_set_entry (alias_set_type alias_set)
3932261a 384{
1a5640b4 385 return VEC_index (alias_set_entry, alias_sets, alias_set);
3932261a
MM
386}
387
ac3d9668
RK
388/* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
389 the two MEMs cannot alias each other. */
3932261a 390
9ddb66ca 391static inline int
4f588890 392mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
3932261a 393{
3932261a
MM
394/* Perform a basic sanity check. Namely, that there are no alias sets
395 if we're not using strict aliasing. This helps to catch bugs
396 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
397 where a MEM is allocated in some way other than by the use of
398 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
399 use alias sets to indicate that spilled registers cannot alias each
400 other, we might need to remove this check. */
298e6adc
NS
401 gcc_assert (flag_strict_aliasing
402 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
3932261a 403
1da68f56
RK
404 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
405}
3932261a 406
1da68f56
RK
407/* Insert the NODE into the splay tree given by DATA. Used by
408 record_alias_subset via splay_tree_foreach. */
409
410static int
4682ae04 411insert_subset_children (splay_tree_node node, void *data)
1da68f56
RK
412{
413 splay_tree_insert ((splay_tree) data, node->key, node->value);
414
415 return 0;
416}
417
c58936b6
DB
418/* Return true if the first alias set is a subset of the second. */
419
420bool
4862826d 421alias_set_subset_of (alias_set_type set1, alias_set_type set2)
c58936b6
DB
422{
423 alias_set_entry ase;
424
425 /* Everything is a subset of the "aliases everything" set. */
426 if (set2 == 0)
427 return true;
428
429 /* Otherwise, check if set1 is a subset of set2. */
430 ase = get_alias_set_entry (set2);
431 if (ase != 0
038a39d1 432 && (ase->has_zero_child
a7a512be
RG
433 || splay_tree_lookup (ase->children,
434 (splay_tree_key) set1)))
c58936b6
DB
435 return true;
436 return false;
437}
438
1da68f56
RK
439/* Return 1 if the two specified alias sets may conflict. */
440
441int
4862826d 442alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
1da68f56
RK
443{
444 alias_set_entry ase;
445
836f7794
EB
446 /* The easy case. */
447 if (alias_sets_must_conflict_p (set1, set2))
1da68f56 448 return 1;
3932261a 449
3bdf5ad1 450 /* See if the first alias set is a subset of the second. */
1da68f56 451 ase = get_alias_set_entry (set1);
2bf105ab
RK
452 if (ase != 0
453 && (ase->has_zero_child
454 || splay_tree_lookup (ase->children,
1da68f56
RK
455 (splay_tree_key) set2)))
456 return 1;
3932261a
MM
457
458 /* Now do the same, but with the alias sets reversed. */
1da68f56 459 ase = get_alias_set_entry (set2);
2bf105ab
RK
460 if (ase != 0
461 && (ase->has_zero_child
462 || splay_tree_lookup (ase->children,
1da68f56
RK
463 (splay_tree_key) set1)))
464 return 1;
3932261a 465
1da68f56 466 /* The two alias sets are distinct and neither one is the
836f7794 467 child of the other. Therefore, they cannot conflict. */
1da68f56 468 return 0;
3932261a 469}
5399d643 470
836f7794 471/* Return 1 if the two specified alias sets will always conflict. */
5399d643
JW
472
473int
4862826d 474alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
5399d643
JW
475{
476 if (set1 == 0 || set2 == 0 || set1 == set2)
477 return 1;
478
479 return 0;
480}
481
1da68f56
RK
482/* Return 1 if any MEM object of type T1 will always conflict (using the
483 dependency routines in this file) with any MEM object of type T2.
484 This is used when allocating temporary storage. If T1 and/or T2 are
485 NULL_TREE, it means we know nothing about the storage. */
486
487int
4682ae04 488objects_must_conflict_p (tree t1, tree t2)
1da68f56 489{
4862826d 490 alias_set_type set1, set2;
82d610ec 491
e8ea2809
RK
492 /* If neither has a type specified, we don't know if they'll conflict
493 because we may be using them to store objects of various types, for
494 example the argument and local variables areas of inlined functions. */
981a4c34 495 if (t1 == 0 && t2 == 0)
e8ea2809
RK
496 return 0;
497
1da68f56
RK
498 /* If they are the same type, they must conflict. */
499 if (t1 == t2
500 /* Likewise if both are volatile. */
501 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
502 return 1;
503
82d610ec
RK
504 set1 = t1 ? get_alias_set (t1) : 0;
505 set2 = t2 ? get_alias_set (t2) : 0;
1da68f56 506
836f7794
EB
507 /* We can't use alias_sets_conflict_p because we must make sure
508 that every subtype of t1 will conflict with every subtype of
82d610ec
RK
509 t2 for which a pair of subobjects of these respective subtypes
510 overlaps on the stack. */
836f7794 511 return alias_sets_must_conflict_p (set1, set2);
1da68f56
RK
512}
513\f
2039d7aa
RH
514/* Return true if all nested component references handled by
515 get_inner_reference in T are such that we should use the alias set
516 provided by the object at the heart of T.
517
518 This is true for non-addressable components (which don't have their
519 own alias set), as well as components of objects in alias set zero.
520 This later point is a special case wherein we wish to override the
521 alias set used by the component, but we don't have per-FIELD_DECL
522 assignable alias sets. */
523
524bool
22ea9ec0 525component_uses_parent_alias_set (const_tree t)
6e24b709 526{
afe84921
RH
527 while (1)
528 {
2039d7aa 529 /* If we're at the end, it vacuously uses its own alias set. */
afe84921 530 if (!handled_component_p (t))
2039d7aa 531 return false;
afe84921
RH
532
533 switch (TREE_CODE (t))
534 {
535 case COMPONENT_REF:
536 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
2039d7aa 537 return true;
afe84921
RH
538 break;
539
540 case ARRAY_REF:
541 case ARRAY_RANGE_REF:
542 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
2039d7aa 543 return true;
afe84921
RH
544 break;
545
546 case REALPART_EXPR:
547 case IMAGPART_EXPR:
548 break;
549
550 default:
551 /* Bitfields and casts are never addressable. */
2039d7aa 552 return true;
afe84921
RH
553 }
554
555 t = TREE_OPERAND (t, 0);
2039d7aa
RH
556 if (get_alias_set (TREE_TYPE (t)) == 0)
557 return true;
afe84921 558 }
6e24b709
RK
559}
560
5006671f
RG
561/* Return the alias set for the memory pointed to by T, which may be
562 either a type or an expression. Return -1 if there is nothing
563 special about dereferencing T. */
564
565static alias_set_type
566get_deref_alias_set_1 (tree t)
567{
568 /* If we're not doing any alias analysis, just assume everything
569 aliases everything else. */
570 if (!flag_strict_aliasing)
571 return 0;
572
5b21f0f3 573 /* All we care about is the type. */
5006671f 574 if (! TYPE_P (t))
5b21f0f3 575 t = TREE_TYPE (t);
5006671f
RG
576
577 /* If we have an INDIRECT_REF via a void pointer, we don't
578 know anything about what that might alias. Likewise if the
579 pointer is marked that way. */
580 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
581 || TYPE_REF_CAN_ALIAS_ALL (t))
582 return 0;
583
584 return -1;
585}
586
587/* Return the alias set for the memory pointed to by T, which may be
588 either a type or an expression. */
589
590alias_set_type
591get_deref_alias_set (tree t)
592{
593 alias_set_type set = get_deref_alias_set_1 (t);
594
595 /* Fall back to the alias-set of the pointed-to type. */
596 if (set == -1)
597 {
598 if (! TYPE_P (t))
599 t = TREE_TYPE (t);
600 set = get_alias_set (TREE_TYPE (t));
601 }
602
603 return set;
604}
605
3bdf5ad1
RK
606/* Return the alias set for T, which may be either a type or an
607 expression. Call language-specific routine for help, if needed. */
608
4862826d 609alias_set_type
4682ae04 610get_alias_set (tree t)
3bdf5ad1 611{
4862826d 612 alias_set_type set;
3bdf5ad1
RK
613
614 /* If we're not doing any alias analysis, just assume everything
615 aliases everything else. Also return 0 if this or its type is
616 an error. */
617 if (! flag_strict_aliasing || t == error_mark_node
618 || (! TYPE_P (t)
619 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
620 return 0;
621
622 /* We can be passed either an expression or a type. This and the
f47e9b4e
RK
623 language-specific routine may make mutually-recursive calls to each other
624 to figure out what to do. At each juncture, we see if this is a tree
625 that the language may need to handle specially. First handle things that
738cc472 626 aren't types. */
f824e5c3 627 if (! TYPE_P (t))
3bdf5ad1 628 {
ebcc3d93 629 tree inner;
738cc472 630
70f34814
RG
631 /* Give the language a chance to do something with this tree
632 before we look at it. */
8ac61af7 633 STRIP_NOPS (t);
ae2bcd98 634 set = lang_hooks.get_alias_set (t);
8ac61af7
RK
635 if (set != -1)
636 return set;
637
70f34814 638 /* Get the base object of the reference. */
ebcc3d93 639 inner = t;
6fce44af 640 while (handled_component_p (inner))
738cc472 641 {
70f34814
RG
642 /* If there is a VIEW_CONVERT_EXPR in the chain we cannot use
643 the type of any component references that wrap it to
644 determine the alias-set. */
645 if (TREE_CODE (inner) == VIEW_CONVERT_EXPR)
646 t = TREE_OPERAND (inner, 0);
6fce44af 647 inner = TREE_OPERAND (inner, 0);
738cc472
RK
648 }
649
70f34814
RG
650 /* Handle pointer dereferences here, they can override the
651 alias-set. */
1b096a0a 652 if (INDIRECT_REF_P (inner))
738cc472 653 {
5006671f
RG
654 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
655 if (set != -1)
656 return set;
738cc472 657 }
4b228e61
RG
658 else if (TREE_CODE (inner) == TARGET_MEM_REF)
659 return get_deref_alias_set (TMR_OFFSET (inner));
70f34814
RG
660 else if (TREE_CODE (inner) == MEM_REF)
661 {
662 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 1));
663 if (set != -1)
664 return set;
665 }
666
667 /* If the innermost reference is a MEM_REF that has a
668 conversion embedded treat it like a VIEW_CONVERT_EXPR above,
669 using the memory access type for determining the alias-set. */
670 if (TREE_CODE (inner) == MEM_REF
7be7d292
EB
671 && TYPE_MAIN_VARIANT (TREE_TYPE (inner))
672 != TYPE_MAIN_VARIANT
673 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (inner, 1)))))
70f34814 674 return get_deref_alias_set (TREE_OPERAND (inner, 1));
738cc472
RK
675
676 /* Otherwise, pick up the outermost object that we could have a pointer
6fce44af 677 to, processing conversions as above. */
2039d7aa 678 while (component_uses_parent_alias_set (t))
f47e9b4e 679 {
6fce44af 680 t = TREE_OPERAND (t, 0);
8ac61af7
RK
681 STRIP_NOPS (t);
682 }
f824e5c3 683
738cc472
RK
684 /* If we've already determined the alias set for a decl, just return
685 it. This is necessary for C++ anonymous unions, whose component
686 variables don't look like union members (boo!). */
5755cd38 687 if (TREE_CODE (t) == VAR_DECL
3c0cb5de 688 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
5755cd38
JM
689 return MEM_ALIAS_SET (DECL_RTL (t));
690
f824e5c3
RK
691 /* Now all we care about is the type. */
692 t = TREE_TYPE (t);
3bdf5ad1
RK
693 }
694
3bdf5ad1 695 /* Variant qualifiers don't affect the alias set, so get the main
daad0278 696 variant. */
3bdf5ad1 697 t = TYPE_MAIN_VARIANT (t);
daad0278
RG
698
699 /* Always use the canonical type as well. If this is a type that
700 requires structural comparisons to identify compatible types
701 use alias set zero. */
702 if (TYPE_STRUCTURAL_EQUALITY_P (t))
cb9c2485
JM
703 {
704 /* Allow the language to specify another alias set for this
705 type. */
706 set = lang_hooks.get_alias_set (t);
707 if (set != -1)
708 return set;
709 return 0;
710 }
7be7d292 711
daad0278 712 t = TYPE_CANONICAL (t);
7be7d292 713
daad0278
RG
714 /* Canonical types shouldn't form a tree nor should the canonical
715 type require structural equality checks. */
7be7d292
EB
716 gcc_checking_assert (TYPE_CANONICAL (t) == t
717 && !TYPE_STRUCTURAL_EQUALITY_P (t));
daad0278
RG
718
719 /* If this is a type with a known alias set, return it. */
738cc472 720 if (TYPE_ALIAS_SET_KNOWN_P (t))
3bdf5ad1
RK
721 return TYPE_ALIAS_SET (t);
722
36784d0e
RG
723 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
724 if (!COMPLETE_TYPE_P (t))
725 {
726 /* For arrays with unknown size the conservative answer is the
727 alias set of the element type. */
728 if (TREE_CODE (t) == ARRAY_TYPE)
729 return get_alias_set (TREE_TYPE (t));
730
731 /* But return zero as a conservative answer for incomplete types. */
732 return 0;
733 }
734
3bdf5ad1 735 /* See if the language has special handling for this type. */
ae2bcd98 736 set = lang_hooks.get_alias_set (t);
8ac61af7 737 if (set != -1)
738cc472 738 return set;
2bf105ab 739
3bdf5ad1
RK
740 /* There are no objects of FUNCTION_TYPE, so there's no point in
741 using up an alias set for them. (There are, of course, pointers
742 and references to functions, but that's different.) */
7be7d292 743 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
3bdf5ad1 744 set = 0;
74d86f4f
RH
745
746 /* Unless the language specifies otherwise, let vector types alias
747 their components. This avoids some nasty type punning issues in
748 normal usage. And indeed lets vectors be treated more like an
749 array slice. */
750 else if (TREE_CODE (t) == VECTOR_TYPE)
751 set = get_alias_set (TREE_TYPE (t));
752
4653cae5
RG
753 /* Unless the language specifies otherwise, treat array types the
754 same as their components. This avoids the asymmetry we get
755 through recording the components. Consider accessing a
756 character(kind=1) through a reference to a character(kind=1)[1:1].
757 Or consider if we want to assign integer(kind=4)[0:D.1387] and
758 integer(kind=4)[4] the same alias set or not.
759 Just be pragmatic here and make sure the array and its element
760 type get the same alias set assigned. */
7be7d292 761 else if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NONALIASED_COMPONENT (t))
4653cae5
RG
762 set = get_alias_set (TREE_TYPE (t));
763
0ceb0201
RG
764 /* From the former common C and C++ langhook implementation:
765
766 Unfortunately, there is no canonical form of a pointer type.
767 In particular, if we have `typedef int I', then `int *', and
768 `I *' are different types. So, we have to pick a canonical
769 representative. We do this below.
770
771 Technically, this approach is actually more conservative that
772 it needs to be. In particular, `const int *' and `int *'
773 should be in different alias sets, according to the C and C++
774 standard, since their types are not the same, and so,
775 technically, an `int **' and `const int **' cannot point at
776 the same thing.
777
778 But, the standard is wrong. In particular, this code is
779 legal C++:
780
781 int *ip;
782 int **ipp = &ip;
783 const int* const* cipp = ipp;
784 And, it doesn't make sense for that to be legal unless you
785 can dereference IPP and CIPP. So, we ignore cv-qualifiers on
786 the pointed-to types. This issue has been reported to the
787 C++ committee.
788
789 In addition to the above canonicalization issue, with LTO
790 we should also canonicalize `T (*)[]' to `T *' avoiding
791 alias issues with pointer-to element types and pointer-to
792 array types.
793
794 Likewise we need to deal with the situation of incomplete
795 pointed-to types and make `*(struct X **)&a' and
796 `*(struct X {} **)&a' alias. Otherwise we will have to
797 guarantee that all pointer-to incomplete type variants
798 will be replaced by pointer-to complete type variants if
799 they are available.
800
801 With LTO the convenient situation of using `void *' to
802 access and store any pointer type will also become
803 more apparent (and `void *' is just another pointer-to
804 incomplete type). Assigning alias-set zero to `void *'
805 and all pointer-to incomplete types is a not appealing
806 solution. Assigning an effective alias-set zero only
807 affecting pointers might be - by recording proper subset
808 relationships of all pointer alias-sets.
809
810 Pointer-to function types are another grey area which
811 needs caution. Globbing them all into one alias-set
812 or the above effective zero set would work.
813
814 For now just assign the same alias-set to all pointers.
815 That's simple and avoids all the above problems. */
816 else if (POINTER_TYPE_P (t)
817 && t != ptr_type_node)
818 return get_alias_set (ptr_type_node);
819
7be7d292 820 /* Otherwise make a new alias set for this type. */
3bdf5ad1 821 else
3bdf5ad1
RK
822 set = new_alias_set ();
823
824 TYPE_ALIAS_SET (t) = set;
2bf105ab 825
7be7d292
EB
826 /* If this is an aggregate type or a complex type, we must record any
827 component aliasing information. */
1d79fd2c 828 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
2bf105ab
RK
829 record_component_aliases (t);
830
3bdf5ad1
RK
831 return set;
832}
833
834/* Return a brand-new alias set. */
835
4862826d 836alias_set_type
4682ae04 837new_alias_set (void)
3bdf5ad1 838{
3bdf5ad1 839 if (flag_strict_aliasing)
9ddb66ca 840 {
1a5640b4
KH
841 if (alias_sets == 0)
842 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
843 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
844 return VEC_length (alias_set_entry, alias_sets) - 1;
9ddb66ca 845 }
3bdf5ad1
RK
846 else
847 return 0;
848}
3932261a 849
01d28c3f
JM
850/* Indicate that things in SUBSET can alias things in SUPERSET, but that
851 not everything that aliases SUPERSET also aliases SUBSET. For example,
852 in C, a store to an `int' can alias a load of a structure containing an
853 `int', and vice versa. But it can't alias a load of a 'double' member
854 of the same structure. Here, the structure would be the SUPERSET and
855 `int' the SUBSET. This relationship is also described in the comment at
856 the beginning of this file.
857
858 This function should be called only once per SUPERSET/SUBSET pair.
3932261a
MM
859
860 It is illegal for SUPERSET to be zero; everything is implicitly a
861 subset of alias set zero. */
862
794511d2 863void
4862826d 864record_alias_subset (alias_set_type superset, alias_set_type subset)
3932261a
MM
865{
866 alias_set_entry superset_entry;
867 alias_set_entry subset_entry;
868
f47e9b4e
RK
869 /* It is possible in complex type situations for both sets to be the same,
870 in which case we can ignore this operation. */
871 if (superset == subset)
872 return;
873
298e6adc 874 gcc_assert (superset);
3932261a
MM
875
876 superset_entry = get_alias_set_entry (superset);
ca7fd9cd 877 if (superset_entry == 0)
3932261a
MM
878 {
879 /* Create an entry for the SUPERSET, so that we have a place to
880 attach the SUBSET. */
a9429e29 881 superset_entry = ggc_alloc_cleared_alias_set_entry_d ();
3932261a 882 superset_entry->alias_set = superset;
ca7fd9cd 883 superset_entry->children
a9429e29
LB
884 = splay_tree_new_ggc (splay_tree_compare_ints,
885 ggc_alloc_splay_tree_scalar_scalar_splay_tree_s,
886 ggc_alloc_splay_tree_scalar_scalar_splay_tree_node_s);
570eb5c8 887 superset_entry->has_zero_child = 0;
1a5640b4 888 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
3932261a
MM
889 }
890
2bf105ab
RK
891 if (subset == 0)
892 superset_entry->has_zero_child = 1;
893 else
894 {
895 subset_entry = get_alias_set_entry (subset);
896 /* If there is an entry for the subset, enter all of its children
897 (if they are not already present) as children of the SUPERSET. */
ca7fd9cd 898 if (subset_entry)
2bf105ab
RK
899 {
900 if (subset_entry->has_zero_child)
901 superset_entry->has_zero_child = 1;
d4b60170 902
2bf105ab
RK
903 splay_tree_foreach (subset_entry->children, insert_subset_children,
904 superset_entry->children);
905 }
3932261a 906
2bf105ab 907 /* Enter the SUBSET itself as a child of the SUPERSET. */
ca7fd9cd 908 splay_tree_insert (superset_entry->children,
2bf105ab
RK
909 (splay_tree_key) subset, 0);
910 }
3932261a
MM
911}
912
a0c33338
RK
913/* Record that component types of TYPE, if any, are part of that type for
914 aliasing purposes. For record types, we only record component types
b5487346
EB
915 for fields that are not marked non-addressable. For array types, we
916 only record the component type if it is not marked non-aliased. */
a0c33338
RK
917
918void
4682ae04 919record_component_aliases (tree type)
a0c33338 920{
4862826d 921 alias_set_type superset = get_alias_set (type);
a0c33338
RK
922 tree field;
923
924 if (superset == 0)
925 return;
926
927 switch (TREE_CODE (type))
928 {
a0c33338
RK
929 case RECORD_TYPE:
930 case UNION_TYPE:
931 case QUAL_UNION_TYPE:
6614fd40 932 /* Recursively record aliases for the base classes, if there are any. */
fa743e8c 933 if (TYPE_BINFO (type))
ca7fd9cd
KH
934 {
935 int i;
fa743e8c 936 tree binfo, base_binfo;
c22cacf3 937
fa743e8c
NS
938 for (binfo = TYPE_BINFO (type), i = 0;
939 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
940 record_alias_subset (superset,
941 get_alias_set (BINFO_TYPE (base_binfo)));
ca7fd9cd 942 }
910ad8de 943 for (field = TYPE_FIELDS (type); field != 0; field = DECL_CHAIN (field))
b5487346 944 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
2bf105ab 945 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
a0c33338
RK
946 break;
947
1d79fd2c
JW
948 case COMPLEX_TYPE:
949 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
950 break;
951
4653cae5
RG
952 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
953 element type. */
954
a0c33338
RK
955 default:
956 break;
957 }
958}
959
3bdf5ad1
RK
960/* Allocate an alias set for use in storing and reading from the varargs
961 spill area. */
962
4862826d 963static GTY(()) alias_set_type varargs_set = -1;
f103e34d 964
4862826d 965alias_set_type
4682ae04 966get_varargs_alias_set (void)
3bdf5ad1 967{
cd3ce9b4
JM
968#if 1
969 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
970 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
971 consistently use the varargs alias set for loads from the varargs
972 area. So don't use it anywhere. */
973 return 0;
974#else
f103e34d
GK
975 if (varargs_set == -1)
976 varargs_set = new_alias_set ();
3bdf5ad1 977
f103e34d 978 return varargs_set;
cd3ce9b4 979#endif
3bdf5ad1
RK
980}
981
982/* Likewise, but used for the fixed portions of the frame, e.g., register
983 save areas. */
984
4862826d 985static GTY(()) alias_set_type frame_set = -1;
f103e34d 986
4862826d 987alias_set_type
4682ae04 988get_frame_alias_set (void)
3bdf5ad1 989{
f103e34d
GK
990 if (frame_set == -1)
991 frame_set = new_alias_set ();
3bdf5ad1 992
f103e34d 993 return frame_set;
3bdf5ad1
RK
994}
995
2a2c8203
JC
996/* Inside SRC, the source of a SET, find a base address. */
997
9ae8ffe7 998static rtx
4682ae04 999find_base_value (rtx src)
9ae8ffe7 1000{
713f41f9 1001 unsigned int regno;
0aacc8ed 1002
53451050
RS
1003#if defined (FIND_BASE_TERM)
1004 /* Try machine-dependent ways to find the base term. */
1005 src = FIND_BASE_TERM (src);
1006#endif
1007
9ae8ffe7
JL
1008 switch (GET_CODE (src))
1009 {
1010 case SYMBOL_REF:
1011 case LABEL_REF:
1012 return src;
1013
1014 case REG:
fb6754f0 1015 regno = REGNO (src);
d4b60170 1016 /* At the start of a function, argument registers have known base
2a2c8203
JC
1017 values which may be lost later. Returning an ADDRESS
1018 expression here allows optimization based on argument values
1019 even when the argument registers are used for other purposes. */
713f41f9
BS
1020 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
1021 return new_reg_base_value[regno];
73774bc7 1022
eaf407a5 1023 /* If a pseudo has a known base value, return it. Do not do this
9b462c42
RH
1024 for non-fixed hard regs since it can result in a circular
1025 dependency chain for registers which have values at function entry.
eaf407a5
JL
1026
1027 The test above is not sufficient because the scheduler may move
1028 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
9b462c42 1029 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
08c79682 1030 && regno < VEC_length (rtx, reg_base_value))
83bbd9b6
RH
1031 {
1032 /* If we're inside init_alias_analysis, use new_reg_base_value
1033 to reduce the number of relaxation iterations. */
1afdf91c 1034 if (new_reg_base_value && new_reg_base_value[regno]
6fb5fa3c 1035 && DF_REG_DEF_COUNT (regno) == 1)
83bbd9b6
RH
1036 return new_reg_base_value[regno];
1037
08c79682
KH
1038 if (VEC_index (rtx, reg_base_value, regno))
1039 return VEC_index (rtx, reg_base_value, regno);
83bbd9b6 1040 }
73774bc7 1041
e3f049a8 1042 return 0;
9ae8ffe7
JL
1043
1044 case MEM:
1045 /* Check for an argument passed in memory. Only record in the
1046 copying-arguments block; it is too hard to track changes
1047 otherwise. */
1048 if (copying_arguments
1049 && (XEXP (src, 0) == arg_pointer_rtx
1050 || (GET_CODE (XEXP (src, 0)) == PLUS
1051 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
38a448ca 1052 return gen_rtx_ADDRESS (VOIDmode, src);
9ae8ffe7
JL
1053 return 0;
1054
1055 case CONST:
1056 src = XEXP (src, 0);
1057 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
1058 break;
d4b60170 1059
ec5c56db 1060 /* ... fall through ... */
2a2c8203 1061
9ae8ffe7
JL
1062 case PLUS:
1063 case MINUS:
2a2c8203 1064 {
ec907dd8
JL
1065 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
1066
0134bf2d
DE
1067 /* If either operand is a REG that is a known pointer, then it
1068 is the base. */
1069 if (REG_P (src_0) && REG_POINTER (src_0))
1070 return find_base_value (src_0);
1071 if (REG_P (src_1) && REG_POINTER (src_1))
1072 return find_base_value (src_1);
1073
ec907dd8
JL
1074 /* If either operand is a REG, then see if we already have
1075 a known value for it. */
0134bf2d 1076 if (REG_P (src_0))
ec907dd8
JL
1077 {
1078 temp = find_base_value (src_0);
d4b60170 1079 if (temp != 0)
ec907dd8
JL
1080 src_0 = temp;
1081 }
1082
0134bf2d 1083 if (REG_P (src_1))
ec907dd8
JL
1084 {
1085 temp = find_base_value (src_1);
d4b60170 1086 if (temp!= 0)
ec907dd8
JL
1087 src_1 = temp;
1088 }
2a2c8203 1089
0134bf2d
DE
1090 /* If either base is named object or a special address
1091 (like an argument or stack reference), then use it for the
1092 base term. */
1093 if (src_0 != 0
1094 && (GET_CODE (src_0) == SYMBOL_REF
1095 || GET_CODE (src_0) == LABEL_REF
1096 || (GET_CODE (src_0) == ADDRESS
1097 && GET_MODE (src_0) != VOIDmode)))
1098 return src_0;
1099
1100 if (src_1 != 0
1101 && (GET_CODE (src_1) == SYMBOL_REF
1102 || GET_CODE (src_1) == LABEL_REF
1103 || (GET_CODE (src_1) == ADDRESS
1104 && GET_MODE (src_1) != VOIDmode)))
1105 return src_1;
1106
d4b60170 1107 /* Guess which operand is the base address:
ec907dd8
JL
1108 If either operand is a symbol, then it is the base. If
1109 either operand is a CONST_INT, then the other is the base. */
481683e1 1110 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
2a2c8203 1111 return find_base_value (src_0);
481683e1 1112 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
ec907dd8
JL
1113 return find_base_value (src_1);
1114
9ae8ffe7 1115 return 0;
2a2c8203
JC
1116 }
1117
1118 case LO_SUM:
1119 /* The standard form is (lo_sum reg sym) so look only at the
1120 second operand. */
1121 return find_base_value (XEXP (src, 1));
9ae8ffe7
JL
1122
1123 case AND:
1124 /* If the second operand is constant set the base
ec5c56db 1125 address to the first operand. */
481683e1 1126 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
2a2c8203 1127 return find_base_value (XEXP (src, 0));
9ae8ffe7
JL
1128 return 0;
1129
61f0131c 1130 case TRUNCATE:
d4ebfa65
BE
1131 /* As we do not know which address space the pointer is refering to, we can
1132 handle this only if the target does not support different pointer or
1133 address modes depending on the address space. */
1134 if (!target_default_pointer_address_modes_p ())
1135 break;
61f0131c
R
1136 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1137 break;
1138 /* Fall through. */
9ae8ffe7 1139 case HIGH:
d288e53d
DE
1140 case PRE_INC:
1141 case PRE_DEC:
1142 case POST_INC:
1143 case POST_DEC:
1144 case PRE_MODIFY:
1145 case POST_MODIFY:
2a2c8203 1146 return find_base_value (XEXP (src, 0));
1d300e19 1147
0aacc8ed
RK
1148 case ZERO_EXTEND:
1149 case SIGN_EXTEND: /* used for NT/Alpha pointers */
d4ebfa65
BE
1150 /* As we do not know which address space the pointer is refering to, we can
1151 handle this only if the target does not support different pointer or
1152 address modes depending on the address space. */
1153 if (!target_default_pointer_address_modes_p ())
1154 break;
1155
0aacc8ed
RK
1156 {
1157 rtx temp = find_base_value (XEXP (src, 0));
1158
5ae6cd0d 1159 if (temp != 0 && CONSTANT_P (temp))
0aacc8ed 1160 temp = convert_memory_address (Pmode, temp);
0aacc8ed
RK
1161
1162 return temp;
1163 }
1164
1d300e19
KG
1165 default:
1166 break;
9ae8ffe7
JL
1167 }
1168
1169 return 0;
1170}
1171
1172/* Called from init_alias_analysis indirectly through note_stores. */
1173
d4b60170 1174/* While scanning insns to find base values, reg_seen[N] is nonzero if
9ae8ffe7
JL
1175 register N has been set in this function. */
1176static char *reg_seen;
1177
13309a5f
JC
1178/* Addresses which are known not to alias anything else are identified
1179 by a unique integer. */
ec907dd8
JL
1180static int unique_id;
1181
2a2c8203 1182static void
7bc980e1 1183record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
9ae8ffe7 1184{
b3694847 1185 unsigned regno;
9ae8ffe7 1186 rtx src;
c28b4e40 1187 int n;
9ae8ffe7 1188
f8cfc6aa 1189 if (!REG_P (dest))
9ae8ffe7
JL
1190 return;
1191
fb6754f0 1192 regno = REGNO (dest);
9ae8ffe7 1193
7a40b8b1 1194 gcc_checking_assert (regno < VEC_length (rtx, reg_base_value));
ac606739 1195
c28b4e40
JW
1196 /* If this spans multiple hard registers, then we must indicate that every
1197 register has an unusable value. */
1198 if (regno < FIRST_PSEUDO_REGISTER)
66fd46b6 1199 n = hard_regno_nregs[regno][GET_MODE (dest)];
c28b4e40
JW
1200 else
1201 n = 1;
1202 if (n != 1)
1203 {
1204 while (--n >= 0)
1205 {
1206 reg_seen[regno + n] = 1;
1207 new_reg_base_value[regno + n] = 0;
1208 }
1209 return;
1210 }
1211
9ae8ffe7
JL
1212 if (set)
1213 {
1214 /* A CLOBBER wipes out any old value but does not prevent a previously
1215 unset register from acquiring a base address (i.e. reg_seen is not
1216 set). */
1217 if (GET_CODE (set) == CLOBBER)
1218 {
ec907dd8 1219 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1220 return;
1221 }
1222 src = SET_SRC (set);
1223 }
1224 else
1225 {
9ae8ffe7
JL
1226 if (reg_seen[regno])
1227 {
ec907dd8 1228 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1229 return;
1230 }
1231 reg_seen[regno] = 1;
38a448ca
RH
1232 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1233 GEN_INT (unique_id++));
9ae8ffe7
JL
1234 return;
1235 }
1236
5da6f168
RS
1237 /* If this is not the first set of REGNO, see whether the new value
1238 is related to the old one. There are two cases of interest:
1239
1240 (1) The register might be assigned an entirely new value
1241 that has the same base term as the original set.
1242
1243 (2) The set might be a simple self-modification that
1244 cannot change REGNO's base value.
1245
1246 If neither case holds, reject the original base value as invalid.
1247 Note that the following situation is not detected:
1248
c22cacf3 1249 extern int x, y; int *p = &x; p += (&y-&x);
5da6f168 1250
9ae8ffe7
JL
1251 ANSI C does not allow computing the difference of addresses
1252 of distinct top level objects. */
5da6f168
RS
1253 if (new_reg_base_value[regno] != 0
1254 && find_base_value (src) != new_reg_base_value[regno])
9ae8ffe7
JL
1255 switch (GET_CODE (src))
1256 {
2a2c8203 1257 case LO_SUM:
9ae8ffe7
JL
1258 case MINUS:
1259 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
ec907dd8 1260 new_reg_base_value[regno] = 0;
9ae8ffe7 1261 break;
61f0131c
R
1262 case PLUS:
1263 /* If the value we add in the PLUS is also a valid base value,
1264 this might be the actual base value, and the original value
1265 an index. */
1266 {
1267 rtx other = NULL_RTX;
1268
1269 if (XEXP (src, 0) == dest)
1270 other = XEXP (src, 1);
1271 else if (XEXP (src, 1) == dest)
1272 other = XEXP (src, 0);
1273
1274 if (! other || find_base_value (other))
1275 new_reg_base_value[regno] = 0;
1276 break;
1277 }
9ae8ffe7 1278 case AND:
481683e1 1279 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
ec907dd8 1280 new_reg_base_value[regno] = 0;
9ae8ffe7 1281 break;
9ae8ffe7 1282 default:
ec907dd8 1283 new_reg_base_value[regno] = 0;
9ae8ffe7
JL
1284 break;
1285 }
1286 /* If this is the first set of a register, record the value. */
1287 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
ec907dd8
JL
1288 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1289 new_reg_base_value[regno] = find_base_value (src);
9ae8ffe7
JL
1290
1291 reg_seen[regno] = 1;
1292}
1293
bb1acb3e
RH
1294/* If a value is known for REGNO, return it. */
1295
c22cacf3 1296rtx
bb1acb3e
RH
1297get_reg_known_value (unsigned int regno)
1298{
1299 if (regno >= FIRST_PSEUDO_REGISTER)
1300 {
1301 regno -= FIRST_PSEUDO_REGISTER;
1302 if (regno < reg_known_value_size)
1303 return reg_known_value[regno];
1304 }
1305 return NULL;
43fe47ca
JW
1306}
1307
bb1acb3e
RH
1308/* Set it. */
1309
1310static void
1311set_reg_known_value (unsigned int regno, rtx val)
1312{
1313 if (regno >= FIRST_PSEUDO_REGISTER)
1314 {
1315 regno -= FIRST_PSEUDO_REGISTER;
1316 if (regno < reg_known_value_size)
1317 reg_known_value[regno] = val;
1318 }
1319}
1320
1321/* Similarly for reg_known_equiv_p. */
1322
1323bool
1324get_reg_known_equiv_p (unsigned int regno)
1325{
1326 if (regno >= FIRST_PSEUDO_REGISTER)
1327 {
1328 regno -= FIRST_PSEUDO_REGISTER;
1329 if (regno < reg_known_value_size)
1330 return reg_known_equiv_p[regno];
1331 }
1332 return false;
1333}
1334
1335static void
1336set_reg_known_equiv_p (unsigned int regno, bool val)
1337{
1338 if (regno >= FIRST_PSEUDO_REGISTER)
1339 {
1340 regno -= FIRST_PSEUDO_REGISTER;
1341 if (regno < reg_known_value_size)
1342 reg_known_equiv_p[regno] = val;
1343 }
1344}
1345
1346
db048faf
MM
1347/* Returns a canonical version of X, from the point of view alias
1348 analysis. (For example, if X is a MEM whose address is a register,
1349 and the register has a known value (say a SYMBOL_REF), then a MEM
1350 whose address is the SYMBOL_REF is returned.) */
1351
1352rtx
4682ae04 1353canon_rtx (rtx x)
9ae8ffe7
JL
1354{
1355 /* Recursively look for equivalences. */
f8cfc6aa 1356 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
bb1acb3e
RH
1357 {
1358 rtx t = get_reg_known_value (REGNO (x));
1359 if (t == x)
1360 return x;
1361 if (t)
1362 return canon_rtx (t);
1363 }
1364
1365 if (GET_CODE (x) == PLUS)
9ae8ffe7
JL
1366 {
1367 rtx x0 = canon_rtx (XEXP (x, 0));
1368 rtx x1 = canon_rtx (XEXP (x, 1));
1369
1370 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1371 {
481683e1 1372 if (CONST_INT_P (x0))
ed8908e7 1373 return plus_constant (x1, INTVAL (x0));
481683e1 1374 else if (CONST_INT_P (x1))
ed8908e7 1375 return plus_constant (x0, INTVAL (x1));
38a448ca 1376 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
9ae8ffe7
JL
1377 }
1378 }
d4b60170 1379
9ae8ffe7
JL
1380 /* This gives us much better alias analysis when called from
1381 the loop optimizer. Note we want to leave the original
1382 MEM alone, but need to return the canonicalized MEM with
1383 all the flags with their original values. */
3c0cb5de 1384 else if (MEM_P (x))
f1ec5147 1385 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
d4b60170 1386
9ae8ffe7
JL
1387 return x;
1388}
1389
1390/* Return 1 if X and Y are identical-looking rtx's.
45183e03 1391 Expect that X and Y has been already canonicalized.
9ae8ffe7
JL
1392
1393 We use the data in reg_known_value above to see if two registers with
1394 different numbers are, in fact, equivalent. */
1395
1396static int
ed7a4b4b 1397rtx_equal_for_memref_p (const_rtx x, const_rtx y)
9ae8ffe7 1398{
b3694847
SS
1399 int i;
1400 int j;
1401 enum rtx_code code;
1402 const char *fmt;
9ae8ffe7
JL
1403
1404 if (x == 0 && y == 0)
1405 return 1;
1406 if (x == 0 || y == 0)
1407 return 0;
d4b60170 1408
9ae8ffe7
JL
1409 if (x == y)
1410 return 1;
1411
1412 code = GET_CODE (x);
1413 /* Rtx's of different codes cannot be equal. */
1414 if (code != GET_CODE (y))
1415 return 0;
1416
1417 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1418 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1419
1420 if (GET_MODE (x) != GET_MODE (y))
1421 return 0;
1422
db048faf
MM
1423 /* Some RTL can be compared without a recursive examination. */
1424 switch (code)
1425 {
1426 case REG:
1427 return REGNO (x) == REGNO (y);
1428
1429 case LABEL_REF:
1430 return XEXP (x, 0) == XEXP (y, 0);
ca7fd9cd 1431
db048faf
MM
1432 case SYMBOL_REF:
1433 return XSTR (x, 0) == XSTR (y, 0);
1434
40e02b4a 1435 case VALUE:
db048faf
MM
1436 case CONST_INT:
1437 case CONST_DOUBLE:
091a3ac7 1438 case CONST_FIXED:
db048faf
MM
1439 /* There's no need to compare the contents of CONST_DOUBLEs or
1440 CONST_INTs because pointer equality is a good enough
1441 comparison for these nodes. */
1442 return 0;
1443
db048faf
MM
1444 default:
1445 break;
1446 }
9ae8ffe7 1447
45183e03
JH
1448 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1449 if (code == PLUS)
9ae8ffe7
JL
1450 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1451 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1452 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1453 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
45183e03
JH
1454 /* For commutative operations, the RTX match if the operand match in any
1455 order. Also handle the simple binary and unary cases without a loop. */
ec8e098d 1456 if (COMMUTATIVE_P (x))
45183e03
JH
1457 {
1458 rtx xop0 = canon_rtx (XEXP (x, 0));
1459 rtx yop0 = canon_rtx (XEXP (y, 0));
1460 rtx yop1 = canon_rtx (XEXP (y, 1));
1461
1462 return ((rtx_equal_for_memref_p (xop0, yop0)
1463 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1464 || (rtx_equal_for_memref_p (xop0, yop1)
1465 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1466 }
ec8e098d 1467 else if (NON_COMMUTATIVE_P (x))
45183e03
JH
1468 {
1469 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
4682ae04 1470 canon_rtx (XEXP (y, 0)))
45183e03
JH
1471 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1472 canon_rtx (XEXP (y, 1))));
1473 }
ec8e098d 1474 else if (UNARY_P (x))
45183e03 1475 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
4682ae04 1476 canon_rtx (XEXP (y, 0)));
9ae8ffe7
JL
1477
1478 /* Compare the elements. If any pair of corresponding elements
de12be17
JC
1479 fail to match, return 0 for the whole things.
1480
1481 Limit cases to types which actually appear in addresses. */
9ae8ffe7
JL
1482
1483 fmt = GET_RTX_FORMAT (code);
1484 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1485 {
1486 switch (fmt[i])
1487 {
9ae8ffe7
JL
1488 case 'i':
1489 if (XINT (x, i) != XINT (y, i))
1490 return 0;
1491 break;
1492
9ae8ffe7
JL
1493 case 'E':
1494 /* Two vectors must have the same length. */
1495 if (XVECLEN (x, i) != XVECLEN (y, i))
1496 return 0;
1497
1498 /* And the corresponding elements must match. */
1499 for (j = 0; j < XVECLEN (x, i); j++)
45183e03
JH
1500 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1501 canon_rtx (XVECEXP (y, i, j))) == 0)
9ae8ffe7
JL
1502 return 0;
1503 break;
1504
1505 case 'e':
45183e03
JH
1506 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1507 canon_rtx (XEXP (y, i))) == 0)
9ae8ffe7
JL
1508 return 0;
1509 break;
1510
3237ac18
AH
1511 /* This can happen for asm operands. */
1512 case 's':
1513 if (strcmp (XSTR (x, i), XSTR (y, i)))
1514 return 0;
1515 break;
1516
aee21ba9
JL
1517 /* This can happen for an asm which clobbers memory. */
1518 case '0':
1519 break;
1520
9ae8ffe7
JL
1521 /* It is believed that rtx's at this level will never
1522 contain anything but integers and other rtx's,
1523 except for within LABEL_REFs and SYMBOL_REFs. */
1524 default:
298e6adc 1525 gcc_unreachable ();
9ae8ffe7
JL
1526 }
1527 }
1528 return 1;
1529}
1530
94f24ddc 1531rtx
4682ae04 1532find_base_term (rtx x)
9ae8ffe7 1533{
eab5c70a
BS
1534 cselib_val *val;
1535 struct elt_loc_list *l;
1536
b949ea8b
JW
1537#if defined (FIND_BASE_TERM)
1538 /* Try machine-dependent ways to find the base term. */
1539 x = FIND_BASE_TERM (x);
1540#endif
1541
9ae8ffe7
JL
1542 switch (GET_CODE (x))
1543 {
1544 case REG:
1545 return REG_BASE_VALUE (x);
1546
d288e53d 1547 case TRUNCATE:
d4ebfa65
BE
1548 /* As we do not know which address space the pointer is refering to, we can
1549 handle this only if the target does not support different pointer or
1550 address modes depending on the address space. */
1551 if (!target_default_pointer_address_modes_p ())
1552 return 0;
d288e53d 1553 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
ca7fd9cd 1554 return 0;
d288e53d 1555 /* Fall through. */
9ae8ffe7 1556 case HIGH:
6d849a2a
JL
1557 case PRE_INC:
1558 case PRE_DEC:
1559 case POST_INC:
1560 case POST_DEC:
d288e53d
DE
1561 case PRE_MODIFY:
1562 case POST_MODIFY:
6d849a2a
JL
1563 return find_base_term (XEXP (x, 0));
1564
1abade85
RK
1565 case ZERO_EXTEND:
1566 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
d4ebfa65
BE
1567 /* As we do not know which address space the pointer is refering to, we can
1568 handle this only if the target does not support different pointer or
1569 address modes depending on the address space. */
1570 if (!target_default_pointer_address_modes_p ())
1571 return 0;
1572
1abade85
RK
1573 {
1574 rtx temp = find_base_term (XEXP (x, 0));
1575
5ae6cd0d 1576 if (temp != 0 && CONSTANT_P (temp))
1abade85 1577 temp = convert_memory_address (Pmode, temp);
1abade85
RK
1578
1579 return temp;
1580 }
1581
eab5c70a
BS
1582 case VALUE:
1583 val = CSELIB_VAL_PTR (x);
40e02b4a
JH
1584 if (!val)
1585 return 0;
eab5c70a
BS
1586 for (l = val->locs; l; l = l->next)
1587 if ((x = find_base_term (l->loc)) != 0)
1588 return x;
1589 return 0;
1590
023f059b
JJ
1591 case LO_SUM:
1592 /* The standard form is (lo_sum reg sym) so look only at the
1593 second operand. */
1594 return find_base_term (XEXP (x, 1));
1595
9ae8ffe7
JL
1596 case CONST:
1597 x = XEXP (x, 0);
1598 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1599 return 0;
938d968e 1600 /* Fall through. */
9ae8ffe7
JL
1601 case PLUS:
1602 case MINUS:
1603 {
3c567fae
JL
1604 rtx tmp1 = XEXP (x, 0);
1605 rtx tmp2 = XEXP (x, 1);
1606
f5143c46 1607 /* This is a little bit tricky since we have to determine which of
3c567fae
JL
1608 the two operands represents the real base address. Otherwise this
1609 routine may return the index register instead of the base register.
1610
1611 That may cause us to believe no aliasing was possible, when in
1612 fact aliasing is possible.
1613
1614 We use a few simple tests to guess the base register. Additional
1615 tests can certainly be added. For example, if one of the operands
1616 is a shift or multiply, then it must be the index register and the
1617 other operand is the base register. */
ca7fd9cd 1618
b949ea8b
JW
1619 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1620 return find_base_term (tmp2);
1621
3c567fae
JL
1622 /* If either operand is known to be a pointer, then use it
1623 to determine the base term. */
3502dc9c 1624 if (REG_P (tmp1) && REG_POINTER (tmp1))
7eba2d1f
LM
1625 {
1626 rtx base = find_base_term (tmp1);
1627 if (base)
1628 return base;
1629 }
3c567fae 1630
3502dc9c 1631 if (REG_P (tmp2) && REG_POINTER (tmp2))
7eba2d1f
LM
1632 {
1633 rtx base = find_base_term (tmp2);
1634 if (base)
1635 return base;
1636 }
3c567fae
JL
1637
1638 /* Neither operand was known to be a pointer. Go ahead and find the
1639 base term for both operands. */
1640 tmp1 = find_base_term (tmp1);
1641 tmp2 = find_base_term (tmp2);
1642
1643 /* If either base term is named object or a special address
1644 (like an argument or stack reference), then use it for the
1645 base term. */
d4b60170 1646 if (tmp1 != 0
3c567fae
JL
1647 && (GET_CODE (tmp1) == SYMBOL_REF
1648 || GET_CODE (tmp1) == LABEL_REF
1649 || (GET_CODE (tmp1) == ADDRESS
1650 && GET_MODE (tmp1) != VOIDmode)))
1651 return tmp1;
1652
d4b60170 1653 if (tmp2 != 0
3c567fae
JL
1654 && (GET_CODE (tmp2) == SYMBOL_REF
1655 || GET_CODE (tmp2) == LABEL_REF
1656 || (GET_CODE (tmp2) == ADDRESS
1657 && GET_MODE (tmp2) != VOIDmode)))
1658 return tmp2;
1659
1660 /* We could not determine which of the two operands was the
1661 base register and which was the index. So we can determine
1662 nothing from the base alias check. */
1663 return 0;
9ae8ffe7
JL
1664 }
1665
1666 case AND:
481683e1 1667 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
d288e53d 1668 return find_base_term (XEXP (x, 0));
9ae8ffe7
JL
1669 return 0;
1670
1671 case SYMBOL_REF:
1672 case LABEL_REF:
1673 return x;
1674
1675 default:
1676 return 0;
1677 }
1678}
1679
1680/* Return 0 if the addresses X and Y are known to point to different
1681 objects, 1 if they might be pointers to the same object. */
1682
1683static int
4682ae04
AJ
1684base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1685 enum machine_mode y_mode)
9ae8ffe7
JL
1686{
1687 rtx x_base = find_base_term (x);
1688 rtx y_base = find_base_term (y);
1689
1c72c7f6
JC
1690 /* If the address itself has no known base see if a known equivalent
1691 value has one. If either address still has no known base, nothing
1692 is known about aliasing. */
1693 if (x_base == 0)
1694 {
1695 rtx x_c;
d4b60170 1696
1c72c7f6
JC
1697 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1698 return 1;
d4b60170 1699
1c72c7f6
JC
1700 x_base = find_base_term (x_c);
1701 if (x_base == 0)
1702 return 1;
1703 }
9ae8ffe7 1704
1c72c7f6
JC
1705 if (y_base == 0)
1706 {
1707 rtx y_c;
1708 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1709 return 1;
d4b60170 1710
1c72c7f6
JC
1711 y_base = find_base_term (y_c);
1712 if (y_base == 0)
1713 return 1;
1714 }
1715
1716 /* If the base addresses are equal nothing is known about aliasing. */
1717 if (rtx_equal_p (x_base, y_base))
9ae8ffe7
JL
1718 return 1;
1719
435da628
UB
1720 /* The base addresses are different expressions. If they are not accessed
1721 via AND, there is no conflict. We can bring knowledge of object
1722 alignment into play here. For example, on alpha, "char a, b;" can
1723 alias one another, though "char a; long b;" cannot. AND addesses may
1724 implicitly alias surrounding objects; i.e. unaligned access in DImode
1725 via AND address can alias all surrounding object types except those
1726 with aligment 8 or higher. */
1727 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1728 return 1;
1729 if (GET_CODE (x) == AND
481683e1 1730 && (!CONST_INT_P (XEXP (x, 1))
435da628
UB
1731 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1732 return 1;
1733 if (GET_CODE (y) == AND
481683e1 1734 && (!CONST_INT_P (XEXP (y, 1))
435da628
UB
1735 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1736 return 1;
1737
1738 /* Differing symbols not accessed via AND never alias. */
9ae8ffe7 1739 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
435da628 1740 return 0;
9ae8ffe7
JL
1741
1742 /* If one address is a stack reference there can be no alias:
1743 stack references using different base registers do not alias,
1744 a stack reference can not alias a parameter, and a stack reference
1745 can not alias a global. */
1746 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1747 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1748 return 0;
1749
0d3c82d6 1750 return 1;
9ae8ffe7
JL
1751}
1752
eab5c70a
BS
1753/* Convert the address X into something we can use. This is done by returning
1754 it unchanged unless it is a value; in the latter case we call cselib to get
1755 a more useful rtx. */
3bdf5ad1 1756
a13d4ebf 1757rtx
4682ae04 1758get_addr (rtx x)
eab5c70a
BS
1759{
1760 cselib_val *v;
1761 struct elt_loc_list *l;
1762
1763 if (GET_CODE (x) != VALUE)
1764 return x;
1765 v = CSELIB_VAL_PTR (x);
40e02b4a
JH
1766 if (v)
1767 {
1768 for (l = v->locs; l; l = l->next)
1769 if (CONSTANT_P (l->loc))
1770 return l->loc;
1771 for (l = v->locs; l; l = l->next)
3c0cb5de 1772 if (!REG_P (l->loc) && !MEM_P (l->loc))
40e02b4a
JH
1773 return l->loc;
1774 if (v->locs)
1775 return v->locs->loc;
1776 }
eab5c70a
BS
1777 return x;
1778}
1779
39cec1ac
MH
1780/* Return the address of the (N_REFS + 1)th memory reference to ADDR
1781 where SIZE is the size in bytes of the memory reference. If ADDR
1782 is not modified by the memory reference then ADDR is returned. */
1783
04e2b4d3 1784static rtx
4682ae04 1785addr_side_effect_eval (rtx addr, int size, int n_refs)
39cec1ac
MH
1786{
1787 int offset = 0;
ca7fd9cd 1788
39cec1ac
MH
1789 switch (GET_CODE (addr))
1790 {
1791 case PRE_INC:
1792 offset = (n_refs + 1) * size;
1793 break;
1794 case PRE_DEC:
1795 offset = -(n_refs + 1) * size;
1796 break;
1797 case POST_INC:
1798 offset = n_refs * size;
1799 break;
1800 case POST_DEC:
1801 offset = -n_refs * size;
1802 break;
1803
1804 default:
1805 return addr;
1806 }
ca7fd9cd 1807
39cec1ac 1808 if (offset)
45183e03 1809 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
c22cacf3 1810 GEN_INT (offset));
39cec1ac
MH
1811 else
1812 addr = XEXP (addr, 0);
45183e03 1813 addr = canon_rtx (addr);
39cec1ac
MH
1814
1815 return addr;
1816}
1817
f47e08d9
RG
1818/* Return one if X and Y (memory addresses) reference the
1819 same location in memory or if the references overlap.
1820 Return zero if they do not overlap, else return
1821 minus one in which case they still might reference the same location.
1822
1823 C is an offset accumulator. When
9ae8ffe7
JL
1824 C is nonzero, we are testing aliases between X and Y + C.
1825 XSIZE is the size in bytes of the X reference,
1826 similarly YSIZE is the size in bytes for Y.
45183e03 1827 Expect that canon_rtx has been already called for X and Y.
9ae8ffe7
JL
1828
1829 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1830 referenced (the reference was BLKmode), so make the most pessimistic
1831 assumptions.
1832
c02f035f
RH
1833 If XSIZE or YSIZE is negative, we may access memory outside the object
1834 being referenced as a side effect. This can happen when using AND to
1835 align memory references, as is done on the Alpha.
1836
9ae8ffe7 1837 Nice to notice that varying addresses cannot conflict with fp if no
f47e08d9
RG
1838 local variables had their addresses taken, but that's too hard now.
1839
1840 ??? Contrary to the tree alias oracle this does not return
1841 one for X + non-constant and Y + non-constant when X and Y are equal.
1842 If that is fixed the TBAA hack for union type-punning can be removed. */
9ae8ffe7 1843
9ae8ffe7 1844static int
4682ae04 1845memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
9ae8ffe7 1846{
eab5c70a 1847 if (GET_CODE (x) == VALUE)
5312b066
JJ
1848 {
1849 if (REG_P (y))
1850 {
24f8d71e
JJ
1851 struct elt_loc_list *l = NULL;
1852 if (CSELIB_VAL_PTR (x))
1853 for (l = CSELIB_VAL_PTR (x)->locs; l; l = l->next)
1854 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, y))
1855 break;
5312b066
JJ
1856 if (l)
1857 x = y;
1858 else
1859 x = get_addr (x);
1860 }
1861 /* Don't call get_addr if y is the same VALUE. */
1862 else if (x != y)
1863 x = get_addr (x);
1864 }
eab5c70a 1865 if (GET_CODE (y) == VALUE)
5312b066
JJ
1866 {
1867 if (REG_P (x))
1868 {
24f8d71e
JJ
1869 struct elt_loc_list *l = NULL;
1870 if (CSELIB_VAL_PTR (y))
1871 for (l = CSELIB_VAL_PTR (y)->locs; l; l = l->next)
1872 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, x))
1873 break;
5312b066
JJ
1874 if (l)
1875 y = x;
1876 else
1877 y = get_addr (y);
1878 }
1879 /* Don't call get_addr if x is the same VALUE. */
1880 else if (y != x)
1881 y = get_addr (y);
1882 }
9ae8ffe7
JL
1883 if (GET_CODE (x) == HIGH)
1884 x = XEXP (x, 0);
1885 else if (GET_CODE (x) == LO_SUM)
1886 x = XEXP (x, 1);
1887 else
45183e03 1888 x = addr_side_effect_eval (x, xsize, 0);
9ae8ffe7
JL
1889 if (GET_CODE (y) == HIGH)
1890 y = XEXP (y, 0);
1891 else if (GET_CODE (y) == LO_SUM)
1892 y = XEXP (y, 1);
1893 else
45183e03 1894 y = addr_side_effect_eval (y, ysize, 0);
9ae8ffe7
JL
1895
1896 if (rtx_equal_for_memref_p (x, y))
1897 {
c02f035f 1898 if (xsize <= 0 || ysize <= 0)
9ae8ffe7
JL
1899 return 1;
1900 if (c >= 0 && xsize > c)
1901 return 1;
1902 if (c < 0 && ysize+c > 0)
1903 return 1;
1904 return 0;
1905 }
1906
6e73e666
JC
1907 /* This code used to check for conflicts involving stack references and
1908 globals but the base address alias code now handles these cases. */
9ae8ffe7
JL
1909
1910 if (GET_CODE (x) == PLUS)
1911 {
1912 /* The fact that X is canonicalized means that this
1913 PLUS rtx is canonicalized. */
1914 rtx x0 = XEXP (x, 0);
1915 rtx x1 = XEXP (x, 1);
1916
1917 if (GET_CODE (y) == PLUS)
1918 {
1919 /* The fact that Y is canonicalized means that this
1920 PLUS rtx is canonicalized. */
1921 rtx y0 = XEXP (y, 0);
1922 rtx y1 = XEXP (y, 1);
1923
1924 if (rtx_equal_for_memref_p (x1, y1))
1925 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1926 if (rtx_equal_for_memref_p (x0, y0))
1927 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
481683e1 1928 if (CONST_INT_P (x1))
63be02db 1929 {
481683e1 1930 if (CONST_INT_P (y1))
63be02db
JM
1931 return memrefs_conflict_p (xsize, x0, ysize, y0,
1932 c - INTVAL (x1) + INTVAL (y1));
1933 else
1934 return memrefs_conflict_p (xsize, x0, ysize, y,
1935 c - INTVAL (x1));
1936 }
481683e1 1937 else if (CONST_INT_P (y1))
9ae8ffe7
JL
1938 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1939
f47e08d9 1940 return -1;
9ae8ffe7 1941 }
481683e1 1942 else if (CONST_INT_P (x1))
9ae8ffe7
JL
1943 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1944 }
1945 else if (GET_CODE (y) == PLUS)
1946 {
1947 /* The fact that Y is canonicalized means that this
1948 PLUS rtx is canonicalized. */
1949 rtx y0 = XEXP (y, 0);
1950 rtx y1 = XEXP (y, 1);
1951
481683e1 1952 if (CONST_INT_P (y1))
9ae8ffe7
JL
1953 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1954 else
f47e08d9 1955 return -1;
9ae8ffe7
JL
1956 }
1957
1958 if (GET_CODE (x) == GET_CODE (y))
1959 switch (GET_CODE (x))
1960 {
1961 case MULT:
1962 {
1963 /* Handle cases where we expect the second operands to be the
1964 same, and check only whether the first operand would conflict
1965 or not. */
1966 rtx x0, y0;
1967 rtx x1 = canon_rtx (XEXP (x, 1));
1968 rtx y1 = canon_rtx (XEXP (y, 1));
1969 if (! rtx_equal_for_memref_p (x1, y1))
f47e08d9 1970 return -1;
9ae8ffe7
JL
1971 x0 = canon_rtx (XEXP (x, 0));
1972 y0 = canon_rtx (XEXP (y, 0));
1973 if (rtx_equal_for_memref_p (x0, y0))
1974 return (xsize == 0 || ysize == 0
1975 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1976
1977 /* Can't properly adjust our sizes. */
481683e1 1978 if (!CONST_INT_P (x1))
f47e08d9 1979 return -1;
9ae8ffe7
JL
1980 xsize /= INTVAL (x1);
1981 ysize /= INTVAL (x1);
1982 c /= INTVAL (x1);
1983 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1984 }
1d300e19
KG
1985
1986 default:
1987 break;
9ae8ffe7
JL
1988 }
1989
1990 /* Treat an access through an AND (e.g. a subword access on an Alpha)
ca7fd9cd 1991 as an access with indeterminate size. Assume that references
56ee9281
RH
1992 besides AND are aligned, so if the size of the other reference is
1993 at least as large as the alignment, assume no other overlap. */
481683e1 1994 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
56ee9281 1995 {
02e3377d 1996 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
56ee9281 1997 xsize = -1;
45183e03 1998 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
56ee9281 1999 }
481683e1 2000 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
c02f035f 2001 {
56ee9281 2002 /* ??? If we are indexing far enough into the array/structure, we
ca7fd9cd 2003 may yet be able to determine that we can not overlap. But we
c02f035f 2004 also need to that we are far enough from the end not to overlap
56ee9281 2005 a following reference, so we do nothing with that for now. */
02e3377d 2006 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
56ee9281 2007 ysize = -1;
45183e03 2008 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
c02f035f 2009 }
9ae8ffe7
JL
2010
2011 if (CONSTANT_P (x))
2012 {
481683e1 2013 if (CONST_INT_P (x) && CONST_INT_P (y))
9ae8ffe7
JL
2014 {
2015 c += (INTVAL (y) - INTVAL (x));
c02f035f 2016 return (xsize <= 0 || ysize <= 0
9ae8ffe7
JL
2017 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
2018 }
2019
2020 if (GET_CODE (x) == CONST)
2021 {
2022 if (GET_CODE (y) == CONST)
2023 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2024 ysize, canon_rtx (XEXP (y, 0)), c);
2025 else
2026 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
2027 ysize, y, c);
2028 }
2029 if (GET_CODE (y) == CONST)
2030 return memrefs_conflict_p (xsize, x, ysize,
2031 canon_rtx (XEXP (y, 0)), c);
2032
2033 if (CONSTANT_P (y))
b949ea8b 2034 return (xsize <= 0 || ysize <= 0
c02f035f 2035 || (rtx_equal_for_memref_p (x, y)
b949ea8b 2036 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
9ae8ffe7 2037
f47e08d9 2038 return -1;
9ae8ffe7 2039 }
f47e08d9
RG
2040
2041 return -1;
9ae8ffe7
JL
2042}
2043
2044/* Functions to compute memory dependencies.
2045
2046 Since we process the insns in execution order, we can build tables
2047 to keep track of what registers are fixed (and not aliased), what registers
2048 are varying in known ways, and what registers are varying in unknown
2049 ways.
2050
2051 If both memory references are volatile, then there must always be a
2052 dependence between the two references, since their order can not be
2053 changed. A volatile and non-volatile reference can be interchanged
ca7fd9cd 2054 though.
9ae8ffe7 2055
dc1618bc
RK
2056 A MEM_IN_STRUCT reference at a non-AND varying address can never
2057 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
2058 also must allow AND addresses, because they may generate accesses
2059 outside the object being referenced. This is used to generate
2060 aligned addresses from unaligned addresses, for instance, the alpha
2061 storeqi_unaligned pattern. */
9ae8ffe7
JL
2062
2063/* Read dependence: X is read after read in MEM takes place. There can
2064 only be a dependence here if both reads are volatile. */
2065
2066int
4f588890 2067read_dependence (const_rtx mem, const_rtx x)
9ae8ffe7
JL
2068{
2069 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
2070}
2071
c6df88cb
MM
2072/* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
2073 MEM2 is a reference to a structure at a varying address, or returns
2074 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
2075 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
2076 to decide whether or not an address may vary; it should return
eab5c70a
BS
2077 nonzero whenever variation is possible.
2078 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
ca7fd9cd 2079
4f588890
KG
2080static const_rtx
2081fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
4682ae04 2082 rtx mem2_addr,
4f588890 2083 bool (*varies_p) (const_rtx, bool))
ca7fd9cd 2084{
3e0abe15
GK
2085 if (! flag_strict_aliasing)
2086 return NULL_RTX;
2087
afa8f0fb
AP
2088 if (MEM_ALIAS_SET (mem2)
2089 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
e38fe8e0 2090 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
c6df88cb
MM
2091 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
2092 varying address. */
2093 return mem1;
2094
afa8f0fb
AP
2095 if (MEM_ALIAS_SET (mem1)
2096 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
e38fe8e0 2097 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
c6df88cb
MM
2098 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
2099 varying address. */
2100 return mem2;
2101
2102 return NULL_RTX;
2103}
2104
2105/* Returns nonzero if something about the mode or address format MEM1
2106 indicates that it might well alias *anything*. */
2107
2c72b78f 2108static int
4f588890 2109aliases_everything_p (const_rtx mem)
c6df88cb 2110{
c6df88cb 2111 if (GET_CODE (XEXP (mem, 0)) == AND)
35fd3193 2112 /* If the address is an AND, it's very hard to know at what it is
c6df88cb
MM
2113 actually pointing. */
2114 return 1;
ca7fd9cd 2115
c6df88cb
MM
2116 return 0;
2117}
2118
998d7deb
RH
2119/* Return true if we can determine that the fields referenced cannot
2120 overlap for any pair of objects. */
2121
2122static bool
4f588890 2123nonoverlapping_component_refs_p (const_tree x, const_tree y)
998d7deb 2124{
4f588890 2125 const_tree fieldx, fieldy, typex, typey, orig_y;
998d7deb 2126
4c7af939
AB
2127 if (!flag_strict_aliasing)
2128 return false;
2129
998d7deb
RH
2130 do
2131 {
2132 /* The comparison has to be done at a common type, since we don't
d6a7951f 2133 know how the inheritance hierarchy works. */
998d7deb
RH
2134 orig_y = y;
2135 do
2136 {
2137 fieldx = TREE_OPERAND (x, 1);
c05a0766 2138 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
998d7deb
RH
2139
2140 y = orig_y;
2141 do
2142 {
2143 fieldy = TREE_OPERAND (y, 1);
c05a0766 2144 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
998d7deb
RH
2145
2146 if (typex == typey)
2147 goto found;
2148
2149 y = TREE_OPERAND (y, 0);
2150 }
2151 while (y && TREE_CODE (y) == COMPONENT_REF);
2152
2153 x = TREE_OPERAND (x, 0);
2154 }
2155 while (x && TREE_CODE (x) == COMPONENT_REF);
998d7deb 2156 /* Never found a common type. */
c05a0766 2157 return false;
998d7deb
RH
2158
2159 found:
2160 /* If we're left with accessing different fields of a structure,
2161 then no overlap. */
2162 if (TREE_CODE (typex) == RECORD_TYPE
2163 && fieldx != fieldy)
2164 return true;
2165
2166 /* The comparison on the current field failed. If we're accessing
2167 a very nested structure, look at the next outer level. */
2168 x = TREE_OPERAND (x, 0);
2169 y = TREE_OPERAND (y, 0);
2170 }
2171 while (x && y
2172 && TREE_CODE (x) == COMPONENT_REF
2173 && TREE_CODE (y) == COMPONENT_REF);
ca7fd9cd 2174
998d7deb
RH
2175 return false;
2176}
2177
2178/* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2179
2180static tree
4682ae04 2181decl_for_component_ref (tree x)
998d7deb
RH
2182{
2183 do
2184 {
2185 x = TREE_OPERAND (x, 0);
2186 }
2187 while (x && TREE_CODE (x) == COMPONENT_REF);
2188
2189 return x && DECL_P (x) ? x : NULL_TREE;
2190}
2191
2192/* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2193 offset of the field reference. */
2194
2195static rtx
4682ae04 2196adjust_offset_for_component_ref (tree x, rtx offset)
998d7deb
RH
2197{
2198 HOST_WIDE_INT ioffset;
2199
2200 if (! offset)
2201 return NULL_RTX;
2202
2203 ioffset = INTVAL (offset);
ca7fd9cd 2204 do
998d7deb 2205 {
44de5aeb 2206 tree offset = component_ref_field_offset (x);
998d7deb
RH
2207 tree field = TREE_OPERAND (x, 1);
2208
44de5aeb 2209 if (! host_integerp (offset, 1))
998d7deb 2210 return NULL_RTX;
44de5aeb 2211 ioffset += (tree_low_cst (offset, 1)
998d7deb
RH
2212 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2213 / BITS_PER_UNIT));
2214
2215 x = TREE_OPERAND (x, 0);
2216 }
2217 while (x && TREE_CODE (x) == COMPONENT_REF);
2218
2219 return GEN_INT (ioffset);
2220}
2221
95bd1dd7 2222/* Return nonzero if we can determine the exprs corresponding to memrefs
c6ea834c
BM
2223 X and Y and they do not overlap.
2224 If LOOP_VARIANT is set, skip offset-based disambiguation */
a4311dfe 2225
2e4e39f6 2226int
c6ea834c 2227nonoverlapping_memrefs_p (const_rtx x, const_rtx y, bool loop_invariant)
a4311dfe 2228{
998d7deb 2229 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
a4311dfe
RK
2230 rtx rtlx, rtly;
2231 rtx basex, basey;
998d7deb 2232 rtx moffsetx, moffsety;
a4311dfe
RK
2233 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2234
998d7deb
RH
2235 /* Unless both have exprs, we can't tell anything. */
2236 if (exprx == 0 || expry == 0)
2237 return 0;
2b22e382
RG
2238
2239 /* For spill-slot accesses make sure we have valid offsets. */
2240 if ((exprx == get_spill_slot_decl (false)
2241 && ! MEM_OFFSET (x))
2242 || (expry == get_spill_slot_decl (false)
2243 && ! MEM_OFFSET (y)))
2244 return 0;
c22cacf3 2245
998d7deb
RH
2246 /* If both are field references, we may be able to determine something. */
2247 if (TREE_CODE (exprx) == COMPONENT_REF
2248 && TREE_CODE (expry) == COMPONENT_REF
2249 && nonoverlapping_component_refs_p (exprx, expry))
2250 return 1;
2251
c22cacf3 2252
998d7deb
RH
2253 /* If the field reference test failed, look at the DECLs involved. */
2254 moffsetx = MEM_OFFSET (x);
2255 if (TREE_CODE (exprx) == COMPONENT_REF)
2256 {
2e0c984c
RG
2257 tree t = decl_for_component_ref (exprx);
2258 if (! t)
2259 return 0;
2260 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2261 exprx = t;
998d7deb 2262 }
c67a1cf6 2263
998d7deb
RH
2264 moffsety = MEM_OFFSET (y);
2265 if (TREE_CODE (expry) == COMPONENT_REF)
2266 {
2e0c984c
RG
2267 tree t = decl_for_component_ref (expry);
2268 if (! t)
2269 return 0;
2270 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2271 expry = t;
998d7deb
RH
2272 }
2273
2274 if (! DECL_P (exprx) || ! DECL_P (expry))
a4311dfe
RK
2275 return 0;
2276
1307c758
RG
2277 /* With invalid code we can end up storing into the constant pool.
2278 Bail out to avoid ICEing when creating RTL for this.
2279 See gfortran.dg/lto/20091028-2_0.f90. */
2280 if (TREE_CODE (exprx) == CONST_DECL
2281 || TREE_CODE (expry) == CONST_DECL)
2282 return 1;
2283
998d7deb
RH
2284 rtlx = DECL_RTL (exprx);
2285 rtly = DECL_RTL (expry);
a4311dfe 2286
1edcd60b
RK
2287 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2288 can't overlap unless they are the same because we never reuse that part
2289 of the stack frame used for locals for spilled pseudos. */
3c0cb5de 2290 if ((!MEM_P (rtlx) || !MEM_P (rtly))
1edcd60b 2291 && ! rtx_equal_p (rtlx, rtly))
a4311dfe
RK
2292 return 1;
2293
09e881c9
BE
2294 /* If we have MEMs refering to different address spaces (which can
2295 potentially overlap), we cannot easily tell from the addresses
2296 whether the references overlap. */
2297 if (MEM_P (rtlx) && MEM_P (rtly)
2298 && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly))
2299 return 0;
2300
a4311dfe
RK
2301 /* Get the base and offsets of both decls. If either is a register, we
2302 know both are and are the same, so use that as the base. The only
2303 we can avoid overlap is if we can deduce that they are nonoverlapping
2304 pieces of that decl, which is very rare. */
3c0cb5de 2305 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
481683e1 2306 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
a4311dfe
RK
2307 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2308
3c0cb5de 2309 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
481683e1 2310 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
a4311dfe
RK
2311 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2312
d746694a 2313 /* If the bases are different, we know they do not overlap if both
ca7fd9cd 2314 are constants or if one is a constant and the other a pointer into the
d746694a
RK
2315 stack frame. Otherwise a different base means we can't tell if they
2316 overlap or not. */
2317 if (! rtx_equal_p (basex, basey))
ca7fd9cd
KH
2318 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2319 || (CONSTANT_P (basex) && REG_P (basey)
2320 && REGNO_PTR_FRAME_P (REGNO (basey)))
2321 || (CONSTANT_P (basey) && REG_P (basex)
2322 && REGNO_PTR_FRAME_P (REGNO (basex))));
a4311dfe 2323
c6ea834c
BM
2324 /* Offset based disambiguation not appropriate for loop invariant */
2325 if (loop_invariant)
2326 return 0;
2327
3c0cb5de 2328 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
a4311dfe
RK
2329 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2330 : -1);
3c0cb5de 2331 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
a4311dfe
RK
2332 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2333 -1);
2334
0af5bc3e
RK
2335 /* If we have an offset for either memref, it can update the values computed
2336 above. */
998d7deb
RH
2337 if (moffsetx)
2338 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2339 if (moffsety)
2340 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
a4311dfe 2341
0af5bc3e 2342 /* If a memref has both a size and an offset, we can use the smaller size.
efc981bb 2343 We can't do this if the offset isn't known because we must view this
0af5bc3e 2344 memref as being anywhere inside the DECL's MEM. */
998d7deb 2345 if (MEM_SIZE (x) && moffsetx)
a4311dfe 2346 sizex = INTVAL (MEM_SIZE (x));
998d7deb 2347 if (MEM_SIZE (y) && moffsety)
a4311dfe
RK
2348 sizey = INTVAL (MEM_SIZE (y));
2349
2350 /* Put the values of the memref with the lower offset in X's values. */
2351 if (offsetx > offsety)
2352 {
2353 tem = offsetx, offsetx = offsety, offsety = tem;
2354 tem = sizex, sizex = sizey, sizey = tem;
2355 }
2356
2357 /* If we don't know the size of the lower-offset value, we can't tell
2358 if they conflict. Otherwise, we do the test. */
a6f7c915 2359 return sizex >= 0 && offsety >= offsetx + sizex;
a4311dfe
RK
2360}
2361
9362286d
SB
2362/* Helper for true_dependence and canon_true_dependence.
2363 Checks for true dependence: X is read after store in MEM takes place.
9ae8ffe7 2364
9362286d
SB
2365 VARIES is the function that should be used as rtx_varies function.
2366
2367 If MEM_CANONICALIZED is FALSE, then X_ADDR and MEM_ADDR should be
2368 NULL_RTX, and the canonical addresses of MEM and X are both computed
2369 here. If MEM_CANONICALIZED, then MEM must be already canonicalized.
2370
2371 If X_ADDR is non-NULL, it is used in preference of XEXP (x, 0).
2372
2373 Returns 1 if there is a true dependence, 0 otherwise. */
2374
2375static int
2376true_dependence_1 (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2377 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool),
2378 bool mem_canonicalized)
9ae8ffe7 2379{
49982682 2380 rtx base;
f47e08d9 2381 int ret;
9ae8ffe7 2382
9362286d
SB
2383 gcc_checking_assert (mem_canonicalized ? (mem_addr != NULL_RTX)
2384 : (mem_addr == NULL_RTX && x_addr == NULL_RTX));
2385
9ae8ffe7
JL
2386 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2387 return 1;
2388
c4484b8f 2389 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
ac3768f6 2390 This is used in epilogue deallocation functions, and in cselib. */
c4484b8f
RH
2391 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2392 return 1;
2393 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2394 return 1;
9cd9e512
RH
2395 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2396 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2397 return 1;
c4484b8f 2398
389fdba0
RH
2399 /* Read-only memory is by definition never modified, and therefore can't
2400 conflict with anything. We don't expect to find read-only set on MEM,
41806d92 2401 but stupid user tricks can produce them, so don't die. */
389fdba0 2402 if (MEM_READONLY_P (x))
9ae8ffe7
JL
2403 return 0;
2404
09e881c9
BE
2405 /* If we have MEMs refering to different address spaces (which can
2406 potentially overlap), we cannot easily tell from the addresses
2407 whether the references overlap. */
2408 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2409 return 1;
2410
9362286d
SB
2411 if (! mem_addr)
2412 {
2413 mem_addr = XEXP (mem, 0);
2414 if (mem_mode == VOIDmode)
2415 mem_mode = GET_MODE (mem);
2416 }
2417
2418 if (! x_addr)
2147c71c 2419 {
a522de15
SB
2420 x_addr = XEXP (x, 0);
2421 if (!((GET_CODE (x_addr) == VALUE
2422 && GET_CODE (mem_addr) != VALUE
2423 && reg_mentioned_p (x_addr, mem_addr))
2424 || (GET_CODE (x_addr) != VALUE
2425 && GET_CODE (mem_addr) == VALUE
2426 && reg_mentioned_p (mem_addr, x_addr))))
2427 {
2428 x_addr = get_addr (x_addr);
2429 if (! mem_canonicalized)
2430 mem_addr = get_addr (mem_addr);
2431 }
2147c71c 2432 }
eab5c70a 2433
55efb413
JW
2434 base = find_base_term (x_addr);
2435 if (base && (GET_CODE (base) == LABEL_REF
2436 || (GET_CODE (base) == SYMBOL_REF
2437 && CONSTANT_POOL_ADDRESS_P (base))))
2438 return 0;
2439
eab5c70a 2440 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
1c72c7f6
JC
2441 return 0;
2442
eab5c70a 2443 x_addr = canon_rtx (x_addr);
9362286d
SB
2444 if (!mem_canonicalized)
2445 mem_addr = canon_rtx (mem_addr);
6e73e666 2446
f47e08d9
RG
2447 if ((ret = memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2448 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2449 return ret;
2450
2451 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2452 return 0;
2453
c6ea834c 2454 if (nonoverlapping_memrefs_p (mem, x, false))
0211b6ab
JW
2455 return 0;
2456
c6df88cb 2457 if (aliases_everything_p (x))
0211b6ab
JW
2458 return 1;
2459
f5143c46 2460 /* We cannot use aliases_everything_p to test MEM, since we must look
9362286d 2461 at MEM_ADDR, rather than XEXP (mem, 0). */
c6df88cb 2462 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
a13d4ebf
AM
2463 return 1;
2464
9362286d 2465 /* ??? In true_dependence we also allow BLKmode to alias anything. Why
a13d4ebf
AM
2466 don't we do this in anti_dependence and output_dependence? */
2467 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2468 return 1;
2469
55b34b5f
RG
2470 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2471 return 0;
2472
2473 return rtx_refs_may_alias_p (x, mem, true);
a13d4ebf
AM
2474}
2475
9362286d
SB
2476/* True dependence: X is read after store in MEM takes place. */
2477
2478int
2479true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2480 bool (*varies) (const_rtx, bool))
2481{
2482 return true_dependence_1 (mem, mem_mode, NULL_RTX,
2483 x, NULL_RTX, varies,
2484 /*mem_canonicalized=*/false);
2485}
2486
a13d4ebf 2487/* Canonical true dependence: X is read after store in MEM takes place.
ca7fd9cd
KH
2488 Variant of true_dependence which assumes MEM has already been
2489 canonicalized (hence we no longer do that here).
9362286d
SB
2490 The mem_addr argument has been added, since true_dependence_1 computed
2491 this value prior to canonicalizing. */
a13d4ebf
AM
2492
2493int
4f588890 2494canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
6216f94e 2495 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
a13d4ebf 2496{
9362286d
SB
2497 return true_dependence_1 (mem, mem_mode, mem_addr,
2498 x, x_addr, varies,
2499 /*mem_canonicalized=*/true);
9ae8ffe7
JL
2500}
2501
da7d8304 2502/* Returns nonzero if a write to X might alias a previous read from
389fdba0 2503 (or, if WRITEP is nonzero, a write to) MEM. */
9ae8ffe7 2504
2c72b78f 2505static int
4f588890 2506write_dependence_p (const_rtx mem, const_rtx x, int writep)
9ae8ffe7 2507{
6e73e666 2508 rtx x_addr, mem_addr;
4f588890 2509 const_rtx fixed_scalar;
49982682 2510 rtx base;
f47e08d9 2511 int ret;
6e73e666 2512
9ae8ffe7
JL
2513 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2514 return 1;
2515
c4484b8f
RH
2516 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2517 This is used in epilogue deallocation functions. */
2518 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2519 return 1;
2520 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2521 return 1;
9cd9e512
RH
2522 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2523 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2524 return 1;
c4484b8f 2525
389fdba0
RH
2526 /* A read from read-only memory can't conflict with read-write memory. */
2527 if (!writep && MEM_READONLY_P (mem))
2528 return 0;
55efb413 2529
09e881c9
BE
2530 /* If we have MEMs refering to different address spaces (which can
2531 potentially overlap), we cannot easily tell from the addresses
2532 whether the references overlap. */
2533 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2534 return 1;
2535
2147c71c
L
2536 x_addr = XEXP (x, 0);
2537 mem_addr = XEXP (mem, 0);
2538 if (!((GET_CODE (x_addr) == VALUE
2539 && GET_CODE (mem_addr) != VALUE
2540 && reg_mentioned_p (x_addr, mem_addr))
2541 || (GET_CODE (x_addr) != VALUE
2542 && GET_CODE (mem_addr) == VALUE
2543 && reg_mentioned_p (mem_addr, x_addr))))
2544 {
2545 x_addr = get_addr (x_addr);
2546 mem_addr = get_addr (mem_addr);
2547 }
55efb413 2548
49982682
JW
2549 if (! writep)
2550 {
55efb413 2551 base = find_base_term (mem_addr);
49982682
JW
2552 if (base && (GET_CODE (base) == LABEL_REF
2553 || (GET_CODE (base) == SYMBOL_REF
2554 && CONSTANT_POOL_ADDRESS_P (base))))
2555 return 0;
2556 }
2557
eab5c70a
BS
2558 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2559 GET_MODE (mem)))
41472af8
MM
2560 return 0;
2561
eab5c70a
BS
2562 x_addr = canon_rtx (x_addr);
2563 mem_addr = canon_rtx (mem_addr);
6e73e666 2564
f47e08d9
RG
2565 if ((ret = memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2566 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2567 return ret;
2568
c6ea834c 2569 if (nonoverlapping_memrefs_p (x, mem, false))
c6df88cb
MM
2570 return 0;
2571
ca7fd9cd 2572 fixed_scalar
eab5c70a
BS
2573 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2574 rtx_addr_varies_p);
2575
55b34b5f
RG
2576 if ((fixed_scalar == mem && !aliases_everything_p (x))
2577 || (fixed_scalar == x && !aliases_everything_p (mem)))
2578 return 0;
2579
2580 return rtx_refs_may_alias_p (x, mem, false);
c6df88cb
MM
2581}
2582
2583/* Anti dependence: X is written after read in MEM takes place. */
2584
2585int
4f588890 2586anti_dependence (const_rtx mem, const_rtx x)
c6df88cb 2587{
389fdba0 2588 return write_dependence_p (mem, x, /*writep=*/0);
9ae8ffe7
JL
2589}
2590
2591/* Output dependence: X is written after store in MEM takes place. */
2592
2593int
4f588890 2594output_dependence (const_rtx mem, const_rtx x)
9ae8ffe7 2595{
389fdba0 2596 return write_dependence_p (mem, x, /*writep=*/1);
9ae8ffe7 2597}
c14b9960 2598\f
6e73e666 2599
c6ea834c
BM
2600
2601/* Check whether X may be aliased with MEM. Don't do offset-based
2602 memory disambiguation & TBAA. */
2603int
2604may_alias_p (const_rtx mem, const_rtx x)
2605{
2606 rtx x_addr, mem_addr;
c6ea834c
BM
2607
2608 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2609 return 1;
2610
2611 /* ??? In true_dependence we also allow BLKmode to alias anything. */
2612 if (GET_MODE (mem) == BLKmode || GET_MODE (x) == BLKmode)
2613 return 1;
2614
2615 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2616 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2617 return 1;
2618
2619 /* Read-only memory is by definition never modified, and therefore can't
2620 conflict with anything. We don't expect to find read-only set on MEM,
2621 but stupid user tricks can produce them, so don't die. */
2622 if (MEM_READONLY_P (x))
2623 return 0;
2624
2625 /* If we have MEMs refering to different address spaces (which can
2626 potentially overlap), we cannot easily tell from the addresses
2627 whether the references overlap. */
2628 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2629 return 1;
2630
2631 x_addr = XEXP (x, 0);
2632 mem_addr = XEXP (mem, 0);
2633 if (!((GET_CODE (x_addr) == VALUE
2634 && GET_CODE (mem_addr) != VALUE
2635 && reg_mentioned_p (x_addr, mem_addr))
2636 || (GET_CODE (x_addr) != VALUE
2637 && GET_CODE (mem_addr) == VALUE
2638 && reg_mentioned_p (mem_addr, x_addr))))
2639 {
2640 x_addr = get_addr (x_addr);
2641 mem_addr = get_addr (mem_addr);
2642 }
2643
2644 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), GET_MODE (mem_addr)))
2645 return 0;
2646
2647 x_addr = canon_rtx (x_addr);
2648 mem_addr = canon_rtx (mem_addr);
2649
2650 if (nonoverlapping_memrefs_p (mem, x, true))
2651 return 0;
2652
2653 if (aliases_everything_p (x))
2654 return 1;
2655
2656 /* We cannot use aliases_everything_p to test MEM, since we must look
2657 at MEM_ADDR, rather than XEXP (mem, 0). */
2658 if (GET_MODE (mem) == QImode || GET_CODE (mem_addr) == AND)
2659 return 1;
2660
2661 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2662 rtx_addr_varies_p))
2663 return 0;
2664
2665 /* TBAA not valid for loop_invarint */
2666 return rtx_refs_may_alias_p (x, mem, false);
2667}
2668
6e73e666 2669void
b5deb7b6 2670init_alias_target (void)
6e73e666 2671{
b3694847 2672 int i;
6e73e666 2673
b5deb7b6
SL
2674 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2675
6e73e666
JC
2676 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2677 /* Check whether this register can hold an incoming pointer
2678 argument. FUNCTION_ARG_REGNO_P tests outgoing register
ec5c56db 2679 numbers, so translate if necessary due to register windows. */
6e73e666
JC
2680 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2681 && HARD_REGNO_MODE_OK (i, Pmode))
bf1660a6
JL
2682 static_reg_base_value[i]
2683 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2684
bf1660a6
JL
2685 static_reg_base_value[STACK_POINTER_REGNUM]
2686 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2687 static_reg_base_value[ARG_POINTER_REGNUM]
2688 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2689 static_reg_base_value[FRAME_POINTER_REGNUM]
2690 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2691#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2692 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2693 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2694#endif
2695}
2696
7b52eede
JH
2697/* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2698 to be memory reference. */
2699static bool memory_modified;
2700static void
aa317c97 2701memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7b52eede 2702{
3c0cb5de 2703 if (MEM_P (x))
7b52eede 2704 {
9678086d 2705 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
7b52eede
JH
2706 memory_modified = true;
2707 }
2708}
2709
2710
2711/* Return true when INSN possibly modify memory contents of MEM
454ff5cb 2712 (i.e. address can be modified). */
7b52eede 2713bool
9678086d 2714memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
7b52eede
JH
2715{
2716 if (!INSN_P (insn))
2717 return false;
2718 memory_modified = false;
aa317c97 2719 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
7b52eede
JH
2720 return memory_modified;
2721}
2722
c13e8210
MM
2723/* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2724 array. */
2725
9ae8ffe7 2726void
4682ae04 2727init_alias_analysis (void)
9ae8ffe7 2728{
c582d54a 2729 unsigned int maxreg = max_reg_num ();
ea64ef27 2730 int changed, pass;
b3694847
SS
2731 int i;
2732 unsigned int ui;
2733 rtx insn;
9ae8ffe7 2734
0d446150
JH
2735 timevar_push (TV_ALIAS_ANALYSIS);
2736
bb1acb3e 2737 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
a9429e29 2738 reg_known_value = ggc_alloc_cleared_vec_rtx (reg_known_value_size);
f883e0a7 2739 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
9ae8ffe7 2740
08c79682 2741 /* If we have memory allocated from the previous run, use it. */
c582d54a 2742 if (old_reg_base_value)
08c79682
KH
2743 reg_base_value = old_reg_base_value;
2744
2745 if (reg_base_value)
2746 VEC_truncate (rtx, reg_base_value, 0);
2747
a590ac65 2748 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
ac606739 2749
5ed6ace5
MD
2750 new_reg_base_value = XNEWVEC (rtx, maxreg);
2751 reg_seen = XNEWVEC (char, maxreg);
ec907dd8
JL
2752
2753 /* The basic idea is that each pass through this loop will use the
2754 "constant" information from the previous pass to propagate alias
2755 information through another level of assignments.
2756
2757 This could get expensive if the assignment chains are long. Maybe
2758 we should throttle the number of iterations, possibly based on
6e73e666 2759 the optimization level or flag_expensive_optimizations.
ec907dd8
JL
2760
2761 We could propagate more information in the first pass by making use
6fb5fa3c 2762 of DF_REG_DEF_COUNT to determine immediately that the alias information
ea64ef27
JL
2763 for a pseudo is "constant".
2764
2765 A program with an uninitialized variable can cause an infinite loop
2766 here. Instead of doing a full dataflow analysis to detect such problems
2767 we just cap the number of iterations for the loop.
2768
2769 The state of the arrays for the set chain in question does not matter
2770 since the program has undefined behavior. */
6e73e666 2771
ea64ef27 2772 pass = 0;
6e73e666 2773 do
ec907dd8
JL
2774 {
2775 /* Assume nothing will change this iteration of the loop. */
2776 changed = 0;
2777
ec907dd8
JL
2778 /* We want to assign the same IDs each iteration of this loop, so
2779 start counting from zero each iteration of the loop. */
2780 unique_id = 0;
2781
f5143c46 2782 /* We're at the start of the function each iteration through the
ec907dd8 2783 loop, so we're copying arguments. */
83bbd9b6 2784 copying_arguments = true;
9ae8ffe7 2785
6e73e666 2786 /* Wipe the potential alias information clean for this pass. */
c582d54a 2787 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
8072f69c 2788
6e73e666 2789 /* Wipe the reg_seen array clean. */
c582d54a 2790 memset (reg_seen, 0, maxreg);
9ae8ffe7 2791
6e73e666
JC
2792 /* Mark all hard registers which may contain an address.
2793 The stack, frame and argument pointers may contain an address.
2794 An argument register which can hold a Pmode value may contain
2795 an address even if it is not in BASE_REGS.
8072f69c 2796
6e73e666
JC
2797 The address expression is VOIDmode for an argument and
2798 Pmode for other registers. */
2799
7f243674
JL
2800 memcpy (new_reg_base_value, static_reg_base_value,
2801 FIRST_PSEUDO_REGISTER * sizeof (rtx));
6e73e666 2802
ec907dd8
JL
2803 /* Walk the insns adding values to the new_reg_base_value array. */
2804 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9ae8ffe7 2805 {
2c3c49de 2806 if (INSN_P (insn))
ec907dd8 2807 {
6e73e666 2808 rtx note, set;
efc9bd41
RK
2809
2810#if defined (HAVE_prologue) || defined (HAVE_epilogue)
f5143c46 2811 /* The prologue/epilogue insns are not threaded onto the
657959ca
JL
2812 insn chain until after reload has completed. Thus,
2813 there is no sense wasting time checking if INSN is in
2814 the prologue/epilogue until after reload has completed. */
2815 if (reload_completed
2816 && prologue_epilogue_contains (insn))
efc9bd41
RK
2817 continue;
2818#endif
2819
ec907dd8 2820 /* If this insn has a noalias note, process it, Otherwise,
c22cacf3
MS
2821 scan for sets. A simple set will have no side effects
2822 which could change the base value of any other register. */
6e73e666 2823
ec907dd8 2824 if (GET_CODE (PATTERN (insn)) == SET
efc9bd41
RK
2825 && REG_NOTES (insn) != 0
2826 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
84832317 2827 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
ec907dd8 2828 else
84832317 2829 note_stores (PATTERN (insn), record_set, NULL);
6e73e666
JC
2830
2831 set = single_set (insn);
2832
2833 if (set != 0
f8cfc6aa 2834 && REG_P (SET_DEST (set))
fb6754f0 2835 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
6e73e666 2836 {
fb6754f0 2837 unsigned int regno = REGNO (SET_DEST (set));
713f41f9 2838 rtx src = SET_SRC (set);
bb1acb3e 2839 rtx t;
713f41f9 2840
a31830a7
SB
2841 note = find_reg_equal_equiv_note (insn);
2842 if (note && REG_NOTE_KIND (note) == REG_EQUAL
6fb5fa3c 2843 && DF_REG_DEF_COUNT (regno) != 1)
a31830a7
SB
2844 note = NULL_RTX;
2845
2846 if (note != NULL_RTX
713f41f9 2847 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
bb2cf916 2848 && ! rtx_varies_p (XEXP (note, 0), 1)
bb1acb3e
RH
2849 && ! reg_overlap_mentioned_p (SET_DEST (set),
2850 XEXP (note, 0)))
713f41f9 2851 {
bb1acb3e
RH
2852 set_reg_known_value (regno, XEXP (note, 0));
2853 set_reg_known_equiv_p (regno,
2854 REG_NOTE_KIND (note) == REG_EQUIV);
713f41f9 2855 }
6fb5fa3c 2856 else if (DF_REG_DEF_COUNT (regno) == 1
713f41f9 2857 && GET_CODE (src) == PLUS
f8cfc6aa 2858 && REG_P (XEXP (src, 0))
bb1acb3e 2859 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
481683e1 2860 && CONST_INT_P (XEXP (src, 1)))
713f41f9 2861 {
bb1acb3e
RH
2862 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2863 set_reg_known_value (regno, t);
2864 set_reg_known_equiv_p (regno, 0);
713f41f9 2865 }
6fb5fa3c 2866 else if (DF_REG_DEF_COUNT (regno) == 1
713f41f9
BS
2867 && ! rtx_varies_p (src, 1))
2868 {
bb1acb3e
RH
2869 set_reg_known_value (regno, src);
2870 set_reg_known_equiv_p (regno, 0);
713f41f9 2871 }
6e73e666 2872 }
ec907dd8 2873 }
4b4bf941 2874 else if (NOTE_P (insn)
a38e7aa5 2875 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
83bbd9b6 2876 copying_arguments = false;
6e73e666 2877 }
ec907dd8 2878
6e73e666 2879 /* Now propagate values from new_reg_base_value to reg_base_value. */
62e5bf5d 2880 gcc_assert (maxreg == (unsigned int) max_reg_num ());
c22cacf3 2881
c582d54a 2882 for (ui = 0; ui < maxreg; ui++)
6e73e666 2883 {
e51712db 2884 if (new_reg_base_value[ui]
08c79682 2885 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
c582d54a 2886 && ! rtx_equal_p (new_reg_base_value[ui],
08c79682 2887 VEC_index (rtx, reg_base_value, ui)))
ec907dd8 2888 {
08c79682 2889 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
6e73e666 2890 changed = 1;
ec907dd8 2891 }
9ae8ffe7 2892 }
9ae8ffe7 2893 }
6e73e666 2894 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
9ae8ffe7
JL
2895
2896 /* Fill in the remaining entries. */
bb1acb3e 2897 for (i = 0; i < (int)reg_known_value_size; i++)
9ae8ffe7 2898 if (reg_known_value[i] == 0)
bb1acb3e 2899 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
9ae8ffe7 2900
e05e2395
MM
2901 /* Clean up. */
2902 free (new_reg_base_value);
ec907dd8 2903 new_reg_base_value = 0;
e05e2395 2904 free (reg_seen);
9ae8ffe7 2905 reg_seen = 0;
0d446150 2906 timevar_pop (TV_ALIAS_ANALYSIS);
9ae8ffe7
JL
2907}
2908
2909void
4682ae04 2910end_alias_analysis (void)
9ae8ffe7 2911{
c582d54a 2912 old_reg_base_value = reg_base_value;
bb1acb3e 2913 ggc_free (reg_known_value);
9ae8ffe7 2914 reg_known_value = 0;
ac606739 2915 reg_known_value_size = 0;
bb1acb3e 2916 free (reg_known_equiv_p);
e05e2395 2917 reg_known_equiv_p = 0;
9ae8ffe7 2918}
e2500fed
GK
2919
2920#include "gt-alias.h"