]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/alias.c
Merge from pch-branch up to tag pch-commit-20020603.
[thirdparty/gcc.git] / gcc / alias.c
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "function.h"
28 #include "expr.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "toplev.h"
35 #include "cselib.h"
36 #include "splay-tree.h"
37 #include "ggc.h"
38 #include "langhooks.h"
39 #include "target.h"
40
41 /* The alias sets assigned to MEMs assist the back-end in determining
42 which MEMs can alias which other MEMs. In general, two MEMs in
43 different alias sets cannot alias each other, with one important
44 exception. Consider something like:
45
46 struct S {int i; double d; };
47
48 a store to an `S' can alias something of either type `int' or type
49 `double'. (However, a store to an `int' cannot alias a `double'
50 and vice versa.) We indicate this via a tree structure that looks
51 like:
52 struct S
53 / \
54 / \
55 |/_ _\|
56 int double
57
58 (The arrows are directed and point downwards.)
59 In this situation we say the alias set for `struct S' is the
60 `superset' and that those for `int' and `double' are `subsets'.
61
62 To see whether two alias sets can point to the same memory, we must
63 see if either alias set is a subset of the other. We need not trace
64 past immediate descendents, however, since we propagate all
65 grandchildren up one level.
66
67 Alias set zero is implicitly a superset of all other alias sets.
68 However, this is no actual entry for alias set zero. It is an
69 error to attempt to explicitly construct a subset of zero. */
70
71 typedef struct alias_set_entry
72 {
73 /* The alias set number, as stored in MEM_ALIAS_SET. */
74 HOST_WIDE_INT alias_set;
75
76 /* The children of the alias set. These are not just the immediate
77 children, but, in fact, all descendents. So, if we have:
78
79 struct T { struct S s; float f; }
80
81 continuing our example above, the children here will be all of
82 `int', `double', `float', and `struct S'. */
83 splay_tree children;
84
85 /* Nonzero if would have a child of zero: this effectively makes this
86 alias set the same as alias set zero. */
87 int has_zero_child;
88 } *alias_set_entry;
89
90 static int rtx_equal_for_memref_p PARAMS ((rtx, rtx));
91 static rtx find_symbolic_term PARAMS ((rtx));
92 rtx get_addr PARAMS ((rtx));
93 static int memrefs_conflict_p PARAMS ((int, rtx, int, rtx,
94 HOST_WIDE_INT));
95 static void record_set PARAMS ((rtx, rtx, void *));
96 static rtx find_base_term PARAMS ((rtx));
97 static int base_alias_check PARAMS ((rtx, rtx, enum machine_mode,
98 enum machine_mode));
99 static rtx find_base_value PARAMS ((rtx));
100 static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
101 static int insert_subset_children PARAMS ((splay_tree_node, void*));
102 static tree find_base_decl PARAMS ((tree));
103 static alias_set_entry get_alias_set_entry PARAMS ((HOST_WIDE_INT));
104 static rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
105 int (*) (rtx, int)));
106 static int aliases_everything_p PARAMS ((rtx));
107 static bool nonoverlapping_component_refs_p PARAMS ((tree, tree));
108 static tree decl_for_component_ref PARAMS ((tree));
109 static rtx adjust_offset_for_component_ref PARAMS ((tree, rtx));
110 static int nonoverlapping_memrefs_p PARAMS ((rtx, rtx));
111 static int write_dependence_p PARAMS ((rtx, rtx, int));
112
113 static int nonlocal_mentioned_p_1 PARAMS ((rtx *, void *));
114 static int nonlocal_mentioned_p PARAMS ((rtx));
115 static int nonlocal_referenced_p_1 PARAMS ((rtx *, void *));
116 static int nonlocal_referenced_p PARAMS ((rtx));
117 static int nonlocal_set_p_1 PARAMS ((rtx *, void *));
118 static int nonlocal_set_p PARAMS ((rtx));
119
120 /* Set up all info needed to perform alias analysis on memory references. */
121
122 /* Returns the size in bytes of the mode of X. */
123 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
124
125 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
126 different alias sets. We ignore alias sets in functions making use
127 of variable arguments because the va_arg macros on some systems are
128 not legal ANSI C. */
129 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
130 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
131
132 /* Cap the number of passes we make over the insns propagating alias
133 information through set chains. 10 is a completely arbitrary choice. */
134 #define MAX_ALIAS_LOOP_PASSES 10
135
136 /* reg_base_value[N] gives an address to which register N is related.
137 If all sets after the first add or subtract to the current value
138 or otherwise modify it so it does not point to a different top level
139 object, reg_base_value[N] is equal to the address part of the source
140 of the first set.
141
142 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
143 expressions represent certain special values: function arguments and
144 the stack, frame, and argument pointers.
145
146 The contents of an ADDRESS is not normally used, the mode of the
147 ADDRESS determines whether the ADDRESS is a function argument or some
148 other special value. Pointer equality, not rtx_equal_p, determines whether
149 two ADDRESS expressions refer to the same base address.
150
151 The only use of the contents of an ADDRESS is for determining if the
152 current function performs nonlocal memory memory references for the
153 purposes of marking the function as a constant function. */
154
155 static GTY((length ("reg_base_value_size"))) rtx *reg_base_value;
156 static rtx *new_reg_base_value;
157 static unsigned int reg_base_value_size; /* size of reg_base_value array */
158
159 #define REG_BASE_VALUE(X) \
160 (REGNO (X) < reg_base_value_size \
161 ? reg_base_value[REGNO (X)] : 0)
162
163 /* Vector of known invariant relationships between registers. Set in
164 loop unrolling. Indexed by register number, if nonzero the value
165 is an expression describing this register in terms of another.
166
167 The length of this array is REG_BASE_VALUE_SIZE.
168
169 Because this array contains only pseudo registers it has no effect
170 after reload. */
171 static rtx *alias_invariant;
172
173 /* Vector indexed by N giving the initial (unchanging) value known for
174 pseudo-register N. This array is initialized in
175 init_alias_analysis, and does not change until end_alias_analysis
176 is called. */
177 rtx *reg_known_value;
178
179 /* Indicates number of valid entries in reg_known_value. */
180 static unsigned int reg_known_value_size;
181
182 /* Vector recording for each reg_known_value whether it is due to a
183 REG_EQUIV note. Future passes (viz., reload) may replace the
184 pseudo with the equivalent expression and so we account for the
185 dependences that would be introduced if that happens.
186
187 The REG_EQUIV notes created in assign_parms may mention the arg
188 pointer, and there are explicit insns in the RTL that modify the
189 arg pointer. Thus we must ensure that such insns don't get
190 scheduled across each other because that would invalidate the
191 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
192 wrong, but solving the problem in the scheduler will likely give
193 better code, so we do it here. */
194 char *reg_known_equiv_p;
195
196 /* True when scanning insns from the start of the rtl to the
197 NOTE_INSN_FUNCTION_BEG note. */
198 static int copying_arguments;
199
200 /* The splay-tree used to store the various alias set entries. */
201 static splay_tree alias_sets;
202 \f
203 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
204 such an entry, or NULL otherwise. */
205
206 static alias_set_entry
207 get_alias_set_entry (alias_set)
208 HOST_WIDE_INT alias_set;
209 {
210 splay_tree_node sn
211 = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
212
213 return sn != 0 ? ((alias_set_entry) sn->value) : 0;
214 }
215
216 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
217 the two MEMs cannot alias each other. */
218
219 static int
220 mems_in_disjoint_alias_sets_p (mem1, mem2)
221 rtx mem1;
222 rtx mem2;
223 {
224 #ifdef ENABLE_CHECKING
225 /* Perform a basic sanity check. Namely, that there are no alias sets
226 if we're not using strict aliasing. This helps to catch bugs
227 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
228 where a MEM is allocated in some way other than by the use of
229 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
230 use alias sets to indicate that spilled registers cannot alias each
231 other, we might need to remove this check. */
232 if (! flag_strict_aliasing
233 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
234 abort ();
235 #endif
236
237 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
238 }
239
240 /* Insert the NODE into the splay tree given by DATA. Used by
241 record_alias_subset via splay_tree_foreach. */
242
243 static int
244 insert_subset_children (node, data)
245 splay_tree_node node;
246 void *data;
247 {
248 splay_tree_insert ((splay_tree) data, node->key, node->value);
249
250 return 0;
251 }
252
253 /* Return 1 if the two specified alias sets may conflict. */
254
255 int
256 alias_sets_conflict_p (set1, set2)
257 HOST_WIDE_INT set1, set2;
258 {
259 alias_set_entry ase;
260
261 /* If have no alias set information for one of the operands, we have
262 to assume it can alias anything. */
263 if (set1 == 0 || set2 == 0
264 /* If the two alias sets are the same, they may alias. */
265 || set1 == set2)
266 return 1;
267
268 /* See if the first alias set is a subset of the second. */
269 ase = get_alias_set_entry (set1);
270 if (ase != 0
271 && (ase->has_zero_child
272 || splay_tree_lookup (ase->children,
273 (splay_tree_key) set2)))
274 return 1;
275
276 /* Now do the same, but with the alias sets reversed. */
277 ase = get_alias_set_entry (set2);
278 if (ase != 0
279 && (ase->has_zero_child
280 || splay_tree_lookup (ase->children,
281 (splay_tree_key) set1)))
282 return 1;
283
284 /* The two alias sets are distinct and neither one is the
285 child of the other. Therefore, they cannot alias. */
286 return 0;
287 }
288 \f
289 /* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
290 has any readonly fields. If any of the fields have types that
291 contain readonly fields, return true as well. */
292
293 int
294 readonly_fields_p (type)
295 tree type;
296 {
297 tree field;
298
299 if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
300 && TREE_CODE (type) != QUAL_UNION_TYPE)
301 return 0;
302
303 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
304 if (TREE_CODE (field) == FIELD_DECL
305 && (TREE_READONLY (field)
306 || readonly_fields_p (TREE_TYPE (field))))
307 return 1;
308
309 return 0;
310 }
311 \f
312 /* Return 1 if any MEM object of type T1 will always conflict (using the
313 dependency routines in this file) with any MEM object of type T2.
314 This is used when allocating temporary storage. If T1 and/or T2 are
315 NULL_TREE, it means we know nothing about the storage. */
316
317 int
318 objects_must_conflict_p (t1, t2)
319 tree t1, t2;
320 {
321 /* If neither has a type specified, we don't know if they'll conflict
322 because we may be using them to store objects of various types, for
323 example the argument and local variables areas of inlined functions. */
324 if (t1 == 0 && t2 == 0)
325 return 0;
326
327 /* If one or the other has readonly fields or is readonly,
328 then they may not conflict. */
329 if ((t1 != 0 && readonly_fields_p (t1))
330 || (t2 != 0 && readonly_fields_p (t2))
331 || (t1 != 0 && TYPE_READONLY (t1))
332 || (t2 != 0 && TYPE_READONLY (t2)))
333 return 0;
334
335 /* If they are the same type, they must conflict. */
336 if (t1 == t2
337 /* Likewise if both are volatile. */
338 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
339 return 1;
340
341 /* If one is aggregate and the other is scalar then they may not
342 conflict. */
343 if ((t1 != 0 && AGGREGATE_TYPE_P (t1))
344 != (t2 != 0 && AGGREGATE_TYPE_P (t2)))
345 return 0;
346
347 /* Otherwise they conflict only if the alias sets conflict. */
348 return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
349 t2 ? get_alias_set (t2) : 0);
350 }
351 \f
352 /* T is an expression with pointer type. Find the DECL on which this
353 expression is based. (For example, in `a[i]' this would be `a'.)
354 If there is no such DECL, or a unique decl cannot be determined,
355 NULL_TREE is returned. */
356
357 static tree
358 find_base_decl (t)
359 tree t;
360 {
361 tree d0, d1, d2;
362
363 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
364 return 0;
365
366 /* If this is a declaration, return it. */
367 if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
368 return t;
369
370 /* Handle general expressions. It would be nice to deal with
371 COMPONENT_REFs here. If we could tell that `a' and `b' were the
372 same, then `a->f' and `b->f' are also the same. */
373 switch (TREE_CODE_CLASS (TREE_CODE (t)))
374 {
375 case '1':
376 return find_base_decl (TREE_OPERAND (t, 0));
377
378 case '2':
379 /* Return 0 if found in neither or both are the same. */
380 d0 = find_base_decl (TREE_OPERAND (t, 0));
381 d1 = find_base_decl (TREE_OPERAND (t, 1));
382 if (d0 == d1)
383 return d0;
384 else if (d0 == 0)
385 return d1;
386 else if (d1 == 0)
387 return d0;
388 else
389 return 0;
390
391 case '3':
392 d0 = find_base_decl (TREE_OPERAND (t, 0));
393 d1 = find_base_decl (TREE_OPERAND (t, 1));
394 d2 = find_base_decl (TREE_OPERAND (t, 2));
395
396 /* Set any nonzero values from the last, then from the first. */
397 if (d1 == 0) d1 = d2;
398 if (d0 == 0) d0 = d1;
399 if (d1 == 0) d1 = d0;
400 if (d2 == 0) d2 = d1;
401
402 /* At this point all are nonzero or all are zero. If all three are the
403 same, return it. Otherwise, return zero. */
404 return (d0 == d1 && d1 == d2) ? d0 : 0;
405
406 default:
407 return 0;
408 }
409 }
410
411 /* Return 1 if all the nested component references handled by
412 get_inner_reference in T are such that we can address the object in T. */
413
414 int
415 can_address_p (t)
416 tree t;
417 {
418 /* If we're at the end, it is vacuously addressable. */
419 if (! handled_component_p (t))
420 return 1;
421
422 /* Bitfields are never addressable. */
423 else if (TREE_CODE (t) == BIT_FIELD_REF)
424 return 0;
425
426 /* Fields are addressable unless they are marked as nonaddressable or
427 the containing type has alias set 0. */
428 else if (TREE_CODE (t) == COMPONENT_REF
429 && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
430 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
431 && can_address_p (TREE_OPERAND (t, 0)))
432 return 1;
433
434 /* Likewise for arrays. */
435 else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
436 && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
437 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
438 && can_address_p (TREE_OPERAND (t, 0)))
439 return 1;
440
441 return 0;
442 }
443
444 /* Return the alias set for T, which may be either a type or an
445 expression. Call language-specific routine for help, if needed. */
446
447 HOST_WIDE_INT
448 get_alias_set (t)
449 tree t;
450 {
451 HOST_WIDE_INT set;
452
453 /* If we're not doing any alias analysis, just assume everything
454 aliases everything else. Also return 0 if this or its type is
455 an error. */
456 if (! flag_strict_aliasing || t == error_mark_node
457 || (! TYPE_P (t)
458 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
459 return 0;
460
461 /* We can be passed either an expression or a type. This and the
462 language-specific routine may make mutually-recursive calls to each other
463 to figure out what to do. At each juncture, we see if this is a tree
464 that the language may need to handle specially. First handle things that
465 aren't types. */
466 if (! TYPE_P (t))
467 {
468 tree inner = t;
469 tree placeholder_ptr = 0;
470
471 /* Remove any nops, then give the language a chance to do
472 something with this tree before we look at it. */
473 STRIP_NOPS (t);
474 set = (*lang_hooks.get_alias_set) (t);
475 if (set != -1)
476 return set;
477
478 /* First see if the actual object referenced is an INDIRECT_REF from a
479 restrict-qualified pointer or a "void *". Replace
480 PLACEHOLDER_EXPRs. */
481 while (TREE_CODE (inner) == PLACEHOLDER_EXPR
482 || handled_component_p (inner))
483 {
484 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
485 inner = find_placeholder (inner, &placeholder_ptr);
486 else
487 inner = TREE_OPERAND (inner, 0);
488
489 STRIP_NOPS (inner);
490 }
491
492 /* Check for accesses through restrict-qualified pointers. */
493 if (TREE_CODE (inner) == INDIRECT_REF)
494 {
495 tree decl = find_base_decl (TREE_OPERAND (inner, 0));
496
497 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
498 {
499 /* If we haven't computed the actual alias set, do it now. */
500 if (DECL_POINTER_ALIAS_SET (decl) == -2)
501 {
502 /* No two restricted pointers can point at the same thing.
503 However, a restricted pointer can point at the same thing
504 as an unrestricted pointer, if that unrestricted pointer
505 is based on the restricted pointer. So, we make the
506 alias set for the restricted pointer a subset of the
507 alias set for the type pointed to by the type of the
508 decl. */
509 HOST_WIDE_INT pointed_to_alias_set
510 = get_alias_set (TREE_TYPE (TREE_TYPE (decl)));
511
512 if (pointed_to_alias_set == 0)
513 /* It's not legal to make a subset of alias set zero. */
514 ;
515 else
516 {
517 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
518 record_alias_subset (pointed_to_alias_set,
519 DECL_POINTER_ALIAS_SET (decl));
520 }
521 }
522
523 /* We use the alias set indicated in the declaration. */
524 return DECL_POINTER_ALIAS_SET (decl);
525 }
526
527 /* If we have an INDIRECT_REF via a void pointer, we don't
528 know anything about what that might alias. */
529 else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE)
530 return 0;
531 }
532
533 /* Otherwise, pick up the outermost object that we could have a pointer
534 to, processing conversion and PLACEHOLDER_EXPR as above. */
535 placeholder_ptr = 0;
536 while (TREE_CODE (t) == PLACEHOLDER_EXPR
537 || (handled_component_p (t) && ! can_address_p (t)))
538 {
539 if (TREE_CODE (t) == PLACEHOLDER_EXPR)
540 t = find_placeholder (t, &placeholder_ptr);
541 else
542 t = TREE_OPERAND (t, 0);
543
544 STRIP_NOPS (t);
545 }
546
547 /* If we've already determined the alias set for a decl, just return
548 it. This is necessary for C++ anonymous unions, whose component
549 variables don't look like union members (boo!). */
550 if (TREE_CODE (t) == VAR_DECL
551 && DECL_RTL_SET_P (t) && GET_CODE (DECL_RTL (t)) == MEM)
552 return MEM_ALIAS_SET (DECL_RTL (t));
553
554 /* Now all we care about is the type. */
555 t = TREE_TYPE (t);
556 }
557
558 /* Variant qualifiers don't affect the alias set, so get the main
559 variant. If this is a type with a known alias set, return it. */
560 t = TYPE_MAIN_VARIANT (t);
561 if (TYPE_ALIAS_SET_KNOWN_P (t))
562 return TYPE_ALIAS_SET (t);
563
564 /* See if the language has special handling for this type. */
565 set = (*lang_hooks.get_alias_set) (t);
566 if (set != -1)
567 return set;
568
569 /* There are no objects of FUNCTION_TYPE, so there's no point in
570 using up an alias set for them. (There are, of course, pointers
571 and references to functions, but that's different.) */
572 else if (TREE_CODE (t) == FUNCTION_TYPE)
573 set = 0;
574 else
575 /* Otherwise make a new alias set for this type. */
576 set = new_alias_set ();
577
578 TYPE_ALIAS_SET (t) = set;
579
580 /* If this is an aggregate type, we must record any component aliasing
581 information. */
582 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
583 record_component_aliases (t);
584
585 return set;
586 }
587
588 /* Return a brand-new alias set. */
589
590 HOST_WIDE_INT
591 new_alias_set ()
592 {
593 static HOST_WIDE_INT last_alias_set;
594
595 if (flag_strict_aliasing)
596 return ++last_alias_set;
597 else
598 return 0;
599 }
600
601 /* Indicate that things in SUBSET can alias things in SUPERSET, but
602 not vice versa. For example, in C, a store to an `int' can alias a
603 structure containing an `int', but not vice versa. Here, the
604 structure would be the SUPERSET and `int' the SUBSET. This
605 function should be called only once per SUPERSET/SUBSET pair.
606
607 It is illegal for SUPERSET to be zero; everything is implicitly a
608 subset of alias set zero. */
609
610 void
611 record_alias_subset (superset, subset)
612 HOST_WIDE_INT superset;
613 HOST_WIDE_INT subset;
614 {
615 alias_set_entry superset_entry;
616 alias_set_entry subset_entry;
617
618 /* It is possible in complex type situations for both sets to be the same,
619 in which case we can ignore this operation. */
620 if (superset == subset)
621 return;
622
623 if (superset == 0)
624 abort ();
625
626 superset_entry = get_alias_set_entry (superset);
627 if (superset_entry == 0)
628 {
629 /* Create an entry for the SUPERSET, so that we have a place to
630 attach the SUBSET. */
631 superset_entry
632 = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
633 superset_entry->alias_set = superset;
634 superset_entry->children
635 = splay_tree_new (splay_tree_compare_ints, 0, 0);
636 superset_entry->has_zero_child = 0;
637 splay_tree_insert (alias_sets, (splay_tree_key) superset,
638 (splay_tree_value) superset_entry);
639 }
640
641 if (subset == 0)
642 superset_entry->has_zero_child = 1;
643 else
644 {
645 subset_entry = get_alias_set_entry (subset);
646 /* If there is an entry for the subset, enter all of its children
647 (if they are not already present) as children of the SUPERSET. */
648 if (subset_entry)
649 {
650 if (subset_entry->has_zero_child)
651 superset_entry->has_zero_child = 1;
652
653 splay_tree_foreach (subset_entry->children, insert_subset_children,
654 superset_entry->children);
655 }
656
657 /* Enter the SUBSET itself as a child of the SUPERSET. */
658 splay_tree_insert (superset_entry->children,
659 (splay_tree_key) subset, 0);
660 }
661 }
662
663 /* Record that component types of TYPE, if any, are part of that type for
664 aliasing purposes. For record types, we only record component types
665 for fields that are marked addressable. For array types, we always
666 record the component types, so the front end should not call this
667 function if the individual component aren't addressable. */
668
669 void
670 record_component_aliases (type)
671 tree type;
672 {
673 HOST_WIDE_INT superset = get_alias_set (type);
674 tree field;
675
676 if (superset == 0)
677 return;
678
679 switch (TREE_CODE (type))
680 {
681 case ARRAY_TYPE:
682 if (! TYPE_NONALIASED_COMPONENT (type))
683 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
684 break;
685
686 case RECORD_TYPE:
687 case UNION_TYPE:
688 case QUAL_UNION_TYPE:
689 /* Recursively record aliases for the base classes, if there are any */
690 if (TYPE_BINFO (type) != NULL && TYPE_BINFO_BASETYPES (type) != NULL)
691 {
692 int i;
693 for (i = 0; i < TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (type)); i++)
694 {
695 tree binfo = TREE_VEC_ELT (TYPE_BINFO_BASETYPES (type), i);
696 record_alias_subset (superset,
697 get_alias_set (BINFO_TYPE (binfo)));
698 }
699 }
700 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
701 if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
702 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
703 break;
704
705 case COMPLEX_TYPE:
706 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
707 break;
708
709 default:
710 break;
711 }
712 }
713
714 /* Allocate an alias set for use in storing and reading from the varargs
715 spill area. */
716
717 HOST_WIDE_INT
718 get_varargs_alias_set ()
719 {
720 static HOST_WIDE_INT set = -1;
721
722 if (set == -1)
723 set = new_alias_set ();
724
725 return set;
726 }
727
728 /* Likewise, but used for the fixed portions of the frame, e.g., register
729 save areas. */
730
731 HOST_WIDE_INT
732 get_frame_alias_set ()
733 {
734 static HOST_WIDE_INT set = -1;
735
736 if (set == -1)
737 set = new_alias_set ();
738
739 return set;
740 }
741
742 /* Inside SRC, the source of a SET, find a base address. */
743
744 static rtx
745 find_base_value (src)
746 rtx src;
747 {
748 unsigned int regno;
749
750 switch (GET_CODE (src))
751 {
752 case SYMBOL_REF:
753 case LABEL_REF:
754 return src;
755
756 case REG:
757 regno = REGNO (src);
758 /* At the start of a function, argument registers have known base
759 values which may be lost later. Returning an ADDRESS
760 expression here allows optimization based on argument values
761 even when the argument registers are used for other purposes. */
762 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
763 return new_reg_base_value[regno];
764
765 /* If a pseudo has a known base value, return it. Do not do this
766 for non-fixed hard regs since it can result in a circular
767 dependency chain for registers which have values at function entry.
768
769 The test above is not sufficient because the scheduler may move
770 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
771 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
772 && regno < reg_base_value_size
773 && reg_base_value[regno])
774 return reg_base_value[regno];
775
776 return src;
777
778 case MEM:
779 /* Check for an argument passed in memory. Only record in the
780 copying-arguments block; it is too hard to track changes
781 otherwise. */
782 if (copying_arguments
783 && (XEXP (src, 0) == arg_pointer_rtx
784 || (GET_CODE (XEXP (src, 0)) == PLUS
785 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
786 return gen_rtx_ADDRESS (VOIDmode, src);
787 return 0;
788
789 case CONST:
790 src = XEXP (src, 0);
791 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
792 break;
793
794 /* ... fall through ... */
795
796 case PLUS:
797 case MINUS:
798 {
799 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
800
801 /* If either operand is a REG that is a known pointer, then it
802 is the base. */
803 if (REG_P (src_0) && REG_POINTER (src_0))
804 return find_base_value (src_0);
805 if (REG_P (src_1) && REG_POINTER (src_1))
806 return find_base_value (src_1);
807
808 /* If either operand is a REG, then see if we already have
809 a known value for it. */
810 if (REG_P (src_0))
811 {
812 temp = find_base_value (src_0);
813 if (temp != 0)
814 src_0 = temp;
815 }
816
817 if (REG_P (src_1))
818 {
819 temp = find_base_value (src_1);
820 if (temp!= 0)
821 src_1 = temp;
822 }
823
824 /* If either base is named object or a special address
825 (like an argument or stack reference), then use it for the
826 base term. */
827 if (src_0 != 0
828 && (GET_CODE (src_0) == SYMBOL_REF
829 || GET_CODE (src_0) == LABEL_REF
830 || (GET_CODE (src_0) == ADDRESS
831 && GET_MODE (src_0) != VOIDmode)))
832 return src_0;
833
834 if (src_1 != 0
835 && (GET_CODE (src_1) == SYMBOL_REF
836 || GET_CODE (src_1) == LABEL_REF
837 || (GET_CODE (src_1) == ADDRESS
838 && GET_MODE (src_1) != VOIDmode)))
839 return src_1;
840
841 /* Guess which operand is the base address:
842 If either operand is a symbol, then it is the base. If
843 either operand is a CONST_INT, then the other is the base. */
844 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
845 return find_base_value (src_0);
846 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
847 return find_base_value (src_1);
848
849 return 0;
850 }
851
852 case LO_SUM:
853 /* The standard form is (lo_sum reg sym) so look only at the
854 second operand. */
855 return find_base_value (XEXP (src, 1));
856
857 case AND:
858 /* If the second operand is constant set the base
859 address to the first operand. */
860 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
861 return find_base_value (XEXP (src, 0));
862 return 0;
863
864 case TRUNCATE:
865 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
866 break;
867 /* Fall through. */
868 case HIGH:
869 case PRE_INC:
870 case PRE_DEC:
871 case POST_INC:
872 case POST_DEC:
873 case PRE_MODIFY:
874 case POST_MODIFY:
875 return find_base_value (XEXP (src, 0));
876
877 case ZERO_EXTEND:
878 case SIGN_EXTEND: /* used for NT/Alpha pointers */
879 {
880 rtx temp = find_base_value (XEXP (src, 0));
881
882 #ifdef POINTERS_EXTEND_UNSIGNED
883 if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
884 temp = convert_memory_address (Pmode, temp);
885 #endif
886
887 return temp;
888 }
889
890 default:
891 break;
892 }
893
894 return 0;
895 }
896
897 /* Called from init_alias_analysis indirectly through note_stores. */
898
899 /* While scanning insns to find base values, reg_seen[N] is nonzero if
900 register N has been set in this function. */
901 static char *reg_seen;
902
903 /* Addresses which are known not to alias anything else are identified
904 by a unique integer. */
905 static int unique_id;
906
907 static void
908 record_set (dest, set, data)
909 rtx dest, set;
910 void *data ATTRIBUTE_UNUSED;
911 {
912 unsigned regno;
913 rtx src;
914
915 if (GET_CODE (dest) != REG)
916 return;
917
918 regno = REGNO (dest);
919
920 if (regno >= reg_base_value_size)
921 abort ();
922
923 if (set)
924 {
925 /* A CLOBBER wipes out any old value but does not prevent a previously
926 unset register from acquiring a base address (i.e. reg_seen is not
927 set). */
928 if (GET_CODE (set) == CLOBBER)
929 {
930 new_reg_base_value[regno] = 0;
931 return;
932 }
933 src = SET_SRC (set);
934 }
935 else
936 {
937 if (reg_seen[regno])
938 {
939 new_reg_base_value[regno] = 0;
940 return;
941 }
942 reg_seen[regno] = 1;
943 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
944 GEN_INT (unique_id++));
945 return;
946 }
947
948 /* This is not the first set. If the new value is not related to the
949 old value, forget the base value. Note that the following code is
950 not detected:
951 extern int x, y; int *p = &x; p += (&y-&x);
952 ANSI C does not allow computing the difference of addresses
953 of distinct top level objects. */
954 if (new_reg_base_value[regno])
955 switch (GET_CODE (src))
956 {
957 case LO_SUM:
958 case MINUS:
959 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
960 new_reg_base_value[regno] = 0;
961 break;
962 case PLUS:
963 /* If the value we add in the PLUS is also a valid base value,
964 this might be the actual base value, and the original value
965 an index. */
966 {
967 rtx other = NULL_RTX;
968
969 if (XEXP (src, 0) == dest)
970 other = XEXP (src, 1);
971 else if (XEXP (src, 1) == dest)
972 other = XEXP (src, 0);
973
974 if (! other || find_base_value (other))
975 new_reg_base_value[regno] = 0;
976 break;
977 }
978 case AND:
979 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
980 new_reg_base_value[regno] = 0;
981 break;
982 default:
983 new_reg_base_value[regno] = 0;
984 break;
985 }
986 /* If this is the first set of a register, record the value. */
987 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
988 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
989 new_reg_base_value[regno] = find_base_value (src);
990
991 reg_seen[regno] = 1;
992 }
993
994 /* Called from loop optimization when a new pseudo-register is
995 created. It indicates that REGNO is being set to VAL. f INVARIANT
996 is true then this value also describes an invariant relationship
997 which can be used to deduce that two registers with unknown values
998 are different. */
999
1000 void
1001 record_base_value (regno, val, invariant)
1002 unsigned int regno;
1003 rtx val;
1004 int invariant;
1005 {
1006 if (regno >= reg_base_value_size)
1007 return;
1008
1009 if (invariant && alias_invariant)
1010 alias_invariant[regno] = val;
1011
1012 if (GET_CODE (val) == REG)
1013 {
1014 if (REGNO (val) < reg_base_value_size)
1015 reg_base_value[regno] = reg_base_value[REGNO (val)];
1016
1017 return;
1018 }
1019
1020 reg_base_value[regno] = find_base_value (val);
1021 }
1022
1023 /* Clear alias info for a register. This is used if an RTL transformation
1024 changes the value of a register. This is used in flow by AUTO_INC_DEC
1025 optimizations. We don't need to clear reg_base_value, since flow only
1026 changes the offset. */
1027
1028 void
1029 clear_reg_alias_info (reg)
1030 rtx reg;
1031 {
1032 unsigned int regno = REGNO (reg);
1033
1034 if (regno < reg_known_value_size && regno >= FIRST_PSEUDO_REGISTER)
1035 reg_known_value[regno] = reg;
1036 }
1037
1038 /* Returns a canonical version of X, from the point of view alias
1039 analysis. (For example, if X is a MEM whose address is a register,
1040 and the register has a known value (say a SYMBOL_REF), then a MEM
1041 whose address is the SYMBOL_REF is returned.) */
1042
1043 rtx
1044 canon_rtx (x)
1045 rtx x;
1046 {
1047 /* Recursively look for equivalences. */
1048 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
1049 && REGNO (x) < reg_known_value_size)
1050 return reg_known_value[REGNO (x)] == x
1051 ? x : canon_rtx (reg_known_value[REGNO (x)]);
1052 else if (GET_CODE (x) == PLUS)
1053 {
1054 rtx x0 = canon_rtx (XEXP (x, 0));
1055 rtx x1 = canon_rtx (XEXP (x, 1));
1056
1057 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1058 {
1059 if (GET_CODE (x0) == CONST_INT)
1060 return plus_constant (x1, INTVAL (x0));
1061 else if (GET_CODE (x1) == CONST_INT)
1062 return plus_constant (x0, INTVAL (x1));
1063 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1064 }
1065 }
1066
1067 /* This gives us much better alias analysis when called from
1068 the loop optimizer. Note we want to leave the original
1069 MEM alone, but need to return the canonicalized MEM with
1070 all the flags with their original values. */
1071 else if (GET_CODE (x) == MEM)
1072 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1073
1074 return x;
1075 }
1076
1077 /* Return 1 if X and Y are identical-looking rtx's.
1078
1079 We use the data in reg_known_value above to see if two registers with
1080 different numbers are, in fact, equivalent. */
1081
1082 static int
1083 rtx_equal_for_memref_p (x, y)
1084 rtx x, y;
1085 {
1086 int i;
1087 int j;
1088 enum rtx_code code;
1089 const char *fmt;
1090
1091 if (x == 0 && y == 0)
1092 return 1;
1093 if (x == 0 || y == 0)
1094 return 0;
1095
1096 x = canon_rtx (x);
1097 y = canon_rtx (y);
1098
1099 if (x == y)
1100 return 1;
1101
1102 code = GET_CODE (x);
1103 /* Rtx's of different codes cannot be equal. */
1104 if (code != GET_CODE (y))
1105 return 0;
1106
1107 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1108 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1109
1110 if (GET_MODE (x) != GET_MODE (y))
1111 return 0;
1112
1113 /* Some RTL can be compared without a recursive examination. */
1114 switch (code)
1115 {
1116 case VALUE:
1117 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
1118
1119 case REG:
1120 return REGNO (x) == REGNO (y);
1121
1122 case LABEL_REF:
1123 return XEXP (x, 0) == XEXP (y, 0);
1124
1125 case SYMBOL_REF:
1126 return XSTR (x, 0) == XSTR (y, 0);
1127
1128 case CONST_INT:
1129 case CONST_DOUBLE:
1130 /* There's no need to compare the contents of CONST_DOUBLEs or
1131 CONST_INTs because pointer equality is a good enough
1132 comparison for these nodes. */
1133 return 0;
1134
1135 case ADDRESSOF:
1136 return (XINT (x, 1) == XINT (y, 1)
1137 && rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)));
1138
1139 default:
1140 break;
1141 }
1142
1143 /* For commutative operations, the RTX match if the operand match in any
1144 order. Also handle the simple binary and unary cases without a loop. */
1145 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
1146 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1147 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1148 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1149 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1150 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
1151 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1152 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
1153 else if (GET_RTX_CLASS (code) == '1')
1154 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
1155
1156 /* Compare the elements. If any pair of corresponding elements
1157 fail to match, return 0 for the whole things.
1158
1159 Limit cases to types which actually appear in addresses. */
1160
1161 fmt = GET_RTX_FORMAT (code);
1162 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1163 {
1164 switch (fmt[i])
1165 {
1166 case 'i':
1167 if (XINT (x, i) != XINT (y, i))
1168 return 0;
1169 break;
1170
1171 case 'E':
1172 /* Two vectors must have the same length. */
1173 if (XVECLEN (x, i) != XVECLEN (y, i))
1174 return 0;
1175
1176 /* And the corresponding elements must match. */
1177 for (j = 0; j < XVECLEN (x, i); j++)
1178 if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
1179 XVECEXP (y, i, j)) == 0)
1180 return 0;
1181 break;
1182
1183 case 'e':
1184 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
1185 return 0;
1186 break;
1187
1188 /* This can happen for asm operands. */
1189 case 's':
1190 if (strcmp (XSTR (x, i), XSTR (y, i)))
1191 return 0;
1192 break;
1193
1194 /* This can happen for an asm which clobbers memory. */
1195 case '0':
1196 break;
1197
1198 /* It is believed that rtx's at this level will never
1199 contain anything but integers and other rtx's,
1200 except for within LABEL_REFs and SYMBOL_REFs. */
1201 default:
1202 abort ();
1203 }
1204 }
1205 return 1;
1206 }
1207
1208 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1209 X and return it, or return 0 if none found. */
1210
1211 static rtx
1212 find_symbolic_term (x)
1213 rtx x;
1214 {
1215 int i;
1216 enum rtx_code code;
1217 const char *fmt;
1218
1219 code = GET_CODE (x);
1220 if (code == SYMBOL_REF || code == LABEL_REF)
1221 return x;
1222 if (GET_RTX_CLASS (code) == 'o')
1223 return 0;
1224
1225 fmt = GET_RTX_FORMAT (code);
1226 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1227 {
1228 rtx t;
1229
1230 if (fmt[i] == 'e')
1231 {
1232 t = find_symbolic_term (XEXP (x, i));
1233 if (t != 0)
1234 return t;
1235 }
1236 else if (fmt[i] == 'E')
1237 break;
1238 }
1239 return 0;
1240 }
1241
1242 static rtx
1243 find_base_term (x)
1244 rtx x;
1245 {
1246 cselib_val *val;
1247 struct elt_loc_list *l;
1248
1249 #if defined (FIND_BASE_TERM)
1250 /* Try machine-dependent ways to find the base term. */
1251 x = FIND_BASE_TERM (x);
1252 #endif
1253
1254 switch (GET_CODE (x))
1255 {
1256 case REG:
1257 return REG_BASE_VALUE (x);
1258
1259 case TRUNCATE:
1260 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1261 return 0;
1262 /* Fall through. */
1263 case HIGH:
1264 case PRE_INC:
1265 case PRE_DEC:
1266 case POST_INC:
1267 case POST_DEC:
1268 case PRE_MODIFY:
1269 case POST_MODIFY:
1270 return find_base_term (XEXP (x, 0));
1271
1272 case ZERO_EXTEND:
1273 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1274 {
1275 rtx temp = find_base_term (XEXP (x, 0));
1276
1277 #ifdef POINTERS_EXTEND_UNSIGNED
1278 if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
1279 temp = convert_memory_address (Pmode, temp);
1280 #endif
1281
1282 return temp;
1283 }
1284
1285 case VALUE:
1286 val = CSELIB_VAL_PTR (x);
1287 for (l = val->locs; l; l = l->next)
1288 if ((x = find_base_term (l->loc)) != 0)
1289 return x;
1290 return 0;
1291
1292 case CONST:
1293 x = XEXP (x, 0);
1294 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1295 return 0;
1296 /* fall through */
1297 case LO_SUM:
1298 case PLUS:
1299 case MINUS:
1300 {
1301 rtx tmp1 = XEXP (x, 0);
1302 rtx tmp2 = XEXP (x, 1);
1303
1304 /* This is a little bit tricky since we have to determine which of
1305 the two operands represents the real base address. Otherwise this
1306 routine may return the index register instead of the base register.
1307
1308 That may cause us to believe no aliasing was possible, when in
1309 fact aliasing is possible.
1310
1311 We use a few simple tests to guess the base register. Additional
1312 tests can certainly be added. For example, if one of the operands
1313 is a shift or multiply, then it must be the index register and the
1314 other operand is the base register. */
1315
1316 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1317 return find_base_term (tmp2);
1318
1319 /* If either operand is known to be a pointer, then use it
1320 to determine the base term. */
1321 if (REG_P (tmp1) && REG_POINTER (tmp1))
1322 return find_base_term (tmp1);
1323
1324 if (REG_P (tmp2) && REG_POINTER (tmp2))
1325 return find_base_term (tmp2);
1326
1327 /* Neither operand was known to be a pointer. Go ahead and find the
1328 base term for both operands. */
1329 tmp1 = find_base_term (tmp1);
1330 tmp2 = find_base_term (tmp2);
1331
1332 /* If either base term is named object or a special address
1333 (like an argument or stack reference), then use it for the
1334 base term. */
1335 if (tmp1 != 0
1336 && (GET_CODE (tmp1) == SYMBOL_REF
1337 || GET_CODE (tmp1) == LABEL_REF
1338 || (GET_CODE (tmp1) == ADDRESS
1339 && GET_MODE (tmp1) != VOIDmode)))
1340 return tmp1;
1341
1342 if (tmp2 != 0
1343 && (GET_CODE (tmp2) == SYMBOL_REF
1344 || GET_CODE (tmp2) == LABEL_REF
1345 || (GET_CODE (tmp2) == ADDRESS
1346 && GET_MODE (tmp2) != VOIDmode)))
1347 return tmp2;
1348
1349 /* We could not determine which of the two operands was the
1350 base register and which was the index. So we can determine
1351 nothing from the base alias check. */
1352 return 0;
1353 }
1354
1355 case AND:
1356 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1357 return find_base_term (XEXP (x, 0));
1358 return 0;
1359
1360 case SYMBOL_REF:
1361 case LABEL_REF:
1362 return x;
1363
1364 case ADDRESSOF:
1365 return REG_BASE_VALUE (frame_pointer_rtx);
1366
1367 default:
1368 return 0;
1369 }
1370 }
1371
1372 /* Return 0 if the addresses X and Y are known to point to different
1373 objects, 1 if they might be pointers to the same object. */
1374
1375 static int
1376 base_alias_check (x, y, x_mode, y_mode)
1377 rtx x, y;
1378 enum machine_mode x_mode, y_mode;
1379 {
1380 rtx x_base = find_base_term (x);
1381 rtx y_base = find_base_term (y);
1382
1383 /* If the address itself has no known base see if a known equivalent
1384 value has one. If either address still has no known base, nothing
1385 is known about aliasing. */
1386 if (x_base == 0)
1387 {
1388 rtx x_c;
1389
1390 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1391 return 1;
1392
1393 x_base = find_base_term (x_c);
1394 if (x_base == 0)
1395 return 1;
1396 }
1397
1398 if (y_base == 0)
1399 {
1400 rtx y_c;
1401 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1402 return 1;
1403
1404 y_base = find_base_term (y_c);
1405 if (y_base == 0)
1406 return 1;
1407 }
1408
1409 /* If the base addresses are equal nothing is known about aliasing. */
1410 if (rtx_equal_p (x_base, y_base))
1411 return 1;
1412
1413 /* The base addresses of the read and write are different expressions.
1414 If they are both symbols and they are not accessed via AND, there is
1415 no conflict. We can bring knowledge of object alignment into play
1416 here. For example, on alpha, "char a, b;" can alias one another,
1417 though "char a; long b;" cannot. */
1418 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1419 {
1420 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1421 return 1;
1422 if (GET_CODE (x) == AND
1423 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1424 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1425 return 1;
1426 if (GET_CODE (y) == AND
1427 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1428 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1429 return 1;
1430 /* Differing symbols never alias. */
1431 return 0;
1432 }
1433
1434 /* If one address is a stack reference there can be no alias:
1435 stack references using different base registers do not alias,
1436 a stack reference can not alias a parameter, and a stack reference
1437 can not alias a global. */
1438 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1439 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1440 return 0;
1441
1442 if (! flag_argument_noalias)
1443 return 1;
1444
1445 if (flag_argument_noalias > 1)
1446 return 0;
1447
1448 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1449 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1450 }
1451
1452 /* Convert the address X into something we can use. This is done by returning
1453 it unchanged unless it is a value; in the latter case we call cselib to get
1454 a more useful rtx. */
1455
1456 rtx
1457 get_addr (x)
1458 rtx x;
1459 {
1460 cselib_val *v;
1461 struct elt_loc_list *l;
1462
1463 if (GET_CODE (x) != VALUE)
1464 return x;
1465 v = CSELIB_VAL_PTR (x);
1466 for (l = v->locs; l; l = l->next)
1467 if (CONSTANT_P (l->loc))
1468 return l->loc;
1469 for (l = v->locs; l; l = l->next)
1470 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
1471 return l->loc;
1472 if (v->locs)
1473 return v->locs->loc;
1474 return x;
1475 }
1476
1477 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1478 where SIZE is the size in bytes of the memory reference. If ADDR
1479 is not modified by the memory reference then ADDR is returned. */
1480
1481 rtx
1482 addr_side_effect_eval (addr, size, n_refs)
1483 rtx addr;
1484 int size;
1485 int n_refs;
1486 {
1487 int offset = 0;
1488
1489 switch (GET_CODE (addr))
1490 {
1491 case PRE_INC:
1492 offset = (n_refs + 1) * size;
1493 break;
1494 case PRE_DEC:
1495 offset = -(n_refs + 1) * size;
1496 break;
1497 case POST_INC:
1498 offset = n_refs * size;
1499 break;
1500 case POST_DEC:
1501 offset = -n_refs * size;
1502 break;
1503
1504 default:
1505 return addr;
1506 }
1507
1508 if (offset)
1509 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
1510 else
1511 addr = XEXP (addr, 0);
1512
1513 return addr;
1514 }
1515
1516 /* Return nonzero if X and Y (memory addresses) could reference the
1517 same location in memory. C is an offset accumulator. When
1518 C is nonzero, we are testing aliases between X and Y + C.
1519 XSIZE is the size in bytes of the X reference,
1520 similarly YSIZE is the size in bytes for Y.
1521
1522 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1523 referenced (the reference was BLKmode), so make the most pessimistic
1524 assumptions.
1525
1526 If XSIZE or YSIZE is negative, we may access memory outside the object
1527 being referenced as a side effect. This can happen when using AND to
1528 align memory references, as is done on the Alpha.
1529
1530 Nice to notice that varying addresses cannot conflict with fp if no
1531 local variables had their addresses taken, but that's too hard now. */
1532
1533 static int
1534 memrefs_conflict_p (xsize, x, ysize, y, c)
1535 rtx x, y;
1536 int xsize, ysize;
1537 HOST_WIDE_INT c;
1538 {
1539 if (GET_CODE (x) == VALUE)
1540 x = get_addr (x);
1541 if (GET_CODE (y) == VALUE)
1542 y = get_addr (y);
1543 if (GET_CODE (x) == HIGH)
1544 x = XEXP (x, 0);
1545 else if (GET_CODE (x) == LO_SUM)
1546 x = XEXP (x, 1);
1547 else
1548 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1549 if (GET_CODE (y) == HIGH)
1550 y = XEXP (y, 0);
1551 else if (GET_CODE (y) == LO_SUM)
1552 y = XEXP (y, 1);
1553 else
1554 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1555
1556 if (rtx_equal_for_memref_p (x, y))
1557 {
1558 if (xsize <= 0 || ysize <= 0)
1559 return 1;
1560 if (c >= 0 && xsize > c)
1561 return 1;
1562 if (c < 0 && ysize+c > 0)
1563 return 1;
1564 return 0;
1565 }
1566
1567 /* This code used to check for conflicts involving stack references and
1568 globals but the base address alias code now handles these cases. */
1569
1570 if (GET_CODE (x) == PLUS)
1571 {
1572 /* The fact that X is canonicalized means that this
1573 PLUS rtx is canonicalized. */
1574 rtx x0 = XEXP (x, 0);
1575 rtx x1 = XEXP (x, 1);
1576
1577 if (GET_CODE (y) == PLUS)
1578 {
1579 /* The fact that Y is canonicalized means that this
1580 PLUS rtx is canonicalized. */
1581 rtx y0 = XEXP (y, 0);
1582 rtx y1 = XEXP (y, 1);
1583
1584 if (rtx_equal_for_memref_p (x1, y1))
1585 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1586 if (rtx_equal_for_memref_p (x0, y0))
1587 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1588 if (GET_CODE (x1) == CONST_INT)
1589 {
1590 if (GET_CODE (y1) == CONST_INT)
1591 return memrefs_conflict_p (xsize, x0, ysize, y0,
1592 c - INTVAL (x1) + INTVAL (y1));
1593 else
1594 return memrefs_conflict_p (xsize, x0, ysize, y,
1595 c - INTVAL (x1));
1596 }
1597 else if (GET_CODE (y1) == CONST_INT)
1598 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1599
1600 return 1;
1601 }
1602 else if (GET_CODE (x1) == CONST_INT)
1603 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1604 }
1605 else if (GET_CODE (y) == PLUS)
1606 {
1607 /* The fact that Y is canonicalized means that this
1608 PLUS rtx is canonicalized. */
1609 rtx y0 = XEXP (y, 0);
1610 rtx y1 = XEXP (y, 1);
1611
1612 if (GET_CODE (y1) == CONST_INT)
1613 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1614 else
1615 return 1;
1616 }
1617
1618 if (GET_CODE (x) == GET_CODE (y))
1619 switch (GET_CODE (x))
1620 {
1621 case MULT:
1622 {
1623 /* Handle cases where we expect the second operands to be the
1624 same, and check only whether the first operand would conflict
1625 or not. */
1626 rtx x0, y0;
1627 rtx x1 = canon_rtx (XEXP (x, 1));
1628 rtx y1 = canon_rtx (XEXP (y, 1));
1629 if (! rtx_equal_for_memref_p (x1, y1))
1630 return 1;
1631 x0 = canon_rtx (XEXP (x, 0));
1632 y0 = canon_rtx (XEXP (y, 0));
1633 if (rtx_equal_for_memref_p (x0, y0))
1634 return (xsize == 0 || ysize == 0
1635 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1636
1637 /* Can't properly adjust our sizes. */
1638 if (GET_CODE (x1) != CONST_INT)
1639 return 1;
1640 xsize /= INTVAL (x1);
1641 ysize /= INTVAL (x1);
1642 c /= INTVAL (x1);
1643 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1644 }
1645
1646 case REG:
1647 /* Are these registers known not to be equal? */
1648 if (alias_invariant)
1649 {
1650 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1651 rtx i_x, i_y; /* invariant relationships of X and Y */
1652
1653 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1654 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1655
1656 if (i_x == 0 && i_y == 0)
1657 break;
1658
1659 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1660 ysize, i_y ? i_y : y, c))
1661 return 0;
1662 }
1663 break;
1664
1665 default:
1666 break;
1667 }
1668
1669 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1670 as an access with indeterminate size. Assume that references
1671 besides AND are aligned, so if the size of the other reference is
1672 at least as large as the alignment, assume no other overlap. */
1673 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1674 {
1675 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1676 xsize = -1;
1677 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1678 }
1679 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1680 {
1681 /* ??? If we are indexing far enough into the array/structure, we
1682 may yet be able to determine that we can not overlap. But we
1683 also need to that we are far enough from the end not to overlap
1684 a following reference, so we do nothing with that for now. */
1685 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1686 ysize = -1;
1687 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1688 }
1689
1690 if (GET_CODE (x) == ADDRESSOF)
1691 {
1692 if (y == frame_pointer_rtx
1693 || GET_CODE (y) == ADDRESSOF)
1694 return xsize <= 0 || ysize <= 0;
1695 }
1696 if (GET_CODE (y) == ADDRESSOF)
1697 {
1698 if (x == frame_pointer_rtx)
1699 return xsize <= 0 || ysize <= 0;
1700 }
1701
1702 if (CONSTANT_P (x))
1703 {
1704 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1705 {
1706 c += (INTVAL (y) - INTVAL (x));
1707 return (xsize <= 0 || ysize <= 0
1708 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1709 }
1710
1711 if (GET_CODE (x) == CONST)
1712 {
1713 if (GET_CODE (y) == CONST)
1714 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1715 ysize, canon_rtx (XEXP (y, 0)), c);
1716 else
1717 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1718 ysize, y, c);
1719 }
1720 if (GET_CODE (y) == CONST)
1721 return memrefs_conflict_p (xsize, x, ysize,
1722 canon_rtx (XEXP (y, 0)), c);
1723
1724 if (CONSTANT_P (y))
1725 return (xsize <= 0 || ysize <= 0
1726 || (rtx_equal_for_memref_p (x, y)
1727 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1728
1729 return 1;
1730 }
1731 return 1;
1732 }
1733
1734 /* Functions to compute memory dependencies.
1735
1736 Since we process the insns in execution order, we can build tables
1737 to keep track of what registers are fixed (and not aliased), what registers
1738 are varying in known ways, and what registers are varying in unknown
1739 ways.
1740
1741 If both memory references are volatile, then there must always be a
1742 dependence between the two references, since their order can not be
1743 changed. A volatile and non-volatile reference can be interchanged
1744 though.
1745
1746 A MEM_IN_STRUCT reference at a non-AND varying address can never
1747 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1748 also must allow AND addresses, because they may generate accesses
1749 outside the object being referenced. This is used to generate
1750 aligned addresses from unaligned addresses, for instance, the alpha
1751 storeqi_unaligned pattern. */
1752
1753 /* Read dependence: X is read after read in MEM takes place. There can
1754 only be a dependence here if both reads are volatile. */
1755
1756 int
1757 read_dependence (mem, x)
1758 rtx mem;
1759 rtx x;
1760 {
1761 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1762 }
1763
1764 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1765 MEM2 is a reference to a structure at a varying address, or returns
1766 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1767 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1768 to decide whether or not an address may vary; it should return
1769 nonzero whenever variation is possible.
1770 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1771
1772 static rtx
1773 fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1774 rtx mem1, mem2;
1775 rtx mem1_addr, mem2_addr;
1776 int (*varies_p) PARAMS ((rtx, int));
1777 {
1778 if (! flag_strict_aliasing)
1779 return NULL_RTX;
1780
1781 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1782 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1783 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1784 varying address. */
1785 return mem1;
1786
1787 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1788 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1789 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1790 varying address. */
1791 return mem2;
1792
1793 return NULL_RTX;
1794 }
1795
1796 /* Returns nonzero if something about the mode or address format MEM1
1797 indicates that it might well alias *anything*. */
1798
1799 static int
1800 aliases_everything_p (mem)
1801 rtx mem;
1802 {
1803 if (GET_CODE (XEXP (mem, 0)) == AND)
1804 /* If the address is an AND, its very hard to know at what it is
1805 actually pointing. */
1806 return 1;
1807
1808 return 0;
1809 }
1810
1811 /* Return true if we can determine that the fields referenced cannot
1812 overlap for any pair of objects. */
1813
1814 static bool
1815 nonoverlapping_component_refs_p (x, y)
1816 tree x, y;
1817 {
1818 tree fieldx, fieldy, typex, typey, orig_y;
1819
1820 do
1821 {
1822 /* The comparison has to be done at a common type, since we don't
1823 know how the inheritance hierarchy works. */
1824 orig_y = y;
1825 do
1826 {
1827 fieldx = TREE_OPERAND (x, 1);
1828 typex = DECL_FIELD_CONTEXT (fieldx);
1829
1830 y = orig_y;
1831 do
1832 {
1833 fieldy = TREE_OPERAND (y, 1);
1834 typey = DECL_FIELD_CONTEXT (fieldy);
1835
1836 if (typex == typey)
1837 goto found;
1838
1839 y = TREE_OPERAND (y, 0);
1840 }
1841 while (y && TREE_CODE (y) == COMPONENT_REF);
1842
1843 x = TREE_OPERAND (x, 0);
1844 }
1845 while (x && TREE_CODE (x) == COMPONENT_REF);
1846
1847 /* Never found a common type. */
1848 return false;
1849
1850 found:
1851 /* If we're left with accessing different fields of a structure,
1852 then no overlap. */
1853 if (TREE_CODE (typex) == RECORD_TYPE
1854 && fieldx != fieldy)
1855 return true;
1856
1857 /* The comparison on the current field failed. If we're accessing
1858 a very nested structure, look at the next outer level. */
1859 x = TREE_OPERAND (x, 0);
1860 y = TREE_OPERAND (y, 0);
1861 }
1862 while (x && y
1863 && TREE_CODE (x) == COMPONENT_REF
1864 && TREE_CODE (y) == COMPONENT_REF);
1865
1866 return false;
1867 }
1868
1869 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
1870
1871 static tree
1872 decl_for_component_ref (x)
1873 tree x;
1874 {
1875 do
1876 {
1877 x = TREE_OPERAND (x, 0);
1878 }
1879 while (x && TREE_CODE (x) == COMPONENT_REF);
1880
1881 return x && DECL_P (x) ? x : NULL_TREE;
1882 }
1883
1884 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1885 offset of the field reference. */
1886
1887 static rtx
1888 adjust_offset_for_component_ref (x, offset)
1889 tree x;
1890 rtx offset;
1891 {
1892 HOST_WIDE_INT ioffset;
1893
1894 if (! offset)
1895 return NULL_RTX;
1896
1897 ioffset = INTVAL (offset);
1898 do
1899 {
1900 tree field = TREE_OPERAND (x, 1);
1901
1902 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
1903 return NULL_RTX;
1904 ioffset += (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1905 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1906 / BITS_PER_UNIT));
1907
1908 x = TREE_OPERAND (x, 0);
1909 }
1910 while (x && TREE_CODE (x) == COMPONENT_REF);
1911
1912 return GEN_INT (ioffset);
1913 }
1914
1915 /* Return nonzero if we can deterimine the exprs corresponding to memrefs
1916 X and Y and they do not overlap. */
1917
1918 static int
1919 nonoverlapping_memrefs_p (x, y)
1920 rtx x, y;
1921 {
1922 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
1923 rtx rtlx, rtly;
1924 rtx basex, basey;
1925 rtx moffsetx, moffsety;
1926 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
1927
1928 /* Unless both have exprs, we can't tell anything. */
1929 if (exprx == 0 || expry == 0)
1930 return 0;
1931
1932 /* If both are field references, we may be able to determine something. */
1933 if (TREE_CODE (exprx) == COMPONENT_REF
1934 && TREE_CODE (expry) == COMPONENT_REF
1935 && nonoverlapping_component_refs_p (exprx, expry))
1936 return 1;
1937
1938 /* If the field reference test failed, look at the DECLs involved. */
1939 moffsetx = MEM_OFFSET (x);
1940 if (TREE_CODE (exprx) == COMPONENT_REF)
1941 {
1942 tree t = decl_for_component_ref (exprx);
1943 if (! t)
1944 return 0;
1945 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
1946 exprx = t;
1947 }
1948 moffsety = MEM_OFFSET (y);
1949 if (TREE_CODE (expry) == COMPONENT_REF)
1950 {
1951 tree t = decl_for_component_ref (expry);
1952 if (! t)
1953 return 0;
1954 moffsety = adjust_offset_for_component_ref (expry, moffsety);
1955 expry = t;
1956 }
1957
1958 if (! DECL_P (exprx) || ! DECL_P (expry))
1959 return 0;
1960
1961 rtlx = DECL_RTL (exprx);
1962 rtly = DECL_RTL (expry);
1963
1964 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
1965 can't overlap unless they are the same because we never reuse that part
1966 of the stack frame used for locals for spilled pseudos. */
1967 if ((GET_CODE (rtlx) != MEM || GET_CODE (rtly) != MEM)
1968 && ! rtx_equal_p (rtlx, rtly))
1969 return 1;
1970
1971 /* Get the base and offsets of both decls. If either is a register, we
1972 know both are and are the same, so use that as the base. The only
1973 we can avoid overlap is if we can deduce that they are nonoverlapping
1974 pieces of that decl, which is very rare. */
1975 basex = GET_CODE (rtlx) == MEM ? XEXP (rtlx, 0) : rtlx;
1976 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
1977 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
1978
1979 basey = GET_CODE (rtly) == MEM ? XEXP (rtly, 0) : rtly;
1980 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
1981 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
1982
1983 /* If the bases are different, we know they do not overlap if both
1984 are constants or if one is a constant and the other a pointer into the
1985 stack frame. Otherwise a different base means we can't tell if they
1986 overlap or not. */
1987 if (! rtx_equal_p (basex, basey))
1988 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
1989 || (CONSTANT_P (basex) && REG_P (basey)
1990 && REGNO_PTR_FRAME_P (REGNO (basey)))
1991 || (CONSTANT_P (basey) && REG_P (basex)
1992 && REGNO_PTR_FRAME_P (REGNO (basex))));
1993
1994 sizex = (GET_CODE (rtlx) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
1995 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
1996 : -1);
1997 sizey = (GET_CODE (rtly) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtly))
1998 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
1999 -1);
2000
2001 /* If we have an offset for either memref, it can update the values computed
2002 above. */
2003 if (moffsetx)
2004 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2005 if (moffsety)
2006 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2007
2008 /* If a memref has both a size and an offset, we can use the smaller size.
2009 We can't do this if the offset isn't known because we must view this
2010 memref as being anywhere inside the DECL's MEM. */
2011 if (MEM_SIZE (x) && moffsetx)
2012 sizex = INTVAL (MEM_SIZE (x));
2013 if (MEM_SIZE (y) && moffsety)
2014 sizey = INTVAL (MEM_SIZE (y));
2015
2016 /* Put the values of the memref with the lower offset in X's values. */
2017 if (offsetx > offsety)
2018 {
2019 tem = offsetx, offsetx = offsety, offsety = tem;
2020 tem = sizex, sizex = sizey, sizey = tem;
2021 }
2022
2023 /* If we don't know the size of the lower-offset value, we can't tell
2024 if they conflict. Otherwise, we do the test. */
2025 return sizex >= 0 && offsety >= offsetx + sizex;
2026 }
2027
2028 /* True dependence: X is read after store in MEM takes place. */
2029
2030 int
2031 true_dependence (mem, mem_mode, x, varies)
2032 rtx mem;
2033 enum machine_mode mem_mode;
2034 rtx x;
2035 int (*varies) PARAMS ((rtx, int));
2036 {
2037 rtx x_addr, mem_addr;
2038 rtx base;
2039
2040 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2041 return 1;
2042
2043 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2044 This is used in epilogue deallocation functions. */
2045 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2046 return 1;
2047 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2048 return 1;
2049
2050 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2051 return 0;
2052
2053 /* Unchanging memory can't conflict with non-unchanging memory.
2054 A non-unchanging read can conflict with a non-unchanging write.
2055 An unchanging read can conflict with an unchanging write since
2056 there may be a single store to this address to initialize it.
2057 Note that an unchanging store can conflict with a non-unchanging read
2058 since we have to make conservative assumptions when we have a
2059 record with readonly fields and we are copying the whole thing.
2060 Just fall through to the code below to resolve potential conflicts.
2061 This won't handle all cases optimally, but the possible performance
2062 loss should be negligible. */
2063 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2064 return 0;
2065
2066 if (nonoverlapping_memrefs_p (mem, x))
2067 return 0;
2068
2069 if (mem_mode == VOIDmode)
2070 mem_mode = GET_MODE (mem);
2071
2072 x_addr = get_addr (XEXP (x, 0));
2073 mem_addr = get_addr (XEXP (mem, 0));
2074
2075 base = find_base_term (x_addr);
2076 if (base && (GET_CODE (base) == LABEL_REF
2077 || (GET_CODE (base) == SYMBOL_REF
2078 && CONSTANT_POOL_ADDRESS_P (base))))
2079 return 0;
2080
2081 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2082 return 0;
2083
2084 x_addr = canon_rtx (x_addr);
2085 mem_addr = canon_rtx (mem_addr);
2086
2087 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2088 SIZE_FOR_MODE (x), x_addr, 0))
2089 return 0;
2090
2091 if (aliases_everything_p (x))
2092 return 1;
2093
2094 /* We cannot use aliases_everything_p to test MEM, since we must look
2095 at MEM_MODE, rather than GET_MODE (MEM). */
2096 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2097 return 1;
2098
2099 /* In true_dependence we also allow BLKmode to alias anything. Why
2100 don't we do this in anti_dependence and output_dependence? */
2101 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2102 return 1;
2103
2104 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2105 varies);
2106 }
2107
2108 /* Canonical true dependence: X is read after store in MEM takes place.
2109 Variant of true_dependence which assumes MEM has already been
2110 canonicalized (hence we no longer do that here).
2111 The mem_addr argument has been added, since true_dependence computed
2112 this value prior to canonicalizing. */
2113
2114 int
2115 canon_true_dependence (mem, mem_mode, mem_addr, x, varies)
2116 rtx mem, mem_addr, x;
2117 enum machine_mode mem_mode;
2118 int (*varies) PARAMS ((rtx, int));
2119 {
2120 rtx x_addr;
2121
2122 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2123 return 1;
2124
2125 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2126 This is used in epilogue deallocation functions. */
2127 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2128 return 1;
2129 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2130 return 1;
2131
2132 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2133 return 0;
2134
2135 /* If X is an unchanging read, then it can't possibly conflict with any
2136 non-unchanging store. It may conflict with an unchanging write though,
2137 because there may be a single store to this address to initialize it.
2138 Just fall through to the code below to resolve the case where we have
2139 both an unchanging read and an unchanging write. This won't handle all
2140 cases optimally, but the possible performance loss should be
2141 negligible. */
2142 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2143 return 0;
2144
2145 if (nonoverlapping_memrefs_p (x, mem))
2146 return 0;
2147
2148 x_addr = get_addr (XEXP (x, 0));
2149
2150 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2151 return 0;
2152
2153 x_addr = canon_rtx (x_addr);
2154 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2155 SIZE_FOR_MODE (x), x_addr, 0))
2156 return 0;
2157
2158 if (aliases_everything_p (x))
2159 return 1;
2160
2161 /* We cannot use aliases_everything_p to test MEM, since we must look
2162 at MEM_MODE, rather than GET_MODE (MEM). */
2163 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2164 return 1;
2165
2166 /* In true_dependence we also allow BLKmode to alias anything. Why
2167 don't we do this in anti_dependence and output_dependence? */
2168 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2169 return 1;
2170
2171 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2172 varies);
2173 }
2174
2175 /* Returns non-zero if a write to X might alias a previous read from
2176 (or, if WRITEP is non-zero, a write to) MEM. */
2177
2178 static int
2179 write_dependence_p (mem, x, writep)
2180 rtx mem;
2181 rtx x;
2182 int writep;
2183 {
2184 rtx x_addr, mem_addr;
2185 rtx fixed_scalar;
2186 rtx base;
2187
2188 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2189 return 1;
2190
2191 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2192 This is used in epilogue deallocation functions. */
2193 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2194 return 1;
2195 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2196 return 1;
2197
2198 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2199 return 0;
2200
2201 /* Unchanging memory can't conflict with non-unchanging memory. */
2202 if (RTX_UNCHANGING_P (x) != RTX_UNCHANGING_P (mem))
2203 return 0;
2204
2205 /* If MEM is an unchanging read, then it can't possibly conflict with
2206 the store to X, because there is at most one store to MEM, and it must
2207 have occurred somewhere before MEM. */
2208 if (! writep && RTX_UNCHANGING_P (mem))
2209 return 0;
2210
2211 if (nonoverlapping_memrefs_p (x, mem))
2212 return 0;
2213
2214 x_addr = get_addr (XEXP (x, 0));
2215 mem_addr = get_addr (XEXP (mem, 0));
2216
2217 if (! writep)
2218 {
2219 base = find_base_term (mem_addr);
2220 if (base && (GET_CODE (base) == LABEL_REF
2221 || (GET_CODE (base) == SYMBOL_REF
2222 && CONSTANT_POOL_ADDRESS_P (base))))
2223 return 0;
2224 }
2225
2226 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2227 GET_MODE (mem)))
2228 return 0;
2229
2230 x_addr = canon_rtx (x_addr);
2231 mem_addr = canon_rtx (mem_addr);
2232
2233 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2234 SIZE_FOR_MODE (x), x_addr, 0))
2235 return 0;
2236
2237 fixed_scalar
2238 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2239 rtx_addr_varies_p);
2240
2241 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2242 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2243 }
2244
2245 /* Anti dependence: X is written after read in MEM takes place. */
2246
2247 int
2248 anti_dependence (mem, x)
2249 rtx mem;
2250 rtx x;
2251 {
2252 return write_dependence_p (mem, x, /*writep=*/0);
2253 }
2254
2255 /* Output dependence: X is written after store in MEM takes place. */
2256
2257 int
2258 output_dependence (mem, x)
2259 rtx mem;
2260 rtx x;
2261 {
2262 return write_dependence_p (mem, x, /*writep=*/1);
2263 }
2264 \f
2265 /* A subroutine of nonlocal_mentioned_p, returns 1 if *LOC mentions
2266 something which is not local to the function and is not constant. */
2267
2268 static int
2269 nonlocal_mentioned_p_1 (loc, data)
2270 rtx *loc;
2271 void *data ATTRIBUTE_UNUSED;
2272 {
2273 rtx x = *loc;
2274 rtx base;
2275 int regno;
2276
2277 if (! x)
2278 return 0;
2279
2280 switch (GET_CODE (x))
2281 {
2282 case SUBREG:
2283 if (GET_CODE (SUBREG_REG (x)) == REG)
2284 {
2285 /* Global registers are not local. */
2286 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2287 && global_regs[subreg_regno (x)])
2288 return 1;
2289 return 0;
2290 }
2291 break;
2292
2293 case REG:
2294 regno = REGNO (x);
2295 /* Global registers are not local. */
2296 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2297 return 1;
2298 return 0;
2299
2300 case SCRATCH:
2301 case PC:
2302 case CC0:
2303 case CONST_INT:
2304 case CONST_DOUBLE:
2305 case CONST_VECTOR:
2306 case CONST:
2307 case LABEL_REF:
2308 return 0;
2309
2310 case SYMBOL_REF:
2311 /* Constants in the function's constants pool are constant. */
2312 if (CONSTANT_POOL_ADDRESS_P (x))
2313 return 0;
2314 return 1;
2315
2316 case CALL:
2317 /* Non-constant calls and recursion are not local. */
2318 return 1;
2319
2320 case MEM:
2321 /* Be overly conservative and consider any volatile memory
2322 reference as not local. */
2323 if (MEM_VOLATILE_P (x))
2324 return 1;
2325 base = find_base_term (XEXP (x, 0));
2326 if (base)
2327 {
2328 /* A Pmode ADDRESS could be a reference via the structure value
2329 address or static chain. Such memory references are nonlocal.
2330
2331 Thus, we have to examine the contents of the ADDRESS to find
2332 out if this is a local reference or not. */
2333 if (GET_CODE (base) == ADDRESS
2334 && GET_MODE (base) == Pmode
2335 && (XEXP (base, 0) == stack_pointer_rtx
2336 || XEXP (base, 0) == arg_pointer_rtx
2337 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2338 || XEXP (base, 0) == hard_frame_pointer_rtx
2339 #endif
2340 || XEXP (base, 0) == frame_pointer_rtx))
2341 return 0;
2342 /* Constants in the function's constant pool are constant. */
2343 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2344 return 0;
2345 }
2346 return 1;
2347
2348 case UNSPEC_VOLATILE:
2349 case ASM_INPUT:
2350 return 1;
2351
2352 case ASM_OPERANDS:
2353 if (MEM_VOLATILE_P (x))
2354 return 1;
2355
2356 /* FALLTHROUGH */
2357
2358 default:
2359 break;
2360 }
2361
2362 return 0;
2363 }
2364
2365 /* Returns non-zero if X might mention something which is not
2366 local to the function and is not constant. */
2367
2368 static int
2369 nonlocal_mentioned_p (x)
2370 rtx x;
2371 {
2372
2373 if (INSN_P (x))
2374 {
2375 if (GET_CODE (x) == CALL_INSN)
2376 {
2377 if (! CONST_OR_PURE_CALL_P (x))
2378 return 1;
2379 x = CALL_INSN_FUNCTION_USAGE (x);
2380 if (x == 0)
2381 return 0;
2382 }
2383 else
2384 x = PATTERN (x);
2385 }
2386
2387 return for_each_rtx (&x, nonlocal_mentioned_p_1, NULL);
2388 }
2389
2390 /* A subroutine of nonlocal_referenced_p, returns 1 if *LOC references
2391 something which is not local to the function and is not constant. */
2392
2393 static int
2394 nonlocal_referenced_p_1 (loc, data)
2395 rtx *loc;
2396 void *data ATTRIBUTE_UNUSED;
2397 {
2398 rtx x = *loc;
2399
2400 if (! x)
2401 return 0;
2402
2403 switch (GET_CODE (x))
2404 {
2405 case MEM:
2406 case REG:
2407 case SYMBOL_REF:
2408 case SUBREG:
2409 return nonlocal_mentioned_p (x);
2410
2411 case CALL:
2412 /* Non-constant calls and recursion are not local. */
2413 return 1;
2414
2415 case SET:
2416 if (nonlocal_mentioned_p (SET_SRC (x)))
2417 return 1;
2418
2419 if (GET_CODE (SET_DEST (x)) == MEM)
2420 return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
2421
2422 /* If the destination is anything other than a CC0, PC,
2423 MEM, REG, or a SUBREG of a REG that occupies all of
2424 the REG, then X references nonlocal memory if it is
2425 mentioned in the destination. */
2426 if (GET_CODE (SET_DEST (x)) != CC0
2427 && GET_CODE (SET_DEST (x)) != PC
2428 && GET_CODE (SET_DEST (x)) != REG
2429 && ! (GET_CODE (SET_DEST (x)) == SUBREG
2430 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
2431 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
2432 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2433 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2434 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
2435 return nonlocal_mentioned_p (SET_DEST (x));
2436 return 0;
2437
2438 case CLOBBER:
2439 if (GET_CODE (XEXP (x, 0)) == MEM)
2440 return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
2441 return 0;
2442
2443 case USE:
2444 return nonlocal_mentioned_p (XEXP (x, 0));
2445
2446 case ASM_INPUT:
2447 case UNSPEC_VOLATILE:
2448 return 1;
2449
2450 case ASM_OPERANDS:
2451 if (MEM_VOLATILE_P (x))
2452 return 1;
2453
2454 /* FALLTHROUGH */
2455
2456 default:
2457 break;
2458 }
2459
2460 return 0;
2461 }
2462
2463 /* Returns non-zero if X might reference something which is not
2464 local to the function and is not constant. */
2465
2466 static int
2467 nonlocal_referenced_p (x)
2468 rtx x;
2469 {
2470
2471 if (INSN_P (x))
2472 {
2473 if (GET_CODE (x) == CALL_INSN)
2474 {
2475 if (! CONST_OR_PURE_CALL_P (x))
2476 return 1;
2477 x = CALL_INSN_FUNCTION_USAGE (x);
2478 if (x == 0)
2479 return 0;
2480 }
2481 else
2482 x = PATTERN (x);
2483 }
2484
2485 return for_each_rtx (&x, nonlocal_referenced_p_1, NULL);
2486 }
2487
2488 /* A subroutine of nonlocal_set_p, returns 1 if *LOC sets
2489 something which is not local to the function and is not constant. */
2490
2491 static int
2492 nonlocal_set_p_1 (loc, data)
2493 rtx *loc;
2494 void *data ATTRIBUTE_UNUSED;
2495 {
2496 rtx x = *loc;
2497
2498 if (! x)
2499 return 0;
2500
2501 switch (GET_CODE (x))
2502 {
2503 case CALL:
2504 /* Non-constant calls and recursion are not local. */
2505 return 1;
2506
2507 case PRE_INC:
2508 case PRE_DEC:
2509 case POST_INC:
2510 case POST_DEC:
2511 case PRE_MODIFY:
2512 case POST_MODIFY:
2513 return nonlocal_mentioned_p (XEXP (x, 0));
2514
2515 case SET:
2516 if (nonlocal_mentioned_p (SET_DEST (x)))
2517 return 1;
2518 return nonlocal_set_p (SET_SRC (x));
2519
2520 case CLOBBER:
2521 return nonlocal_mentioned_p (XEXP (x, 0));
2522
2523 case USE:
2524 return 0;
2525
2526 case ASM_INPUT:
2527 case UNSPEC_VOLATILE:
2528 return 1;
2529
2530 case ASM_OPERANDS:
2531 if (MEM_VOLATILE_P (x))
2532 return 1;
2533
2534 /* FALLTHROUGH */
2535
2536 default:
2537 break;
2538 }
2539
2540 return 0;
2541 }
2542
2543 /* Returns non-zero if X might set something which is not
2544 local to the function and is not constant. */
2545
2546 static int
2547 nonlocal_set_p (x)
2548 rtx x;
2549 {
2550
2551 if (INSN_P (x))
2552 {
2553 if (GET_CODE (x) == CALL_INSN)
2554 {
2555 if (! CONST_OR_PURE_CALL_P (x))
2556 return 1;
2557 x = CALL_INSN_FUNCTION_USAGE (x);
2558 if (x == 0)
2559 return 0;
2560 }
2561 else
2562 x = PATTERN (x);
2563 }
2564
2565 return for_each_rtx (&x, nonlocal_set_p_1, NULL);
2566 }
2567
2568 /* Mark the function if it is constant. */
2569
2570 void
2571 mark_constant_function ()
2572 {
2573 rtx insn;
2574 int nonlocal_memory_referenced;
2575
2576 if (TREE_READONLY (current_function_decl)
2577 || DECL_IS_PURE (current_function_decl)
2578 || TREE_THIS_VOLATILE (current_function_decl)
2579 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode
2580 || current_function_has_nonlocal_goto
2581 || !(*targetm.binds_local_p) (current_function_decl))
2582 return;
2583
2584 /* A loop might not return which counts as a side effect. */
2585 if (mark_dfs_back_edges ())
2586 return;
2587
2588 nonlocal_memory_referenced = 0;
2589
2590 init_alias_analysis ();
2591
2592 /* Determine if this is a constant or pure function. */
2593
2594 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2595 {
2596 if (! INSN_P (insn))
2597 continue;
2598
2599 if (nonlocal_set_p (insn) || global_reg_mentioned_p (insn)
2600 || volatile_refs_p (PATTERN (insn)))
2601 break;
2602
2603 if (! nonlocal_memory_referenced)
2604 nonlocal_memory_referenced = nonlocal_referenced_p (insn);
2605 }
2606
2607 end_alias_analysis ();
2608
2609 /* Mark the function. */
2610
2611 if (insn)
2612 ;
2613 else if (nonlocal_memory_referenced)
2614 DECL_IS_PURE (current_function_decl) = 1;
2615 else
2616 TREE_READONLY (current_function_decl) = 1;
2617 }
2618 \f
2619
2620 static HARD_REG_SET argument_registers;
2621
2622 void
2623 init_alias_once ()
2624 {
2625 int i;
2626
2627 #ifndef OUTGOING_REGNO
2628 #define OUTGOING_REGNO(N) N
2629 #endif
2630 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2631 /* Check whether this register can hold an incoming pointer
2632 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2633 numbers, so translate if necessary due to register windows. */
2634 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2635 && HARD_REGNO_MODE_OK (i, Pmode))
2636 SET_HARD_REG_BIT (argument_registers, i);
2637
2638 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
2639 }
2640
2641 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2642 array. */
2643
2644 void
2645 init_alias_analysis ()
2646 {
2647 int maxreg = max_reg_num ();
2648 int changed, pass;
2649 int i;
2650 unsigned int ui;
2651 rtx insn;
2652
2653 reg_known_value_size = maxreg;
2654
2655 reg_known_value
2656 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
2657 - FIRST_PSEUDO_REGISTER;
2658 reg_known_equiv_p
2659 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
2660 - FIRST_PSEUDO_REGISTER;
2661
2662 /* Overallocate reg_base_value to allow some growth during loop
2663 optimization. Loop unrolling can create a large number of
2664 registers. */
2665 reg_base_value_size = maxreg * 2;
2666 reg_base_value = (rtx *) ggc_alloc_cleared (reg_base_value_size
2667 * sizeof (rtx));
2668
2669 new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
2670 reg_seen = (char *) xmalloc (reg_base_value_size);
2671 if (! reload_completed && flag_unroll_loops)
2672 {
2673 /* ??? Why are we realloc'ing if we're just going to zero it? */
2674 alias_invariant = (rtx *)xrealloc (alias_invariant,
2675 reg_base_value_size * sizeof (rtx));
2676 memset ((char *)alias_invariant, 0, reg_base_value_size * sizeof (rtx));
2677 }
2678
2679 /* The basic idea is that each pass through this loop will use the
2680 "constant" information from the previous pass to propagate alias
2681 information through another level of assignments.
2682
2683 This could get expensive if the assignment chains are long. Maybe
2684 we should throttle the number of iterations, possibly based on
2685 the optimization level or flag_expensive_optimizations.
2686
2687 We could propagate more information in the first pass by making use
2688 of REG_N_SETS to determine immediately that the alias information
2689 for a pseudo is "constant".
2690
2691 A program with an uninitialized variable can cause an infinite loop
2692 here. Instead of doing a full dataflow analysis to detect such problems
2693 we just cap the number of iterations for the loop.
2694
2695 The state of the arrays for the set chain in question does not matter
2696 since the program has undefined behavior. */
2697
2698 pass = 0;
2699 do
2700 {
2701 /* Assume nothing will change this iteration of the loop. */
2702 changed = 0;
2703
2704 /* We want to assign the same IDs each iteration of this loop, so
2705 start counting from zero each iteration of the loop. */
2706 unique_id = 0;
2707
2708 /* We're at the start of the function each iteration through the
2709 loop, so we're copying arguments. */
2710 copying_arguments = 1;
2711
2712 /* Wipe the potential alias information clean for this pass. */
2713 memset ((char *) new_reg_base_value, 0, reg_base_value_size * sizeof (rtx));
2714
2715 /* Wipe the reg_seen array clean. */
2716 memset ((char *) reg_seen, 0, reg_base_value_size);
2717
2718 /* Mark all hard registers which may contain an address.
2719 The stack, frame and argument pointers may contain an address.
2720 An argument register which can hold a Pmode value may contain
2721 an address even if it is not in BASE_REGS.
2722
2723 The address expression is VOIDmode for an argument and
2724 Pmode for other registers. */
2725
2726 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2727 if (TEST_HARD_REG_BIT (argument_registers, i))
2728 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
2729 gen_rtx_REG (Pmode, i));
2730
2731 new_reg_base_value[STACK_POINTER_REGNUM]
2732 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2733 new_reg_base_value[ARG_POINTER_REGNUM]
2734 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2735 new_reg_base_value[FRAME_POINTER_REGNUM]
2736 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2737 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2738 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2739 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2740 #endif
2741
2742 /* Walk the insns adding values to the new_reg_base_value array. */
2743 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2744 {
2745 if (INSN_P (insn))
2746 {
2747 rtx note, set;
2748
2749 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2750 /* The prologue/epilogue insns are not threaded onto the
2751 insn chain until after reload has completed. Thus,
2752 there is no sense wasting time checking if INSN is in
2753 the prologue/epilogue until after reload has completed. */
2754 if (reload_completed
2755 && prologue_epilogue_contains (insn))
2756 continue;
2757 #endif
2758
2759 /* If this insn has a noalias note, process it, Otherwise,
2760 scan for sets. A simple set will have no side effects
2761 which could change the base value of any other register. */
2762
2763 if (GET_CODE (PATTERN (insn)) == SET
2764 && REG_NOTES (insn) != 0
2765 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2766 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2767 else
2768 note_stores (PATTERN (insn), record_set, NULL);
2769
2770 set = single_set (insn);
2771
2772 if (set != 0
2773 && GET_CODE (SET_DEST (set)) == REG
2774 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2775 {
2776 unsigned int regno = REGNO (SET_DEST (set));
2777 rtx src = SET_SRC (set);
2778
2779 if (REG_NOTES (insn) != 0
2780 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2781 && REG_N_SETS (regno) == 1)
2782 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2783 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2784 && ! rtx_varies_p (XEXP (note, 0), 1)
2785 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
2786 {
2787 reg_known_value[regno] = XEXP (note, 0);
2788 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
2789 }
2790 else if (REG_N_SETS (regno) == 1
2791 && GET_CODE (src) == PLUS
2792 && GET_CODE (XEXP (src, 0)) == REG
2793 && REGNO (XEXP (src, 0)) >= FIRST_PSEUDO_REGISTER
2794 && (reg_known_value[REGNO (XEXP (src, 0))])
2795 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2796 {
2797 rtx op0 = XEXP (src, 0);
2798 op0 = reg_known_value[REGNO (op0)];
2799 reg_known_value[regno]
2800 = plus_constant (op0, INTVAL (XEXP (src, 1)));
2801 reg_known_equiv_p[regno] = 0;
2802 }
2803 else if (REG_N_SETS (regno) == 1
2804 && ! rtx_varies_p (src, 1))
2805 {
2806 reg_known_value[regno] = src;
2807 reg_known_equiv_p[regno] = 0;
2808 }
2809 }
2810 }
2811 else if (GET_CODE (insn) == NOTE
2812 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2813 copying_arguments = 0;
2814 }
2815
2816 /* Now propagate values from new_reg_base_value to reg_base_value. */
2817 for (ui = 0; ui < reg_base_value_size; ui++)
2818 {
2819 if (new_reg_base_value[ui]
2820 && new_reg_base_value[ui] != reg_base_value[ui]
2821 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
2822 {
2823 reg_base_value[ui] = new_reg_base_value[ui];
2824 changed = 1;
2825 }
2826 }
2827 }
2828 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2829
2830 /* Fill in the remaining entries. */
2831 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
2832 if (reg_known_value[i] == 0)
2833 reg_known_value[i] = regno_reg_rtx[i];
2834
2835 /* Simplify the reg_base_value array so that no register refers to
2836 another register, except to special registers indirectly through
2837 ADDRESS expressions.
2838
2839 In theory this loop can take as long as O(registers^2), but unless
2840 there are very long dependency chains it will run in close to linear
2841 time.
2842
2843 This loop may not be needed any longer now that the main loop does
2844 a better job at propagating alias information. */
2845 pass = 0;
2846 do
2847 {
2848 changed = 0;
2849 pass++;
2850 for (ui = 0; ui < reg_base_value_size; ui++)
2851 {
2852 rtx base = reg_base_value[ui];
2853 if (base && GET_CODE (base) == REG)
2854 {
2855 unsigned int base_regno = REGNO (base);
2856 if (base_regno == ui) /* register set from itself */
2857 reg_base_value[ui] = 0;
2858 else
2859 reg_base_value[ui] = reg_base_value[base_regno];
2860 changed = 1;
2861 }
2862 }
2863 }
2864 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2865
2866 /* Clean up. */
2867 free (new_reg_base_value);
2868 new_reg_base_value = 0;
2869 free (reg_seen);
2870 reg_seen = 0;
2871 }
2872
2873 void
2874 end_alias_analysis ()
2875 {
2876 free (reg_known_value + FIRST_PSEUDO_REGISTER);
2877 reg_known_value = 0;
2878 reg_known_value_size = 0;
2879 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
2880 reg_known_equiv_p = 0;
2881 reg_base_value = 0;
2882 reg_base_value_size = 0;
2883 if (alias_invariant)
2884 {
2885 free (alias_invariant);
2886 alias_invariant = 0;
2887 }
2888 }
2889
2890 #include "gt-alias.h"