]>
Commit | Line | Data |
---|---|---|
9ae8ffe7 JL |
1 | /* Alias analysis for GNU C |
2 | Copyright (C) 1997 Free Software Foundation, Inc. | |
3 | Contributed by John Carr (jfc@mit.edu). | |
4 | ||
5 | This file is part of GNU CC. | |
6 | ||
7 | GNU CC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GNU CC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GNU CC; see the file COPYING. If not, write to | |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, | |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "rtl.h" | |
24 | #include "expr.h" | |
25 | #include "regs.h" | |
26 | #include "hard-reg-set.h" | |
27 | #include "flags.h" | |
28 | ||
29 | static rtx canon_rtx PROTO((rtx)); | |
30 | static int rtx_equal_for_memref_p PROTO((rtx, rtx)); | |
31 | static rtx find_symbolic_term PROTO((rtx)); | |
32 | static int memrefs_conflict_p PROTO((int, rtx, int, rtx, | |
33 | HOST_WIDE_INT)); | |
34 | ||
35 | /* Set up all info needed to perform alias analysis on memory references. */ | |
36 | ||
37 | #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X))) | |
38 | ||
39 | /* reg_base_value[N] gives an address to which register N is related. | |
40 | If all sets after the first add or subtract to the current value | |
41 | or otherwise modify it so it does not point to a different top level | |
42 | object, reg_base_value[N] is equal to the address part of the source | |
2a2c8203 JC |
43 | of the first set. |
44 | ||
45 | A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS | |
46 | expressions represent certain special values: function arguments and | |
47 | the stack, frame, and argument pointers. The contents of an address | |
48 | expression are not used (but they are descriptive for debugging); | |
49 | only the address and mode matter. Pointer equality, not rtx_equal_p, | |
50 | determines whether two ADDRESS expressions refer to the same base | |
51 | address. The mode determines whether it is a function argument or | |
52 | other special value. */ | |
53 | ||
9ae8ffe7 JL |
54 | rtx *reg_base_value; |
55 | unsigned int reg_base_value_size; /* size of reg_base_value array */ | |
56 | #define REG_BASE_VALUE(X) \ | |
57 | (REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0) | |
58 | ||
59 | /* Vector indexed by N giving the initial (unchanging) value known | |
60 | for pseudo-register N. */ | |
61 | rtx *reg_known_value; | |
62 | ||
63 | /* Indicates number of valid entries in reg_known_value. */ | |
64 | static int reg_known_value_size; | |
65 | ||
66 | /* Vector recording for each reg_known_value whether it is due to a | |
67 | REG_EQUIV note. Future passes (viz., reload) may replace the | |
68 | pseudo with the equivalent expression and so we account for the | |
69 | dependences that would be introduced if that happens. */ | |
70 | /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in | |
71 | assign_parms mention the arg pointer, and there are explicit insns in the | |
72 | RTL that modify the arg pointer. Thus we must ensure that such insns don't | |
73 | get scheduled across each other because that would invalidate the REG_EQUIV | |
74 | notes. One could argue that the REG_EQUIV notes are wrong, but solving | |
75 | the problem in the scheduler will likely give better code, so we do it | |
76 | here. */ | |
77 | char *reg_known_equiv_p; | |
78 | ||
2a2c8203 JC |
79 | /* True when scanning insns from the start of the rtl to the |
80 | NOTE_INSN_FUNCTION_BEG note. */ | |
9ae8ffe7 | 81 | |
9ae8ffe7 JL |
82 | static int copying_arguments; |
83 | ||
2a2c8203 JC |
84 | /* Inside SRC, the source of a SET, find a base address. */ |
85 | ||
9ae8ffe7 JL |
86 | static rtx |
87 | find_base_value (src) | |
88 | register rtx src; | |
89 | { | |
90 | switch (GET_CODE (src)) | |
91 | { | |
92 | case SYMBOL_REF: | |
93 | case LABEL_REF: | |
94 | return src; | |
95 | ||
96 | case REG: | |
2a2c8203 JC |
97 | /* At the start of a function argument registers have known base |
98 | values which may be lost later. Returning an ADDRESS | |
99 | expression here allows optimization based on argument values | |
100 | even when the argument registers are used for other purposes. */ | |
101 | if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments) | |
9ae8ffe7 JL |
102 | return reg_base_value[REGNO (src)]; |
103 | return src; | |
104 | ||
105 | case MEM: | |
106 | /* Check for an argument passed in memory. Only record in the | |
107 | copying-arguments block; it is too hard to track changes | |
108 | otherwise. */ | |
109 | if (copying_arguments | |
110 | && (XEXP (src, 0) == arg_pointer_rtx | |
111 | || (GET_CODE (XEXP (src, 0)) == PLUS | |
112 | && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx))) | |
113 | return gen_rtx (ADDRESS, VOIDmode, src); | |
114 | return 0; | |
115 | ||
116 | case CONST: | |
117 | src = XEXP (src, 0); | |
118 | if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS) | |
119 | break; | |
120 | /* fall through */ | |
2a2c8203 | 121 | |
9ae8ffe7 JL |
122 | case PLUS: |
123 | case MINUS: | |
2a2c8203 | 124 | { |
bd123dc9 | 125 | rtx src_0 = XEXP (src, 0), src_1 = XEXP (src, 1); |
2a2c8203 JC |
126 | |
127 | /* Guess which operand is the base address. | |
128 | ||
129 | If the first operand is a symbol or the second operand is | |
130 | an integer, the first operand is the base address. Else if | |
131 | either operand is a register marked as a pointer, it is the | |
132 | base address. */ | |
133 | ||
134 | if (GET_CODE (src_1) == CONST_INT | |
135 | || GET_CODE (src_0) == SYMBOL_REF | |
136 | || GET_CODE (src_0) == LABEL_REF | |
137 | || GET_CODE (src_0) == CONST) | |
138 | return find_base_value (src_0); | |
139 | ||
140 | if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0))) | |
141 | return find_base_value (src_0); | |
142 | ||
143 | if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1))) | |
144 | return find_base_value (src_1); | |
145 | ||
9ae8ffe7 | 146 | return 0; |
2a2c8203 JC |
147 | } |
148 | ||
149 | case LO_SUM: | |
150 | /* The standard form is (lo_sum reg sym) so look only at the | |
151 | second operand. */ | |
152 | return find_base_value (XEXP (src, 1)); | |
9ae8ffe7 JL |
153 | |
154 | case AND: | |
155 | /* If the second operand is constant set the base | |
156 | address to the first operand. */ | |
2a2c8203 JC |
157 | if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0) |
158 | return find_base_value (XEXP (src, 0)); | |
9ae8ffe7 JL |
159 | return 0; |
160 | ||
161 | case HIGH: | |
2a2c8203 | 162 | return find_base_value (XEXP (src, 0)); |
9ae8ffe7 JL |
163 | } |
164 | ||
165 | return 0; | |
166 | } | |
167 | ||
168 | /* Called from init_alias_analysis indirectly through note_stores. */ | |
169 | ||
170 | /* while scanning insns to find base values, reg_seen[N] is nonzero if | |
171 | register N has been set in this function. */ | |
172 | static char *reg_seen; | |
173 | ||
2a2c8203 JC |
174 | static void |
175 | record_set (dest, set) | |
9ae8ffe7 JL |
176 | rtx dest, set; |
177 | { | |
178 | register int regno; | |
179 | rtx src; | |
180 | ||
181 | if (GET_CODE (dest) != REG) | |
182 | return; | |
183 | ||
184 | regno = REGNO (dest); | |
185 | ||
186 | if (set) | |
187 | { | |
188 | /* A CLOBBER wipes out any old value but does not prevent a previously | |
189 | unset register from acquiring a base address (i.e. reg_seen is not | |
190 | set). */ | |
191 | if (GET_CODE (set) == CLOBBER) | |
192 | { | |
193 | reg_base_value[regno] = 0; | |
194 | return; | |
195 | } | |
196 | src = SET_SRC (set); | |
197 | } | |
198 | else | |
199 | { | |
200 | static int unique_id; | |
201 | if (reg_seen[regno]) | |
202 | { | |
203 | reg_base_value[regno] = 0; | |
204 | return; | |
205 | } | |
206 | reg_seen[regno] = 1; | |
207 | reg_base_value[regno] = gen_rtx (ADDRESS, Pmode, | |
208 | GEN_INT (unique_id++)); | |
209 | return; | |
210 | } | |
211 | ||
212 | /* This is not the first set. If the new value is not related to the | |
213 | old value, forget the base value. Note that the following code is | |
214 | not detected: | |
215 | extern int x, y; int *p = &x; p += (&y-&x); | |
216 | ANSI C does not allow computing the difference of addresses | |
217 | of distinct top level objects. */ | |
218 | if (reg_base_value[regno]) | |
219 | switch (GET_CODE (src)) | |
220 | { | |
2a2c8203 | 221 | case LO_SUM: |
9ae8ffe7 JL |
222 | case PLUS: |
223 | case MINUS: | |
224 | if (XEXP (src, 0) != dest && XEXP (src, 1) != dest) | |
225 | reg_base_value[regno] = 0; | |
226 | break; | |
227 | case AND: | |
228 | if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT) | |
229 | reg_base_value[regno] = 0; | |
230 | break; | |
9ae8ffe7 JL |
231 | default: |
232 | reg_base_value[regno] = 0; | |
233 | break; | |
234 | } | |
235 | /* If this is the first set of a register, record the value. */ | |
236 | else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno]) | |
237 | && ! reg_seen[regno] && reg_base_value[regno] == 0) | |
238 | reg_base_value[regno] = find_base_value (src); | |
239 | ||
240 | reg_seen[regno] = 1; | |
241 | } | |
242 | ||
243 | /* Called from loop optimization when a new pseudo-register is created. */ | |
244 | void | |
245 | record_base_value (regno, val) | |
246 | int regno; | |
247 | rtx val; | |
248 | { | |
249 | if (!flag_alias_check || regno >= reg_base_value_size) | |
250 | return; | |
251 | if (GET_CODE (val) == REG) | |
252 | { | |
253 | if (REGNO (val) < reg_base_value_size) | |
254 | reg_base_value[regno] = reg_base_value[REGNO (val)]; | |
255 | return; | |
256 | } | |
257 | reg_base_value[regno] = find_base_value (val); | |
258 | } | |
259 | ||
260 | static rtx | |
261 | canon_rtx (x) | |
262 | rtx x; | |
263 | { | |
264 | /* Recursively look for equivalences. */ | |
265 | if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER | |
266 | && REGNO (x) < reg_known_value_size) | |
267 | return reg_known_value[REGNO (x)] == x | |
268 | ? x : canon_rtx (reg_known_value[REGNO (x)]); | |
269 | else if (GET_CODE (x) == PLUS) | |
270 | { | |
271 | rtx x0 = canon_rtx (XEXP (x, 0)); | |
272 | rtx x1 = canon_rtx (XEXP (x, 1)); | |
273 | ||
274 | if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1)) | |
275 | { | |
276 | /* We can tolerate LO_SUMs being offset here; these | |
277 | rtl are used for nothing other than comparisons. */ | |
278 | if (GET_CODE (x0) == CONST_INT) | |
279 | return plus_constant_for_output (x1, INTVAL (x0)); | |
280 | else if (GET_CODE (x1) == CONST_INT) | |
281 | return plus_constant_for_output (x0, INTVAL (x1)); | |
282 | return gen_rtx (PLUS, GET_MODE (x), x0, x1); | |
283 | } | |
284 | } | |
285 | /* This gives us much better alias analysis when called from | |
286 | the loop optimizer. Note we want to leave the original | |
287 | MEM alone, but need to return the canonicalized MEM with | |
288 | all the flags with their original values. */ | |
289 | else if (GET_CODE (x) == MEM) | |
290 | { | |
291 | rtx addr = canon_rtx (XEXP (x, 0)); | |
292 | if (addr != XEXP (x, 0)) | |
293 | { | |
294 | rtx new = gen_rtx (MEM, GET_MODE (x), addr); | |
295 | MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x); | |
296 | RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x); | |
297 | MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x); | |
298 | x = new; | |
299 | } | |
300 | } | |
301 | return x; | |
302 | } | |
303 | ||
304 | /* Return 1 if X and Y are identical-looking rtx's. | |
305 | ||
306 | We use the data in reg_known_value above to see if two registers with | |
307 | different numbers are, in fact, equivalent. */ | |
308 | ||
309 | static int | |
310 | rtx_equal_for_memref_p (x, y) | |
311 | rtx x, y; | |
312 | { | |
313 | register int i; | |
314 | register int j; | |
315 | register enum rtx_code code; | |
316 | register char *fmt; | |
317 | ||
318 | if (x == 0 && y == 0) | |
319 | return 1; | |
320 | if (x == 0 || y == 0) | |
321 | return 0; | |
322 | x = canon_rtx (x); | |
323 | y = canon_rtx (y); | |
324 | ||
325 | if (x == y) | |
326 | return 1; | |
327 | ||
328 | code = GET_CODE (x); | |
329 | /* Rtx's of different codes cannot be equal. */ | |
330 | if (code != GET_CODE (y)) | |
331 | return 0; | |
332 | ||
333 | /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. | |
334 | (REG:SI x) and (REG:HI x) are NOT equivalent. */ | |
335 | ||
336 | if (GET_MODE (x) != GET_MODE (y)) | |
337 | return 0; | |
338 | ||
339 | /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */ | |
340 | ||
341 | if (code == REG) | |
342 | return REGNO (x) == REGNO (y); | |
343 | if (code == LABEL_REF) | |
344 | return XEXP (x, 0) == XEXP (y, 0); | |
345 | if (code == SYMBOL_REF) | |
346 | return XSTR (x, 0) == XSTR (y, 0); | |
347 | ||
348 | /* For commutative operations, the RTX match if the operand match in any | |
349 | order. Also handle the simple binary and unary cases without a loop. */ | |
350 | if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c') | |
351 | return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)) | |
352 | && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1))) | |
353 | || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1)) | |
354 | && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0)))); | |
355 | else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2') | |
356 | return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)) | |
357 | && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1))); | |
358 | else if (GET_RTX_CLASS (code) == '1') | |
359 | return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0)); | |
360 | ||
361 | /* Compare the elements. If any pair of corresponding elements | |
362 | fail to match, return 0 for the whole things. */ | |
363 | ||
364 | fmt = GET_RTX_FORMAT (code); | |
365 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
366 | { | |
367 | switch (fmt[i]) | |
368 | { | |
369 | case 'w': | |
370 | if (XWINT (x, i) != XWINT (y, i)) | |
371 | return 0; | |
372 | break; | |
373 | ||
374 | case 'n': | |
375 | case 'i': | |
376 | if (XINT (x, i) != XINT (y, i)) | |
377 | return 0; | |
378 | break; | |
379 | ||
380 | case 'V': | |
381 | case 'E': | |
382 | /* Two vectors must have the same length. */ | |
383 | if (XVECLEN (x, i) != XVECLEN (y, i)) | |
384 | return 0; | |
385 | ||
386 | /* And the corresponding elements must match. */ | |
387 | for (j = 0; j < XVECLEN (x, i); j++) | |
388 | if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0) | |
389 | return 0; | |
390 | break; | |
391 | ||
392 | case 'e': | |
393 | if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0) | |
394 | return 0; | |
395 | break; | |
396 | ||
397 | case 'S': | |
398 | case 's': | |
399 | if (strcmp (XSTR (x, i), XSTR (y, i))) | |
400 | return 0; | |
401 | break; | |
402 | ||
403 | case 'u': | |
404 | /* These are just backpointers, so they don't matter. */ | |
405 | break; | |
406 | ||
407 | case '0': | |
408 | break; | |
409 | ||
410 | /* It is believed that rtx's at this level will never | |
411 | contain anything but integers and other rtx's, | |
412 | except for within LABEL_REFs and SYMBOL_REFs. */ | |
413 | default: | |
414 | abort (); | |
415 | } | |
416 | } | |
417 | return 1; | |
418 | } | |
419 | ||
420 | /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within | |
421 | X and return it, or return 0 if none found. */ | |
422 | ||
423 | static rtx | |
424 | find_symbolic_term (x) | |
425 | rtx x; | |
426 | { | |
427 | register int i; | |
428 | register enum rtx_code code; | |
429 | register char *fmt; | |
430 | ||
431 | code = GET_CODE (x); | |
432 | if (code == SYMBOL_REF || code == LABEL_REF) | |
433 | return x; | |
434 | if (GET_RTX_CLASS (code) == 'o') | |
435 | return 0; | |
436 | ||
437 | fmt = GET_RTX_FORMAT (code); | |
438 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
439 | { | |
440 | rtx t; | |
441 | ||
442 | if (fmt[i] == 'e') | |
443 | { | |
444 | t = find_symbolic_term (XEXP (x, i)); | |
445 | if (t != 0) | |
446 | return t; | |
447 | } | |
448 | else if (fmt[i] == 'E') | |
449 | break; | |
450 | } | |
451 | return 0; | |
452 | } | |
453 | ||
454 | static rtx | |
455 | find_base_term (x) | |
456 | register rtx x; | |
457 | { | |
458 | switch (GET_CODE (x)) | |
459 | { | |
460 | case REG: | |
461 | return REG_BASE_VALUE (x); | |
462 | ||
463 | case HIGH: | |
464 | return find_base_term (XEXP (x, 0)); | |
465 | ||
6d849a2a JL |
466 | case PRE_INC: |
467 | case PRE_DEC: | |
468 | case POST_INC: | |
469 | case POST_DEC: | |
470 | return find_base_term (XEXP (x, 0)); | |
471 | ||
9ae8ffe7 JL |
472 | case CONST: |
473 | x = XEXP (x, 0); | |
474 | if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS) | |
475 | return 0; | |
476 | /* fall through */ | |
477 | case LO_SUM: | |
478 | case PLUS: | |
479 | case MINUS: | |
480 | { | |
481 | rtx tmp = find_base_term (XEXP (x, 0)); | |
482 | if (tmp) | |
483 | return tmp; | |
484 | return find_base_term (XEXP (x, 1)); | |
485 | } | |
486 | ||
487 | case AND: | |
488 | if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
489 | return REG_BASE_VALUE (XEXP (x, 0)); | |
490 | return 0; | |
491 | ||
492 | case SYMBOL_REF: | |
493 | case LABEL_REF: | |
494 | return x; | |
495 | ||
496 | default: | |
497 | return 0; | |
498 | } | |
499 | } | |
500 | ||
501 | /* Return 0 if the addresses X and Y are known to point to different | |
502 | objects, 1 if they might be pointers to the same object. */ | |
503 | ||
504 | static int | |
505 | base_alias_check (x, y) | |
506 | rtx x, y; | |
507 | { | |
508 | rtx x_base = find_base_term (x); | |
509 | rtx y_base = find_base_term (y); | |
510 | ||
511 | /* If either base address is unknown or the base addresses are equal, | |
512 | nothing is known about aliasing. */ | |
513 | ||
514 | if (x_base == 0 || y_base == 0 || rtx_equal_p (x_base, y_base)) | |
515 | return 1; | |
516 | ||
517 | /* The base addresses of the read and write are different | |
c02f035f RH |
518 | expressions. If they are both symbols and they are not accessed |
519 | via AND, there is no conflict. */ | |
520 | /* XXX: We can bring knowledge of object alignment and offset into | |
521 | play here. For example, on alpha, "char a, b;" can alias one | |
522 | another, though "char a; long b;" cannot. Similarly, offsets | |
523 | into strutures may be brought into play. Given "char a, b[40];", | |
524 | a and b[1] may overlap, but a and b[20] do not. */ | |
9ae8ffe7 | 525 | if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS) |
c02f035f RH |
526 | { |
527 | return GET_CODE (x) == AND || GET_CODE (y) == AND; | |
528 | } | |
9ae8ffe7 JL |
529 | |
530 | /* If one address is a stack reference there can be no alias: | |
531 | stack references using different base registers do not alias, | |
532 | a stack reference can not alias a parameter, and a stack reference | |
533 | can not alias a global. */ | |
534 | if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode) | |
535 | || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode)) | |
536 | return 0; | |
537 | ||
538 | if (! flag_argument_noalias) | |
539 | return 1; | |
540 | ||
541 | if (flag_argument_noalias > 1) | |
542 | return 0; | |
543 | ||
544 | /* Weak noalias assertion (arguments are distinct, but may match globals). */ | |
545 | return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode); | |
546 | } | |
547 | ||
548 | /* Return nonzero if X and Y (memory addresses) could reference the | |
549 | same location in memory. C is an offset accumulator. When | |
550 | C is nonzero, we are testing aliases between X and Y + C. | |
551 | XSIZE is the size in bytes of the X reference, | |
552 | similarly YSIZE is the size in bytes for Y. | |
553 | ||
554 | If XSIZE or YSIZE is zero, we do not know the amount of memory being | |
555 | referenced (the reference was BLKmode), so make the most pessimistic | |
556 | assumptions. | |
557 | ||
c02f035f RH |
558 | If XSIZE or YSIZE is negative, we may access memory outside the object |
559 | being referenced as a side effect. This can happen when using AND to | |
560 | align memory references, as is done on the Alpha. | |
561 | ||
9ae8ffe7 JL |
562 | We recognize the following cases of non-conflicting memory: |
563 | ||
564 | (1) addresses involving the frame pointer cannot conflict | |
565 | with addresses involving static variables. | |
566 | (2) static variables with different addresses cannot conflict. | |
567 | ||
568 | Nice to notice that varying addresses cannot conflict with fp if no | |
569 | local variables had their addresses taken, but that's too hard now. */ | |
570 | ||
571 | ||
572 | static int | |
573 | memrefs_conflict_p (xsize, x, ysize, y, c) | |
574 | register rtx x, y; | |
575 | int xsize, ysize; | |
576 | HOST_WIDE_INT c; | |
577 | { | |
578 | if (GET_CODE (x) == HIGH) | |
579 | x = XEXP (x, 0); | |
580 | else if (GET_CODE (x) == LO_SUM) | |
581 | x = XEXP (x, 1); | |
582 | else | |
583 | x = canon_rtx (x); | |
584 | if (GET_CODE (y) == HIGH) | |
585 | y = XEXP (y, 0); | |
586 | else if (GET_CODE (y) == LO_SUM) | |
587 | y = XEXP (y, 1); | |
588 | else | |
589 | y = canon_rtx (y); | |
590 | ||
591 | if (rtx_equal_for_memref_p (x, y)) | |
592 | { | |
c02f035f | 593 | if (xsize <= 0 || ysize <= 0) |
9ae8ffe7 JL |
594 | return 1; |
595 | if (c >= 0 && xsize > c) | |
596 | return 1; | |
597 | if (c < 0 && ysize+c > 0) | |
598 | return 1; | |
599 | return 0; | |
600 | } | |
601 | ||
602 | if (y == frame_pointer_rtx || y == hard_frame_pointer_rtx | |
96286722 | 603 | || y == stack_pointer_rtx || y == arg_pointer_rtx) |
9ae8ffe7 JL |
604 | { |
605 | rtx t = y; | |
606 | int tsize = ysize; | |
607 | y = x; ysize = xsize; | |
608 | x = t; xsize = tsize; | |
609 | } | |
610 | ||
611 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx | |
96286722 | 612 | || x == stack_pointer_rtx || x == arg_pointer_rtx) |
9ae8ffe7 JL |
613 | { |
614 | rtx y1; | |
615 | ||
616 | if (CONSTANT_P (y)) | |
617 | return 0; | |
618 | ||
619 | if (GET_CODE (y) == PLUS | |
620 | && canon_rtx (XEXP (y, 0)) == x | |
621 | && (y1 = canon_rtx (XEXP (y, 1))) | |
622 | && GET_CODE (y1) == CONST_INT) | |
623 | { | |
624 | c += INTVAL (y1); | |
c02f035f | 625 | return (xsize <= 0 || ysize <= 0 |
9ae8ffe7 JL |
626 | || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); |
627 | } | |
628 | ||
629 | if (GET_CODE (y) == PLUS | |
630 | && (y1 = canon_rtx (XEXP (y, 0))) | |
631 | && CONSTANT_P (y1)) | |
632 | return 0; | |
633 | ||
634 | return 1; | |
635 | } | |
636 | ||
637 | if (GET_CODE (x) == PLUS) | |
638 | { | |
639 | /* The fact that X is canonicalized means that this | |
640 | PLUS rtx is canonicalized. */ | |
641 | rtx x0 = XEXP (x, 0); | |
642 | rtx x1 = XEXP (x, 1); | |
643 | ||
644 | if (GET_CODE (y) == PLUS) | |
645 | { | |
646 | /* The fact that Y is canonicalized means that this | |
647 | PLUS rtx is canonicalized. */ | |
648 | rtx y0 = XEXP (y, 0); | |
649 | rtx y1 = XEXP (y, 1); | |
650 | ||
651 | if (rtx_equal_for_memref_p (x1, y1)) | |
652 | return memrefs_conflict_p (xsize, x0, ysize, y0, c); | |
653 | if (rtx_equal_for_memref_p (x0, y0)) | |
654 | return memrefs_conflict_p (xsize, x1, ysize, y1, c); | |
655 | if (GET_CODE (x1) == CONST_INT) | |
656 | if (GET_CODE (y1) == CONST_INT) | |
657 | return memrefs_conflict_p (xsize, x0, ysize, y0, | |
658 | c - INTVAL (x1) + INTVAL (y1)); | |
659 | else | |
660 | return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1)); | |
661 | else if (GET_CODE (y1) == CONST_INT) | |
662 | return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); | |
663 | ||
664 | /* Handle case where we cannot understand iteration operators, | |
665 | but we notice that the base addresses are distinct objects. */ | |
666 | /* ??? Is this still necessary? */ | |
667 | x = find_symbolic_term (x); | |
668 | if (x == 0) | |
669 | return 1; | |
670 | y = find_symbolic_term (y); | |
671 | if (y == 0) | |
672 | return 1; | |
673 | return rtx_equal_for_memref_p (x, y); | |
674 | } | |
675 | else if (GET_CODE (x1) == CONST_INT) | |
676 | return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1)); | |
677 | } | |
678 | else if (GET_CODE (y) == PLUS) | |
679 | { | |
680 | /* The fact that Y is canonicalized means that this | |
681 | PLUS rtx is canonicalized. */ | |
682 | rtx y0 = XEXP (y, 0); | |
683 | rtx y1 = XEXP (y, 1); | |
684 | ||
685 | if (GET_CODE (y1) == CONST_INT) | |
686 | return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1)); | |
687 | else | |
688 | return 1; | |
689 | } | |
690 | ||
691 | if (GET_CODE (x) == GET_CODE (y)) | |
692 | switch (GET_CODE (x)) | |
693 | { | |
694 | case MULT: | |
695 | { | |
696 | /* Handle cases where we expect the second operands to be the | |
697 | same, and check only whether the first operand would conflict | |
698 | or not. */ | |
699 | rtx x0, y0; | |
700 | rtx x1 = canon_rtx (XEXP (x, 1)); | |
701 | rtx y1 = canon_rtx (XEXP (y, 1)); | |
702 | if (! rtx_equal_for_memref_p (x1, y1)) | |
703 | return 1; | |
704 | x0 = canon_rtx (XEXP (x, 0)); | |
705 | y0 = canon_rtx (XEXP (y, 0)); | |
706 | if (rtx_equal_for_memref_p (x0, y0)) | |
707 | return (xsize == 0 || ysize == 0 | |
708 | || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); | |
709 | ||
710 | /* Can't properly adjust our sizes. */ | |
711 | if (GET_CODE (x1) != CONST_INT) | |
712 | return 1; | |
713 | xsize /= INTVAL (x1); | |
714 | ysize /= INTVAL (x1); | |
715 | c /= INTVAL (x1); | |
716 | return memrefs_conflict_p (xsize, x0, ysize, y0, c); | |
717 | } | |
718 | } | |
719 | ||
720 | /* Treat an access through an AND (e.g. a subword access on an Alpha) | |
721 | as an access with indeterminate size. */ | |
722 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
c02f035f | 723 | return memrefs_conflict_p (-1, XEXP (x, 0), ysize, y, c); |
9ae8ffe7 | 724 | if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT) |
c02f035f RH |
725 | { |
726 | /* XXX: If we are indexing far enough into the array/structure, we | |
727 | may yet be able to determine that we can not overlap. But we | |
728 | also need to that we are far enough from the end not to overlap | |
729 | a following reference, so we do nothing for now. */ | |
730 | return memrefs_conflict_p (xsize, x, -1, XEXP (y, 0), c); | |
731 | } | |
9ae8ffe7 JL |
732 | |
733 | if (CONSTANT_P (x)) | |
734 | { | |
735 | if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT) | |
736 | { | |
737 | c += (INTVAL (y) - INTVAL (x)); | |
c02f035f | 738 | return (xsize <= 0 || ysize <= 0 |
9ae8ffe7 JL |
739 | || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)); |
740 | } | |
741 | ||
742 | if (GET_CODE (x) == CONST) | |
743 | { | |
744 | if (GET_CODE (y) == CONST) | |
745 | return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), | |
746 | ysize, canon_rtx (XEXP (y, 0)), c); | |
747 | else | |
748 | return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), | |
749 | ysize, y, c); | |
750 | } | |
751 | if (GET_CODE (y) == CONST) | |
752 | return memrefs_conflict_p (xsize, x, ysize, | |
753 | canon_rtx (XEXP (y, 0)), c); | |
754 | ||
755 | if (CONSTANT_P (y)) | |
c02f035f RH |
756 | return (xsize < 0 || ysize < 0 |
757 | || (rtx_equal_for_memref_p (x, y) | |
758 | && (xsize == 0 || ysize == 0 | |
759 | || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0)))); | |
9ae8ffe7 JL |
760 | |
761 | return 1; | |
762 | } | |
763 | return 1; | |
764 | } | |
765 | ||
766 | /* Functions to compute memory dependencies. | |
767 | ||
768 | Since we process the insns in execution order, we can build tables | |
769 | to keep track of what registers are fixed (and not aliased), what registers | |
770 | are varying in known ways, and what registers are varying in unknown | |
771 | ways. | |
772 | ||
773 | If both memory references are volatile, then there must always be a | |
774 | dependence between the two references, since their order can not be | |
775 | changed. A volatile and non-volatile reference can be interchanged | |
776 | though. | |
777 | ||
fa8b6024 | 778 | A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never |
9ae8ffe7 JL |
779 | conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must |
780 | allow QImode aliasing because the ANSI C standard allows character | |
781 | pointers to alias anything. We are assuming that characters are | |
fa8b6024 JW |
782 | always QImode here. We also must allow AND addresses, because they may |
783 | generate accesses outside the object being referenced. This is used to | |
784 | generate aligned addresses from unaligned addresses, for instance, the | |
785 | alpha storeqi_unaligned pattern. */ | |
9ae8ffe7 JL |
786 | |
787 | /* Read dependence: X is read after read in MEM takes place. There can | |
788 | only be a dependence here if both reads are volatile. */ | |
789 | ||
790 | int | |
791 | read_dependence (mem, x) | |
792 | rtx mem; | |
793 | rtx x; | |
794 | { | |
795 | return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem); | |
796 | } | |
797 | ||
798 | /* True dependence: X is read after store in MEM takes place. */ | |
799 | ||
800 | int | |
801 | true_dependence (mem, mem_mode, x, varies) | |
802 | rtx mem; | |
803 | enum machine_mode mem_mode; | |
804 | rtx x; | |
805 | int (*varies)(); | |
806 | { | |
807 | rtx x_addr, mem_addr; | |
808 | ||
809 | if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem)) | |
810 | return 1; | |
811 | ||
812 | x_addr = XEXP (x, 0); | |
813 | mem_addr = XEXP (mem, 0); | |
814 | ||
815 | if (flag_alias_check && ! base_alias_check (x_addr, mem_addr)) | |
816 | return 0; | |
817 | ||
818 | /* If X is an unchanging read, then it can't possibly conflict with any | |
819 | non-unchanging store. It may conflict with an unchanging write though, | |
820 | because there may be a single store to this address to initialize it. | |
821 | Just fall through to the code below to resolve the case where we have | |
822 | both an unchanging read and an unchanging write. This won't handle all | |
823 | cases optimally, but the possible performance loss should be | |
824 | negligible. */ | |
825 | if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem)) | |
826 | return 0; | |
827 | ||
828 | x_addr = canon_rtx (x_addr); | |
829 | mem_addr = canon_rtx (mem_addr); | |
830 | if (mem_mode == VOIDmode) | |
831 | mem_mode = GET_MODE (mem); | |
832 | ||
edaa4ee0 | 833 | if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr, |
6d849a2a | 834 | SIZE_FOR_MODE (x), x_addr, 0)) |
9ae8ffe7 JL |
835 | return 0; |
836 | ||
837 | /* If both references are struct references, or both are not, nothing | |
838 | is known about aliasing. | |
839 | ||
840 | If either reference is QImode or BLKmode, ANSI C permits aliasing. | |
841 | ||
842 | If both addresses are constant, or both are not, nothing is known | |
843 | about aliasing. */ | |
844 | if (MEM_IN_STRUCT_P (x) == MEM_IN_STRUCT_P (mem) | |
845 | || mem_mode == QImode || mem_mode == BLKmode | |
57163df0 | 846 | || GET_MODE (x) == QImode || GET_MODE (x) == BLKmode |
fa8b6024 | 847 | || GET_CODE (x_addr) == AND || GET_CODE (mem_addr) == AND |
9ae8ffe7 JL |
848 | || varies (x_addr) == varies (mem_addr)) |
849 | return 1; | |
850 | ||
851 | /* One memory reference is to a constant address, one is not. | |
852 | One is to a structure, the other is not. | |
853 | ||
854 | If either memory reference is a variable structure the other is a | |
855 | fixed scalar and there is no aliasing. */ | |
856 | if ((MEM_IN_STRUCT_P (mem) && varies (mem_addr)) | |
27314274 | 857 | || (MEM_IN_STRUCT_P (x) && varies (x_addr))) |
9ae8ffe7 JL |
858 | return 0; |
859 | ||
860 | return 1; | |
861 | } | |
862 | ||
863 | /* Anti dependence: X is written after read in MEM takes place. */ | |
864 | ||
865 | int | |
866 | anti_dependence (mem, x) | |
867 | rtx mem; | |
868 | rtx x; | |
869 | { | |
870 | if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem)) | |
871 | return 1; | |
872 | ||
873 | if (flag_alias_check && ! base_alias_check (XEXP (x, 0), XEXP (mem, 0))) | |
874 | return 0; | |
875 | ||
876 | /* If MEM is an unchanging read, then it can't possibly conflict with | |
877 | the store to X, because there is at most one store to MEM, and it must | |
878 | have occurred somewhere before MEM. */ | |
879 | x = canon_rtx (x); | |
880 | mem = canon_rtx (mem); | |
881 | if (RTX_UNCHANGING_P (mem)) | |
882 | return 0; | |
883 | ||
884 | return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0), | |
885 | SIZE_FOR_MODE (x), XEXP (x, 0), 0) | |
886 | && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem) | |
887 | && GET_MODE (mem) != QImode | |
fa8b6024 | 888 | && GET_CODE (XEXP (mem, 0)) != AND |
9ae8ffe7 JL |
889 | && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x)) |
890 | && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x) | |
891 | && GET_MODE (x) != QImode | |
fa8b6024 | 892 | && GET_CODE (XEXP (x, 0)) != AND |
9ae8ffe7 JL |
893 | && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem))); |
894 | } | |
895 | ||
896 | /* Output dependence: X is written after store in MEM takes place. */ | |
897 | ||
898 | int | |
899 | output_dependence (mem, x) | |
900 | register rtx mem; | |
901 | register rtx x; | |
902 | { | |
903 | if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem)) | |
904 | return 1; | |
905 | ||
906 | if (flag_alias_check && !base_alias_check (XEXP (x, 0), XEXP (mem, 0))) | |
907 | return 0; | |
908 | ||
909 | x = canon_rtx (x); | |
910 | mem = canon_rtx (mem); | |
911 | return (memrefs_conflict_p (SIZE_FOR_MODE (mem), XEXP (mem, 0), | |
912 | SIZE_FOR_MODE (x), XEXP (x, 0), 0) | |
913 | && ! (MEM_IN_STRUCT_P (mem) && rtx_addr_varies_p (mem) | |
914 | && GET_MODE (mem) != QImode | |
fa8b6024 | 915 | && GET_CODE (XEXP (mem, 0)) != AND |
9ae8ffe7 JL |
916 | && ! MEM_IN_STRUCT_P (x) && ! rtx_addr_varies_p (x)) |
917 | && ! (MEM_IN_STRUCT_P (x) && rtx_addr_varies_p (x) | |
918 | && GET_MODE (x) != QImode | |
fa8b6024 | 919 | && GET_CODE (XEXP (x, 0)) != AND |
9ae8ffe7 JL |
920 | && ! MEM_IN_STRUCT_P (mem) && ! rtx_addr_varies_p (mem))); |
921 | } | |
922 | ||
923 | void | |
924 | init_alias_analysis () | |
925 | { | |
926 | int maxreg = max_reg_num (); | |
927 | int changed; | |
928 | register int i; | |
929 | register rtx insn; | |
930 | rtx note; | |
931 | rtx set; | |
932 | ||
933 | reg_known_value_size = maxreg; | |
934 | ||
935 | reg_known_value | |
936 | = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx)) | |
937 | - FIRST_PSEUDO_REGISTER; | |
938 | reg_known_equiv_p = | |
939 | oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER; | |
940 | bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER), | |
941 | (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx)); | |
942 | bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER, | |
943 | (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char)); | |
944 | ||
945 | if (flag_alias_check) | |
946 | { | |
947 | /* Overallocate reg_base_value to allow some growth during loop | |
948 | optimization. Loop unrolling can create a large number of | |
949 | registers. */ | |
950 | reg_base_value_size = maxreg * 2; | |
951 | reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx)); | |
952 | reg_seen = (char *)alloca (reg_base_value_size); | |
52b7724b | 953 | bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx)); |
9ae8ffe7 JL |
954 | bzero (reg_seen, reg_base_value_size); |
955 | ||
956 | /* Mark all hard registers which may contain an address. | |
957 | The stack, frame and argument pointers may contain an address. | |
958 | An argument register which can hold a Pmode value may contain | |
959 | an address even if it is not in BASE_REGS. | |
960 | ||
961 | The address expression is VOIDmode for an argument and | |
962 | Pmode for other registers. */ | |
963 | #ifndef OUTGOING_REGNO | |
964 | #define OUTGOING_REGNO(N) N | |
965 | #endif | |
966 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
967 | /* Check whether this register can hold an incoming pointer | |
968 | argument. FUNCTION_ARG_REGNO_P tests outgoing register | |
969 | numbers, so translate if necessary due to register windows. */ | |
970 | if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i)) && HARD_REGNO_MODE_OK (i, Pmode)) | |
971 | reg_base_value[i] = gen_rtx (ADDRESS, VOIDmode, | |
972 | gen_rtx (REG, Pmode, i)); | |
973 | ||
974 | reg_base_value[STACK_POINTER_REGNUM] | |
975 | = gen_rtx (ADDRESS, Pmode, stack_pointer_rtx); | |
976 | reg_base_value[ARG_POINTER_REGNUM] | |
977 | = gen_rtx (ADDRESS, Pmode, arg_pointer_rtx); | |
978 | reg_base_value[FRAME_POINTER_REGNUM] | |
979 | = gen_rtx (ADDRESS, Pmode, frame_pointer_rtx); | |
2a2c8203 | 980 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
9ae8ffe7 JL |
981 | reg_base_value[HARD_FRAME_POINTER_REGNUM] |
982 | = gen_rtx (ADDRESS, Pmode, hard_frame_pointer_rtx); | |
2a2c8203 | 983 | #endif |
9ae8ffe7 JL |
984 | } |
985 | ||
986 | copying_arguments = 1; | |
987 | /* Fill in the entries with known constant values. */ | |
988 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
989 | { | |
990 | if (flag_alias_check && GET_RTX_CLASS (GET_CODE (insn)) == 'i') | |
991 | { | |
992 | /* If this insn has a noalias note, process it, Otherwise, | |
993 | scan for sets. A simple set will have no side effects | |
994 | which could change the base value of any other register. */ | |
995 | rtx noalias_note; | |
996 | if (GET_CODE (PATTERN (insn)) == SET | |
997 | && (noalias_note = find_reg_note (insn, REG_NOALIAS, NULL_RTX))) | |
2a2c8203 | 998 | record_set (SET_DEST (PATTERN (insn)), 0); |
9ae8ffe7 JL |
999 | else |
1000 | note_stores (PATTERN (insn), record_set); | |
1001 | } | |
1002 | else if (GET_CODE (insn) == NOTE | |
1003 | && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG) | |
1004 | copying_arguments = 0; | |
1005 | ||
1006 | if ((set = single_set (insn)) != 0 | |
1007 | && GET_CODE (SET_DEST (set)) == REG | |
1008 | && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER | |
1009 | && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0 | |
1010 | && REG_N_SETS (REGNO (SET_DEST (set))) == 1) | |
1011 | || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0) | |
1012 | && GET_CODE (XEXP (note, 0)) != EXPR_LIST) | |
1013 | { | |
1014 | int regno = REGNO (SET_DEST (set)); | |
1015 | reg_known_value[regno] = XEXP (note, 0); | |
1016 | reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV; | |
1017 | } | |
1018 | } | |
1019 | ||
1020 | /* Fill in the remaining entries. */ | |
1021 | for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++) | |
1022 | if (reg_known_value[i] == 0) | |
1023 | reg_known_value[i] = regno_reg_rtx[i]; | |
1024 | ||
1025 | if (! flag_alias_check) | |
1026 | return; | |
1027 | ||
1028 | /* Simplify the reg_base_value array so that no register refers to | |
1029 | another register, except to special registers indirectly through | |
1030 | ADDRESS expressions. | |
1031 | ||
1032 | In theory this loop can take as long as O(registers^2), but unless | |
1033 | there are very long dependency chains it will run in close to linear | |
1034 | time. */ | |
1035 | do | |
1036 | { | |
1037 | changed = 0; | |
7557aa98 | 1038 | for (i = 0; i < reg_base_value_size; i++) |
9ae8ffe7 JL |
1039 | { |
1040 | rtx base = reg_base_value[i]; | |
1041 | if (base && GET_CODE (base) == REG) | |
1042 | { | |
1043 | int base_regno = REGNO (base); | |
1044 | if (base_regno == i) /* register set from itself */ | |
1045 | reg_base_value[i] = 0; | |
1046 | else | |
1047 | reg_base_value[i] = reg_base_value[base_regno]; | |
1048 | changed = 1; | |
1049 | } | |
1050 | } | |
1051 | } | |
1052 | while (changed); | |
1053 | ||
1054 | reg_seen = 0; | |
1055 | } | |
1056 | ||
1057 | void | |
1058 | end_alias_analysis () | |
1059 | { | |
1060 | reg_known_value = 0; | |
1061 | reg_base_value = 0; | |
1062 | reg_base_value_size = 0; | |
1063 | } |