]>
Commit | Line | Data |
---|---|---|
af082de3 | 1 | /* Analyze RTL for GNU compiler. |
af841dbd | 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
a446b4e8 | 3 | 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software |
af082de3 | 4 | Foundation, Inc. |
2c88418c | 5 | |
1322177d | 6 | This file is part of GCC. |
2c88418c | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
2c88418c | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
2c88418c RS |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
2c88418c RS |
21 | |
22 | ||
23 | #include "config.h" | |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
e35b9579 | 27 | #include "toplev.h" |
2c88418c | 28 | #include "rtl.h" |
3335f1d9 | 29 | #include "hard-reg-set.h" |
bc204393 RH |
30 | #include "insn-config.h" |
31 | #include "recog.h" | |
f894b69b PB |
32 | #include "target.h" |
33 | #include "output.h" | |
91ea4f8d | 34 | #include "tm_p.h" |
f5eb5fd0 | 35 | #include "flags.h" |
52bfebf0 | 36 | #include "real.h" |
66fd46b6 | 37 | #include "regs.h" |
2f93eea8 | 38 | #include "function.h" |
6fb5fa3c | 39 | #include "df.h" |
7ffb5e78 | 40 | #include "tree.h" |
2c88418c | 41 | |
f1f4e530 JM |
42 | /* Information about a subreg of a hard register. */ |
43 | struct subreg_info | |
44 | { | |
45 | /* Offset of first hard register involved in the subreg. */ | |
46 | int offset; | |
47 | /* Number of hard registers involved in the subreg. */ | |
48 | int nregs; | |
49 | /* Whether this subreg can be represented as a hard reg with the new | |
50 | mode. */ | |
51 | bool representable_p; | |
52 | }; | |
53 | ||
e2373f95 | 54 | /* Forward declarations */ |
7bc980e1 | 55 | static void set_of_1 (rtx, const_rtx, void *); |
f7d504c2 KG |
56 | static bool covers_regno_p (const_rtx, unsigned int); |
57 | static bool covers_regno_no_parallel_p (const_rtx, unsigned int); | |
0c20a65f | 58 | static int rtx_referenced_p_1 (rtx *, void *); |
f7d504c2 | 59 | static int computed_jump_p_1 (const_rtx); |
7bc980e1 | 60 | static void parms_set (rtx, const_rtx, void *); |
f1f4e530 JM |
61 | static void subreg_get_info (unsigned int, enum machine_mode, |
62 | unsigned int, enum machine_mode, | |
63 | struct subreg_info *); | |
2a1777af | 64 | |
fa233e34 KG |
65 | static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode, |
66 | const_rtx, enum machine_mode, | |
2f93eea8 | 67 | unsigned HOST_WIDE_INT); |
fa233e34 KG |
68 | static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode, |
69 | const_rtx, enum machine_mode, | |
2f93eea8 | 70 | unsigned HOST_WIDE_INT); |
fa233e34 | 71 | static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
72 | enum machine_mode, |
73 | unsigned int); | |
fa233e34 | 74 | static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
75 | enum machine_mode, unsigned int); |
76 | ||
cf94b0fc PB |
77 | /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or |
78 | -1 if a code has no such operand. */ | |
79 | static int non_rtx_starting_operands[NUM_RTX_CODE]; | |
80 | ||
2c88418c RS |
81 | /* Bit flags that specify the machine subtype we are compiling for. |
82 | Bits are tested using macros TARGET_... defined in the tm.h file | |
83 | and set by `-m...' switches. Must be defined in rtlanal.c. */ | |
84 | ||
85 | int target_flags; | |
b12cbf2c AN |
86 | |
87 | /* Truncation narrows the mode from SOURCE mode to DESTINATION mode. | |
88 | If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is | |
89 | SIGN_EXTEND then while narrowing we also have to enforce the | |
90 | representation and sign-extend the value to mode DESTINATION_REP. | |
91 | ||
92 | If the value is already sign-extended to DESTINATION_REP mode we | |
93 | can just switch to DESTINATION mode on it. For each pair of | |
94 | integral modes SOURCE and DESTINATION, when truncating from SOURCE | |
95 | to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION] | |
96 | contains the number of high-order bits in SOURCE that have to be | |
97 | copies of the sign-bit so that we can do this mode-switch to | |
98 | DESTINATION. */ | |
99 | ||
100 | static unsigned int | |
101 | num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1]; | |
2c88418c RS |
102 | \f |
103 | /* Return 1 if the value of X is unstable | |
104 | (would be different at a different point in the program). | |
105 | The frame pointer, arg pointer, etc. are considered stable | |
106 | (within one function) and so is anything marked `unchanging'. */ | |
107 | ||
108 | int | |
f7d504c2 | 109 | rtx_unstable_p (const_rtx x) |
2c88418c | 110 | { |
f7d504c2 | 111 | const RTX_CODE code = GET_CODE (x); |
b3694847 SS |
112 | int i; |
113 | const char *fmt; | |
2c88418c | 114 | |
ae0fb1b9 JW |
115 | switch (code) |
116 | { | |
117 | case MEM: | |
389fdba0 | 118 | return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0)); |
2c88418c | 119 | |
ae0fb1b9 JW |
120 | case CONST: |
121 | case CONST_INT: | |
122 | case CONST_DOUBLE: | |
091a3ac7 | 123 | case CONST_FIXED: |
69ef87e2 | 124 | case CONST_VECTOR: |
ae0fb1b9 JW |
125 | case SYMBOL_REF: |
126 | case LABEL_REF: | |
127 | return 0; | |
2c88418c | 128 | |
ae0fb1b9 JW |
129 | case REG: |
130 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
c0fc376b | 131 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 | 132 | /* The arg pointer varies if it is not a fixed register. */ |
389fdba0 | 133 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
c0fc376b RH |
134 | return 0; |
135 | #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED | |
136 | /* ??? When call-clobbered, the value is stable modulo the restore | |
137 | that must happen after a call. This currently screws up local-alloc | |
138 | into believing that the restore is not needed. */ | |
139 | if (x == pic_offset_table_rtx) | |
140 | return 0; | |
141 | #endif | |
142 | return 1; | |
ae0fb1b9 JW |
143 | |
144 | case ASM_OPERANDS: | |
145 | if (MEM_VOLATILE_P (x)) | |
146 | return 1; | |
147 | ||
5d3cc252 | 148 | /* Fall through. */ |
ae0fb1b9 JW |
149 | |
150 | default: | |
151 | break; | |
152 | } | |
2c88418c RS |
153 | |
154 | fmt = GET_RTX_FORMAT (code); | |
155 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
156 | if (fmt[i] == 'e') | |
9c82ac6b JW |
157 | { |
158 | if (rtx_unstable_p (XEXP (x, i))) | |
159 | return 1; | |
160 | } | |
161 | else if (fmt[i] == 'E') | |
162 | { | |
163 | int j; | |
164 | for (j = 0; j < XVECLEN (x, i); j++) | |
165 | if (rtx_unstable_p (XVECEXP (x, i, j))) | |
166 | return 1; | |
167 | } | |
168 | ||
2c88418c RS |
169 | return 0; |
170 | } | |
171 | ||
172 | /* Return 1 if X has a value that can vary even between two | |
173 | executions of the program. 0 means X can be compared reliably | |
174 | against certain constants or near-constants. | |
e38fe8e0 BS |
175 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
176 | zero, we are slightly more conservative. | |
2c88418c RS |
177 | The frame pointer and the arg pointer are considered constant. */ |
178 | ||
4f588890 KG |
179 | bool |
180 | rtx_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 181 | { |
e978d62e | 182 | RTX_CODE code; |
b3694847 SS |
183 | int i; |
184 | const char *fmt; | |
2c88418c | 185 | |
e978d62e PB |
186 | if (!x) |
187 | return 0; | |
188 | ||
189 | code = GET_CODE (x); | |
2c88418c RS |
190 | switch (code) |
191 | { | |
192 | case MEM: | |
389fdba0 | 193 | return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); |
55efb413 | 194 | |
2c88418c RS |
195 | case CONST: |
196 | case CONST_INT: | |
197 | case CONST_DOUBLE: | |
091a3ac7 | 198 | case CONST_FIXED: |
69ef87e2 | 199 | case CONST_VECTOR: |
2c88418c RS |
200 | case SYMBOL_REF: |
201 | case LABEL_REF: | |
202 | return 0; | |
203 | ||
204 | case REG: | |
205 | /* Note that we have to test for the actual rtx used for the frame | |
206 | and arg pointers and not just the register number in case we have | |
207 | eliminated the frame and/or arg pointer and are using it | |
208 | for pseudos. */ | |
c0fc376b | 209 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 JL |
210 | /* The arg pointer varies if it is not a fixed register. */ |
211 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
c0fc376b | 212 | return 0; |
e38fe8e0 BS |
213 | if (x == pic_offset_table_rtx |
214 | #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED | |
215 | /* ??? When call-clobbered, the value is stable modulo the restore | |
216 | that must happen after a call. This currently screws up | |
217 | local-alloc into believing that the restore is not needed, so we | |
218 | must return 0 only if we are called from alias analysis. */ | |
219 | && for_alias | |
c0fc376b | 220 | #endif |
e38fe8e0 BS |
221 | ) |
222 | return 0; | |
c0fc376b | 223 | return 1; |
2c88418c RS |
224 | |
225 | case LO_SUM: | |
226 | /* The operand 0 of a LO_SUM is considered constant | |
e7d96a83 JW |
227 | (in fact it is related specifically to operand 1) |
228 | during alias analysis. */ | |
229 | return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias)) | |
230 | || rtx_varies_p (XEXP (x, 1), for_alias); | |
a6a2274a | 231 | |
ae0fb1b9 JW |
232 | case ASM_OPERANDS: |
233 | if (MEM_VOLATILE_P (x)) | |
234 | return 1; | |
235 | ||
5d3cc252 | 236 | /* Fall through. */ |
ae0fb1b9 | 237 | |
e9a25f70 JL |
238 | default: |
239 | break; | |
2c88418c RS |
240 | } |
241 | ||
242 | fmt = GET_RTX_FORMAT (code); | |
243 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
244 | if (fmt[i] == 'e') | |
9c82ac6b | 245 | { |
e38fe8e0 | 246 | if (rtx_varies_p (XEXP (x, i), for_alias)) |
9c82ac6b JW |
247 | return 1; |
248 | } | |
249 | else if (fmt[i] == 'E') | |
250 | { | |
251 | int j; | |
252 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 253 | if (rtx_varies_p (XVECEXP (x, i, j), for_alias)) |
9c82ac6b JW |
254 | return 1; |
255 | } | |
256 | ||
2c88418c RS |
257 | return 0; |
258 | } | |
259 | ||
2358ff91 EB |
260 | /* Return nonzero if the use of X as an address in a MEM can cause a trap. |
261 | MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls | |
262 | whether nonzero is returned for unaligned memory accesses on strict | |
263 | alignment machines. */ | |
2c88418c | 264 | |
2358ff91 | 265 | static int |
f7d504c2 | 266 | rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems) |
2c88418c | 267 | { |
b3694847 | 268 | enum rtx_code code = GET_CODE (x); |
2c88418c RS |
269 | |
270 | switch (code) | |
271 | { | |
272 | case SYMBOL_REF: | |
ff0b6b99 FS |
273 | return SYMBOL_REF_WEAK (x); |
274 | ||
2c88418c | 275 | case LABEL_REF: |
2c88418c RS |
276 | return 0; |
277 | ||
278 | case REG: | |
279 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
4f73495e RH |
280 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
281 | || x == stack_pointer_rtx | |
282 | /* The arg pointer varies if it is not a fixed register. */ | |
283 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
284 | return 0; | |
285 | /* All of the virtual frame registers are stack references. */ | |
286 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
287 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
288 | return 0; | |
289 | return 1; | |
2c88418c RS |
290 | |
291 | case CONST: | |
2358ff91 | 292 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems); |
2c88418c RS |
293 | |
294 | case PLUS: | |
2358ff91 EB |
295 | /* An address is assumed not to trap if: |
296 | - it is an address that can't trap plus a constant integer, | |
297 | with the proper remainder modulo the mode size if we are | |
298 | considering unaligned memory references. */ | |
299 | if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems) | |
300 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
301 | { | |
302 | HOST_WIDE_INT offset; | |
303 | ||
bb11103a EB |
304 | if (!STRICT_ALIGNMENT |
305 | || !unaligned_mems | |
306 | || GET_MODE_SIZE (mode) == 0) | |
2358ff91 EB |
307 | return 0; |
308 | ||
309 | offset = INTVAL (XEXP (x, 1)); | |
310 | ||
311 | #ifdef SPARC_STACK_BOUNDARY_HACK | |
312 | /* ??? The SPARC port may claim a STACK_BOUNDARY higher than | |
313 | the real alignment of %sp. However, when it does this, the | |
314 | alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | |
315 | if (SPARC_STACK_BOUNDARY_HACK | |
316 | && (XEXP (x, 0) == stack_pointer_rtx | |
317 | || XEXP (x, 0) == hard_frame_pointer_rtx)) | |
318 | offset -= STACK_POINTER_OFFSET; | |
319 | #endif | |
320 | ||
321 | return offset % GET_MODE_SIZE (mode) != 0; | |
322 | } | |
323 | ||
324 | /* - or it is the pic register plus a constant. */ | |
325 | if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1))) | |
326 | return 0; | |
327 | ||
328 | return 1; | |
2c88418c RS |
329 | |
330 | case LO_SUM: | |
4f73495e | 331 | case PRE_MODIFY: |
2358ff91 | 332 | return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems); |
4f73495e RH |
333 | |
334 | case PRE_DEC: | |
335 | case PRE_INC: | |
336 | case POST_DEC: | |
337 | case POST_INC: | |
338 | case POST_MODIFY: | |
2358ff91 | 339 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems); |
4f73495e | 340 | |
e9a25f70 JL |
341 | default: |
342 | break; | |
2c88418c RS |
343 | } |
344 | ||
345 | /* If it isn't one of the case above, it can cause a trap. */ | |
346 | return 1; | |
347 | } | |
348 | ||
2358ff91 EB |
349 | /* Return nonzero if the use of X as an address in a MEM can cause a trap. */ |
350 | ||
351 | int | |
f7d504c2 | 352 | rtx_addr_can_trap_p (const_rtx x) |
2358ff91 EB |
353 | { |
354 | return rtx_addr_can_trap_p_1 (x, VOIDmode, false); | |
355 | } | |
356 | ||
4977bab6 ZW |
357 | /* Return true if X is an address that is known to not be zero. */ |
358 | ||
359 | bool | |
f7d504c2 | 360 | nonzero_address_p (const_rtx x) |
4977bab6 | 361 | { |
f7d504c2 | 362 | const enum rtx_code code = GET_CODE (x); |
4977bab6 ZW |
363 | |
364 | switch (code) | |
365 | { | |
366 | case SYMBOL_REF: | |
367 | return !SYMBOL_REF_WEAK (x); | |
368 | ||
369 | case LABEL_REF: | |
370 | return true; | |
371 | ||
4977bab6 ZW |
372 | case REG: |
373 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
374 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx | |
375 | || x == stack_pointer_rtx | |
376 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
377 | return true; | |
378 | /* All of the virtual frame registers are stack references. */ | |
379 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
380 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
381 | return true; | |
382 | return false; | |
383 | ||
384 | case CONST: | |
385 | return nonzero_address_p (XEXP (x, 0)); | |
386 | ||
387 | case PLUS: | |
388 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
942d7821 | 389 | return nonzero_address_p (XEXP (x, 0)); |
4977bab6 ZW |
390 | /* Handle PIC references. */ |
391 | else if (XEXP (x, 0) == pic_offset_table_rtx | |
392 | && CONSTANT_P (XEXP (x, 1))) | |
393 | return true; | |
394 | return false; | |
395 | ||
396 | case PRE_MODIFY: | |
397 | /* Similar to the above; allow positive offsets. Further, since | |
398 | auto-inc is only allowed in memories, the register must be a | |
399 | pointer. */ | |
400 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
401 | && INTVAL (XEXP (x, 1)) > 0) | |
402 | return true; | |
403 | return nonzero_address_p (XEXP (x, 0)); | |
404 | ||
405 | case PRE_INC: | |
406 | /* Similarly. Further, the offset is always positive. */ | |
407 | return true; | |
408 | ||
409 | case PRE_DEC: | |
410 | case POST_DEC: | |
411 | case POST_INC: | |
412 | case POST_MODIFY: | |
413 | return nonzero_address_p (XEXP (x, 0)); | |
414 | ||
415 | case LO_SUM: | |
416 | return nonzero_address_p (XEXP (x, 1)); | |
417 | ||
418 | default: | |
419 | break; | |
420 | } | |
421 | ||
422 | /* If it isn't one of the case above, might be zero. */ | |
423 | return false; | |
424 | } | |
425 | ||
a6a2274a | 426 | /* Return 1 if X refers to a memory location whose address |
2c88418c | 427 | cannot be compared reliably with constant addresses, |
a6a2274a | 428 | or if X refers to a BLKmode memory object. |
e38fe8e0 BS |
429 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
430 | zero, we are slightly more conservative. */ | |
2c88418c | 431 | |
4f588890 KG |
432 | bool |
433 | rtx_addr_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 434 | { |
b3694847 SS |
435 | enum rtx_code code; |
436 | int i; | |
437 | const char *fmt; | |
2c88418c RS |
438 | |
439 | if (x == 0) | |
440 | return 0; | |
441 | ||
442 | code = GET_CODE (x); | |
443 | if (code == MEM) | |
e38fe8e0 | 444 | return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias); |
2c88418c RS |
445 | |
446 | fmt = GET_RTX_FORMAT (code); | |
447 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
448 | if (fmt[i] == 'e') | |
833c0b26 | 449 | { |
e38fe8e0 | 450 | if (rtx_addr_varies_p (XEXP (x, i), for_alias)) |
833c0b26 RK |
451 | return 1; |
452 | } | |
453 | else if (fmt[i] == 'E') | |
454 | { | |
455 | int j; | |
456 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 457 | if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias)) |
833c0b26 RK |
458 | return 1; |
459 | } | |
2c88418c RS |
460 | return 0; |
461 | } | |
462 | \f | |
463 | /* Return the value of the integer term in X, if one is apparent; | |
464 | otherwise return 0. | |
465 | Only obvious integer terms are detected. | |
3ef42a0c | 466 | This is used in cse.c with the `related_value' field. */ |
2c88418c | 467 | |
c166a311 | 468 | HOST_WIDE_INT |
f7d504c2 | 469 | get_integer_term (const_rtx x) |
2c88418c RS |
470 | { |
471 | if (GET_CODE (x) == CONST) | |
472 | x = XEXP (x, 0); | |
473 | ||
474 | if (GET_CODE (x) == MINUS | |
475 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
476 | return - INTVAL (XEXP (x, 1)); | |
477 | if (GET_CODE (x) == PLUS | |
478 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
479 | return INTVAL (XEXP (x, 1)); | |
480 | return 0; | |
481 | } | |
482 | ||
483 | /* If X is a constant, return the value sans apparent integer term; | |
484 | otherwise return 0. | |
485 | Only obvious integer terms are detected. */ | |
486 | ||
487 | rtx | |
f7d504c2 | 488 | get_related_value (const_rtx x) |
2c88418c RS |
489 | { |
490 | if (GET_CODE (x) != CONST) | |
491 | return 0; | |
492 | x = XEXP (x, 0); | |
493 | if (GET_CODE (x) == PLUS | |
494 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
495 | return XEXP (x, 0); | |
496 | else if (GET_CODE (x) == MINUS | |
497 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
498 | return XEXP (x, 0); | |
499 | return 0; | |
500 | } | |
501 | \f | |
7ffb5e78 RS |
502 | /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points |
503 | to somewhere in the same object or object_block as SYMBOL. */ | |
504 | ||
505 | bool | |
f7d504c2 | 506 | offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset) |
7ffb5e78 RS |
507 | { |
508 | tree decl; | |
509 | ||
510 | if (GET_CODE (symbol) != SYMBOL_REF) | |
511 | return false; | |
512 | ||
513 | if (offset == 0) | |
514 | return true; | |
515 | ||
516 | if (offset > 0) | |
517 | { | |
518 | if (CONSTANT_POOL_ADDRESS_P (symbol) | |
519 | && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol))) | |
520 | return true; | |
521 | ||
522 | decl = SYMBOL_REF_DECL (symbol); | |
523 | if (decl && offset < int_size_in_bytes (TREE_TYPE (decl))) | |
524 | return true; | |
525 | } | |
526 | ||
527 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) | |
528 | && SYMBOL_REF_BLOCK (symbol) | |
529 | && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0 | |
530 | && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol) | |
531 | < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size)) | |
532 | return true; | |
533 | ||
534 | return false; | |
535 | } | |
536 | ||
537 | /* Split X into a base and a constant offset, storing them in *BASE_OUT | |
538 | and *OFFSET_OUT respectively. */ | |
539 | ||
540 | void | |
541 | split_const (rtx x, rtx *base_out, rtx *offset_out) | |
542 | { | |
543 | if (GET_CODE (x) == CONST) | |
544 | { | |
545 | x = XEXP (x, 0); | |
546 | if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
547 | { | |
548 | *base_out = XEXP (x, 0); | |
549 | *offset_out = XEXP (x, 1); | |
550 | return; | |
551 | } | |
552 | } | |
553 | *base_out = x; | |
554 | *offset_out = const0_rtx; | |
555 | } | |
556 | \f | |
4b983fdc RH |
557 | /* Return the number of places FIND appears within X. If COUNT_DEST is |
558 | zero, we do not count occurrences inside the destination of a SET. */ | |
559 | ||
560 | int | |
f7d504c2 | 561 | count_occurrences (const_rtx x, const_rtx find, int count_dest) |
4b983fdc RH |
562 | { |
563 | int i, j; | |
564 | enum rtx_code code; | |
565 | const char *format_ptr; | |
566 | int count; | |
567 | ||
568 | if (x == find) | |
569 | return 1; | |
570 | ||
571 | code = GET_CODE (x); | |
572 | ||
573 | switch (code) | |
574 | { | |
575 | case REG: | |
576 | case CONST_INT: | |
577 | case CONST_DOUBLE: | |
091a3ac7 | 578 | case CONST_FIXED: |
69ef87e2 | 579 | case CONST_VECTOR: |
4b983fdc RH |
580 | case SYMBOL_REF: |
581 | case CODE_LABEL: | |
582 | case PC: | |
583 | case CC0: | |
584 | return 0; | |
585 | ||
2372a062 BS |
586 | case EXPR_LIST: |
587 | count = count_occurrences (XEXP (x, 0), find, count_dest); | |
588 | if (XEXP (x, 1)) | |
589 | count += count_occurrences (XEXP (x, 1), find, count_dest); | |
590 | return count; | |
591 | ||
4b983fdc | 592 | case MEM: |
3c0cb5de | 593 | if (MEM_P (find) && rtx_equal_p (x, find)) |
4b983fdc RH |
594 | return 1; |
595 | break; | |
596 | ||
597 | case SET: | |
598 | if (SET_DEST (x) == find && ! count_dest) | |
599 | return count_occurrences (SET_SRC (x), find, count_dest); | |
600 | break; | |
601 | ||
602 | default: | |
603 | break; | |
604 | } | |
605 | ||
606 | format_ptr = GET_RTX_FORMAT (code); | |
607 | count = 0; | |
608 | ||
609 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
610 | { | |
611 | switch (*format_ptr++) | |
612 | { | |
613 | case 'e': | |
614 | count += count_occurrences (XEXP (x, i), find, count_dest); | |
615 | break; | |
616 | ||
617 | case 'E': | |
618 | for (j = 0; j < XVECLEN (x, i); j++) | |
619 | count += count_occurrences (XVECEXP (x, i, j), find, count_dest); | |
620 | break; | |
621 | } | |
622 | } | |
623 | return count; | |
624 | } | |
6fb5fa3c | 625 | |
4b983fdc | 626 | \f |
2c88418c RS |
627 | /* Nonzero if register REG appears somewhere within IN. |
628 | Also works if REG is not a register; in this case it checks | |
629 | for a subexpression of IN that is Lisp "equal" to REG. */ | |
630 | ||
631 | int | |
f7d504c2 | 632 | reg_mentioned_p (const_rtx reg, const_rtx in) |
2c88418c | 633 | { |
b3694847 SS |
634 | const char *fmt; |
635 | int i; | |
636 | enum rtx_code code; | |
2c88418c RS |
637 | |
638 | if (in == 0) | |
639 | return 0; | |
640 | ||
641 | if (reg == in) | |
642 | return 1; | |
643 | ||
644 | if (GET_CODE (in) == LABEL_REF) | |
645 | return reg == XEXP (in, 0); | |
646 | ||
647 | code = GET_CODE (in); | |
648 | ||
649 | switch (code) | |
650 | { | |
651 | /* Compare registers by number. */ | |
652 | case REG: | |
f8cfc6aa | 653 | return REG_P (reg) && REGNO (in) == REGNO (reg); |
2c88418c RS |
654 | |
655 | /* These codes have no constituent expressions | |
656 | and are unique. */ | |
657 | case SCRATCH: | |
658 | case CC0: | |
659 | case PC: | |
660 | return 0; | |
661 | ||
662 | case CONST_INT: | |
69ef87e2 | 663 | case CONST_VECTOR: |
2c88418c | 664 | case CONST_DOUBLE: |
091a3ac7 | 665 | case CONST_FIXED: |
2c88418c RS |
666 | /* These are kept unique for a given value. */ |
667 | return 0; | |
a6a2274a | 668 | |
e9a25f70 JL |
669 | default: |
670 | break; | |
2c88418c RS |
671 | } |
672 | ||
673 | if (GET_CODE (reg) == code && rtx_equal_p (reg, in)) | |
674 | return 1; | |
675 | ||
676 | fmt = GET_RTX_FORMAT (code); | |
677 | ||
678 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
679 | { | |
680 | if (fmt[i] == 'E') | |
681 | { | |
b3694847 | 682 | int j; |
2c88418c RS |
683 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) |
684 | if (reg_mentioned_p (reg, XVECEXP (in, i, j))) | |
685 | return 1; | |
686 | } | |
687 | else if (fmt[i] == 'e' | |
688 | && reg_mentioned_p (reg, XEXP (in, i))) | |
689 | return 1; | |
690 | } | |
691 | return 0; | |
692 | } | |
693 | \f | |
694 | /* Return 1 if in between BEG and END, exclusive of BEG and END, there is | |
695 | no CODE_LABEL insn. */ | |
696 | ||
697 | int | |
f7d504c2 | 698 | no_labels_between_p (const_rtx beg, const_rtx end) |
2c88418c | 699 | { |
b3694847 | 700 | rtx p; |
978f547f JH |
701 | if (beg == end) |
702 | return 0; | |
2c88418c | 703 | for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) |
4b4bf941 | 704 | if (LABEL_P (p)) |
2c88418c RS |
705 | return 0; |
706 | return 1; | |
707 | } | |
708 | ||
709 | /* Nonzero if register REG is used in an insn between | |
710 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
711 | ||
712 | int | |
f7d504c2 | 713 | reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn) |
2c88418c | 714 | { |
b3694847 | 715 | rtx insn; |
2c88418c RS |
716 | |
717 | if (from_insn == to_insn) | |
718 | return 0; | |
719 | ||
720 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 721 | if (INSN_P (insn) |
8f3e7a26 | 722 | && (reg_overlap_mentioned_p (reg, PATTERN (insn)) |
76dd5923 | 723 | || (CALL_P (insn) && find_reg_fusage (insn, USE, reg)))) |
2c88418c RS |
724 | return 1; |
725 | return 0; | |
726 | } | |
727 | \f | |
728 | /* Nonzero if the old value of X, a register, is referenced in BODY. If X | |
729 | is entirely replaced by a new value and the only use is as a SET_DEST, | |
730 | we do not consider it a reference. */ | |
731 | ||
732 | int | |
f7d504c2 | 733 | reg_referenced_p (const_rtx x, const_rtx body) |
2c88418c RS |
734 | { |
735 | int i; | |
736 | ||
737 | switch (GET_CODE (body)) | |
738 | { | |
739 | case SET: | |
740 | if (reg_overlap_mentioned_p (x, SET_SRC (body))) | |
741 | return 1; | |
742 | ||
743 | /* If the destination is anything other than CC0, PC, a REG or a SUBREG | |
744 | of a REG that occupies all of the REG, the insn references X if | |
745 | it is mentioned in the destination. */ | |
746 | if (GET_CODE (SET_DEST (body)) != CC0 | |
747 | && GET_CODE (SET_DEST (body)) != PC | |
f8cfc6aa | 748 | && !REG_P (SET_DEST (body)) |
2c88418c | 749 | && ! (GET_CODE (SET_DEST (body)) == SUBREG |
f8cfc6aa | 750 | && REG_P (SUBREG_REG (SET_DEST (body))) |
2c88418c RS |
751 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body)))) |
752 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
753 | == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body))) | |
754 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
755 | && reg_overlap_mentioned_p (x, SET_DEST (body))) | |
756 | return 1; | |
e9a25f70 | 757 | return 0; |
2c88418c RS |
758 | |
759 | case ASM_OPERANDS: | |
760 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
761 | if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i))) | |
762 | return 1; | |
e9a25f70 | 763 | return 0; |
2c88418c RS |
764 | |
765 | case CALL: | |
766 | case USE: | |
14a774a9 | 767 | case IF_THEN_ELSE: |
2c88418c RS |
768 | return reg_overlap_mentioned_p (x, body); |
769 | ||
770 | case TRAP_IF: | |
771 | return reg_overlap_mentioned_p (x, TRAP_CONDITION (body)); | |
772 | ||
21b8482a JJ |
773 | case PREFETCH: |
774 | return reg_overlap_mentioned_p (x, XEXP (body, 0)); | |
775 | ||
2ac4fed0 RK |
776 | case UNSPEC: |
777 | case UNSPEC_VOLATILE: | |
2f9fb4c2 R |
778 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
779 | if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i))) | |
780 | return 1; | |
781 | return 0; | |
782 | ||
2c88418c RS |
783 | case PARALLEL: |
784 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
785 | if (reg_referenced_p (x, XVECEXP (body, 0, i))) | |
786 | return 1; | |
e9a25f70 | 787 | return 0; |
a6a2274a | 788 | |
0d3ffb5a | 789 | case CLOBBER: |
3c0cb5de | 790 | if (MEM_P (XEXP (body, 0))) |
0d3ffb5a GK |
791 | if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) |
792 | return 1; | |
793 | return 0; | |
794 | ||
0c99ec5c RH |
795 | case COND_EXEC: |
796 | if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) | |
797 | return 1; | |
798 | return reg_referenced_p (x, COND_EXEC_CODE (body)); | |
799 | ||
e9a25f70 JL |
800 | default: |
801 | return 0; | |
2c88418c | 802 | } |
2c88418c | 803 | } |
2c88418c RS |
804 | \f |
805 | /* Nonzero if register REG is set or clobbered in an insn between | |
806 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
807 | ||
808 | int | |
ed7a4b4b | 809 | reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn) |
2c88418c | 810 | { |
ed7a4b4b | 811 | const_rtx insn; |
2c88418c RS |
812 | |
813 | if (from_insn == to_insn) | |
814 | return 0; | |
815 | ||
816 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 817 | if (INSN_P (insn) && reg_set_p (reg, insn)) |
2c88418c RS |
818 | return 1; |
819 | return 0; | |
820 | } | |
821 | ||
822 | /* Internals of reg_set_between_p. */ | |
2c88418c | 823 | int |
ed7a4b4b | 824 | reg_set_p (const_rtx reg, const_rtx insn) |
2c88418c | 825 | { |
2c88418c RS |
826 | /* We can be passed an insn or part of one. If we are passed an insn, |
827 | check if a side-effect of the insn clobbers REG. */ | |
4977bab6 ZW |
828 | if (INSN_P (insn) |
829 | && (FIND_REG_INC_NOTE (insn, reg) | |
4b4bf941 | 830 | || (CALL_P (insn) |
f8cfc6aa | 831 | && ((REG_P (reg) |
4f1605d2 | 832 | && REGNO (reg) < FIRST_PSEUDO_REGISTER |
5da20cfe RS |
833 | && overlaps_hard_reg_set_p (regs_invalidated_by_call, |
834 | GET_MODE (reg), REGNO (reg))) | |
3c0cb5de | 835 | || MEM_P (reg) |
4977bab6 ZW |
836 | || find_reg_fusage (insn, CLOBBER, reg))))) |
837 | return 1; | |
2c88418c | 838 | |
91b2d119 | 839 | return set_of (reg, insn) != NULL_RTX; |
2c88418c RS |
840 | } |
841 | ||
842 | /* Similar to reg_set_between_p, but check all registers in X. Return 0 | |
843 | only if none of them are modified between START and END. Return 1 if | |
7b52eede | 844 | X contains a MEM; this routine does usememory aliasing. */ |
2c88418c RS |
845 | |
846 | int | |
9678086d | 847 | modified_between_p (const_rtx x, const_rtx start, const_rtx end) |
2c88418c | 848 | { |
9678086d | 849 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 850 | const char *fmt; |
f8163c92 | 851 | int i, j; |
7b52eede JH |
852 | rtx insn; |
853 | ||
854 | if (start == end) | |
855 | return 0; | |
2c88418c RS |
856 | |
857 | switch (code) | |
858 | { | |
859 | case CONST_INT: | |
860 | case CONST_DOUBLE: | |
091a3ac7 | 861 | case CONST_FIXED: |
69ef87e2 | 862 | case CONST_VECTOR: |
2c88418c RS |
863 | case CONST: |
864 | case SYMBOL_REF: | |
865 | case LABEL_REF: | |
866 | return 0; | |
867 | ||
868 | case PC: | |
869 | case CC0: | |
870 | return 1; | |
871 | ||
872 | case MEM: | |
7b52eede | 873 | if (modified_between_p (XEXP (x, 0), start, end)) |
2c88418c | 874 | return 1; |
550b7784 KK |
875 | if (MEM_READONLY_P (x)) |
876 | return 0; | |
7b52eede JH |
877 | for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn)) |
878 | if (memory_modified_in_insn_p (x, insn)) | |
879 | return 1; | |
880 | return 0; | |
2c88418c RS |
881 | break; |
882 | ||
883 | case REG: | |
884 | return reg_set_between_p (x, start, end); | |
a6a2274a | 885 | |
e9a25f70 JL |
886 | default: |
887 | break; | |
2c88418c RS |
888 | } |
889 | ||
890 | fmt = GET_RTX_FORMAT (code); | |
891 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
f8163c92 RK |
892 | { |
893 | if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end)) | |
894 | return 1; | |
895 | ||
d4757e6a | 896 | else if (fmt[i] == 'E') |
f8163c92 RK |
897 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
898 | if (modified_between_p (XVECEXP (x, i, j), start, end)) | |
899 | return 1; | |
900 | } | |
901 | ||
902 | return 0; | |
903 | } | |
904 | ||
905 | /* Similar to reg_set_p, but check all registers in X. Return 0 only if none | |
906 | of them are modified in INSN. Return 1 if X contains a MEM; this routine | |
7b52eede | 907 | does use memory aliasing. */ |
f8163c92 RK |
908 | |
909 | int | |
9678086d | 910 | modified_in_p (const_rtx x, const_rtx insn) |
f8163c92 | 911 | { |
9678086d | 912 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 913 | const char *fmt; |
f8163c92 RK |
914 | int i, j; |
915 | ||
916 | switch (code) | |
917 | { | |
918 | case CONST_INT: | |
919 | case CONST_DOUBLE: | |
091a3ac7 | 920 | case CONST_FIXED: |
69ef87e2 | 921 | case CONST_VECTOR: |
f8163c92 RK |
922 | case CONST: |
923 | case SYMBOL_REF: | |
924 | case LABEL_REF: | |
925 | return 0; | |
926 | ||
927 | case PC: | |
928 | case CC0: | |
2c88418c RS |
929 | return 1; |
930 | ||
f8163c92 | 931 | case MEM: |
7b52eede | 932 | if (modified_in_p (XEXP (x, 0), insn)) |
f8163c92 | 933 | return 1; |
550b7784 KK |
934 | if (MEM_READONLY_P (x)) |
935 | return 0; | |
7b52eede JH |
936 | if (memory_modified_in_insn_p (x, insn)) |
937 | return 1; | |
938 | return 0; | |
f8163c92 RK |
939 | break; |
940 | ||
941 | case REG: | |
942 | return reg_set_p (x, insn); | |
e9a25f70 JL |
943 | |
944 | default: | |
945 | break; | |
f8163c92 RK |
946 | } |
947 | ||
948 | fmt = GET_RTX_FORMAT (code); | |
949 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
950 | { | |
951 | if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn)) | |
952 | return 1; | |
953 | ||
d4757e6a | 954 | else if (fmt[i] == 'E') |
f8163c92 RK |
955 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
956 | if (modified_in_p (XVECEXP (x, i, j), insn)) | |
957 | return 1; | |
958 | } | |
959 | ||
2c88418c RS |
960 | return 0; |
961 | } | |
962 | \f | |
91b2d119 JH |
963 | /* Helper function for set_of. */ |
964 | struct set_of_data | |
965 | { | |
7bc980e1 KG |
966 | const_rtx found; |
967 | const_rtx pat; | |
91b2d119 JH |
968 | }; |
969 | ||
970 | static void | |
7bc980e1 | 971 | set_of_1 (rtx x, const_rtx pat, void *data1) |
91b2d119 | 972 | { |
7bc980e1 KG |
973 | struct set_of_data *const data = (struct set_of_data *) (data1); |
974 | if (rtx_equal_p (x, data->pat) | |
975 | || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x))) | |
976 | data->found = pat; | |
91b2d119 JH |
977 | } |
978 | ||
979 | /* Give an INSN, return a SET or CLOBBER expression that does modify PAT | |
eaec9b3d | 980 | (either directly or via STRICT_LOW_PART and similar modifiers). */ |
7bc980e1 KG |
981 | const_rtx |
982 | set_of (const_rtx pat, const_rtx insn) | |
91b2d119 JH |
983 | { |
984 | struct set_of_data data; | |
985 | data.found = NULL_RTX; | |
986 | data.pat = pat; | |
987 | note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data); | |
988 | return data.found; | |
989 | } | |
990 | \f | |
2c88418c RS |
991 | /* Given an INSN, return a SET expression if this insn has only a single SET. |
992 | It may also have CLOBBERs, USEs, or SET whose output | |
993 | will not be used, which we ignore. */ | |
994 | ||
995 | rtx | |
f7d504c2 | 996 | single_set_2 (const_rtx insn, const_rtx pat) |
2c88418c | 997 | { |
c9b89a21 JH |
998 | rtx set = NULL; |
999 | int set_verified = 1; | |
2c88418c | 1000 | int i; |
c9b89a21 | 1001 | |
b1cdafbb | 1002 | if (GET_CODE (pat) == PARALLEL) |
2c88418c | 1003 | { |
c9b89a21 | 1004 | for (i = 0; i < XVECLEN (pat, 0); i++) |
b1cdafbb | 1005 | { |
c9b89a21 JH |
1006 | rtx sub = XVECEXP (pat, 0, i); |
1007 | switch (GET_CODE (sub)) | |
1008 | { | |
1009 | case USE: | |
1010 | case CLOBBER: | |
1011 | break; | |
1012 | ||
1013 | case SET: | |
1014 | /* We can consider insns having multiple sets, where all | |
1015 | but one are dead as single set insns. In common case | |
1016 | only single set is present in the pattern so we want | |
f63d1bf7 | 1017 | to avoid checking for REG_UNUSED notes unless necessary. |
c9b89a21 JH |
1018 | |
1019 | When we reach set first time, we just expect this is | |
1020 | the single set we are looking for and only when more | |
1021 | sets are found in the insn, we check them. */ | |
1022 | if (!set_verified) | |
1023 | { | |
1024 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (set)) | |
1025 | && !side_effects_p (set)) | |
1026 | set = NULL; | |
1027 | else | |
1028 | set_verified = 1; | |
1029 | } | |
1030 | if (!set) | |
1031 | set = sub, set_verified = 0; | |
1032 | else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub)) | |
1033 | || side_effects_p (sub)) | |
1034 | return NULL_RTX; | |
1035 | break; | |
1036 | ||
1037 | default: | |
1038 | return NULL_RTX; | |
1039 | } | |
787ccee0 | 1040 | } |
2c88418c | 1041 | } |
c9b89a21 | 1042 | return set; |
2c88418c | 1043 | } |
941c63ac JL |
1044 | |
1045 | /* Given an INSN, return nonzero if it has more than one SET, else return | |
1046 | zero. */ | |
1047 | ||
5f7d3786 | 1048 | int |
f7d504c2 | 1049 | multiple_sets (const_rtx insn) |
941c63ac | 1050 | { |
cae8acdd | 1051 | int found; |
941c63ac | 1052 | int i; |
a6a2274a | 1053 | |
941c63ac | 1054 | /* INSN must be an insn. */ |
2c3c49de | 1055 | if (! INSN_P (insn)) |
941c63ac JL |
1056 | return 0; |
1057 | ||
1058 | /* Only a PARALLEL can have multiple SETs. */ | |
1059 | if (GET_CODE (PATTERN (insn)) == PARALLEL) | |
1060 | { | |
1061 | for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
1062 | if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET) | |
1063 | { | |
1064 | /* If we have already found a SET, then return now. */ | |
1065 | if (found) | |
1066 | return 1; | |
1067 | else | |
1068 | found = 1; | |
1069 | } | |
1070 | } | |
a6a2274a | 1071 | |
941c63ac JL |
1072 | /* Either zero or one SET. */ |
1073 | return 0; | |
1074 | } | |
2c88418c | 1075 | \f |
7142e318 JW |
1076 | /* Return nonzero if the destination of SET equals the source |
1077 | and there are no side effects. */ | |
1078 | ||
1079 | int | |
f7d504c2 | 1080 | set_noop_p (const_rtx set) |
7142e318 JW |
1081 | { |
1082 | rtx src = SET_SRC (set); | |
1083 | rtx dst = SET_DEST (set); | |
1084 | ||
371b8fc0 JH |
1085 | if (dst == pc_rtx && src == pc_rtx) |
1086 | return 1; | |
1087 | ||
3c0cb5de | 1088 | if (MEM_P (dst) && MEM_P (src)) |
cd648cec JH |
1089 | return rtx_equal_p (dst, src) && !side_effects_p (dst); |
1090 | ||
46d096a3 | 1091 | if (GET_CODE (dst) == ZERO_EXTRACT) |
7142e318 | 1092 | return rtx_equal_p (XEXP (dst, 0), src) |
cd648cec JH |
1093 | && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx |
1094 | && !side_effects_p (src); | |
7142e318 JW |
1095 | |
1096 | if (GET_CODE (dst) == STRICT_LOW_PART) | |
1097 | dst = XEXP (dst, 0); | |
1098 | ||
1099 | if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) | |
1100 | { | |
1101 | if (SUBREG_BYTE (src) != SUBREG_BYTE (dst)) | |
1102 | return 0; | |
1103 | src = SUBREG_REG (src); | |
1104 | dst = SUBREG_REG (dst); | |
1105 | } | |
1106 | ||
f8cfc6aa | 1107 | return (REG_P (src) && REG_P (dst) |
7142e318 JW |
1108 | && REGNO (src) == REGNO (dst)); |
1109 | } | |
0005550b JH |
1110 | \f |
1111 | /* Return nonzero if an insn consists only of SETs, each of which only sets a | |
1112 | value to itself. */ | |
1113 | ||
1114 | int | |
fa233e34 | 1115 | noop_move_p (const_rtx insn) |
0005550b JH |
1116 | { |
1117 | rtx pat = PATTERN (insn); | |
1118 | ||
b5832b43 JH |
1119 | if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE) |
1120 | return 1; | |
1121 | ||
0005550b JH |
1122 | /* Insns carrying these notes are useful later on. */ |
1123 | if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) | |
1124 | return 0; | |
1125 | ||
eb9d8e4d JW |
1126 | /* For now treat an insn with a REG_RETVAL note as a |
1127 | a special insn which should not be considered a no-op. */ | |
1128 | if (find_reg_note (insn, REG_RETVAL, NULL_RTX)) | |
1129 | return 0; | |
1130 | ||
0005550b JH |
1131 | if (GET_CODE (pat) == SET && set_noop_p (pat)) |
1132 | return 1; | |
1133 | ||
1134 | if (GET_CODE (pat) == PARALLEL) | |
1135 | { | |
1136 | int i; | |
1137 | /* If nothing but SETs of registers to themselves, | |
1138 | this insn can also be deleted. */ | |
1139 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1140 | { | |
1141 | rtx tem = XVECEXP (pat, 0, i); | |
1142 | ||
1143 | if (GET_CODE (tem) == USE | |
1144 | || GET_CODE (tem) == CLOBBER) | |
1145 | continue; | |
1146 | ||
1147 | if (GET_CODE (tem) != SET || ! set_noop_p (tem)) | |
1148 | return 0; | |
1149 | } | |
1150 | ||
1151 | return 1; | |
1152 | } | |
1153 | return 0; | |
1154 | } | |
1155 | \f | |
7142e318 | 1156 | |
63be01fb JW |
1157 | /* Return the last thing that X was assigned from before *PINSN. If VALID_TO |
1158 | is not NULL_RTX then verify that the object is not modified up to VALID_TO. | |
1159 | If the object was modified, if we hit a partial assignment to X, or hit a | |
1160 | CODE_LABEL first, return X. If we found an assignment, update *PINSN to | |
1161 | point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to | |
1162 | be the src. */ | |
2c88418c RS |
1163 | |
1164 | rtx | |
0c20a65f | 1165 | find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg) |
2c88418c RS |
1166 | { |
1167 | rtx p; | |
1168 | ||
4b4bf941 | 1169 | for (p = PREV_INSN (*pinsn); p && !LABEL_P (p); |
2c88418c | 1170 | p = PREV_INSN (p)) |
2c3c49de | 1171 | if (INSN_P (p)) |
2c88418c RS |
1172 | { |
1173 | rtx set = single_set (p); | |
c166a311 | 1174 | rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX); |
2c88418c RS |
1175 | |
1176 | if (set && rtx_equal_p (x, SET_DEST (set))) | |
1177 | { | |
1178 | rtx src = SET_SRC (set); | |
1179 | ||
1180 | if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST) | |
1181 | src = XEXP (note, 0); | |
1182 | ||
63be01fb JW |
1183 | if ((valid_to == NULL_RTX |
1184 | || ! modified_between_p (src, PREV_INSN (p), valid_to)) | |
2c88418c RS |
1185 | /* Reject hard registers because we don't usually want |
1186 | to use them; we'd rather use a pseudo. */ | |
f8cfc6aa | 1187 | && (! (REG_P (src) |
89d3d442 | 1188 | && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg)) |
2c88418c RS |
1189 | { |
1190 | *pinsn = p; | |
1191 | return src; | |
1192 | } | |
1193 | } | |
a6a2274a | 1194 | |
2c88418c RS |
1195 | /* If set in non-simple way, we don't have a value. */ |
1196 | if (reg_set_p (x, p)) | |
1197 | break; | |
1198 | } | |
1199 | ||
1200 | return x; | |
a6a2274a | 1201 | } |
2c88418c RS |
1202 | \f |
1203 | /* Return nonzero if register in range [REGNO, ENDREGNO) | |
1204 | appears either explicitly or implicitly in X | |
1205 | other than being stored into. | |
1206 | ||
1207 | References contained within the substructure at LOC do not count. | |
1208 | LOC may be zero, meaning don't ignore anything. */ | |
1209 | ||
1210 | int | |
f7d504c2 | 1211 | refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, |
0c20a65f | 1212 | rtx *loc) |
2c88418c | 1213 | { |
770ae6cc RK |
1214 | int i; |
1215 | unsigned int x_regno; | |
1216 | RTX_CODE code; | |
1217 | const char *fmt; | |
2c88418c RS |
1218 | |
1219 | repeat: | |
1220 | /* The contents of a REG_NONNEG note is always zero, so we must come here | |
1221 | upon repeat in case the last REG_NOTE is a REG_NONNEG note. */ | |
1222 | if (x == 0) | |
1223 | return 0; | |
1224 | ||
1225 | code = GET_CODE (x); | |
1226 | ||
1227 | switch (code) | |
1228 | { | |
1229 | case REG: | |
770ae6cc | 1230 | x_regno = REGNO (x); |
f8163c92 RK |
1231 | |
1232 | /* If we modifying the stack, frame, or argument pointer, it will | |
1233 | clobber a virtual register. In fact, we could be more precise, | |
1234 | but it isn't worth it. */ | |
770ae6cc | 1235 | if ((x_regno == STACK_POINTER_REGNUM |
f8163c92 | 1236 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
770ae6cc | 1237 | || x_regno == ARG_POINTER_REGNUM |
f8163c92 | 1238 | #endif |
770ae6cc | 1239 | || x_regno == FRAME_POINTER_REGNUM) |
f8163c92 RK |
1240 | && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER) |
1241 | return 1; | |
1242 | ||
09e18274 | 1243 | return endregno > x_regno && regno < END_REGNO (x); |
2c88418c RS |
1244 | |
1245 | case SUBREG: | |
1246 | /* If this is a SUBREG of a hard reg, we can see exactly which | |
1247 | registers are being modified. Otherwise, handle normally. */ | |
f8cfc6aa | 1248 | if (REG_P (SUBREG_REG (x)) |
2c88418c RS |
1249 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) |
1250 | { | |
ddef6bc7 | 1251 | unsigned int inner_regno = subreg_regno (x); |
770ae6cc | 1252 | unsigned int inner_endregno |
403c659c | 1253 | = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER |
f1f4e530 | 1254 | ? subreg_nregs (x) : 1); |
2c88418c RS |
1255 | |
1256 | return endregno > inner_regno && regno < inner_endregno; | |
1257 | } | |
1258 | break; | |
1259 | ||
1260 | case CLOBBER: | |
1261 | case SET: | |
1262 | if (&SET_DEST (x) != loc | |
1263 | /* Note setting a SUBREG counts as referring to the REG it is in for | |
1264 | a pseudo but not for hard registers since we can | |
1265 | treat each word individually. */ | |
1266 | && ((GET_CODE (SET_DEST (x)) == SUBREG | |
1267 | && loc != &SUBREG_REG (SET_DEST (x)) | |
f8cfc6aa | 1268 | && REG_P (SUBREG_REG (SET_DEST (x))) |
2c88418c RS |
1269 | && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER |
1270 | && refers_to_regno_p (regno, endregno, | |
1271 | SUBREG_REG (SET_DEST (x)), loc)) | |
f8cfc6aa | 1272 | || (!REG_P (SET_DEST (x)) |
2c88418c RS |
1273 | && refers_to_regno_p (regno, endregno, SET_DEST (x), loc)))) |
1274 | return 1; | |
1275 | ||
1276 | if (code == CLOBBER || loc == &SET_SRC (x)) | |
1277 | return 0; | |
1278 | x = SET_SRC (x); | |
1279 | goto repeat; | |
e9a25f70 JL |
1280 | |
1281 | default: | |
1282 | break; | |
2c88418c RS |
1283 | } |
1284 | ||
1285 | /* X does not match, so try its subexpressions. */ | |
1286 | ||
1287 | fmt = GET_RTX_FORMAT (code); | |
1288 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1289 | { | |
1290 | if (fmt[i] == 'e' && loc != &XEXP (x, i)) | |
1291 | { | |
1292 | if (i == 0) | |
1293 | { | |
1294 | x = XEXP (x, 0); | |
1295 | goto repeat; | |
1296 | } | |
1297 | else | |
1298 | if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc)) | |
1299 | return 1; | |
1300 | } | |
1301 | else if (fmt[i] == 'E') | |
1302 | { | |
b3694847 | 1303 | int j; |
6a87d634 | 1304 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2c88418c RS |
1305 | if (loc != &XVECEXP (x, i, j) |
1306 | && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc)) | |
1307 | return 1; | |
1308 | } | |
1309 | } | |
1310 | return 0; | |
1311 | } | |
1312 | ||
1313 | /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, | |
1314 | we check if any register number in X conflicts with the relevant register | |
1315 | numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN | |
1316 | contains a MEM (we don't bother checking for memory addresses that can't | |
1317 | conflict because we expect this to be a rare case. */ | |
1318 | ||
1319 | int | |
f7d504c2 | 1320 | reg_overlap_mentioned_p (const_rtx x, const_rtx in) |
2c88418c | 1321 | { |
770ae6cc | 1322 | unsigned int regno, endregno; |
2c88418c | 1323 | |
6f626d1b PB |
1324 | /* If either argument is a constant, then modifying X can not |
1325 | affect IN. Here we look at IN, we can profitably combine | |
1326 | CONSTANT_P (x) with the switch statement below. */ | |
1327 | if (CONSTANT_P (in)) | |
b98b49ac | 1328 | return 0; |
0c99ec5c | 1329 | |
6f626d1b | 1330 | recurse: |
0c99ec5c | 1331 | switch (GET_CODE (x)) |
2c88418c | 1332 | { |
6f626d1b PB |
1333 | case STRICT_LOW_PART: |
1334 | case ZERO_EXTRACT: | |
1335 | case SIGN_EXTRACT: | |
1336 | /* Overly conservative. */ | |
1337 | x = XEXP (x, 0); | |
1338 | goto recurse; | |
1339 | ||
0c99ec5c | 1340 | case SUBREG: |
2c88418c RS |
1341 | regno = REGNO (SUBREG_REG (x)); |
1342 | if (regno < FIRST_PSEUDO_REGISTER) | |
ddef6bc7 | 1343 | regno = subreg_regno (x); |
f1f4e530 JM |
1344 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
1345 | ? subreg_nregs (x) : 1); | |
0c99ec5c | 1346 | goto do_reg; |
2c88418c | 1347 | |
0c99ec5c RH |
1348 | case REG: |
1349 | regno = REGNO (x); | |
09e18274 | 1350 | endregno = END_REGNO (x); |
f1f4e530 | 1351 | do_reg: |
8e2e89f7 | 1352 | return refers_to_regno_p (regno, endregno, in, (rtx*) 0); |
2c88418c | 1353 | |
0c99ec5c RH |
1354 | case MEM: |
1355 | { | |
1356 | const char *fmt; | |
1357 | int i; | |
2c88418c | 1358 | |
3c0cb5de | 1359 | if (MEM_P (in)) |
2c88418c RS |
1360 | return 1; |
1361 | ||
0c99ec5c RH |
1362 | fmt = GET_RTX_FORMAT (GET_CODE (in)); |
1363 | for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--) | |
3b009185 RH |
1364 | if (fmt[i] == 'e') |
1365 | { | |
1366 | if (reg_overlap_mentioned_p (x, XEXP (in, i))) | |
1367 | return 1; | |
1368 | } | |
1369 | else if (fmt[i] == 'E') | |
1370 | { | |
1371 | int j; | |
1372 | for (j = XVECLEN (in, i) - 1; j >= 0; --j) | |
1373 | if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j))) | |
1374 | return 1; | |
1375 | } | |
c0222c21 | 1376 | |
0c99ec5c RH |
1377 | return 0; |
1378 | } | |
1379 | ||
1380 | case SCRATCH: | |
1381 | case PC: | |
1382 | case CC0: | |
1383 | return reg_mentioned_p (x, in); | |
1384 | ||
1385 | case PARALLEL: | |
37ceff9d | 1386 | { |
90d036a0 | 1387 | int i; |
37ceff9d RH |
1388 | |
1389 | /* If any register in here refers to it we return true. */ | |
7193d1dc RK |
1390 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
1391 | if (XEXP (XVECEXP (x, 0, i), 0) != 0 | |
1392 | && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in)) | |
6f626d1b | 1393 | return 1; |
7193d1dc | 1394 | return 0; |
37ceff9d | 1395 | } |
2c88418c | 1396 | |
0c99ec5c | 1397 | default: |
41374e13 | 1398 | gcc_assert (CONSTANT_P (x)); |
6f626d1b PB |
1399 | return 0; |
1400 | } | |
2c88418c RS |
1401 | } |
1402 | \f | |
2c88418c | 1403 | /* Call FUN on each register or MEM that is stored into or clobbered by X. |
c3a1ef9d MM |
1404 | (X would be the pattern of an insn). DATA is an arbitrary pointer, |
1405 | ignored by note_stores, but passed to FUN. | |
1406 | ||
1407 | FUN receives three arguments: | |
1408 | 1. the REG, MEM, CC0 or PC being stored in or clobbered, | |
1409 | 2. the SET or CLOBBER rtx that does the store, | |
1410 | 3. the pointer DATA provided to note_stores. | |
2c88418c RS |
1411 | |
1412 | If the item being stored in or clobbered is a SUBREG of a hard register, | |
1413 | the SUBREG will be passed. */ | |
a6a2274a | 1414 | |
2c88418c | 1415 | void |
7bc980e1 | 1416 | note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data) |
2c88418c | 1417 | { |
aa317c97 | 1418 | int i; |
90d036a0 | 1419 | |
aa317c97 KG |
1420 | if (GET_CODE (x) == COND_EXEC) |
1421 | x = COND_EXEC_CODE (x); | |
90d036a0 | 1422 | |
aa317c97 KG |
1423 | if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) |
1424 | { | |
1425 | rtx dest = SET_DEST (x); | |
1426 | ||
1427 | while ((GET_CODE (dest) == SUBREG | |
1428 | && (!REG_P (SUBREG_REG (dest)) | |
1429 | || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER)) | |
1430 | || GET_CODE (dest) == ZERO_EXTRACT | |
1431 | || GET_CODE (dest) == STRICT_LOW_PART) | |
1432 | dest = XEXP (dest, 0); | |
1433 | ||
1434 | /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, | |
1435 | each of whose first operand is a register. */ | |
1436 | if (GET_CODE (dest) == PARALLEL) | |
1437 | { | |
1438 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1439 | if (XEXP (XVECEXP (dest, 0, i), 0) != 0) | |
1440 | (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data); | |
1441 | } | |
1442 | else | |
1443 | (*fun) (dest, x, data); | |
1444 | } | |
770ae6cc | 1445 | |
aa317c97 KG |
1446 | else if (GET_CODE (x) == PARALLEL) |
1447 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
1448 | note_stores (XVECEXP (x, 0, i), fun, data); | |
1449 | } | |
2c88418c | 1450 | \f |
e2373f95 RK |
1451 | /* Like notes_stores, but call FUN for each expression that is being |
1452 | referenced in PBODY, a pointer to the PATTERN of an insn. We only call | |
1453 | FUN for each expression, not any interior subexpressions. FUN receives a | |
1454 | pointer to the expression and the DATA passed to this function. | |
1455 | ||
1456 | Note that this is not quite the same test as that done in reg_referenced_p | |
1457 | since that considers something as being referenced if it is being | |
1458 | partially set, while we do not. */ | |
1459 | ||
1460 | void | |
0c20a65f | 1461 | note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data) |
e2373f95 RK |
1462 | { |
1463 | rtx body = *pbody; | |
1464 | int i; | |
1465 | ||
1466 | switch (GET_CODE (body)) | |
1467 | { | |
1468 | case COND_EXEC: | |
1469 | (*fun) (&COND_EXEC_TEST (body), data); | |
1470 | note_uses (&COND_EXEC_CODE (body), fun, data); | |
1471 | return; | |
1472 | ||
1473 | case PARALLEL: | |
1474 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1475 | note_uses (&XVECEXP (body, 0, i), fun, data); | |
1476 | return; | |
1477 | ||
bbbc206e BS |
1478 | case SEQUENCE: |
1479 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1480 | note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data); | |
1481 | return; | |
1482 | ||
e2373f95 RK |
1483 | case USE: |
1484 | (*fun) (&XEXP (body, 0), data); | |
1485 | return; | |
1486 | ||
1487 | case ASM_OPERANDS: | |
1488 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
1489 | (*fun) (&ASM_OPERANDS_INPUT (body, i), data); | |
1490 | return; | |
1491 | ||
1492 | case TRAP_IF: | |
1493 | (*fun) (&TRAP_CONDITION (body), data); | |
1494 | return; | |
1495 | ||
21b8482a JJ |
1496 | case PREFETCH: |
1497 | (*fun) (&XEXP (body, 0), data); | |
1498 | return; | |
1499 | ||
e2373f95 RK |
1500 | case UNSPEC: |
1501 | case UNSPEC_VOLATILE: | |
1502 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1503 | (*fun) (&XVECEXP (body, 0, i), data); | |
1504 | return; | |
1505 | ||
1506 | case CLOBBER: | |
3c0cb5de | 1507 | if (MEM_P (XEXP (body, 0))) |
e2373f95 RK |
1508 | (*fun) (&XEXP (XEXP (body, 0), 0), data); |
1509 | return; | |
1510 | ||
1511 | case SET: | |
1512 | { | |
1513 | rtx dest = SET_DEST (body); | |
1514 | ||
1515 | /* For sets we replace everything in source plus registers in memory | |
1516 | expression in store and operands of a ZERO_EXTRACT. */ | |
1517 | (*fun) (&SET_SRC (body), data); | |
1518 | ||
1519 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
1520 | { | |
1521 | (*fun) (&XEXP (dest, 1), data); | |
1522 | (*fun) (&XEXP (dest, 2), data); | |
1523 | } | |
1524 | ||
1525 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART) | |
1526 | dest = XEXP (dest, 0); | |
1527 | ||
3c0cb5de | 1528 | if (MEM_P (dest)) |
e2373f95 RK |
1529 | (*fun) (&XEXP (dest, 0), data); |
1530 | } | |
1531 | return; | |
1532 | ||
1533 | default: | |
1534 | /* All the other possibilities never store. */ | |
1535 | (*fun) (pbody, data); | |
1536 | return; | |
1537 | } | |
1538 | } | |
1539 | \f | |
2c88418c RS |
1540 | /* Return nonzero if X's old contents don't survive after INSN. |
1541 | This will be true if X is (cc0) or if X is a register and | |
1542 | X dies in INSN or because INSN entirely sets X. | |
1543 | ||
46d096a3 SB |
1544 | "Entirely set" means set directly and not through a SUBREG, or |
1545 | ZERO_EXTRACT, so no trace of the old contents remains. | |
2c88418c RS |
1546 | Likewise, REG_INC does not count. |
1547 | ||
1548 | REG may be a hard or pseudo reg. Renumbering is not taken into account, | |
1549 | but for this use that makes no difference, since regs don't overlap | |
1550 | during their lifetimes. Therefore, this function may be used | |
6fb5fa3c | 1551 | at any time after deaths have been computed. |
2c88418c RS |
1552 | |
1553 | If REG is a hard reg that occupies multiple machine registers, this | |
1554 | function will only return 1 if each of those registers will be replaced | |
1555 | by INSN. */ | |
1556 | ||
1557 | int | |
f7d504c2 | 1558 | dead_or_set_p (const_rtx insn, const_rtx x) |
2c88418c | 1559 | { |
09e18274 | 1560 | unsigned int regno, end_regno; |
770ae6cc | 1561 | unsigned int i; |
2c88418c RS |
1562 | |
1563 | /* Can't use cc0_rtx below since this file is used by genattrtab.c. */ | |
1564 | if (GET_CODE (x) == CC0) | |
1565 | return 1; | |
1566 | ||
41374e13 | 1567 | gcc_assert (REG_P (x)); |
2c88418c RS |
1568 | |
1569 | regno = REGNO (x); | |
09e18274 RS |
1570 | end_regno = END_REGNO (x); |
1571 | for (i = regno; i < end_regno; i++) | |
2c88418c RS |
1572 | if (! dead_or_set_regno_p (insn, i)) |
1573 | return 0; | |
1574 | ||
1575 | return 1; | |
1576 | } | |
1577 | ||
194acded HPN |
1578 | /* Return TRUE iff DEST is a register or subreg of a register and |
1579 | doesn't change the number of words of the inner register, and any | |
1580 | part of the register is TEST_REGNO. */ | |
1581 | ||
1582 | static bool | |
f7d504c2 | 1583 | covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1584 | { |
1585 | unsigned int regno, endregno; | |
1586 | ||
1587 | if (GET_CODE (dest) == SUBREG | |
1588 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
1589 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
1590 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
1591 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) | |
1592 | dest = SUBREG_REG (dest); | |
1593 | ||
1594 | if (!REG_P (dest)) | |
1595 | return false; | |
1596 | ||
1597 | regno = REGNO (dest); | |
09e18274 | 1598 | endregno = END_REGNO (dest); |
194acded HPN |
1599 | return (test_regno >= regno && test_regno < endregno); |
1600 | } | |
1601 | ||
1602 | /* Like covers_regno_no_parallel_p, but also handles PARALLELs where | |
1603 | any member matches the covers_regno_no_parallel_p criteria. */ | |
1604 | ||
1605 | static bool | |
f7d504c2 | 1606 | covers_regno_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1607 | { |
1608 | if (GET_CODE (dest) == PARALLEL) | |
1609 | { | |
1610 | /* Some targets place small structures in registers for return | |
1611 | values of functions, and those registers are wrapped in | |
1612 | PARALLELs that we may see as the destination of a SET. */ | |
1613 | int i; | |
1614 | ||
1615 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1616 | { | |
1617 | rtx inner = XEXP (XVECEXP (dest, 0, i), 0); | |
1618 | if (inner != NULL_RTX | |
1619 | && covers_regno_no_parallel_p (inner, test_regno)) | |
1620 | return true; | |
1621 | } | |
1622 | ||
1623 | return false; | |
1624 | } | |
1625 | else | |
1626 | return covers_regno_no_parallel_p (dest, test_regno); | |
1627 | } | |
1628 | ||
6fb5fa3c | 1629 | /* Utility function for dead_or_set_p to check an individual register. */ |
2c88418c RS |
1630 | |
1631 | int | |
f7d504c2 | 1632 | dead_or_set_regno_p (const_rtx insn, unsigned int test_regno) |
2c88418c | 1633 | { |
f7d504c2 | 1634 | const_rtx pattern; |
2c88418c | 1635 | |
0a2287bf RH |
1636 | /* See if there is a death note for something that includes TEST_REGNO. */ |
1637 | if (find_regno_note (insn, REG_DEAD, test_regno)) | |
1638 | return 1; | |
2c88418c | 1639 | |
4b4bf941 | 1640 | if (CALL_P (insn) |
8f3e7a26 RK |
1641 | && find_regno_fusage (insn, CLOBBER, test_regno)) |
1642 | return 1; | |
1643 | ||
0c99ec5c RH |
1644 | pattern = PATTERN (insn); |
1645 | ||
1646 | if (GET_CODE (pattern) == COND_EXEC) | |
1647 | pattern = COND_EXEC_CODE (pattern); | |
1648 | ||
1649 | if (GET_CODE (pattern) == SET) | |
194acded | 1650 | return covers_regno_p (SET_DEST (pattern), test_regno); |
0c99ec5c | 1651 | else if (GET_CODE (pattern) == PARALLEL) |
2c88418c | 1652 | { |
b3694847 | 1653 | int i; |
2c88418c | 1654 | |
0c99ec5c | 1655 | for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--) |
2c88418c | 1656 | { |
0c99ec5c RH |
1657 | rtx body = XVECEXP (pattern, 0, i); |
1658 | ||
1659 | if (GET_CODE (body) == COND_EXEC) | |
1660 | body = COND_EXEC_CODE (body); | |
2c88418c | 1661 | |
194acded HPN |
1662 | if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) |
1663 | && covers_regno_p (SET_DEST (body), test_regno)) | |
1664 | return 1; | |
2c88418c RS |
1665 | } |
1666 | } | |
1667 | ||
1668 | return 0; | |
1669 | } | |
1670 | ||
1671 | /* Return the reg-note of kind KIND in insn INSN, if there is one. | |
1672 | If DATUM is nonzero, look for one whose datum is DATUM. */ | |
1673 | ||
1674 | rtx | |
f7d504c2 | 1675 | find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum) |
2c88418c | 1676 | { |
b3694847 | 1677 | rtx link; |
2c88418c | 1678 | |
af082de3 BE |
1679 | gcc_assert (insn); |
1680 | ||
ae78d276 | 1681 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1682 | if (! INSN_P (insn)) |
ae78d276 | 1683 | return 0; |
cd798543 AP |
1684 | if (datum == 0) |
1685 | { | |
1686 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1687 | if (REG_NOTE_KIND (link) == kind) | |
1688 | return link; | |
1689 | return 0; | |
1690 | } | |
ae78d276 | 1691 | |
2c88418c | 1692 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
cd798543 | 1693 | if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0)) |
2c88418c RS |
1694 | return link; |
1695 | return 0; | |
1696 | } | |
1697 | ||
1698 | /* Return the reg-note of kind KIND in insn INSN which applies to register | |
99309f3b RK |
1699 | number REGNO, if any. Return 0 if there is no such reg-note. Note that |
1700 | the REGNO of this NOTE need not be REGNO if REGNO is a hard register; | |
1701 | it might be the case that the note overlaps REGNO. */ | |
2c88418c RS |
1702 | |
1703 | rtx | |
f7d504c2 | 1704 | find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno) |
2c88418c | 1705 | { |
b3694847 | 1706 | rtx link; |
2c88418c | 1707 | |
ae78d276 | 1708 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1709 | if (! INSN_P (insn)) |
ae78d276 MM |
1710 | return 0; |
1711 | ||
2c88418c RS |
1712 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1713 | if (REG_NOTE_KIND (link) == kind | |
1714 | /* Verify that it is a register, so that scratch and MEM won't cause a | |
1715 | problem here. */ | |
f8cfc6aa | 1716 | && REG_P (XEXP (link, 0)) |
99309f3b | 1717 | && REGNO (XEXP (link, 0)) <= regno |
09e18274 | 1718 | && END_REGNO (XEXP (link, 0)) > regno) |
2c88418c RS |
1719 | return link; |
1720 | return 0; | |
1721 | } | |
8f3e7a26 | 1722 | |
d9c695ff RK |
1723 | /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and |
1724 | has such a note. */ | |
1725 | ||
1726 | rtx | |
f7d504c2 | 1727 | find_reg_equal_equiv_note (const_rtx insn) |
d9c695ff | 1728 | { |
cd648cec | 1729 | rtx link; |
d9c695ff | 1730 | |
cd648cec | 1731 | if (!INSN_P (insn)) |
d9c695ff | 1732 | return 0; |
ea8f106d | 1733 | |
cd648cec JH |
1734 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1735 | if (REG_NOTE_KIND (link) == REG_EQUAL | |
1736 | || REG_NOTE_KIND (link) == REG_EQUIV) | |
1737 | { | |
ea8f106d SB |
1738 | /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on |
1739 | insns that have multiple sets. Checking single_set to | |
1740 | make sure of this is not the proper check, as explained | |
1741 | in the comment in set_unique_reg_note. | |
1742 | ||
1743 | This should be changed into an assert. */ | |
1744 | if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) | |
cd648cec JH |
1745 | return 0; |
1746 | return link; | |
1747 | } | |
1748 | return NULL; | |
d9c695ff RK |
1749 | } |
1750 | ||
2a450639 RS |
1751 | /* Check whether INSN is a single_set whose source is known to be |
1752 | equivalent to a constant. Return that constant if so, otherwise | |
1753 | return null. */ | |
1754 | ||
1755 | rtx | |
f7d504c2 | 1756 | find_constant_src (const_rtx insn) |
2a450639 RS |
1757 | { |
1758 | rtx note, set, x; | |
1759 | ||
1760 | set = single_set (insn); | |
1761 | if (set) | |
1762 | { | |
1763 | x = avoid_constant_pool_reference (SET_SRC (set)); | |
1764 | if (CONSTANT_P (x)) | |
1765 | return x; | |
1766 | } | |
1767 | ||
1768 | note = find_reg_equal_equiv_note (insn); | |
1769 | if (note && CONSTANT_P (XEXP (note, 0))) | |
1770 | return XEXP (note, 0); | |
1771 | ||
1772 | return NULL_RTX; | |
1773 | } | |
1774 | ||
8f3e7a26 RK |
1775 | /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found |
1776 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1777 | ||
1778 | int | |
f7d504c2 | 1779 | find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum) |
8f3e7a26 RK |
1780 | { |
1781 | /* If it's not a CALL_INSN, it can't possibly have a | |
1782 | CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */ | |
4b4bf941 | 1783 | if (!CALL_P (insn)) |
8f3e7a26 RK |
1784 | return 0; |
1785 | ||
41374e13 | 1786 | gcc_assert (datum); |
8f3e7a26 | 1787 | |
f8cfc6aa | 1788 | if (!REG_P (datum)) |
8f3e7a26 | 1789 | { |
b3694847 | 1790 | rtx link; |
8f3e7a26 RK |
1791 | |
1792 | for (link = CALL_INSN_FUNCTION_USAGE (insn); | |
a6a2274a | 1793 | link; |
8f3e7a26 | 1794 | link = XEXP (link, 1)) |
a6a2274a | 1795 | if (GET_CODE (XEXP (link, 0)) == code |
cc863bea | 1796 | && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0))) |
a6a2274a | 1797 | return 1; |
8f3e7a26 RK |
1798 | } |
1799 | else | |
1800 | { | |
770ae6cc | 1801 | unsigned int regno = REGNO (datum); |
8f3e7a26 RK |
1802 | |
1803 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
1804 | to pseudo registers, so don't bother checking. */ | |
1805 | ||
1806 | if (regno < FIRST_PSEUDO_REGISTER) | |
a6a2274a | 1807 | { |
09e18274 | 1808 | unsigned int end_regno = END_HARD_REGNO (datum); |
770ae6cc | 1809 | unsigned int i; |
8f3e7a26 RK |
1810 | |
1811 | for (i = regno; i < end_regno; i++) | |
1812 | if (find_regno_fusage (insn, code, i)) | |
1813 | return 1; | |
a6a2274a | 1814 | } |
8f3e7a26 RK |
1815 | } |
1816 | ||
1817 | return 0; | |
1818 | } | |
1819 | ||
1820 | /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found | |
1821 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1822 | ||
1823 | int | |
f7d504c2 | 1824 | find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno) |
8f3e7a26 | 1825 | { |
b3694847 | 1826 | rtx link; |
8f3e7a26 RK |
1827 | |
1828 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
1829 | to pseudo registers, so don't bother checking. */ | |
1830 | ||
1831 | if (regno >= FIRST_PSEUDO_REGISTER | |
4b4bf941 | 1832 | || !CALL_P (insn) ) |
8f3e7a26 RK |
1833 | return 0; |
1834 | ||
1835 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
83ab3839 | 1836 | { |
770ae6cc | 1837 | rtx op, reg; |
83ab3839 RH |
1838 | |
1839 | if (GET_CODE (op = XEXP (link, 0)) == code | |
f8cfc6aa | 1840 | && REG_P (reg = XEXP (op, 0)) |
09e18274 RS |
1841 | && REGNO (reg) <= regno |
1842 | && END_HARD_REGNO (reg) > regno) | |
83ab3839 RH |
1843 | return 1; |
1844 | } | |
8f3e7a26 RK |
1845 | |
1846 | return 0; | |
1847 | } | |
a6a063b8 AM |
1848 | |
1849 | /* Return true if INSN is a call to a pure function. */ | |
1850 | ||
1851 | int | |
f7d504c2 | 1852 | pure_call_p (const_rtx insn) |
a6a063b8 | 1853 | { |
f7d504c2 | 1854 | const_rtx link; |
a6a063b8 | 1855 | |
4b4bf941 | 1856 | if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn)) |
a6a063b8 AM |
1857 | return 0; |
1858 | ||
1859 | /* Look for the note that differentiates const and pure functions. */ | |
1860 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
1861 | { | |
1862 | rtx u, m; | |
1863 | ||
1864 | if (GET_CODE (u = XEXP (link, 0)) == USE | |
3c0cb5de | 1865 | && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode |
a6a063b8 AM |
1866 | && GET_CODE (XEXP (m, 0)) == SCRATCH) |
1867 | return 1; | |
1868 | } | |
1869 | ||
1870 | return 0; | |
1871 | } | |
2c88418c RS |
1872 | \f |
1873 | /* Remove register note NOTE from the REG_NOTES of INSN. */ | |
1874 | ||
1875 | void | |
f7d504c2 | 1876 | remove_note (rtx insn, const_rtx note) |
2c88418c | 1877 | { |
b3694847 | 1878 | rtx link; |
2c88418c | 1879 | |
49c3bb12 RH |
1880 | if (note == NULL_RTX) |
1881 | return; | |
1882 | ||
2c88418c | 1883 | if (REG_NOTES (insn) == note) |
6fb5fa3c DB |
1884 | REG_NOTES (insn) = XEXP (note, 1); |
1885 | else | |
1886 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1887 | if (XEXP (link, 1) == note) | |
1888 | { | |
1889 | XEXP (link, 1) = XEXP (note, 1); | |
1890 | break; | |
1891 | } | |
1892 | ||
1893 | switch (REG_NOTE_KIND (note)) | |
2c88418c | 1894 | { |
6fb5fa3c DB |
1895 | case REG_EQUAL: |
1896 | case REG_EQUIV: | |
1897 | df_notes_rescan (insn); | |
1898 | break; | |
1899 | default: | |
1900 | break; | |
2c88418c | 1901 | } |
2c88418c | 1902 | } |
55a98783 | 1903 | |
7cd689bc SB |
1904 | /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */ |
1905 | ||
1906 | void | |
1907 | remove_reg_equal_equiv_notes (rtx insn) | |
1908 | { | |
1909 | rtx *loc; | |
1910 | ||
1911 | loc = ®_NOTES (insn); | |
1912 | while (*loc) | |
1913 | { | |
1914 | enum reg_note kind = REG_NOTE_KIND (*loc); | |
1915 | if (kind == REG_EQUAL || kind == REG_EQUIV) | |
1916 | *loc = XEXP (*loc, 1); | |
1917 | else | |
1918 | loc = &XEXP (*loc, 1); | |
1919 | } | |
1920 | } | |
1921 | ||
5f0d2358 RK |
1922 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
1923 | return 1 if it is found. A simple equality test is used to determine if | |
1924 | NODE matches. */ | |
1925 | ||
1926 | int | |
f7d504c2 | 1927 | in_expr_list_p (const_rtx listp, const_rtx node) |
5f0d2358 | 1928 | { |
f7d504c2 | 1929 | const_rtx x; |
5f0d2358 RK |
1930 | |
1931 | for (x = listp; x; x = XEXP (x, 1)) | |
1932 | if (node == XEXP (x, 0)) | |
1933 | return 1; | |
1934 | ||
1935 | return 0; | |
1936 | } | |
1937 | ||
dd248abd RK |
1938 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
1939 | remove that entry from the list if it is found. | |
55a98783 | 1940 | |
dd248abd | 1941 | A simple equality test is used to determine if NODE matches. */ |
55a98783 JL |
1942 | |
1943 | void | |
f7d504c2 | 1944 | remove_node_from_expr_list (const_rtx node, rtx *listp) |
55a98783 JL |
1945 | { |
1946 | rtx temp = *listp; | |
1947 | rtx prev = NULL_RTX; | |
1948 | ||
1949 | while (temp) | |
1950 | { | |
1951 | if (node == XEXP (temp, 0)) | |
1952 | { | |
1953 | /* Splice the node out of the list. */ | |
1954 | if (prev) | |
1955 | XEXP (prev, 1) = XEXP (temp, 1); | |
1956 | else | |
1957 | *listp = XEXP (temp, 1); | |
1958 | ||
1959 | return; | |
1960 | } | |
dd248abd RK |
1961 | |
1962 | prev = temp; | |
55a98783 JL |
1963 | temp = XEXP (temp, 1); |
1964 | } | |
1965 | } | |
2c88418c | 1966 | \f |
2b067faf RS |
1967 | /* Nonzero if X contains any volatile instructions. These are instructions |
1968 | which may cause unpredictable machine state instructions, and thus no | |
1969 | instructions should be moved or combined across them. This includes | |
1970 | only volatile asms and UNSPEC_VOLATILE instructions. */ | |
1971 | ||
1972 | int | |
f7d504c2 | 1973 | volatile_insn_p (const_rtx x) |
2b067faf | 1974 | { |
f7d504c2 | 1975 | const RTX_CODE code = GET_CODE (x); |
2b067faf RS |
1976 | switch (code) |
1977 | { | |
1978 | case LABEL_REF: | |
1979 | case SYMBOL_REF: | |
1980 | case CONST_INT: | |
1981 | case CONST: | |
1982 | case CONST_DOUBLE: | |
091a3ac7 | 1983 | case CONST_FIXED: |
69ef87e2 | 1984 | case CONST_VECTOR: |
2b067faf RS |
1985 | case CC0: |
1986 | case PC: | |
1987 | case REG: | |
1988 | case SCRATCH: | |
1989 | case CLOBBER: | |
2b067faf RS |
1990 | case ADDR_VEC: |
1991 | case ADDR_DIFF_VEC: | |
1992 | case CALL: | |
1993 | case MEM: | |
1994 | return 0; | |
1995 | ||
1996 | case UNSPEC_VOLATILE: | |
1997 | /* case TRAP_IF: This isn't clear yet. */ | |
1998 | return 1; | |
1999 | ||
4c46ea23 | 2000 | case ASM_INPUT: |
2b067faf RS |
2001 | case ASM_OPERANDS: |
2002 | if (MEM_VOLATILE_P (x)) | |
2003 | return 1; | |
e9a25f70 JL |
2004 | |
2005 | default: | |
2006 | break; | |
2b067faf RS |
2007 | } |
2008 | ||
2009 | /* Recursively scan the operands of this expression. */ | |
2010 | ||
2011 | { | |
f7d504c2 | 2012 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2013 | int i; |
a6a2274a | 2014 | |
2b067faf RS |
2015 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2016 | { | |
2017 | if (fmt[i] == 'e') | |
2018 | { | |
31001f72 | 2019 | if (volatile_insn_p (XEXP (x, i))) |
2b067faf RS |
2020 | return 1; |
2021 | } | |
d4757e6a | 2022 | else if (fmt[i] == 'E') |
2b067faf | 2023 | { |
b3694847 | 2024 | int j; |
2b067faf | 2025 | for (j = 0; j < XVECLEN (x, i); j++) |
31001f72 | 2026 | if (volatile_insn_p (XVECEXP (x, i, j))) |
2b067faf RS |
2027 | return 1; |
2028 | } | |
2029 | } | |
2030 | } | |
2031 | return 0; | |
2032 | } | |
2033 | ||
2c88418c | 2034 | /* Nonzero if X contains any volatile memory references |
2ac4fed0 | 2035 | UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */ |
2c88418c RS |
2036 | |
2037 | int | |
f7d504c2 | 2038 | volatile_refs_p (const_rtx x) |
2c88418c | 2039 | { |
f7d504c2 | 2040 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2041 | switch (code) |
2042 | { | |
2043 | case LABEL_REF: | |
2044 | case SYMBOL_REF: | |
2045 | case CONST_INT: | |
2046 | case CONST: | |
2047 | case CONST_DOUBLE: | |
091a3ac7 | 2048 | case CONST_FIXED: |
69ef87e2 | 2049 | case CONST_VECTOR: |
2c88418c RS |
2050 | case CC0: |
2051 | case PC: | |
2052 | case REG: | |
2053 | case SCRATCH: | |
2054 | case CLOBBER: | |
2c88418c RS |
2055 | case ADDR_VEC: |
2056 | case ADDR_DIFF_VEC: | |
2057 | return 0; | |
2058 | ||
2ac4fed0 | 2059 | case UNSPEC_VOLATILE: |
2c88418c RS |
2060 | return 1; |
2061 | ||
2062 | case MEM: | |
4c46ea23 | 2063 | case ASM_INPUT: |
2c88418c RS |
2064 | case ASM_OPERANDS: |
2065 | if (MEM_VOLATILE_P (x)) | |
2066 | return 1; | |
e9a25f70 JL |
2067 | |
2068 | default: | |
2069 | break; | |
2c88418c RS |
2070 | } |
2071 | ||
2072 | /* Recursively scan the operands of this expression. */ | |
2073 | ||
2074 | { | |
f7d504c2 | 2075 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2076 | int i; |
a6a2274a | 2077 | |
2c88418c RS |
2078 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2079 | { | |
2080 | if (fmt[i] == 'e') | |
2081 | { | |
2082 | if (volatile_refs_p (XEXP (x, i))) | |
2083 | return 1; | |
2084 | } | |
d4757e6a | 2085 | else if (fmt[i] == 'E') |
2c88418c | 2086 | { |
b3694847 | 2087 | int j; |
2c88418c RS |
2088 | for (j = 0; j < XVECLEN (x, i); j++) |
2089 | if (volatile_refs_p (XVECEXP (x, i, j))) | |
2090 | return 1; | |
2091 | } | |
2092 | } | |
2093 | } | |
2094 | return 0; | |
2095 | } | |
2096 | ||
2097 | /* Similar to above, except that it also rejects register pre- and post- | |
2098 | incrementing. */ | |
2099 | ||
2100 | int | |
f7d504c2 | 2101 | side_effects_p (const_rtx x) |
2c88418c | 2102 | { |
f7d504c2 | 2103 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2104 | switch (code) |
2105 | { | |
2106 | case LABEL_REF: | |
2107 | case SYMBOL_REF: | |
2108 | case CONST_INT: | |
2109 | case CONST: | |
2110 | case CONST_DOUBLE: | |
091a3ac7 | 2111 | case CONST_FIXED: |
69ef87e2 | 2112 | case CONST_VECTOR: |
2c88418c RS |
2113 | case CC0: |
2114 | case PC: | |
2115 | case REG: | |
2116 | case SCRATCH: | |
2c88418c RS |
2117 | case ADDR_VEC: |
2118 | case ADDR_DIFF_VEC: | |
2119 | return 0; | |
2120 | ||
2121 | case CLOBBER: | |
2122 | /* Reject CLOBBER with a non-VOID mode. These are made by combine.c | |
2123 | when some combination can't be done. If we see one, don't think | |
2124 | that we can simplify the expression. */ | |
2125 | return (GET_MODE (x) != VOIDmode); | |
2126 | ||
2127 | case PRE_INC: | |
2128 | case PRE_DEC: | |
2129 | case POST_INC: | |
2130 | case POST_DEC: | |
1fb9c5cd MH |
2131 | case PRE_MODIFY: |
2132 | case POST_MODIFY: | |
2c88418c | 2133 | case CALL: |
2ac4fed0 | 2134 | case UNSPEC_VOLATILE: |
2c88418c RS |
2135 | /* case TRAP_IF: This isn't clear yet. */ |
2136 | return 1; | |
2137 | ||
2138 | case MEM: | |
4c46ea23 | 2139 | case ASM_INPUT: |
2c88418c RS |
2140 | case ASM_OPERANDS: |
2141 | if (MEM_VOLATILE_P (x)) | |
2142 | return 1; | |
e9a25f70 JL |
2143 | |
2144 | default: | |
2145 | break; | |
2c88418c RS |
2146 | } |
2147 | ||
2148 | /* Recursively scan the operands of this expression. */ | |
2149 | ||
2150 | { | |
b3694847 SS |
2151 | const char *fmt = GET_RTX_FORMAT (code); |
2152 | int i; | |
a6a2274a | 2153 | |
2c88418c RS |
2154 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2155 | { | |
2156 | if (fmt[i] == 'e') | |
2157 | { | |
2158 | if (side_effects_p (XEXP (x, i))) | |
2159 | return 1; | |
2160 | } | |
d4757e6a | 2161 | else if (fmt[i] == 'E') |
2c88418c | 2162 | { |
b3694847 | 2163 | int j; |
2c88418c RS |
2164 | for (j = 0; j < XVECLEN (x, i); j++) |
2165 | if (side_effects_p (XVECEXP (x, i, j))) | |
2166 | return 1; | |
2167 | } | |
2168 | } | |
2169 | } | |
2170 | return 0; | |
2171 | } | |
2172 | \f | |
e755fcf5 ZD |
2173 | enum may_trap_p_flags |
2174 | { | |
2175 | MTP_UNALIGNED_MEMS = 1, | |
2176 | MTP_AFTER_MOVE = 2 | |
2177 | }; | |
2178 | /* Return nonzero if evaluating rtx X might cause a trap. | |
2179 | (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for | |
2180 | unaligned memory accesses on strict alignment machines. If | |
2181 | (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression | |
2182 | cannot trap at its current location, but it might become trapping if moved | |
2183 | elsewhere. */ | |
2c88418c | 2184 | |
2358ff91 | 2185 | static int |
f7d504c2 | 2186 | may_trap_p_1 (const_rtx x, unsigned flags) |
2c88418c RS |
2187 | { |
2188 | int i; | |
2189 | enum rtx_code code; | |
6f7d635c | 2190 | const char *fmt; |
e755fcf5 | 2191 | bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0; |
2c88418c RS |
2192 | |
2193 | if (x == 0) | |
2194 | return 0; | |
2195 | code = GET_CODE (x); | |
2196 | switch (code) | |
2197 | { | |
2198 | /* Handle these cases quickly. */ | |
2199 | case CONST_INT: | |
2200 | case CONST_DOUBLE: | |
091a3ac7 | 2201 | case CONST_FIXED: |
69ef87e2 | 2202 | case CONST_VECTOR: |
2c88418c RS |
2203 | case SYMBOL_REF: |
2204 | case LABEL_REF: | |
2205 | case CONST: | |
2206 | case PC: | |
2207 | case CC0: | |
2208 | case REG: | |
2209 | case SCRATCH: | |
2210 | return 0; | |
2211 | ||
22aa60a1 | 2212 | case ASM_INPUT: |
2ac4fed0 | 2213 | case UNSPEC_VOLATILE: |
2c88418c RS |
2214 | case TRAP_IF: |
2215 | return 1; | |
2216 | ||
22aa60a1 RH |
2217 | case ASM_OPERANDS: |
2218 | return MEM_VOLATILE_P (x); | |
2219 | ||
2c88418c RS |
2220 | /* Memory ref can trap unless it's a static var or a stack slot. */ |
2221 | case MEM: | |
e755fcf5 ZD |
2222 | if (/* MEM_NOTRAP_P only relates to the actual position of the memory |
2223 | reference; moving it out of condition might cause its address | |
2224 | become invalid. */ | |
2225 | !(flags & MTP_AFTER_MOVE) | |
2226 | && MEM_NOTRAP_P (x) | |
2358ff91 | 2227 | && (!STRICT_ALIGNMENT || !unaligned_mems)) |
4da2eb6b | 2228 | return 0; |
2358ff91 EB |
2229 | return |
2230 | rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems); | |
2c88418c RS |
2231 | |
2232 | /* Division by a non-constant might trap. */ | |
2233 | case DIV: | |
2234 | case MOD: | |
2235 | case UDIV: | |
2236 | case UMOD: | |
52bfebf0 RS |
2237 | if (HONOR_SNANS (GET_MODE (x))) |
2238 | return 1; | |
3d8bf70f | 2239 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x))) |
f9013075 DE |
2240 | return flag_trapping_math; |
2241 | if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx)) | |
2c88418c | 2242 | return 1; |
e9a25f70 JL |
2243 | break; |
2244 | ||
b278301b RK |
2245 | case EXPR_LIST: |
2246 | /* An EXPR_LIST is used to represent a function call. This | |
2247 | certainly may trap. */ | |
2248 | return 1; | |
e9a25f70 | 2249 | |
734508ea JW |
2250 | case GE: |
2251 | case GT: | |
2252 | case LE: | |
2253 | case LT: | |
19aec195 | 2254 | case LTGT: |
55143861 | 2255 | case COMPARE: |
734508ea | 2256 | /* Some floating point comparisons may trap. */ |
f5eb5fd0 JH |
2257 | if (!flag_trapping_math) |
2258 | break; | |
734508ea JW |
2259 | /* ??? There is no machine independent way to check for tests that trap |
2260 | when COMPARE is used, though many targets do make this distinction. | |
2261 | For instance, sparc uses CCFPE for compares which generate exceptions | |
2262 | and CCFP for compares which do not generate exceptions. */ | |
52bfebf0 | 2263 | if (HONOR_NANS (GET_MODE (x))) |
55143861 JJ |
2264 | return 1; |
2265 | /* But often the compare has some CC mode, so check operand | |
2266 | modes as well. */ | |
52bfebf0 RS |
2267 | if (HONOR_NANS (GET_MODE (XEXP (x, 0))) |
2268 | || HONOR_NANS (GET_MODE (XEXP (x, 1)))) | |
2269 | return 1; | |
2270 | break; | |
2271 | ||
2272 | case EQ: | |
2273 | case NE: | |
2274 | if (HONOR_SNANS (GET_MODE (x))) | |
2275 | return 1; | |
2276 | /* Often comparison is CC mode, so check operand modes. */ | |
2277 | if (HONOR_SNANS (GET_MODE (XEXP (x, 0))) | |
2278 | || HONOR_SNANS (GET_MODE (XEXP (x, 1)))) | |
55143861 JJ |
2279 | return 1; |
2280 | break; | |
2281 | ||
22fd5743 FH |
2282 | case FIX: |
2283 | /* Conversion of floating point might trap. */ | |
2284 | if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0)))) | |
2285 | return 1; | |
2286 | break; | |
2287 | ||
05cc23e8 RH |
2288 | case NEG: |
2289 | case ABS: | |
e3947b34 | 2290 | case SUBREG: |
05cc23e8 RH |
2291 | /* These operations don't trap even with floating point. */ |
2292 | break; | |
2293 | ||
2c88418c RS |
2294 | default: |
2295 | /* Any floating arithmetic may trap. */ | |
3d8bf70f | 2296 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) |
f5eb5fd0 | 2297 | && flag_trapping_math) |
2c88418c RS |
2298 | return 1; |
2299 | } | |
2300 | ||
2301 | fmt = GET_RTX_FORMAT (code); | |
2302 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2303 | { | |
2304 | if (fmt[i] == 'e') | |
2305 | { | |
e755fcf5 | 2306 | if (may_trap_p_1 (XEXP (x, i), flags)) |
2c88418c RS |
2307 | return 1; |
2308 | } | |
2309 | else if (fmt[i] == 'E') | |
2310 | { | |
b3694847 | 2311 | int j; |
2c88418c | 2312 | for (j = 0; j < XVECLEN (x, i); j++) |
e755fcf5 | 2313 | if (may_trap_p_1 (XVECEXP (x, i, j), flags)) |
2c88418c RS |
2314 | return 1; |
2315 | } | |
2316 | } | |
2317 | return 0; | |
2318 | } | |
2358ff91 EB |
2319 | |
2320 | /* Return nonzero if evaluating rtx X might cause a trap. */ | |
2321 | ||
2322 | int | |
f7d504c2 | 2323 | may_trap_p (const_rtx x) |
2358ff91 | 2324 | { |
e755fcf5 ZD |
2325 | return may_trap_p_1 (x, 0); |
2326 | } | |
2327 | ||
2328 | /* Return nonzero if evaluating rtx X might cause a trap, when the expression | |
2329 | is moved from its current location by some optimization. */ | |
2330 | ||
2331 | int | |
f7d504c2 | 2332 | may_trap_after_code_motion_p (const_rtx x) |
e755fcf5 ZD |
2333 | { |
2334 | return may_trap_p_1 (x, MTP_AFTER_MOVE); | |
2358ff91 EB |
2335 | } |
2336 | ||
c0220ea4 | 2337 | /* Same as above, but additionally return nonzero if evaluating rtx X might |
2358ff91 EB |
2338 | cause a fault. We define a fault for the purpose of this function as a |
2339 | erroneous execution condition that cannot be encountered during the normal | |
2340 | execution of a valid program; the typical example is an unaligned memory | |
2341 | access on a strict alignment machine. The compiler guarantees that it | |
2342 | doesn't generate code that will fault from a valid program, but this | |
2343 | guarantee doesn't mean anything for individual instructions. Consider | |
2344 | the following example: | |
2345 | ||
2346 | struct S { int d; union { char *cp; int *ip; }; }; | |
2347 | ||
2348 | int foo(struct S *s) | |
2349 | { | |
2350 | if (s->d == 1) | |
2351 | return *s->ip; | |
2352 | else | |
2353 | return *s->cp; | |
2354 | } | |
2355 | ||
2356 | on a strict alignment machine. In a valid program, foo will never be | |
2357 | invoked on a structure for which d is equal to 1 and the underlying | |
2358 | unique field of the union not aligned on a 4-byte boundary, but the | |
2359 | expression *s->ip might cause a fault if considered individually. | |
2360 | ||
2361 | At the RTL level, potentially problematic expressions will almost always | |
2362 | verify may_trap_p; for example, the above dereference can be emitted as | |
2363 | (mem:SI (reg:P)) and this expression is may_trap_p for a generic register. | |
2364 | However, suppose that foo is inlined in a caller that causes s->cp to | |
2365 | point to a local character variable and guarantees that s->d is not set | |
2366 | to 1; foo may have been effectively translated into pseudo-RTL as: | |
2367 | ||
2368 | if ((reg:SI) == 1) | |
2369 | (set (reg:SI) (mem:SI (%fp - 7))) | |
2370 | else | |
2371 | (set (reg:QI) (mem:QI (%fp - 7))) | |
2372 | ||
2373 | Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a | |
2374 | memory reference to a stack slot, but it will certainly cause a fault | |
2375 | on a strict alignment machine. */ | |
2376 | ||
2377 | int | |
f7d504c2 | 2378 | may_trap_or_fault_p (const_rtx x) |
2358ff91 | 2379 | { |
e755fcf5 | 2380 | return may_trap_p_1 (x, MTP_UNALIGNED_MEMS); |
2358ff91 | 2381 | } |
2c88418c RS |
2382 | \f |
2383 | /* Return nonzero if X contains a comparison that is not either EQ or NE, | |
2384 | i.e., an inequality. */ | |
2385 | ||
2386 | int | |
f7d504c2 | 2387 | inequality_comparisons_p (const_rtx x) |
2c88418c | 2388 | { |
b3694847 SS |
2389 | const char *fmt; |
2390 | int len, i; | |
f7d504c2 | 2391 | const enum rtx_code code = GET_CODE (x); |
2c88418c RS |
2392 | |
2393 | switch (code) | |
2394 | { | |
2395 | case REG: | |
2396 | case SCRATCH: | |
2397 | case PC: | |
2398 | case CC0: | |
2399 | case CONST_INT: | |
2400 | case CONST_DOUBLE: | |
091a3ac7 | 2401 | case CONST_FIXED: |
69ef87e2 | 2402 | case CONST_VECTOR: |
2c88418c RS |
2403 | case CONST: |
2404 | case LABEL_REF: | |
2405 | case SYMBOL_REF: | |
2406 | return 0; | |
2407 | ||
2408 | case LT: | |
2409 | case LTU: | |
2410 | case GT: | |
2411 | case GTU: | |
2412 | case LE: | |
2413 | case LEU: | |
2414 | case GE: | |
2415 | case GEU: | |
2416 | return 1; | |
a6a2274a | 2417 | |
e9a25f70 JL |
2418 | default: |
2419 | break; | |
2c88418c RS |
2420 | } |
2421 | ||
2422 | len = GET_RTX_LENGTH (code); | |
2423 | fmt = GET_RTX_FORMAT (code); | |
2424 | ||
2425 | for (i = 0; i < len; i++) | |
2426 | { | |
2427 | if (fmt[i] == 'e') | |
2428 | { | |
2429 | if (inequality_comparisons_p (XEXP (x, i))) | |
2430 | return 1; | |
2431 | } | |
2432 | else if (fmt[i] == 'E') | |
2433 | { | |
b3694847 | 2434 | int j; |
2c88418c RS |
2435 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2436 | if (inequality_comparisons_p (XVECEXP (x, i, j))) | |
2437 | return 1; | |
2438 | } | |
2439 | } | |
a6a2274a | 2440 | |
2c88418c RS |
2441 | return 0; |
2442 | } | |
2443 | \f | |
1ed0205e VM |
2444 | /* Replace any occurrence of FROM in X with TO. The function does |
2445 | not enter into CONST_DOUBLE for the replace. | |
2c88418c RS |
2446 | |
2447 | Note that copying is not done so X must not be shared unless all copies | |
2448 | are to be modified. */ | |
2449 | ||
2450 | rtx | |
0c20a65f | 2451 | replace_rtx (rtx x, rtx from, rtx to) |
2c88418c | 2452 | { |
b3694847 SS |
2453 | int i, j; |
2454 | const char *fmt; | |
2c88418c | 2455 | |
1ed0205e | 2456 | /* The following prevents loops occurrence when we change MEM in |
dc297297 | 2457 | CONST_DOUBLE onto the same CONST_DOUBLE. */ |
1ed0205e VM |
2458 | if (x != 0 && GET_CODE (x) == CONST_DOUBLE) |
2459 | return x; | |
2460 | ||
2c88418c RS |
2461 | if (x == from) |
2462 | return to; | |
2463 | ||
2464 | /* Allow this function to make replacements in EXPR_LISTs. */ | |
2465 | if (x == 0) | |
2466 | return 0; | |
2467 | ||
9dd791c8 AO |
2468 | if (GET_CODE (x) == SUBREG) |
2469 | { | |
2470 | rtx new = replace_rtx (SUBREG_REG (x), from, to); | |
2471 | ||
2472 | if (GET_CODE (new) == CONST_INT) | |
2473 | { | |
2474 | x = simplify_subreg (GET_MODE (x), new, | |
2475 | GET_MODE (SUBREG_REG (x)), | |
2476 | SUBREG_BYTE (x)); | |
41374e13 | 2477 | gcc_assert (x); |
9dd791c8 AO |
2478 | } |
2479 | else | |
2480 | SUBREG_REG (x) = new; | |
2481 | ||
2482 | return x; | |
2483 | } | |
2484 | else if (GET_CODE (x) == ZERO_EXTEND) | |
2485 | { | |
2486 | rtx new = replace_rtx (XEXP (x, 0), from, to); | |
2487 | ||
2488 | if (GET_CODE (new) == CONST_INT) | |
2489 | { | |
2490 | x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
2491 | new, GET_MODE (XEXP (x, 0))); | |
41374e13 | 2492 | gcc_assert (x); |
9dd791c8 AO |
2493 | } |
2494 | else | |
2495 | XEXP (x, 0) = new; | |
2496 | ||
2497 | return x; | |
2498 | } | |
2499 | ||
2c88418c RS |
2500 | fmt = GET_RTX_FORMAT (GET_CODE (x)); |
2501 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
2502 | { | |
2503 | if (fmt[i] == 'e') | |
2504 | XEXP (x, i) = replace_rtx (XEXP (x, i), from, to); | |
2505 | else if (fmt[i] == 'E') | |
2506 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2507 | XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to); | |
2508 | } | |
2509 | ||
2510 | return x; | |
a6a2274a | 2511 | } |
2c88418c | 2512 | \f |
39811184 | 2513 | /* Replace occurrences of the old label in *X with the new one. |
4af16369 | 2514 | DATA is a REPLACE_LABEL_DATA containing the old and new labels. */ |
39811184 JZ |
2515 | |
2516 | int | |
0c20a65f | 2517 | replace_label (rtx *x, void *data) |
39811184 JZ |
2518 | { |
2519 | rtx l = *x; | |
4af16369 JZ |
2520 | rtx old_label = ((replace_label_data *) data)->r1; |
2521 | rtx new_label = ((replace_label_data *) data)->r2; | |
2522 | bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses; | |
39811184 JZ |
2523 | |
2524 | if (l == NULL_RTX) | |
2525 | return 0; | |
2526 | ||
173cd571 JZ |
2527 | if (GET_CODE (l) == SYMBOL_REF |
2528 | && CONSTANT_POOL_ADDRESS_P (l)) | |
4af16369 | 2529 | { |
173cd571 | 2530 | rtx c = get_pool_constant (l); |
4af16369 JZ |
2531 | if (rtx_referenced_p (old_label, c)) |
2532 | { | |
2533 | rtx new_c, new_l; | |
2534 | replace_label_data *d = (replace_label_data *) data; | |
0c20a65f | 2535 | |
4af16369 JZ |
2536 | /* Create a copy of constant C; replace the label inside |
2537 | but do not update LABEL_NUSES because uses in constant pool | |
2538 | are not counted. */ | |
2539 | new_c = copy_rtx (c); | |
2540 | d->update_label_nuses = false; | |
2541 | for_each_rtx (&new_c, replace_label, data); | |
2542 | d->update_label_nuses = update_label_nuses; | |
2543 | ||
2544 | /* Add the new constant NEW_C to constant pool and replace | |
2545 | the old reference to constant by new reference. */ | |
173cd571 | 2546 | new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0); |
4af16369 JZ |
2547 | *x = replace_rtx (l, l, new_l); |
2548 | } | |
2549 | return 0; | |
2550 | } | |
2551 | ||
39811184 JZ |
2552 | /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL |
2553 | field. This is not handled by for_each_rtx because it doesn't | |
2554 | handle unprinted ('0') fields. */ | |
4b4bf941 | 2555 | if (JUMP_P (l) && JUMP_LABEL (l) == old_label) |
39811184 | 2556 | JUMP_LABEL (l) = new_label; |
39811184 | 2557 | |
4af16369 JZ |
2558 | if ((GET_CODE (l) == LABEL_REF |
2559 | || GET_CODE (l) == INSN_LIST) | |
2560 | && XEXP (l, 0) == old_label) | |
2561 | { | |
2562 | XEXP (l, 0) = new_label; | |
2563 | if (update_label_nuses) | |
2564 | { | |
2565 | ++LABEL_NUSES (new_label); | |
2566 | --LABEL_NUSES (old_label); | |
2567 | } | |
2568 | return 0; | |
2569 | } | |
39811184 JZ |
2570 | |
2571 | return 0; | |
2572 | } | |
2573 | ||
4af16369 JZ |
2574 | /* When *BODY is equal to X or X is directly referenced by *BODY |
2575 | return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero | |
2576 | too, otherwise FOR_EACH_RTX continues traversing *BODY. */ | |
39811184 JZ |
2577 | |
2578 | static int | |
0c20a65f | 2579 | rtx_referenced_p_1 (rtx *body, void *x) |
39811184 | 2580 | { |
4af16369 JZ |
2581 | rtx y = (rtx) x; |
2582 | ||
2583 | if (*body == NULL_RTX) | |
2584 | return y == NULL_RTX; | |
2585 | ||
2586 | /* Return true if a label_ref *BODY refers to label Y. */ | |
4b4bf941 | 2587 | if (GET_CODE (*body) == LABEL_REF && LABEL_P (y)) |
4af16369 JZ |
2588 | return XEXP (*body, 0) == y; |
2589 | ||
2590 | /* If *BODY is a reference to pool constant traverse the constant. */ | |
2591 | if (GET_CODE (*body) == SYMBOL_REF | |
2592 | && CONSTANT_POOL_ADDRESS_P (*body)) | |
2593 | return rtx_referenced_p (y, get_pool_constant (*body)); | |
2594 | ||
2595 | /* By default, compare the RTL expressions. */ | |
2596 | return rtx_equal_p (*body, y); | |
39811184 JZ |
2597 | } |
2598 | ||
4af16369 | 2599 | /* Return true if X is referenced in BODY. */ |
39811184 JZ |
2600 | |
2601 | int | |
0c20a65f | 2602 | rtx_referenced_p (rtx x, rtx body) |
39811184 | 2603 | { |
4af16369 | 2604 | return for_each_rtx (&body, rtx_referenced_p_1, x); |
39811184 JZ |
2605 | } |
2606 | ||
ee735eef JZ |
2607 | /* If INSN is a tablejump return true and store the label (before jump table) to |
2608 | *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */ | |
39811184 JZ |
2609 | |
2610 | bool | |
f7d504c2 | 2611 | tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep) |
39811184 | 2612 | { |
ee735eef JZ |
2613 | rtx label, table; |
2614 | ||
4b4bf941 | 2615 | if (JUMP_P (insn) |
ee735eef JZ |
2616 | && (label = JUMP_LABEL (insn)) != NULL_RTX |
2617 | && (table = next_active_insn (label)) != NULL_RTX | |
4b4bf941 | 2618 | && JUMP_P (table) |
ee735eef JZ |
2619 | && (GET_CODE (PATTERN (table)) == ADDR_VEC |
2620 | || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC)) | |
39811184 | 2621 | { |
ee735eef JZ |
2622 | if (labelp) |
2623 | *labelp = label; | |
2624 | if (tablep) | |
2625 | *tablep = table; | |
39811184 JZ |
2626 | return true; |
2627 | } | |
2628 | return false; | |
2629 | } | |
2630 | ||
fce7e199 RH |
2631 | /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or |
2632 | constant that is not in the constant pool and not in the condition | |
2633 | of an IF_THEN_ELSE. */ | |
2a1777af JL |
2634 | |
2635 | static int | |
f7d504c2 | 2636 | computed_jump_p_1 (const_rtx x) |
2a1777af | 2637 | { |
f7d504c2 | 2638 | const enum rtx_code code = GET_CODE (x); |
2a1777af | 2639 | int i, j; |
6f7d635c | 2640 | const char *fmt; |
2a1777af JL |
2641 | |
2642 | switch (code) | |
2643 | { | |
2a1777af JL |
2644 | case LABEL_REF: |
2645 | case PC: | |
2646 | return 0; | |
2647 | ||
fce7e199 RH |
2648 | case CONST: |
2649 | case CONST_INT: | |
2650 | case CONST_DOUBLE: | |
091a3ac7 | 2651 | case CONST_FIXED: |
69ef87e2 | 2652 | case CONST_VECTOR: |
fce7e199 | 2653 | case SYMBOL_REF: |
2a1777af JL |
2654 | case REG: |
2655 | return 1; | |
2656 | ||
2657 | case MEM: | |
2658 | return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
2659 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))); | |
2660 | ||
2661 | case IF_THEN_ELSE: | |
fce7e199 RH |
2662 | return (computed_jump_p_1 (XEXP (x, 1)) |
2663 | || computed_jump_p_1 (XEXP (x, 2))); | |
1d300e19 KG |
2664 | |
2665 | default: | |
2666 | break; | |
2a1777af JL |
2667 | } |
2668 | ||
2669 | fmt = GET_RTX_FORMAT (code); | |
2670 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2671 | { | |
2672 | if (fmt[i] == 'e' | |
fce7e199 | 2673 | && computed_jump_p_1 (XEXP (x, i))) |
2a1777af JL |
2674 | return 1; |
2675 | ||
d4757e6a | 2676 | else if (fmt[i] == 'E') |
2a1777af | 2677 | for (j = 0; j < XVECLEN (x, i); j++) |
fce7e199 | 2678 | if (computed_jump_p_1 (XVECEXP (x, i, j))) |
2a1777af JL |
2679 | return 1; |
2680 | } | |
2681 | ||
2682 | return 0; | |
2683 | } | |
2684 | ||
2685 | /* Return nonzero if INSN is an indirect jump (aka computed jump). | |
2686 | ||
2687 | Tablejumps and casesi insns are not considered indirect jumps; | |
4eb00163 | 2688 | we can recognize them by a (use (label_ref)). */ |
2a1777af JL |
2689 | |
2690 | int | |
f7d504c2 | 2691 | computed_jump_p (const_rtx insn) |
2a1777af JL |
2692 | { |
2693 | int i; | |
4b4bf941 | 2694 | if (JUMP_P (insn)) |
2a1777af JL |
2695 | { |
2696 | rtx pat = PATTERN (insn); | |
2a1777af | 2697 | |
cf7c4aa6 HPN |
2698 | /* If we have a JUMP_LABEL set, we're not a computed jump. */ |
2699 | if (JUMP_LABEL (insn) != NULL) | |
f759eb8b | 2700 | return 0; |
cf7c4aa6 HPN |
2701 | |
2702 | if (GET_CODE (pat) == PARALLEL) | |
2a1777af JL |
2703 | { |
2704 | int len = XVECLEN (pat, 0); | |
2705 | int has_use_labelref = 0; | |
2706 | ||
2707 | for (i = len - 1; i >= 0; i--) | |
2708 | if (GET_CODE (XVECEXP (pat, 0, i)) == USE | |
2709 | && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) | |
2710 | == LABEL_REF)) | |
2711 | has_use_labelref = 1; | |
2712 | ||
2713 | if (! has_use_labelref) | |
2714 | for (i = len - 1; i >= 0; i--) | |
2715 | if (GET_CODE (XVECEXP (pat, 0, i)) == SET | |
2716 | && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx | |
fce7e199 | 2717 | && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i)))) |
2a1777af JL |
2718 | return 1; |
2719 | } | |
2720 | else if (GET_CODE (pat) == SET | |
2721 | && SET_DEST (pat) == pc_rtx | |
fce7e199 | 2722 | && computed_jump_p_1 (SET_SRC (pat))) |
2a1777af JL |
2723 | return 1; |
2724 | } | |
2725 | return 0; | |
2726 | } | |
ccc2d6d0 | 2727 | |
cf94b0fc PB |
2728 | /* Optimized loop of for_each_rtx, trying to avoid useless recursive |
2729 | calls. Processes the subexpressions of EXP and passes them to F. */ | |
2730 | static int | |
2731 | for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data) | |
2732 | { | |
2733 | int result, i, j; | |
2734 | const char *format = GET_RTX_FORMAT (GET_CODE (exp)); | |
2735 | rtx *x; | |
2736 | ||
2737 | for (; format[n] != '\0'; n++) | |
2738 | { | |
2739 | switch (format[n]) | |
2740 | { | |
2741 | case 'e': | |
2742 | /* Call F on X. */ | |
2743 | x = &XEXP (exp, n); | |
2744 | result = (*f) (x, data); | |
2745 | if (result == -1) | |
2746 | /* Do not traverse sub-expressions. */ | |
2747 | continue; | |
2748 | else if (result != 0) | |
2749 | /* Stop the traversal. */ | |
2750 | return result; | |
2751 | ||
2752 | if (*x == NULL_RTX) | |
2753 | /* There are no sub-expressions. */ | |
2754 | continue; | |
2755 | ||
2756 | i = non_rtx_starting_operands[GET_CODE (*x)]; | |
2757 | if (i >= 0) | |
2758 | { | |
2759 | result = for_each_rtx_1 (*x, i, f, data); | |
2760 | if (result != 0) | |
2761 | return result; | |
2762 | } | |
2763 | break; | |
2764 | ||
2765 | case 'V': | |
2766 | case 'E': | |
2767 | if (XVEC (exp, n) == 0) | |
2768 | continue; | |
2769 | for (j = 0; j < XVECLEN (exp, n); ++j) | |
2770 | { | |
2771 | /* Call F on X. */ | |
2772 | x = &XVECEXP (exp, n, j); | |
2773 | result = (*f) (x, data); | |
2774 | if (result == -1) | |
2775 | /* Do not traverse sub-expressions. */ | |
2776 | continue; | |
2777 | else if (result != 0) | |
2778 | /* Stop the traversal. */ | |
2779 | return result; | |
2780 | ||
2781 | if (*x == NULL_RTX) | |
2782 | /* There are no sub-expressions. */ | |
2783 | continue; | |
2784 | ||
2785 | i = non_rtx_starting_operands[GET_CODE (*x)]; | |
2786 | if (i >= 0) | |
2787 | { | |
2788 | result = for_each_rtx_1 (*x, i, f, data); | |
2789 | if (result != 0) | |
2790 | return result; | |
2791 | } | |
2792 | } | |
2793 | break; | |
2794 | ||
2795 | default: | |
2796 | /* Nothing to do. */ | |
2797 | break; | |
2798 | } | |
2799 | } | |
2800 | ||
2801 | return 0; | |
2802 | } | |
2803 | ||
ccc2d6d0 MM |
2804 | /* Traverse X via depth-first search, calling F for each |
2805 | sub-expression (including X itself). F is also passed the DATA. | |
2806 | If F returns -1, do not traverse sub-expressions, but continue | |
2807 | traversing the rest of the tree. If F ever returns any other | |
40f03658 | 2808 | nonzero value, stop the traversal, and return the value returned |
ccc2d6d0 MM |
2809 | by F. Otherwise, return 0. This function does not traverse inside |
2810 | tree structure that contains RTX_EXPRs, or into sub-expressions | |
2811 | whose format code is `0' since it is not known whether or not those | |
2812 | codes are actually RTL. | |
2813 | ||
2814 | This routine is very general, and could (should?) be used to | |
2815 | implement many of the other routines in this file. */ | |
2816 | ||
ae0b51ef | 2817 | int |
0c20a65f | 2818 | for_each_rtx (rtx *x, rtx_function f, void *data) |
ccc2d6d0 MM |
2819 | { |
2820 | int result; | |
ccc2d6d0 MM |
2821 | int i; |
2822 | ||
2823 | /* Call F on X. */ | |
b987f237 | 2824 | result = (*f) (x, data); |
ccc2d6d0 MM |
2825 | if (result == -1) |
2826 | /* Do not traverse sub-expressions. */ | |
2827 | return 0; | |
2828 | else if (result != 0) | |
2829 | /* Stop the traversal. */ | |
2830 | return result; | |
2831 | ||
2832 | if (*x == NULL_RTX) | |
2833 | /* There are no sub-expressions. */ | |
2834 | return 0; | |
2835 | ||
cf94b0fc PB |
2836 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
2837 | if (i < 0) | |
2838 | return 0; | |
ccc2d6d0 | 2839 | |
cf94b0fc | 2840 | return for_each_rtx_1 (*x, i, f, data); |
ccc2d6d0 | 2841 | } |
3ec2b590 | 2842 | |
cf94b0fc | 2843 | |
777b1b71 RH |
2844 | /* Searches X for any reference to REGNO, returning the rtx of the |
2845 | reference found if any. Otherwise, returns NULL_RTX. */ | |
2846 | ||
2847 | rtx | |
0c20a65f | 2848 | regno_use_in (unsigned int regno, rtx x) |
777b1b71 | 2849 | { |
b3694847 | 2850 | const char *fmt; |
777b1b71 RH |
2851 | int i, j; |
2852 | rtx tem; | |
2853 | ||
f8cfc6aa | 2854 | if (REG_P (x) && REGNO (x) == regno) |
777b1b71 RH |
2855 | return x; |
2856 | ||
2857 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
2858 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
2859 | { | |
2860 | if (fmt[i] == 'e') | |
2861 | { | |
2862 | if ((tem = regno_use_in (regno, XEXP (x, i)))) | |
2863 | return tem; | |
2864 | } | |
2865 | else if (fmt[i] == 'E') | |
2866 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2867 | if ((tem = regno_use_in (regno , XVECEXP (x, i, j)))) | |
2868 | return tem; | |
2869 | } | |
2870 | ||
2871 | return NULL_RTX; | |
2872 | } | |
2dfa9a87 | 2873 | |
e5c56fd9 JH |
2874 | /* Return a value indicating whether OP, an operand of a commutative |
2875 | operation, is preferred as the first or second operand. The higher | |
2876 | the value, the stronger the preference for being the first operand. | |
2877 | We use negative values to indicate a preference for the first operand | |
2878 | and positive values for the second operand. */ | |
2879 | ||
9b3bd424 | 2880 | int |
0c20a65f | 2881 | commutative_operand_precedence (rtx op) |
e5c56fd9 | 2882 | { |
e3d6e740 | 2883 | enum rtx_code code = GET_CODE (op); |
e3d6e740 | 2884 | |
e5c56fd9 | 2885 | /* Constants always come the second operand. Prefer "nice" constants. */ |
e3d6e740 | 2886 | if (code == CONST_INT) |
7e0b4eae | 2887 | return -8; |
e3d6e740 | 2888 | if (code == CONST_DOUBLE) |
7e0b4eae | 2889 | return -7; |
091a3ac7 CF |
2890 | if (code == CONST_FIXED) |
2891 | return -7; | |
9ce79a7a | 2892 | op = avoid_constant_pool_reference (op); |
79b82df3 | 2893 | code = GET_CODE (op); |
ec8e098d PB |
2894 | |
2895 | switch (GET_RTX_CLASS (code)) | |
2896 | { | |
2897 | case RTX_CONST_OBJ: | |
2898 | if (code == CONST_INT) | |
7e0b4eae | 2899 | return -6; |
ec8e098d | 2900 | if (code == CONST_DOUBLE) |
7e0b4eae | 2901 | return -5; |
091a3ac7 CF |
2902 | if (code == CONST_FIXED) |
2903 | return -5; | |
7e0b4eae | 2904 | return -4; |
ec8e098d PB |
2905 | |
2906 | case RTX_EXTRA: | |
2907 | /* SUBREGs of objects should come second. */ | |
2908 | if (code == SUBREG && OBJECT_P (SUBREG_REG (op))) | |
7e0b4eae | 2909 | return -3; |
6fb5fa3c | 2910 | return 0; |
ec8e098d PB |
2911 | |
2912 | case RTX_OBJ: | |
2913 | /* Complex expressions should be the first, so decrease priority | |
7e0b4eae PB |
2914 | of objects. Prefer pointer objects over non pointer objects. */ |
2915 | if ((REG_P (op) && REG_POINTER (op)) | |
2916 | || (MEM_P (op) && MEM_POINTER (op))) | |
2917 | return -1; | |
2918 | return -2; | |
ec8e098d PB |
2919 | |
2920 | case RTX_COMM_ARITH: | |
2921 | /* Prefer operands that are themselves commutative to be first. | |
2922 | This helps to make things linear. In particular, | |
2923 | (and (and (reg) (reg)) (not (reg))) is canonical. */ | |
2924 | return 4; | |
2925 | ||
2926 | case RTX_BIN_ARITH: | |
2927 | /* If only one operand is a binary expression, it will be the first | |
2928 | operand. In particular, (plus (minus (reg) (reg)) (neg (reg))) | |
2929 | is canonical, although it will usually be further simplified. */ | |
2930 | return 2; | |
e3d6e740 | 2931 | |
ec8e098d PB |
2932 | case RTX_UNARY: |
2933 | /* Then prefer NEG and NOT. */ | |
2934 | if (code == NEG || code == NOT) | |
2935 | return 1; | |
e5c56fd9 | 2936 | |
ec8e098d PB |
2937 | default: |
2938 | return 0; | |
2939 | } | |
e5c56fd9 JH |
2940 | } |
2941 | ||
f63d1bf7 | 2942 | /* Return 1 iff it is necessary to swap operands of commutative operation |
e5c56fd9 JH |
2943 | in order to canonicalize expression. */ |
2944 | ||
7e0b4eae | 2945 | bool |
0c20a65f | 2946 | swap_commutative_operands_p (rtx x, rtx y) |
e5c56fd9 | 2947 | { |
9b3bd424 RH |
2948 | return (commutative_operand_precedence (x) |
2949 | < commutative_operand_precedence (y)); | |
e5c56fd9 | 2950 | } |
2dfa9a87 MH |
2951 | |
2952 | /* Return 1 if X is an autoincrement side effect and the register is | |
2953 | not the stack pointer. */ | |
2954 | int | |
f7d504c2 | 2955 | auto_inc_p (const_rtx x) |
2dfa9a87 MH |
2956 | { |
2957 | switch (GET_CODE (x)) | |
2958 | { | |
2959 | case PRE_INC: | |
2960 | case POST_INC: | |
2961 | case PRE_DEC: | |
2962 | case POST_DEC: | |
2963 | case PRE_MODIFY: | |
2964 | case POST_MODIFY: | |
2965 | /* There are no REG_INC notes for SP. */ | |
2966 | if (XEXP (x, 0) != stack_pointer_rtx) | |
2967 | return 1; | |
2968 | default: | |
2969 | break; | |
2970 | } | |
2971 | return 0; | |
2972 | } | |
3b10cf4b | 2973 | |
f9da5064 | 2974 | /* Return nonzero if IN contains a piece of rtl that has the address LOC. */ |
db7ba742 | 2975 | int |
f7d504c2 | 2976 | loc_mentioned_in_p (rtx *loc, const_rtx in) |
db7ba742 | 2977 | { |
a52b023a PB |
2978 | enum rtx_code code; |
2979 | const char *fmt; | |
db7ba742 R |
2980 | int i, j; |
2981 | ||
a52b023a PB |
2982 | if (!in) |
2983 | return 0; | |
2984 | ||
2985 | code = GET_CODE (in); | |
2986 | fmt = GET_RTX_FORMAT (code); | |
db7ba742 R |
2987 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2988 | { | |
9ce88f5e | 2989 | if (loc == &in->u.fld[i].rt_rtx) |
db7ba742 R |
2990 | return 1; |
2991 | if (fmt[i] == 'e') | |
2992 | { | |
2993 | if (loc_mentioned_in_p (loc, XEXP (in, i))) | |
2994 | return 1; | |
2995 | } | |
2996 | else if (fmt[i] == 'E') | |
2997 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) | |
2998 | if (loc_mentioned_in_p (loc, XVECEXP (in, i, j))) | |
2999 | return 1; | |
3000 | } | |
3001 | return 0; | |
3002 | } | |
ddef6bc7 | 3003 | |
bb51e270 RS |
3004 | /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE, |
3005 | and SUBREG_BYTE, return the bit offset where the subreg begins | |
3006 | (counting from the least significant bit of the operand). */ | |
33aceff2 JW |
3007 | |
3008 | unsigned int | |
bb51e270 RS |
3009 | subreg_lsb_1 (enum machine_mode outer_mode, |
3010 | enum machine_mode inner_mode, | |
3011 | unsigned int subreg_byte) | |
33aceff2 | 3012 | { |
33aceff2 JW |
3013 | unsigned int bitpos; |
3014 | unsigned int byte; | |
3015 | unsigned int word; | |
3016 | ||
3017 | /* A paradoxical subreg begins at bit position 0. */ | |
bb51e270 | 3018 | if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode)) |
33aceff2 JW |
3019 | return 0; |
3020 | ||
3021 | if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) | |
3022 | /* If the subreg crosses a word boundary ensure that | |
3023 | it also begins and ends on a word boundary. */ | |
41374e13 NS |
3024 | gcc_assert (!((subreg_byte % UNITS_PER_WORD |
3025 | + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD | |
3026 | && (subreg_byte % UNITS_PER_WORD | |
3027 | || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))); | |
33aceff2 JW |
3028 | |
3029 | if (WORDS_BIG_ENDIAN) | |
3030 | word = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3031 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD; |
33aceff2 | 3032 | else |
bb51e270 | 3033 | word = subreg_byte / UNITS_PER_WORD; |
33aceff2 JW |
3034 | bitpos = word * BITS_PER_WORD; |
3035 | ||
3036 | if (BYTES_BIG_ENDIAN) | |
3037 | byte = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3038 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD; |
33aceff2 | 3039 | else |
bb51e270 | 3040 | byte = subreg_byte % UNITS_PER_WORD; |
33aceff2 JW |
3041 | bitpos += byte * BITS_PER_UNIT; |
3042 | ||
3043 | return bitpos; | |
3044 | } | |
3045 | ||
bb51e270 RS |
3046 | /* Given a subreg X, return the bit offset where the subreg begins |
3047 | (counting from the least significant bit of the reg). */ | |
3048 | ||
3049 | unsigned int | |
f7d504c2 | 3050 | subreg_lsb (const_rtx x) |
bb51e270 RS |
3051 | { |
3052 | return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
3053 | SUBREG_BYTE (x)); | |
3054 | } | |
3055 | ||
f1f4e530 | 3056 | /* Fill in information about a subreg of a hard register. |
ddef6bc7 JJ |
3057 | xregno - A regno of an inner hard subreg_reg (or what will become one). |
3058 | xmode - The mode of xregno. | |
3059 | offset - The byte offset. | |
3060 | ymode - The mode of a top level SUBREG (or what may become one). | |
f1f4e530 JM |
3061 | info - Pointer to structure to fill in. */ |
3062 | static void | |
3063 | subreg_get_info (unsigned int xregno, enum machine_mode xmode, | |
3064 | unsigned int offset, enum machine_mode ymode, | |
3065 | struct subreg_info *info) | |
04c5580f | 3066 | { |
8521c414 | 3067 | int nregs_xmode, nregs_ymode; |
04c5580f | 3068 | int mode_multiple, nregs_multiple; |
f1f4e530 | 3069 | int offset_adj, y_offset, y_offset_adj; |
8521c414 | 3070 | int regsize_xmode, regsize_ymode; |
f1f4e530 | 3071 | bool rknown; |
04c5580f | 3072 | |
41374e13 | 3073 | gcc_assert (xregno < FIRST_PSEUDO_REGISTER); |
04c5580f | 3074 | |
f1f4e530 JM |
3075 | rknown = false; |
3076 | ||
dd79bb7e GK |
3077 | /* If there are holes in a non-scalar mode in registers, we expect |
3078 | that it is made up of its units concatenated together. */ | |
8521c414 | 3079 | if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)) |
dd79bb7e | 3080 | { |
8521c414 JM |
3081 | enum machine_mode xmode_unit; |
3082 | ||
3083 | nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode); | |
3084 | if (GET_MODE_INNER (xmode) == VOIDmode) | |
3085 | xmode_unit = xmode; | |
3086 | else | |
3087 | xmode_unit = GET_MODE_INNER (xmode); | |
3088 | gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit)); | |
3089 | gcc_assert (nregs_xmode | |
3090 | == (GET_MODE_NUNITS (xmode) | |
3091 | * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit))); | |
3092 | gcc_assert (hard_regno_nregs[xregno][xmode] | |
3093 | == (hard_regno_nregs[xregno][xmode_unit] | |
3094 | * GET_MODE_NUNITS (xmode))); | |
dd79bb7e GK |
3095 | |
3096 | /* You can only ask for a SUBREG of a value with holes in the middle | |
3097 | if you don't cross the holes. (Such a SUBREG should be done by | |
3098 | picking a different register class, or doing it in memory if | |
3099 | necessary.) An example of a value with holes is XCmode on 32-bit | |
3100 | x86 with -m128bit-long-double; it's represented in 6 32-bit registers, | |
3101 | 3 for each part, but in memory it's two 128-bit parts. | |
3102 | Padding is assumed to be at the end (not necessarily the 'high part') | |
3103 | of each unit. */ | |
8521c414 JM |
3104 | if ((offset / GET_MODE_SIZE (xmode_unit) + 1 |
3105 | < GET_MODE_NUNITS (xmode)) | |
3106 | && (offset / GET_MODE_SIZE (xmode_unit) | |
dd79bb7e | 3107 | != ((offset + GET_MODE_SIZE (ymode) - 1) |
8521c414 | 3108 | / GET_MODE_SIZE (xmode_unit)))) |
f1f4e530 JM |
3109 | { |
3110 | info->representable_p = false; | |
3111 | rknown = true; | |
3112 | } | |
dd79bb7e GK |
3113 | } |
3114 | else | |
3115 | nregs_xmode = hard_regno_nregs[xregno][xmode]; | |
3116 | ||
66fd46b6 | 3117 | nregs_ymode = hard_regno_nregs[xregno][ymode]; |
04c5580f | 3118 | |
dd79bb7e | 3119 | /* Paradoxical subregs are otherwise valid. */ |
f1f4e530 JM |
3120 | if (!rknown |
3121 | && offset == 0 | |
3122 | && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode)) | |
3123 | { | |
3124 | info->representable_p = true; | |
3125 | /* If this is a big endian paradoxical subreg, which uses more | |
3126 | actual hard registers than the original register, we must | |
3127 | return a negative offset so that we find the proper highpart | |
3128 | of the register. */ | |
3129 | if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD | |
3130 | ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN) | |
3131 | info->offset = nregs_xmode - nregs_ymode; | |
3132 | else | |
3133 | info->offset = 0; | |
3134 | info->nregs = nregs_ymode; | |
3135 | return; | |
3136 | } | |
04c5580f | 3137 | |
8521c414 JM |
3138 | /* If registers store different numbers of bits in the different |
3139 | modes, we cannot generally form this subreg. */ | |
f1f4e530 | 3140 | if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode) |
5f7fc2b8 JM |
3141 | && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode) |
3142 | && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0 | |
3143 | && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0) | |
f1f4e530 JM |
3144 | { |
3145 | regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode; | |
f1f4e530 | 3146 | regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode; |
f1f4e530 JM |
3147 | if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1) |
3148 | { | |
3149 | info->representable_p = false; | |
3150 | info->nregs | |
3151 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3152 | info->offset = offset / regsize_xmode; | |
3153 | return; | |
3154 | } | |
3155 | if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1) | |
3156 | { | |
3157 | info->representable_p = false; | |
3158 | info->nregs | |
3159 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3160 | info->offset = offset / regsize_xmode; | |
3161 | return; | |
3162 | } | |
3163 | } | |
8521c414 | 3164 | |
dd79bb7e | 3165 | /* Lowpart subregs are otherwise valid. */ |
f1f4e530 JM |
3166 | if (!rknown && offset == subreg_lowpart_offset (ymode, xmode)) |
3167 | { | |
3168 | info->representable_p = true; | |
3169 | rknown = true; | |
a446b4e8 JM |
3170 | |
3171 | if (offset == 0 || nregs_xmode == nregs_ymode) | |
3172 | { | |
3173 | info->offset = 0; | |
3174 | info->nregs = nregs_ymode; | |
3175 | return; | |
3176 | } | |
f1f4e530 | 3177 | } |
04c5580f | 3178 | |
dd79bb7e GK |
3179 | /* This should always pass, otherwise we don't know how to verify |
3180 | the constraint. These conditions may be relaxed but | |
3181 | subreg_regno_offset would need to be redesigned. */ | |
41374e13 | 3182 | gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0); |
41374e13 | 3183 | gcc_assert ((nregs_xmode % nregs_ymode) == 0); |
04c5580f | 3184 | |
b20b352b | 3185 | /* The XMODE value can be seen as a vector of NREGS_XMODE |
dcc24678 | 3186 | values. The subreg must represent a lowpart of given field. |
04c5580f | 3187 | Compute what field it is. */ |
f1f4e530 JM |
3188 | offset_adj = offset; |
3189 | offset_adj -= subreg_lowpart_offset (ymode, | |
3190 | mode_for_size (GET_MODE_BITSIZE (xmode) | |
3191 | / nregs_xmode, | |
3192 | MODE_INT, 0)); | |
04c5580f | 3193 | |
dd79bb7e | 3194 | /* Size of ymode must not be greater than the size of xmode. */ |
04c5580f | 3195 | mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); |
41374e13 | 3196 | gcc_assert (mode_multiple != 0); |
04c5580f JH |
3197 | |
3198 | y_offset = offset / GET_MODE_SIZE (ymode); | |
f1f4e530 JM |
3199 | y_offset_adj = offset_adj / GET_MODE_SIZE (ymode); |
3200 | nregs_multiple = nregs_xmode / nregs_ymode; | |
41374e13 | 3201 | |
f1f4e530 | 3202 | gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0); |
41374e13 NS |
3203 | gcc_assert ((mode_multiple % nregs_multiple) == 0); |
3204 | ||
f1f4e530 JM |
3205 | if (!rknown) |
3206 | { | |
3207 | info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple))); | |
3208 | rknown = true; | |
3209 | } | |
3210 | info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode; | |
3211 | info->nregs = nregs_ymode; | |
3212 | } | |
3213 | ||
3214 | /* This function returns the regno offset of a subreg expression. | |
3215 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3216 | xmode - The mode of xregno. | |
3217 | offset - The byte offset. | |
3218 | ymode - The mode of a top level SUBREG (or what may become one). | |
3219 | RETURN - The regno offset which would be used. */ | |
3220 | unsigned int | |
3221 | subreg_regno_offset (unsigned int xregno, enum machine_mode xmode, | |
3222 | unsigned int offset, enum machine_mode ymode) | |
3223 | { | |
3224 | struct subreg_info info; | |
3225 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3226 | return info.offset; | |
3227 | } | |
3228 | ||
3229 | /* This function returns true when the offset is representable via | |
3230 | subreg_offset in the given regno. | |
3231 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3232 | xmode - The mode of xregno. | |
3233 | offset - The byte offset. | |
3234 | ymode - The mode of a top level SUBREG (or what may become one). | |
3235 | RETURN - Whether the offset is representable. */ | |
3236 | bool | |
3237 | subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, | |
3238 | unsigned int offset, enum machine_mode ymode) | |
3239 | { | |
3240 | struct subreg_info info; | |
3241 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3242 | return info.representable_p; | |
04c5580f JH |
3243 | } |
3244 | ||
dc297297 | 3245 | /* Return the final regno that a subreg expression refers to. */ |
a6a2274a | 3246 | unsigned int |
f7d504c2 | 3247 | subreg_regno (const_rtx x) |
ddef6bc7 JJ |
3248 | { |
3249 | unsigned int ret; | |
3250 | rtx subreg = SUBREG_REG (x); | |
3251 | int regno = REGNO (subreg); | |
3252 | ||
a6a2274a KH |
3253 | ret = regno + subreg_regno_offset (regno, |
3254 | GET_MODE (subreg), | |
ddef6bc7 JJ |
3255 | SUBREG_BYTE (x), |
3256 | GET_MODE (x)); | |
3257 | return ret; | |
3258 | ||
3259 | } | |
f1f4e530 JM |
3260 | |
3261 | /* Return the number of registers that a subreg expression refers | |
3262 | to. */ | |
3263 | unsigned int | |
f7d504c2 | 3264 | subreg_nregs (const_rtx x) |
f1f4e530 JM |
3265 | { |
3266 | struct subreg_info info; | |
3267 | rtx subreg = SUBREG_REG (x); | |
3268 | int regno = REGNO (subreg); | |
3269 | ||
3270 | subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x), | |
3271 | &info); | |
3272 | return info.nregs; | |
3273 | } | |
3274 | ||
833366d6 JH |
3275 | struct parms_set_data |
3276 | { | |
3277 | int nregs; | |
3278 | HARD_REG_SET regs; | |
3279 | }; | |
3280 | ||
3281 | /* Helper function for noticing stores to parameter registers. */ | |
3282 | static void | |
7bc980e1 | 3283 | parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) |
833366d6 JH |
3284 | { |
3285 | struct parms_set_data *d = data; | |
3286 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER | |
3287 | && TEST_HARD_REG_BIT (d->regs, REGNO (x))) | |
3288 | { | |
3289 | CLEAR_HARD_REG_BIT (d->regs, REGNO (x)); | |
3290 | d->nregs--; | |
3291 | } | |
3292 | } | |
3293 | ||
a6a2274a | 3294 | /* Look backward for first parameter to be loaded. |
b2df20b4 DJ |
3295 | Note that loads of all parameters will not necessarily be |
3296 | found if CSE has eliminated some of them (e.g., an argument | |
3297 | to the outer function is passed down as a parameter). | |
833366d6 JH |
3298 | Do not skip BOUNDARY. */ |
3299 | rtx | |
0c20a65f | 3300 | find_first_parameter_load (rtx call_insn, rtx boundary) |
833366d6 JH |
3301 | { |
3302 | struct parms_set_data parm; | |
b2df20b4 | 3303 | rtx p, before, first_set; |
833366d6 JH |
3304 | |
3305 | /* Since different machines initialize their parameter registers | |
3306 | in different orders, assume nothing. Collect the set of all | |
3307 | parameter registers. */ | |
3308 | CLEAR_HARD_REG_SET (parm.regs); | |
3309 | parm.nregs = 0; | |
3310 | for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) | |
3311 | if (GET_CODE (XEXP (p, 0)) == USE | |
f8cfc6aa | 3312 | && REG_P (XEXP (XEXP (p, 0), 0))) |
833366d6 | 3313 | { |
41374e13 | 3314 | gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER); |
833366d6 JH |
3315 | |
3316 | /* We only care about registers which can hold function | |
3317 | arguments. */ | |
3318 | if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0)))) | |
3319 | continue; | |
3320 | ||
3321 | SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0))); | |
3322 | parm.nregs++; | |
3323 | } | |
3324 | before = call_insn; | |
b2df20b4 | 3325 | first_set = call_insn; |
833366d6 JH |
3326 | |
3327 | /* Search backward for the first set of a register in this set. */ | |
3328 | while (parm.nregs && before != boundary) | |
3329 | { | |
3330 | before = PREV_INSN (before); | |
3331 | ||
3332 | /* It is possible that some loads got CSEed from one call to | |
3333 | another. Stop in that case. */ | |
4b4bf941 | 3334 | if (CALL_P (before)) |
833366d6 JH |
3335 | break; |
3336 | ||
dbc1a163 | 3337 | /* Our caller needs either ensure that we will find all sets |
833366d6 | 3338 | (in case code has not been optimized yet), or take care |
eaec9b3d | 3339 | for possible labels in a way by setting boundary to preceding |
833366d6 | 3340 | CODE_LABEL. */ |
4b4bf941 | 3341 | if (LABEL_P (before)) |
dbc1a163 | 3342 | { |
41374e13 | 3343 | gcc_assert (before == boundary); |
dbc1a163 RH |
3344 | break; |
3345 | } | |
833366d6 | 3346 | |
0d025d43 | 3347 | if (INSN_P (before)) |
b2df20b4 DJ |
3348 | { |
3349 | int nregs_old = parm.nregs; | |
3350 | note_stores (PATTERN (before), parms_set, &parm); | |
3351 | /* If we found something that did not set a parameter reg, | |
3352 | we're done. Do not keep going, as that might result | |
3353 | in hoisting an insn before the setting of a pseudo | |
3354 | that is used by the hoisted insn. */ | |
3355 | if (nregs_old != parm.nregs) | |
3356 | first_set = before; | |
3357 | else | |
3358 | break; | |
3359 | } | |
833366d6 | 3360 | } |
b2df20b4 | 3361 | return first_set; |
833366d6 | 3362 | } |
3dec4024 | 3363 | |
14b493d6 | 3364 | /* Return true if we should avoid inserting code between INSN and preceding |
3dec4024 JH |
3365 | call instruction. */ |
3366 | ||
3367 | bool | |
9678086d | 3368 | keep_with_call_p (const_rtx insn) |
3dec4024 JH |
3369 | { |
3370 | rtx set; | |
3371 | ||
3372 | if (INSN_P (insn) && (set = single_set (insn)) != NULL) | |
3373 | { | |
f8cfc6aa | 3374 | if (REG_P (SET_DEST (set)) |
5df533b3 | 3375 | && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER |
3dec4024 JH |
3376 | && fixed_regs[REGNO (SET_DEST (set))] |
3377 | && general_operand (SET_SRC (set), VOIDmode)) | |
3378 | return true; | |
f8cfc6aa | 3379 | if (REG_P (SET_SRC (set)) |
3dec4024 | 3380 | && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set))) |
f8cfc6aa | 3381 | && REG_P (SET_DEST (set)) |
3dec4024 JH |
3382 | && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER) |
3383 | return true; | |
bc204393 RH |
3384 | /* There may be a stack pop just after the call and before the store |
3385 | of the return register. Search for the actual store when deciding | |
3386 | if we can break or not. */ | |
3dec4024 JH |
3387 | if (SET_DEST (set) == stack_pointer_rtx) |
3388 | { | |
75547801 KG |
3389 | /* This CONST_CAST is okay because next_nonnote_insn just |
3390 | returns it's argument and we assign it to a const_rtx | |
3391 | variable. */ | |
b1d5455a | 3392 | const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn)); |
bc204393 | 3393 | if (i2 && keep_with_call_p (i2)) |
3dec4024 JH |
3394 | return true; |
3395 | } | |
3396 | } | |
3397 | return false; | |
3398 | } | |
71d2c5bd | 3399 | |
432f982f JH |
3400 | /* Return true if LABEL is a target of JUMP_INSN. This applies only |
3401 | to non-complex jumps. That is, direct unconditional, conditional, | |
3402 | and tablejumps, but not computed jumps or returns. It also does | |
3403 | not apply to the fallthru case of a conditional jump. */ | |
3404 | ||
3405 | bool | |
f7d504c2 | 3406 | label_is_jump_target_p (const_rtx label, const_rtx jump_insn) |
432f982f JH |
3407 | { |
3408 | rtx tmp = JUMP_LABEL (jump_insn); | |
3409 | ||
3410 | if (label == tmp) | |
3411 | return true; | |
3412 | ||
3413 | if (tablejump_p (jump_insn, NULL, &tmp)) | |
3414 | { | |
3415 | rtvec vec = XVEC (PATTERN (tmp), | |
3416 | GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC); | |
3417 | int i, veclen = GET_NUM_ELEM (vec); | |
3418 | ||
3419 | for (i = 0; i < veclen; ++i) | |
3420 | if (XEXP (RTVEC_ELT (vec, i), 0) == label) | |
3421 | return true; | |
3422 | } | |
3423 | ||
3424 | return false; | |
3425 | } | |
3426 | ||
f894b69b PB |
3427 | \f |
3428 | /* Return an estimate of the cost of computing rtx X. | |
3429 | One use is in cse, to decide which expression to keep in the hash table. | |
3430 | Another is in rtl generation, to pick the cheapest way to multiply. | |
3431 | Other uses like the latter are expected in the future. */ | |
3432 | ||
3433 | int | |
3434 | rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED) | |
3435 | { | |
3436 | int i, j; | |
3437 | enum rtx_code code; | |
3438 | const char *fmt; | |
3439 | int total; | |
3440 | ||
3441 | if (x == 0) | |
3442 | return 0; | |
3443 | ||
3444 | /* Compute the default costs of certain things. | |
3445 | Note that targetm.rtx_costs can override the defaults. */ | |
3446 | ||
3447 | code = GET_CODE (x); | |
3448 | switch (code) | |
3449 | { | |
3450 | case MULT: | |
3451 | total = COSTS_N_INSNS (5); | |
3452 | break; | |
3453 | case DIV: | |
3454 | case UDIV: | |
3455 | case MOD: | |
3456 | case UMOD: | |
3457 | total = COSTS_N_INSNS (7); | |
3458 | break; | |
3459 | case USE: | |
db3edc20 | 3460 | /* Used in combine.c as a marker. */ |
f894b69b PB |
3461 | total = 0; |
3462 | break; | |
3463 | default: | |
3464 | total = COSTS_N_INSNS (1); | |
3465 | } | |
3466 | ||
3467 | switch (code) | |
3468 | { | |
3469 | case REG: | |
3470 | return 0; | |
3471 | ||
3472 | case SUBREG: | |
edb81165 | 3473 | total = 0; |
f894b69b PB |
3474 | /* If we can't tie these modes, make this expensive. The larger |
3475 | the mode, the more expensive it is. */ | |
3476 | if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x)))) | |
3477 | return COSTS_N_INSNS (2 | |
3478 | + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD); | |
3479 | break; | |
3480 | ||
3481 | default: | |
5fd9b178 | 3482 | if (targetm.rtx_costs (x, code, outer_code, &total)) |
f894b69b PB |
3483 | return total; |
3484 | break; | |
3485 | } | |
3486 | ||
3487 | /* Sum the costs of the sub-rtx's, plus cost of this operation, | |
3488 | which is already in total. */ | |
3489 | ||
3490 | fmt = GET_RTX_FORMAT (code); | |
3491 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3492 | if (fmt[i] == 'e') | |
3493 | total += rtx_cost (XEXP (x, i), code); | |
3494 | else if (fmt[i] == 'E') | |
3495 | for (j = 0; j < XVECLEN (x, i); j++) | |
3496 | total += rtx_cost (XVECEXP (x, i, j), code); | |
3497 | ||
3498 | return total; | |
3499 | } | |
3500 | \f | |
3501 | /* Return cost of address expression X. | |
3502 | Expect that X is properly formed address reference. */ | |
3503 | ||
3504 | int | |
3505 | address_cost (rtx x, enum machine_mode mode) | |
3506 | { | |
f894b69b PB |
3507 | /* We may be asked for cost of various unusual addresses, such as operands |
3508 | of push instruction. It is not worthwhile to complicate writing | |
3509 | of the target hook by such cases. */ | |
3510 | ||
3511 | if (!memory_address_p (mode, x)) | |
3512 | return 1000; | |
3513 | ||
5fd9b178 | 3514 | return targetm.address_cost (x); |
f894b69b PB |
3515 | } |
3516 | ||
3517 | /* If the target doesn't override, compute the cost as with arithmetic. */ | |
3518 | ||
3519 | int | |
3520 | default_address_cost (rtx x) | |
3521 | { | |
3522 | return rtx_cost (x, MEM); | |
3523 | } | |
2f93eea8 PB |
3524 | \f |
3525 | ||
3526 | unsigned HOST_WIDE_INT | |
fa233e34 | 3527 | nonzero_bits (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
3528 | { |
3529 | return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0); | |
3530 | } | |
3531 | ||
3532 | unsigned int | |
fa233e34 | 3533 | num_sign_bit_copies (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
3534 | { |
3535 | return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0); | |
3536 | } | |
3537 | ||
3538 | /* The function cached_nonzero_bits is a wrapper around nonzero_bits1. | |
3539 | It avoids exponential behavior in nonzero_bits1 when X has | |
3540 | identical subexpressions on the first or the second level. */ | |
3541 | ||
3542 | static unsigned HOST_WIDE_INT | |
fa233e34 | 3543 | cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
3544 | enum machine_mode known_mode, |
3545 | unsigned HOST_WIDE_INT known_ret) | |
3546 | { | |
3547 | if (x == known_x && mode == known_mode) | |
3548 | return known_ret; | |
3549 | ||
3550 | /* Try to find identical subexpressions. If found call | |
3551 | nonzero_bits1 on X with the subexpressions as KNOWN_X and the | |
3552 | precomputed value for the subexpression as KNOWN_RET. */ | |
3553 | ||
3554 | if (ARITHMETIC_P (x)) | |
3555 | { | |
3556 | rtx x0 = XEXP (x, 0); | |
3557 | rtx x1 = XEXP (x, 1); | |
3558 | ||
3559 | /* Check the first level. */ | |
3560 | if (x0 == x1) | |
3561 | return nonzero_bits1 (x, mode, x0, mode, | |
3562 | cached_nonzero_bits (x0, mode, known_x, | |
3563 | known_mode, known_ret)); | |
3564 | ||
3565 | /* Check the second level. */ | |
3566 | if (ARITHMETIC_P (x0) | |
3567 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
3568 | return nonzero_bits1 (x, mode, x1, mode, | |
3569 | cached_nonzero_bits (x1, mode, known_x, | |
3570 | known_mode, known_ret)); | |
3571 | ||
3572 | if (ARITHMETIC_P (x1) | |
3573 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
3574 | return nonzero_bits1 (x, mode, x0, mode, | |
3575 | cached_nonzero_bits (x0, mode, known_x, | |
3576 | known_mode, known_ret)); | |
3577 | } | |
3578 | ||
3579 | return nonzero_bits1 (x, mode, known_x, known_mode, known_ret); | |
3580 | } | |
3581 | ||
3582 | /* We let num_sign_bit_copies recur into nonzero_bits as that is useful. | |
3583 | We don't let nonzero_bits recur into num_sign_bit_copies, because that | |
3584 | is less useful. We can't allow both, because that results in exponential | |
3585 | run time recursion. There is a nullstone testcase that triggered | |
3586 | this. This macro avoids accidental uses of num_sign_bit_copies. */ | |
3587 | #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior | |
3588 | ||
3589 | /* Given an expression, X, compute which bits in X can be nonzero. | |
3590 | We don't care about bits outside of those defined in MODE. | |
3591 | ||
3592 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
3593 | an arithmetic operation, we can do better. */ | |
3594 | ||
3595 | static unsigned HOST_WIDE_INT | |
fa233e34 | 3596 | nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
3597 | enum machine_mode known_mode, |
3598 | unsigned HOST_WIDE_INT known_ret) | |
3599 | { | |
3600 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); | |
3601 | unsigned HOST_WIDE_INT inner_nz; | |
3602 | enum rtx_code code; | |
3603 | unsigned int mode_width = GET_MODE_BITSIZE (mode); | |
3604 | ||
3605 | /* For floating-point values, assume all bits are needed. */ | |
3606 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)) | |
3607 | return nonzero; | |
3608 | ||
3609 | /* If X is wider than MODE, use its mode instead. */ | |
3610 | if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) | |
3611 | { | |
3612 | mode = GET_MODE (x); | |
3613 | nonzero = GET_MODE_MASK (mode); | |
3614 | mode_width = GET_MODE_BITSIZE (mode); | |
3615 | } | |
3616 | ||
3617 | if (mode_width > HOST_BITS_PER_WIDE_INT) | |
3618 | /* Our only callers in this case look for single bit values. So | |
3619 | just return the mode mask. Those tests will then be false. */ | |
3620 | return nonzero; | |
3621 | ||
3622 | #ifndef WORD_REGISTER_OPERATIONS | |
3623 | /* If MODE is wider than X, but both are a single word for both the host | |
3624 | and target machines, we can compute this from which bits of the | |
3625 | object might be nonzero in its own mode, taking into account the fact | |
3626 | that on many CISC machines, accessing an object in a wider mode | |
3627 | causes the high-order bits to become undefined. So they are | |
3628 | not known to be zero. */ | |
3629 | ||
3630 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
3631 | && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD | |
3632 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
3633 | && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) | |
3634 | { | |
3635 | nonzero &= cached_nonzero_bits (x, GET_MODE (x), | |
3636 | known_x, known_mode, known_ret); | |
3637 | nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); | |
3638 | return nonzero; | |
3639 | } | |
3640 | #endif | |
3641 | ||
3642 | code = GET_CODE (x); | |
3643 | switch (code) | |
3644 | { | |
3645 | case REG: | |
3646 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
3647 | /* If pointers extend unsigned and this is a pointer in Pmode, say that | |
3648 | all the bits above ptr_mode are known to be zero. */ | |
3649 | if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
3650 | && REG_POINTER (x)) | |
3651 | nonzero &= GET_MODE_MASK (ptr_mode); | |
3652 | #endif | |
3653 | ||
3654 | /* Include declared information about alignment of pointers. */ | |
3655 | /* ??? We don't properly preserve REG_POINTER changes across | |
3656 | pointer-to-integer casts, so we can't trust it except for | |
3657 | things that we know must be pointers. See execute/960116-1.c. */ | |
3658 | if ((x == stack_pointer_rtx | |
3659 | || x == frame_pointer_rtx | |
3660 | || x == arg_pointer_rtx) | |
3661 | && REGNO_POINTER_ALIGN (REGNO (x))) | |
3662 | { | |
3663 | unsigned HOST_WIDE_INT alignment | |
3664 | = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT; | |
3665 | ||
3666 | #ifdef PUSH_ROUNDING | |
3667 | /* If PUSH_ROUNDING is defined, it is possible for the | |
3668 | stack to be momentarily aligned only to that amount, | |
3669 | so we pick the least alignment. */ | |
3670 | if (x == stack_pointer_rtx && PUSH_ARGS) | |
3671 | alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1), | |
3672 | alignment); | |
3673 | #endif | |
3674 | ||
3675 | nonzero &= ~(alignment - 1); | |
3676 | } | |
3677 | ||
3678 | { | |
3679 | unsigned HOST_WIDE_INT nonzero_for_hook = nonzero; | |
3680 | rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x, | |
3681 | known_mode, known_ret, | |
3682 | &nonzero_for_hook); | |
3683 | ||
3684 | if (new) | |
3685 | nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x, | |
3686 | known_mode, known_ret); | |
3687 | ||
3688 | return nonzero_for_hook; | |
3689 | } | |
3690 | ||
3691 | case CONST_INT: | |
3692 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
3693 | /* If X is negative in MODE, sign-extend the value. */ | |
3694 | if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD | |
3695 | && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1)))) | |
3696 | return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width)); | |
3697 | #endif | |
3698 | ||
3699 | return INTVAL (x); | |
3700 | ||
3701 | case MEM: | |
3702 | #ifdef LOAD_EXTEND_OP | |
3703 | /* In many, if not most, RISC machines, reading a byte from memory | |
3704 | zeros the rest of the register. Noticing that fact saves a lot | |
3705 | of extra zero-extends. */ | |
3706 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) | |
3707 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
3708 | #endif | |
3709 | break; | |
3710 | ||
3711 | case EQ: case NE: | |
3712 | case UNEQ: case LTGT: | |
3713 | case GT: case GTU: case UNGT: | |
3714 | case LT: case LTU: case UNLT: | |
3715 | case GE: case GEU: case UNGE: | |
3716 | case LE: case LEU: case UNLE: | |
3717 | case UNORDERED: case ORDERED: | |
2f93eea8 PB |
3718 | /* If this produces an integer result, we know which bits are set. |
3719 | Code here used to clear bits outside the mode of X, but that is | |
3720 | now done above. */ | |
505ac507 RH |
3721 | /* Mind that MODE is the mode the caller wants to look at this |
3722 | operation in, and not the actual operation mode. We can wind | |
3723 | up with (subreg:DI (gt:V4HI x y)), and we don't have anything | |
3724 | that describes the results of a vector compare. */ | |
3725 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT | |
2f93eea8 PB |
3726 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
3727 | nonzero = STORE_FLAG_VALUE; | |
3728 | break; | |
3729 | ||
3730 | case NEG: | |
3731 | #if 0 | |
3732 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
3733 | and num_sign_bit_copies. */ | |
3734 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
3735 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
3736 | nonzero = 1; | |
3737 | #endif | |
3738 | ||
3739 | if (GET_MODE_SIZE (GET_MODE (x)) < mode_width) | |
3740 | nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); | |
3741 | break; | |
3742 | ||
3743 | case ABS: | |
3744 | #if 0 | |
3745 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
3746 | and num_sign_bit_copies. */ | |
3747 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
3748 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
3749 | nonzero = 1; | |
3750 | #endif | |
3751 | break; | |
3752 | ||
3753 | case TRUNCATE: | |
3754 | nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode, | |
3755 | known_x, known_mode, known_ret) | |
3756 | & GET_MODE_MASK (mode)); | |
3757 | break; | |
3758 | ||
3759 | case ZERO_EXTEND: | |
3760 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
3761 | known_x, known_mode, known_ret); | |
3762 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
3763 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
3764 | break; | |
3765 | ||
3766 | case SIGN_EXTEND: | |
3767 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
3768 | Otherwise, show all the bits in the outer mode but not the inner | |
3769 | may be nonzero. */ | |
3770 | inner_nz = cached_nonzero_bits (XEXP (x, 0), mode, | |
3771 | known_x, known_mode, known_ret); | |
3772 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
3773 | { | |
3774 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
3775 | if (inner_nz | |
3776 | & (((HOST_WIDE_INT) 1 | |
3777 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))) | |
3778 | inner_nz |= (GET_MODE_MASK (mode) | |
3779 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); | |
3780 | } | |
3781 | ||
3782 | nonzero &= inner_nz; | |
3783 | break; | |
3784 | ||
3785 | case AND: | |
3786 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
3787 | known_x, known_mode, known_ret) | |
3788 | & cached_nonzero_bits (XEXP (x, 1), mode, | |
3789 | known_x, known_mode, known_ret); | |
3790 | break; | |
3791 | ||
3792 | case XOR: case IOR: | |
3793 | case UMIN: case UMAX: case SMIN: case SMAX: | |
3794 | { | |
3795 | unsigned HOST_WIDE_INT nonzero0 = | |
3796 | cached_nonzero_bits (XEXP (x, 0), mode, | |
3797 | known_x, known_mode, known_ret); | |
3798 | ||
3799 | /* Don't call nonzero_bits for the second time if it cannot change | |
3800 | anything. */ | |
3801 | if ((nonzero & nonzero0) != nonzero) | |
3802 | nonzero &= nonzero0 | |
3803 | | cached_nonzero_bits (XEXP (x, 1), mode, | |
3804 | known_x, known_mode, known_ret); | |
3805 | } | |
3806 | break; | |
3807 | ||
3808 | case PLUS: case MINUS: | |
3809 | case MULT: | |
3810 | case DIV: case UDIV: | |
3811 | case MOD: case UMOD: | |
3812 | /* We can apply the rules of arithmetic to compute the number of | |
3813 | high- and low-order zero bits of these operations. We start by | |
3814 | computing the width (position of the highest-order nonzero bit) | |
3815 | and the number of low-order zero bits for each value. */ | |
3816 | { | |
3817 | unsigned HOST_WIDE_INT nz0 = | |
3818 | cached_nonzero_bits (XEXP (x, 0), mode, | |
3819 | known_x, known_mode, known_ret); | |
3820 | unsigned HOST_WIDE_INT nz1 = | |
3821 | cached_nonzero_bits (XEXP (x, 1), mode, | |
3822 | known_x, known_mode, known_ret); | |
3823 | int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1; | |
3824 | int width0 = floor_log2 (nz0) + 1; | |
3825 | int width1 = floor_log2 (nz1) + 1; | |
3826 | int low0 = floor_log2 (nz0 & -nz0); | |
3827 | int low1 = floor_log2 (nz1 & -nz1); | |
3828 | HOST_WIDE_INT op0_maybe_minusp | |
3829 | = (nz0 & ((HOST_WIDE_INT) 1 << sign_index)); | |
3830 | HOST_WIDE_INT op1_maybe_minusp | |
3831 | = (nz1 & ((HOST_WIDE_INT) 1 << sign_index)); | |
3832 | unsigned int result_width = mode_width; | |
3833 | int result_low = 0; | |
3834 | ||
3835 | switch (code) | |
3836 | { | |
3837 | case PLUS: | |
3838 | result_width = MAX (width0, width1) + 1; | |
3839 | result_low = MIN (low0, low1); | |
3840 | break; | |
3841 | case MINUS: | |
3842 | result_low = MIN (low0, low1); | |
3843 | break; | |
3844 | case MULT: | |
3845 | result_width = width0 + width1; | |
3846 | result_low = low0 + low1; | |
3847 | break; | |
3848 | case DIV: | |
3849 | if (width1 == 0) | |
3850 | break; | |
3851 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
3852 | result_width = width0; | |
3853 | break; | |
3854 | case UDIV: | |
3855 | if (width1 == 0) | |
3856 | break; | |
3857 | result_width = width0; | |
3858 | break; | |
3859 | case MOD: | |
3860 | if (width1 == 0) | |
3861 | break; | |
3862 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
3863 | result_width = MIN (width0, width1); | |
3864 | result_low = MIN (low0, low1); | |
3865 | break; | |
3866 | case UMOD: | |
3867 | if (width1 == 0) | |
3868 | break; | |
3869 | result_width = MIN (width0, width1); | |
3870 | result_low = MIN (low0, low1); | |
3871 | break; | |
3872 | default: | |
41374e13 | 3873 | gcc_unreachable (); |
2f93eea8 PB |
3874 | } |
3875 | ||
3876 | if (result_width < mode_width) | |
3877 | nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1; | |
3878 | ||
3879 | if (result_low > 0) | |
3880 | nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1); | |
3881 | ||
3882 | #ifdef POINTERS_EXTEND_UNSIGNED | |
3883 | /* If pointers extend unsigned and this is an addition or subtraction | |
3884 | to a pointer in Pmode, all the bits above ptr_mode are known to be | |
3885 | zero. */ | |
3886 | if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode | |
3887 | && (code == PLUS || code == MINUS) | |
f8cfc6aa | 3888 | && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0))) |
2f93eea8 PB |
3889 | nonzero &= GET_MODE_MASK (ptr_mode); |
3890 | #endif | |
3891 | } | |
3892 | break; | |
3893 | ||
3894 | case ZERO_EXTRACT: | |
3895 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
3896 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) | |
3897 | nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; | |
3898 | break; | |
3899 | ||
3900 | case SUBREG: | |
3901 | /* If this is a SUBREG formed for a promoted variable that has | |
3902 | been zero-extended, we know that at least the high-order bits | |
3903 | are zero, though others might be too. */ | |
3904 | ||
3905 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0) | |
3906 | nonzero = GET_MODE_MASK (GET_MODE (x)) | |
3907 | & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x), | |
3908 | known_x, known_mode, known_ret); | |
3909 | ||
3910 | /* If the inner mode is a single word for both the host and target | |
3911 | machines, we can compute this from which bits of the inner | |
3912 | object might be nonzero. */ | |
3913 | if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD | |
3914 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) | |
3915 | <= HOST_BITS_PER_WIDE_INT)) | |
3916 | { | |
3917 | nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, | |
3918 | known_x, known_mode, known_ret); | |
3919 | ||
3920 | #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) | |
3921 | /* If this is a typical RISC machine, we only have to worry | |
3922 | about the way loads are extended. */ | |
3923 | if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND | |
3924 | ? (((nonzero | |
3925 | & (((unsigned HOST_WIDE_INT) 1 | |
3926 | << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))) | |
3927 | != 0)) | |
3928 | : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND) | |
3c0cb5de | 3929 | || !MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
3930 | #endif |
3931 | { | |
3932 | /* On many CISC machines, accessing an object in a wider mode | |
3933 | causes the high-order bits to become undefined. So they are | |
3934 | not known to be zero. */ | |
3935 | if (GET_MODE_SIZE (GET_MODE (x)) | |
3936 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
3937 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) | |
3938 | & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))); | |
3939 | } | |
3940 | } | |
3941 | break; | |
3942 | ||
3943 | case ASHIFTRT: | |
3944 | case LSHIFTRT: | |
3945 | case ASHIFT: | |
3946 | case ROTATE: | |
3947 | /* The nonzero bits are in two classes: any bits within MODE | |
3948 | that aren't in GET_MODE (x) are always significant. The rest of the | |
3949 | nonzero bits are those that are significant in the operand of | |
3950 | the shift when shifted the appropriate number of bits. This | |
3951 | shows that high-order bits are cleared by the right shift and | |
3952 | low-order bits by left shifts. */ | |
3953 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
3954 | && INTVAL (XEXP (x, 1)) >= 0 | |
3955 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) | |
3956 | { | |
3957 | enum machine_mode inner_mode = GET_MODE (x); | |
3958 | unsigned int width = GET_MODE_BITSIZE (inner_mode); | |
3959 | int count = INTVAL (XEXP (x, 1)); | |
3960 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); | |
3961 | unsigned HOST_WIDE_INT op_nonzero = | |
3962 | cached_nonzero_bits (XEXP (x, 0), mode, | |
3963 | known_x, known_mode, known_ret); | |
3964 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; | |
3965 | unsigned HOST_WIDE_INT outer = 0; | |
3966 | ||
3967 | if (mode_width > width) | |
3968 | outer = (op_nonzero & nonzero & ~mode_mask); | |
3969 | ||
3970 | if (code == LSHIFTRT) | |
3971 | inner >>= count; | |
3972 | else if (code == ASHIFTRT) | |
3973 | { | |
3974 | inner >>= count; | |
3975 | ||
3976 | /* If the sign bit may have been nonzero before the shift, we | |
3977 | need to mark all the places it could have been copied to | |
3978 | by the shift as possibly nonzero. */ | |
3979 | if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count))) | |
3980 | inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count); | |
3981 | } | |
3982 | else if (code == ASHIFT) | |
3983 | inner <<= count; | |
3984 | else | |
3985 | inner = ((inner << (count % width) | |
3986 | | (inner >> (width - (count % width)))) & mode_mask); | |
3987 | ||
3988 | nonzero &= (outer | inner); | |
3989 | } | |
3990 | break; | |
3991 | ||
3992 | case FFS: | |
3993 | case POPCOUNT: | |
3994 | /* This is at most the number of bits in the mode. */ | |
3995 | nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1; | |
3996 | break; | |
3997 | ||
3998 | case CLZ: | |
3999 | /* If CLZ has a known value at zero, then the nonzero bits are | |
4000 | that value, plus the number of bits in the mode minus one. */ | |
4001 | if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
4002 | nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
4003 | else | |
4004 | nonzero = -1; | |
4005 | break; | |
4006 | ||
4007 | case CTZ: | |
4008 | /* If CTZ has a known value at zero, then the nonzero bits are | |
4009 | that value, plus the number of bits in the mode minus one. */ | |
4010 | if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
4011 | nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
4012 | else | |
4013 | nonzero = -1; | |
4014 | break; | |
4015 | ||
4016 | case PARITY: | |
4017 | nonzero = 1; | |
4018 | break; | |
4019 | ||
4020 | case IF_THEN_ELSE: | |
4021 | { | |
4022 | unsigned HOST_WIDE_INT nonzero_true = | |
4023 | cached_nonzero_bits (XEXP (x, 1), mode, | |
4024 | known_x, known_mode, known_ret); | |
4025 | ||
4026 | /* Don't call nonzero_bits for the second time if it cannot change | |
4027 | anything. */ | |
4028 | if ((nonzero & nonzero_true) != nonzero) | |
4029 | nonzero &= nonzero_true | |
4030 | | cached_nonzero_bits (XEXP (x, 2), mode, | |
4031 | known_x, known_mode, known_ret); | |
4032 | } | |
4033 | break; | |
4034 | ||
4035 | default: | |
4036 | break; | |
4037 | } | |
4038 | ||
4039 | return nonzero; | |
4040 | } | |
4041 | ||
4042 | /* See the macro definition above. */ | |
4043 | #undef cached_num_sign_bit_copies | |
4044 | ||
4045 | \f | |
4046 | /* The function cached_num_sign_bit_copies is a wrapper around | |
4047 | num_sign_bit_copies1. It avoids exponential behavior in | |
4048 | num_sign_bit_copies1 when X has identical subexpressions on the | |
4049 | first or the second level. */ | |
4050 | ||
4051 | static unsigned int | |
fa233e34 | 4052 | cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4053 | enum machine_mode known_mode, |
4054 | unsigned int known_ret) | |
4055 | { | |
4056 | if (x == known_x && mode == known_mode) | |
4057 | return known_ret; | |
4058 | ||
4059 | /* Try to find identical subexpressions. If found call | |
4060 | num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and | |
4061 | the precomputed value for the subexpression as KNOWN_RET. */ | |
4062 | ||
4063 | if (ARITHMETIC_P (x)) | |
4064 | { | |
4065 | rtx x0 = XEXP (x, 0); | |
4066 | rtx x1 = XEXP (x, 1); | |
4067 | ||
4068 | /* Check the first level. */ | |
4069 | if (x0 == x1) | |
4070 | return | |
4071 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4072 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4073 | known_mode, | |
4074 | known_ret)); | |
4075 | ||
4076 | /* Check the second level. */ | |
4077 | if (ARITHMETIC_P (x0) | |
4078 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
4079 | return | |
4080 | num_sign_bit_copies1 (x, mode, x1, mode, | |
4081 | cached_num_sign_bit_copies (x1, mode, known_x, | |
4082 | known_mode, | |
4083 | known_ret)); | |
4084 | ||
4085 | if (ARITHMETIC_P (x1) | |
4086 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
4087 | return | |
4088 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4089 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4090 | known_mode, | |
4091 | known_ret)); | |
4092 | } | |
4093 | ||
4094 | return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret); | |
4095 | } | |
4096 | ||
4097 | /* Return the number of bits at the high-order end of X that are known to | |
4098 | be equal to the sign bit. X will be used in mode MODE; if MODE is | |
4099 | VOIDmode, X will be used in its own mode. The returned value will always | |
4100 | be between 1 and the number of bits in MODE. */ | |
4101 | ||
4102 | static unsigned int | |
fa233e34 | 4103 | num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4104 | enum machine_mode known_mode, |
4105 | unsigned int known_ret) | |
4106 | { | |
4107 | enum rtx_code code = GET_CODE (x); | |
4108 | unsigned int bitwidth = GET_MODE_BITSIZE (mode); | |
4109 | int num0, num1, result; | |
4110 | unsigned HOST_WIDE_INT nonzero; | |
4111 | ||
4112 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
4113 | VOIDmode, we don't know anything. Likewise if one of the modes is | |
4114 | floating-point. */ | |
4115 | ||
4116 | if (mode == VOIDmode) | |
4117 | mode = GET_MODE (x); | |
4118 | ||
4119 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))) | |
4120 | return 1; | |
4121 | ||
4122 | /* For a smaller object, just ignore the high bits. */ | |
4123 | if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) | |
4124 | { | |
4125 | num0 = cached_num_sign_bit_copies (x, GET_MODE (x), | |
4126 | known_x, known_mode, known_ret); | |
4127 | return MAX (1, | |
4128 | num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)); | |
4129 | } | |
4130 | ||
4131 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) | |
4132 | { | |
4133 | #ifndef WORD_REGISTER_OPERATIONS | |
4134 | /* If this machine does not do all register operations on the entire | |
4135 | register and MODE is wider than the mode of X, we can say nothing | |
4136 | at all about the high-order bits. */ | |
4137 | return 1; | |
4138 | #else | |
4139 | /* Likewise on machines that do, if the mode of the object is smaller | |
4140 | than a word and loads of that size don't sign extend, we can say | |
4141 | nothing about the high order bits. */ | |
4142 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
4143 | #ifdef LOAD_EXTEND_OP | |
4144 | && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND | |
4145 | #endif | |
4146 | ) | |
4147 | return 1; | |
4148 | #endif | |
4149 | } | |
4150 | ||
4151 | switch (code) | |
4152 | { | |
4153 | case REG: | |
4154 | ||
4155 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4156 | /* If pointers extend signed and this is a pointer in Pmode, say that | |
4157 | all the bits above ptr_mode are known to be sign bit copies. */ | |
4158 | if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode | |
4159 | && REG_POINTER (x)) | |
4160 | return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; | |
4161 | #endif | |
4162 | ||
4163 | { | |
4164 | unsigned int copies_for_hook = 1, copies = 1; | |
4165 | rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x, | |
4166 | known_mode, known_ret, | |
4167 | &copies_for_hook); | |
4168 | ||
4169 | if (new) | |
4170 | copies = cached_num_sign_bit_copies (new, mode, known_x, | |
4171 | known_mode, known_ret); | |
4172 | ||
4173 | if (copies > 1 || copies_for_hook > 1) | |
4174 | return MAX (copies, copies_for_hook); | |
4175 | ||
4176 | /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */ | |
4177 | } | |
4178 | break; | |
4179 | ||
4180 | case MEM: | |
4181 | #ifdef LOAD_EXTEND_OP | |
4182 | /* Some RISC machines sign-extend all loads of smaller than a word. */ | |
4183 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) | |
4184 | return MAX (1, ((int) bitwidth | |
4185 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1)); | |
4186 | #endif | |
4187 | break; | |
4188 | ||
4189 | case CONST_INT: | |
4190 | /* If the constant is negative, take its 1's complement and remask. | |
4191 | Then see how many zero bits we have. */ | |
4192 | nonzero = INTVAL (x) & GET_MODE_MASK (mode); | |
4193 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
4194 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4195 | nonzero = (~nonzero) & GET_MODE_MASK (mode); | |
4196 | ||
4197 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4198 | ||
4199 | case SUBREG: | |
4200 | /* If this is a SUBREG for a promoted object that is sign-extended | |
4201 | and we are looking at it in a wider mode, we know that at least the | |
4202 | high-order bits are known to be sign bit copies. */ | |
4203 | ||
4204 | if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x)) | |
4205 | { | |
4206 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode, | |
4207 | known_x, known_mode, known_ret); | |
4208 | return MAX ((int) bitwidth | |
4209 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1, | |
4210 | num0); | |
4211 | } | |
4212 | ||
4213 | /* For a smaller object, just ignore the high bits. */ | |
4214 | if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) | |
4215 | { | |
4216 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode, | |
4217 | known_x, known_mode, known_ret); | |
4218 | return MAX (1, (num0 | |
4219 | - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) | |
4220 | - bitwidth))); | |
4221 | } | |
4222 | ||
4223 | #ifdef WORD_REGISTER_OPERATIONS | |
4224 | #ifdef LOAD_EXTEND_OP | |
4225 | /* For paradoxical SUBREGs on machines where all register operations | |
4226 | affect the entire register, just look inside. Note that we are | |
4227 | passing MODE to the recursive call, so the number of sign bit copies | |
4228 | will remain relative to that mode, not the inner mode. */ | |
4229 | ||
4230 | /* This works only if loads sign extend. Otherwise, if we get a | |
4231 | reload for the inner part, it may be loaded from the stack, and | |
4232 | then we lose all sign bit copies that existed before the store | |
4233 | to the stack. */ | |
4234 | ||
4235 | if ((GET_MODE_SIZE (GET_MODE (x)) | |
4236 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
4237 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND | |
3c0cb5de | 4238 | && MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
4239 | return cached_num_sign_bit_copies (SUBREG_REG (x), mode, |
4240 | known_x, known_mode, known_ret); | |
4241 | #endif | |
4242 | #endif | |
4243 | break; | |
4244 | ||
4245 | case SIGN_EXTRACT: | |
4246 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
4247 | return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); | |
4248 | break; | |
4249 | ||
4250 | case SIGN_EXTEND: | |
4251 | return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
4252 | + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, | |
4253 | known_x, known_mode, known_ret)); | |
4254 | ||
4255 | case TRUNCATE: | |
4256 | /* For a smaller object, just ignore the high bits. */ | |
4257 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, | |
4258 | known_x, known_mode, known_ret); | |
4259 | return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
4260 | - bitwidth))); | |
4261 | ||
4262 | case NOT: | |
4263 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4264 | known_x, known_mode, known_ret); | |
4265 | ||
4266 | case ROTATE: case ROTATERT: | |
4267 | /* If we are rotating left by a number of bits less than the number | |
4268 | of sign bit copies, we can just subtract that amount from the | |
4269 | number. */ | |
4270 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
4271 | && INTVAL (XEXP (x, 1)) >= 0 | |
4272 | && INTVAL (XEXP (x, 1)) < (int) bitwidth) | |
4273 | { | |
4274 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4275 | known_x, known_mode, known_ret); | |
4276 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
4277 | : (int) bitwidth - INTVAL (XEXP (x, 1)))); | |
4278 | } | |
4279 | break; | |
4280 | ||
4281 | case NEG: | |
4282 | /* In general, this subtracts one sign bit copy. But if the value | |
4283 | is known to be positive, the number of sign bit copies is the | |
4284 | same as that of the input. Finally, if the input has just one bit | |
4285 | that might be nonzero, all the bits are copies of the sign bit. */ | |
4286 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4287 | known_x, known_mode, known_ret); | |
4288 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4289 | return num0 > 1 ? num0 - 1 : 1; | |
4290 | ||
4291 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
4292 | if (nonzero == 1) | |
4293 | return bitwidth; | |
4294 | ||
4295 | if (num0 > 1 | |
4296 | && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) | |
4297 | num0--; | |
4298 | ||
4299 | return num0; | |
4300 | ||
4301 | case IOR: case AND: case XOR: | |
4302 | case SMIN: case SMAX: case UMIN: case UMAX: | |
4303 | /* Logical operations will preserve the number of sign-bit copies. | |
4304 | MIN and MAX operations always return one of the operands. */ | |
4305 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4306 | known_x, known_mode, known_ret); | |
4307 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4308 | known_x, known_mode, known_ret); | |
22761ec3 AN |
4309 | |
4310 | /* If num1 is clearing some of the top bits then regardless of | |
4311 | the other term, we are guaranteed to have at least that many | |
4312 | high-order zero bits. */ | |
4313 | if (code == AND | |
4314 | && num1 > 1 | |
4315 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
4316 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
4317 | && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))) | |
4318 | return num1; | |
4319 | ||
4320 | /* Similarly for IOR when setting high-order bits. */ | |
4321 | if (code == IOR | |
4322 | && num1 > 1 | |
4323 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
4324 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
4325 | && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))) | |
4326 | return num1; | |
4327 | ||
2f93eea8 PB |
4328 | return MIN (num0, num1); |
4329 | ||
4330 | case PLUS: case MINUS: | |
4331 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
4332 | if we are subtracting 1 from a positive number, there will not | |
4333 | be such a carry. Furthermore, if the positive number is known to | |
4334 | be 0 or 1, we know the result is either -1 or 0. */ | |
4335 | ||
4336 | if (code == PLUS && XEXP (x, 1) == constm1_rtx | |
4337 | && bitwidth <= HOST_BITS_PER_WIDE_INT) | |
4338 | { | |
4339 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
4340 | if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) | |
4341 | return (nonzero == 1 || nonzero == 0 ? bitwidth | |
4342 | : bitwidth - floor_log2 (nonzero) - 1); | |
4343 | } | |
4344 | ||
4345 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4346 | known_x, known_mode, known_ret); | |
4347 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4348 | known_x, known_mode, known_ret); | |
4349 | result = MAX (1, MIN (num0, num1) - 1); | |
4350 | ||
4351 | #ifdef POINTERS_EXTEND_UNSIGNED | |
4352 | /* If pointers extend signed and this is an addition or subtraction | |
4353 | to a pointer in Pmode, all the bits above ptr_mode are known to be | |
4354 | sign bit copies. */ | |
4355 | if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
4356 | && (code == PLUS || code == MINUS) | |
f8cfc6aa | 4357 | && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0))) |
2f93eea8 PB |
4358 | result = MAX ((int) (GET_MODE_BITSIZE (Pmode) |
4359 | - GET_MODE_BITSIZE (ptr_mode) + 1), | |
4360 | result); | |
4361 | #endif | |
4362 | return result; | |
4363 | ||
4364 | case MULT: | |
4365 | /* The number of bits of the product is the sum of the number of | |
4366 | bits of both terms. However, unless one of the terms if known | |
4367 | to be positive, we must allow for an additional bit since negating | |
4368 | a negative number can remove one sign bit copy. */ | |
4369 | ||
4370 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4371 | known_x, known_mode, known_ret); | |
4372 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4373 | known_x, known_mode, known_ret); | |
4374 | ||
4375 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
4376 | if (result > 0 | |
4377 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4378 | || (((nonzero_bits (XEXP (x, 0), mode) | |
4379 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4380 | && ((nonzero_bits (XEXP (x, 1), mode) | |
4381 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)))) | |
4382 | result--; | |
4383 | ||
4384 | return MAX (1, result); | |
4385 | ||
4386 | case UDIV: | |
4387 | /* The result must be <= the first operand. If the first operand | |
4388 | has the high bit set, we know nothing about the number of sign | |
4389 | bit copies. */ | |
4390 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4391 | return 1; | |
4392 | else if ((nonzero_bits (XEXP (x, 0), mode) | |
4393 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4394 | return 1; | |
4395 | else | |
4396 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4397 | known_x, known_mode, known_ret); | |
4398 | ||
4399 | case UMOD: | |
4400 | /* The result must be <= the second operand. */ | |
4401 | return cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4402 | known_x, known_mode, known_ret); | |
4403 | ||
4404 | case DIV: | |
4405 | /* Similar to unsigned division, except that we have to worry about | |
4406 | the case where the divisor is negative, in which case we have | |
4407 | to add 1. */ | |
4408 | result = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4409 | known_x, known_mode, known_ret); | |
4410 | if (result > 1 | |
4411 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4412 | || (nonzero_bits (XEXP (x, 1), mode) | |
4413 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
4414 | result--; | |
4415 | ||
4416 | return result; | |
4417 | ||
4418 | case MOD: | |
4419 | result = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4420 | known_x, known_mode, known_ret); | |
4421 | if (result > 1 | |
4422 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4423 | || (nonzero_bits (XEXP (x, 1), mode) | |
4424 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
4425 | result--; | |
4426 | ||
4427 | return result; | |
4428 | ||
4429 | case ASHIFTRT: | |
4430 | /* Shifts by a constant add to the number of bits equal to the | |
4431 | sign bit. */ | |
4432 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4433 | known_x, known_mode, known_ret); | |
4434 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
4435 | && INTVAL (XEXP (x, 1)) > 0) | |
4436 | num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); | |
4437 | ||
4438 | return num0; | |
4439 | ||
4440 | case ASHIFT: | |
4441 | /* Left shifts destroy copies. */ | |
4442 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
4443 | || INTVAL (XEXP (x, 1)) < 0 | |
4444 | || INTVAL (XEXP (x, 1)) >= (int) bitwidth) | |
4445 | return 1; | |
4446 | ||
4447 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4448 | known_x, known_mode, known_ret); | |
4449 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
4450 | ||
4451 | case IF_THEN_ELSE: | |
4452 | num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4453 | known_x, known_mode, known_ret); | |
4454 | num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode, | |
4455 | known_x, known_mode, known_ret); | |
4456 | return MIN (num0, num1); | |
4457 | ||
4458 | case EQ: case NE: case GE: case GT: case LE: case LT: | |
4459 | case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT: | |
4460 | case GEU: case GTU: case LEU: case LTU: | |
4461 | case UNORDERED: case ORDERED: | |
4462 | /* If the constant is negative, take its 1's complement and remask. | |
4463 | Then see how many zero bits we have. */ | |
4464 | nonzero = STORE_FLAG_VALUE; | |
4465 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
4466 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4467 | nonzero = (~nonzero) & GET_MODE_MASK (mode); | |
4468 | ||
4469 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4470 | ||
4471 | default: | |
4472 | break; | |
4473 | } | |
4474 | ||
4475 | /* If we haven't been able to figure it out by one of the above rules, | |
4476 | see if some of the high-order bits are known to be zero. If so, | |
4477 | count those bits and return one less than that amount. If we can't | |
4478 | safely compute the mask for this mode, always return BITWIDTH. */ | |
4479 | ||
4480 | bitwidth = GET_MODE_BITSIZE (mode); | |
4481 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4482 | return 1; | |
4483 | ||
4484 | nonzero = nonzero_bits (x, mode); | |
4485 | return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) | |
4486 | ? 1 : bitwidth - floor_log2 (nonzero) - 1; | |
4487 | } | |
6fd21094 RS |
4488 | |
4489 | /* Calculate the rtx_cost of a single instruction. A return value of | |
4490 | zero indicates an instruction pattern without a known cost. */ | |
4491 | ||
4492 | int | |
4493 | insn_rtx_cost (rtx pat) | |
4494 | { | |
4495 | int i, cost; | |
4496 | rtx set; | |
4497 | ||
4498 | /* Extract the single set rtx from the instruction pattern. | |
4499 | We can't use single_set since we only have the pattern. */ | |
4500 | if (GET_CODE (pat) == SET) | |
4501 | set = pat; | |
4502 | else if (GET_CODE (pat) == PARALLEL) | |
4503 | { | |
4504 | set = NULL_RTX; | |
4505 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
4506 | { | |
4507 | rtx x = XVECEXP (pat, 0, i); | |
4508 | if (GET_CODE (x) == SET) | |
4509 | { | |
4510 | if (set) | |
4511 | return 0; | |
4512 | set = x; | |
4513 | } | |
4514 | } | |
4515 | if (!set) | |
4516 | return 0; | |
4517 | } | |
4518 | else | |
4519 | return 0; | |
4520 | ||
4521 | cost = rtx_cost (SET_SRC (set), SET); | |
4522 | return cost > 0 ? cost : COSTS_N_INSNS (1); | |
4523 | } | |
75473b02 SB |
4524 | |
4525 | /* Given an insn INSN and condition COND, return the condition in a | |
4526 | canonical form to simplify testing by callers. Specifically: | |
4527 | ||
4528 | (1) The code will always be a comparison operation (EQ, NE, GT, etc.). | |
4529 | (2) Both operands will be machine operands; (cc0) will have been replaced. | |
4530 | (3) If an operand is a constant, it will be the second operand. | |
4531 | (4) (LE x const) will be replaced with (LT x <const+1>) and similarly | |
4532 | for GE, GEU, and LEU. | |
4533 | ||
4534 | If the condition cannot be understood, or is an inequality floating-point | |
4535 | comparison which needs to be reversed, 0 will be returned. | |
4536 | ||
4537 | If REVERSE is nonzero, then reverse the condition prior to canonizing it. | |
4538 | ||
4539 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
4540 | insn used in locating the condition was found. If a replacement test | |
4541 | of the condition is desired, it should be placed in front of that | |
4542 | insn and we will be sure that the inputs are still valid. | |
4543 | ||
4544 | If WANT_REG is nonzero, we wish the condition to be relative to that | |
4545 | register, if possible. Therefore, do not canonicalize the condition | |
4546 | further. If ALLOW_CC_MODE is nonzero, allow the condition returned | |
4547 | to be a compare to a CC mode register. | |
4548 | ||
4549 | If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST | |
4550 | and at INSN. */ | |
4551 | ||
4552 | rtx | |
4553 | canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, | |
4554 | rtx want_reg, int allow_cc_mode, int valid_at_insn_p) | |
4555 | { | |
4556 | enum rtx_code code; | |
4557 | rtx prev = insn; | |
f7d504c2 | 4558 | const_rtx set; |
75473b02 SB |
4559 | rtx tem; |
4560 | rtx op0, op1; | |
4561 | int reverse_code = 0; | |
4562 | enum machine_mode mode; | |
569f8d98 | 4563 | basic_block bb = BLOCK_FOR_INSN (insn); |
75473b02 SB |
4564 | |
4565 | code = GET_CODE (cond); | |
4566 | mode = GET_MODE (cond); | |
4567 | op0 = XEXP (cond, 0); | |
4568 | op1 = XEXP (cond, 1); | |
4569 | ||
4570 | if (reverse) | |
4571 | code = reversed_comparison_code (cond, insn); | |
4572 | if (code == UNKNOWN) | |
4573 | return 0; | |
4574 | ||
4575 | if (earliest) | |
4576 | *earliest = insn; | |
4577 | ||
4578 | /* If we are comparing a register with zero, see if the register is set | |
4579 | in the previous insn to a COMPARE or a comparison operation. Perform | |
4580 | the same tests as a function of STORE_FLAG_VALUE as find_comparison_args | |
4581 | in cse.c */ | |
4582 | ||
4583 | while ((GET_RTX_CLASS (code) == RTX_COMPARE | |
4584 | || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) | |
4585 | && op1 == CONST0_RTX (GET_MODE (op0)) | |
4586 | && op0 != want_reg) | |
4587 | { | |
4588 | /* Set nonzero when we find something of interest. */ | |
4589 | rtx x = 0; | |
4590 | ||
4591 | #ifdef HAVE_cc0 | |
4592 | /* If comparison with cc0, import actual comparison from compare | |
4593 | insn. */ | |
4594 | if (op0 == cc0_rtx) | |
4595 | { | |
4596 | if ((prev = prev_nonnote_insn (prev)) == 0 | |
4597 | || !NONJUMP_INSN_P (prev) | |
4598 | || (set = single_set (prev)) == 0 | |
4599 | || SET_DEST (set) != cc0_rtx) | |
4600 | return 0; | |
4601 | ||
4602 | op0 = SET_SRC (set); | |
4603 | op1 = CONST0_RTX (GET_MODE (op0)); | |
4604 | if (earliest) | |
4605 | *earliest = prev; | |
4606 | } | |
4607 | #endif | |
4608 | ||
4609 | /* If this is a COMPARE, pick up the two things being compared. */ | |
4610 | if (GET_CODE (op0) == COMPARE) | |
4611 | { | |
4612 | op1 = XEXP (op0, 1); | |
4613 | op0 = XEXP (op0, 0); | |
4614 | continue; | |
4615 | } | |
4616 | else if (!REG_P (op0)) | |
4617 | break; | |
4618 | ||
4619 | /* Go back to the previous insn. Stop if it is not an INSN. We also | |
4620 | stop if it isn't a single set or if it has a REG_INC note because | |
4621 | we don't want to bother dealing with it. */ | |
4622 | ||
4623 | if ((prev = prev_nonnote_insn (prev)) == 0 | |
4624 | || !NONJUMP_INSN_P (prev) | |
569f8d98 ZD |
4625 | || FIND_REG_INC_NOTE (prev, NULL_RTX) |
4626 | /* In cfglayout mode, there do not have to be labels at the | |
4627 | beginning of a block, or jumps at the end, so the previous | |
4628 | conditions would not stop us when we reach bb boundary. */ | |
4629 | || BLOCK_FOR_INSN (prev) != bb) | |
75473b02 SB |
4630 | break; |
4631 | ||
4632 | set = set_of (op0, prev); | |
4633 | ||
4634 | if (set | |
4635 | && (GET_CODE (set) != SET | |
4636 | || !rtx_equal_p (SET_DEST (set), op0))) | |
4637 | break; | |
4638 | ||
4639 | /* If this is setting OP0, get what it sets it to if it looks | |
4640 | relevant. */ | |
4641 | if (set) | |
4642 | { | |
4643 | enum machine_mode inner_mode = GET_MODE (SET_DEST (set)); | |
4644 | #ifdef FLOAT_STORE_FLAG_VALUE | |
4645 | REAL_VALUE_TYPE fsfv; | |
4646 | #endif | |
4647 | ||
4648 | /* ??? We may not combine comparisons done in a CCmode with | |
4649 | comparisons not done in a CCmode. This is to aid targets | |
4650 | like Alpha that have an IEEE compliant EQ instruction, and | |
4651 | a non-IEEE compliant BEQ instruction. The use of CCmode is | |
4652 | actually artificial, simply to prevent the combination, but | |
4653 | should not affect other platforms. | |
4654 | ||
4655 | However, we must allow VOIDmode comparisons to match either | |
4656 | CCmode or non-CCmode comparison, because some ports have | |
4657 | modeless comparisons inside branch patterns. | |
4658 | ||
4659 | ??? This mode check should perhaps look more like the mode check | |
4660 | in simplify_comparison in combine. */ | |
4661 | ||
4662 | if ((GET_CODE (SET_SRC (set)) == COMPARE | |
4663 | || (((code == NE | |
4664 | || (code == LT | |
4665 | && GET_MODE_CLASS (inner_mode) == MODE_INT | |
4666 | && (GET_MODE_BITSIZE (inner_mode) | |
4667 | <= HOST_BITS_PER_WIDE_INT) | |
4668 | && (STORE_FLAG_VALUE | |
4669 | & ((HOST_WIDE_INT) 1 | |
4670 | << (GET_MODE_BITSIZE (inner_mode) - 1)))) | |
4671 | #ifdef FLOAT_STORE_FLAG_VALUE | |
4672 | || (code == LT | |
3d8bf70f | 4673 | && SCALAR_FLOAT_MODE_P (inner_mode) |
75473b02 SB |
4674 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), |
4675 | REAL_VALUE_NEGATIVE (fsfv))) | |
4676 | #endif | |
4677 | )) | |
4678 | && COMPARISON_P (SET_SRC (set)))) | |
4679 | && (((GET_MODE_CLASS (mode) == MODE_CC) | |
4680 | == (GET_MODE_CLASS (inner_mode) == MODE_CC)) | |
4681 | || mode == VOIDmode || inner_mode == VOIDmode)) | |
4682 | x = SET_SRC (set); | |
4683 | else if (((code == EQ | |
4684 | || (code == GE | |
4685 | && (GET_MODE_BITSIZE (inner_mode) | |
4686 | <= HOST_BITS_PER_WIDE_INT) | |
4687 | && GET_MODE_CLASS (inner_mode) == MODE_INT | |
4688 | && (STORE_FLAG_VALUE | |
4689 | & ((HOST_WIDE_INT) 1 | |
4690 | << (GET_MODE_BITSIZE (inner_mode) - 1)))) | |
4691 | #ifdef FLOAT_STORE_FLAG_VALUE | |
4692 | || (code == GE | |
3d8bf70f | 4693 | && SCALAR_FLOAT_MODE_P (inner_mode) |
75473b02 SB |
4694 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), |
4695 | REAL_VALUE_NEGATIVE (fsfv))) | |
4696 | #endif | |
4697 | )) | |
4698 | && COMPARISON_P (SET_SRC (set)) | |
4699 | && (((GET_MODE_CLASS (mode) == MODE_CC) | |
4700 | == (GET_MODE_CLASS (inner_mode) == MODE_CC)) | |
4701 | || mode == VOIDmode || inner_mode == VOIDmode)) | |
4702 | ||
4703 | { | |
4704 | reverse_code = 1; | |
4705 | x = SET_SRC (set); | |
4706 | } | |
4707 | else | |
4708 | break; | |
4709 | } | |
4710 | ||
4711 | else if (reg_set_p (op0, prev)) | |
4712 | /* If this sets OP0, but not directly, we have to give up. */ | |
4713 | break; | |
4714 | ||
4715 | if (x) | |
4716 | { | |
4717 | /* If the caller is expecting the condition to be valid at INSN, | |
4718 | make sure X doesn't change before INSN. */ | |
4719 | if (valid_at_insn_p) | |
4720 | if (modified_in_p (x, prev) || modified_between_p (x, prev, insn)) | |
4721 | break; | |
4722 | if (COMPARISON_P (x)) | |
4723 | code = GET_CODE (x); | |
4724 | if (reverse_code) | |
4725 | { | |
4726 | code = reversed_comparison_code (x, prev); | |
4727 | if (code == UNKNOWN) | |
4728 | return 0; | |
4729 | reverse_code = 0; | |
4730 | } | |
4731 | ||
4732 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
4733 | if (earliest) | |
4734 | *earliest = prev; | |
4735 | } | |
4736 | } | |
4737 | ||
4738 | /* If constant is first, put it last. */ | |
4739 | if (CONSTANT_P (op0)) | |
4740 | code = swap_condition (code), tem = op0, op0 = op1, op1 = tem; | |
4741 | ||
4742 | /* If OP0 is the result of a comparison, we weren't able to find what | |
4743 | was really being compared, so fail. */ | |
4744 | if (!allow_cc_mode | |
4745 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) | |
4746 | return 0; | |
4747 | ||
4748 | /* Canonicalize any ordered comparison with integers involving equality | |
4749 | if we can do computations in the relevant mode and we do not | |
4750 | overflow. */ | |
4751 | ||
4752 | if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC | |
4753 | && GET_CODE (op1) == CONST_INT | |
4754 | && GET_MODE (op0) != VOIDmode | |
4755 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
4756 | { | |
4757 | HOST_WIDE_INT const_val = INTVAL (op1); | |
4758 | unsigned HOST_WIDE_INT uconst_val = const_val; | |
4759 | unsigned HOST_WIDE_INT max_val | |
4760 | = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0)); | |
4761 | ||
4762 | switch (code) | |
4763 | { | |
4764 | case LE: | |
4765 | if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1) | |
4766 | code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0)); | |
4767 | break; | |
4768 | ||
4769 | /* When cross-compiling, const_val might be sign-extended from | |
4770 | BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */ | |
4771 | case GE: | |
4772 | if ((HOST_WIDE_INT) (const_val & max_val) | |
4773 | != (((HOST_WIDE_INT) 1 | |
4774 | << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) | |
4775 | code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); | |
4776 | break; | |
4777 | ||
4778 | case LEU: | |
4779 | if (uconst_val < max_val) | |
4780 | code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0)); | |
4781 | break; | |
4782 | ||
4783 | case GEU: | |
4784 | if (uconst_val != 0) | |
4785 | code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0)); | |
4786 | break; | |
4787 | ||
4788 | default: | |
4789 | break; | |
4790 | } | |
4791 | } | |
4792 | ||
4793 | /* Never return CC0; return zero instead. */ | |
4794 | if (CC0_P (op0)) | |
4795 | return 0; | |
4796 | ||
4797 | return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); | |
4798 | } | |
4799 | ||
4800 | /* Given a jump insn JUMP, return the condition that will cause it to branch | |
4801 | to its JUMP_LABEL. If the condition cannot be understood, or is an | |
4802 | inequality floating-point comparison which needs to be reversed, 0 will | |
4803 | be returned. | |
4804 | ||
4805 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
4806 | insn used in locating the condition was found. If a replacement test | |
4807 | of the condition is desired, it should be placed in front of that | |
4808 | insn and we will be sure that the inputs are still valid. If EARLIEST | |
4809 | is null, the returned condition will be valid at INSN. | |
4810 | ||
4811 | If ALLOW_CC_MODE is nonzero, allow the condition returned to be a | |
4812 | compare CC mode register. | |
4813 | ||
4814 | VALID_AT_INSN_P is the same as for canonicalize_condition. */ | |
4815 | ||
4816 | rtx | |
4817 | get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p) | |
4818 | { | |
4819 | rtx cond; | |
4820 | int reverse; | |
4821 | rtx set; | |
4822 | ||
4823 | /* If this is not a standard conditional jump, we can't parse it. */ | |
4824 | if (!JUMP_P (jump) | |
4825 | || ! any_condjump_p (jump)) | |
4826 | return 0; | |
4827 | set = pc_set (jump); | |
4828 | ||
4829 | cond = XEXP (SET_SRC (set), 0); | |
4830 | ||
4831 | /* If this branches to JUMP_LABEL when the condition is false, reverse | |
4832 | the condition. */ | |
4833 | reverse | |
4834 | = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF | |
4835 | && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump); | |
4836 | ||
4837 | return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX, | |
4838 | allow_cc_mode, valid_at_insn_p); | |
4839 | } | |
4840 | ||
b12cbf2c AN |
4841 | /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on |
4842 | TARGET_MODE_REP_EXTENDED. | |
4843 | ||
4844 | Note that we assume that the property of | |
4845 | TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes | |
4846 | narrower than mode B. I.e., if A is a mode narrower than B then in | |
4847 | order to be able to operate on it in mode B, mode A needs to | |
4848 | satisfy the requirements set by the representation of mode B. */ | |
4849 | ||
4850 | static void | |
4851 | init_num_sign_bit_copies_in_rep (void) | |
4852 | { | |
4853 | enum machine_mode mode, in_mode; | |
4854 | ||
4855 | for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode; | |
4856 | in_mode = GET_MODE_WIDER_MODE (mode)) | |
4857 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode; | |
4858 | mode = GET_MODE_WIDER_MODE (mode)) | |
4859 | { | |
4860 | enum machine_mode i; | |
4861 | ||
4862 | /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED | |
4863 | extends to the next widest mode. */ | |
4864 | gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN | |
4865 | || GET_MODE_WIDER_MODE (mode) == in_mode); | |
4866 | ||
4867 | /* We are in in_mode. Count how many bits outside of mode | |
4868 | have to be copies of the sign-bit. */ | |
4869 | for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i)) | |
4870 | { | |
4871 | enum machine_mode wider = GET_MODE_WIDER_MODE (i); | |
4872 | ||
4873 | if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND | |
4874 | /* We can only check sign-bit copies starting from the | |
4875 | top-bit. In order to be able to check the bits we | |
4876 | have already seen we pretend that subsequent bits | |
4877 | have to be sign-bit copies too. */ | |
4878 | || num_sign_bit_copies_in_rep [in_mode][mode]) | |
4879 | num_sign_bit_copies_in_rep [in_mode][mode] | |
4880 | += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i); | |
4881 | } | |
4882 | } | |
4883 | } | |
4884 | ||
d3b72690 PB |
4885 | /* Suppose that truncation from the machine mode of X to MODE is not a |
4886 | no-op. See if there is anything special about X so that we can | |
4887 | assume it already contains a truncated value of MODE. */ | |
4888 | ||
4889 | bool | |
fa233e34 | 4890 | truncated_to_mode (enum machine_mode mode, const_rtx x) |
d3b72690 | 4891 | { |
b12cbf2c AN |
4892 | /* This register has already been used in MODE without explicit |
4893 | truncation. */ | |
4894 | if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x)) | |
4895 | return true; | |
4896 | ||
4897 | /* See if we already satisfy the requirements of MODE. If yes we | |
4898 | can just switch to MODE. */ | |
4899 | if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode] | |
4900 | && (num_sign_bit_copies (x, GET_MODE (x)) | |
4901 | >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1)) | |
4902 | return true; | |
d3b72690 | 4903 | |
b12cbf2c AN |
4904 | return false; |
4905 | } | |
cf94b0fc PB |
4906 | \f |
4907 | /* Initialize non_rtx_starting_operands, which is used to speed up | |
4908 | for_each_rtx. */ | |
4909 | void | |
4910 | init_rtlanal (void) | |
4911 | { | |
4912 | int i; | |
4913 | for (i = 0; i < NUM_RTX_CODE; i++) | |
4914 | { | |
4915 | const char *format = GET_RTX_FORMAT (i); | |
4916 | const char *first = strpbrk (format, "eEV"); | |
4917 | non_rtx_starting_operands[i] = first ? first - format : -1; | |
4918 | } | |
b12cbf2c AN |
4919 | |
4920 | init_num_sign_bit_copies_in_rep (); | |
cf94b0fc | 4921 | } |
3d8504ac RS |
4922 | \f |
4923 | /* Check whether this is a constant pool constant. */ | |
4924 | bool | |
4925 | constant_pool_constant_p (rtx x) | |
4926 | { | |
4927 | x = avoid_constant_pool_reference (x); | |
4928 | return GET_CODE (x) == CONST_DOUBLE; | |
4929 | } | |
4930 |