]>
Commit | Line | Data |
---|---|---|
af082de3 | 1 | /* Analyze RTL for GNU compiler. |
af841dbd | 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
66647d44 JJ |
3 | 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
4 | Free Software Foundation, Inc. | |
2c88418c | 5 | |
1322177d | 6 | This file is part of GCC. |
2c88418c | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
2c88418c | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
2c88418c RS |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
2c88418c RS |
21 | |
22 | ||
23 | #include "config.h" | |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
e35b9579 | 27 | #include "toplev.h" |
2c88418c | 28 | #include "rtl.h" |
3335f1d9 | 29 | #include "hard-reg-set.h" |
bc204393 RH |
30 | #include "insn-config.h" |
31 | #include "recog.h" | |
f894b69b PB |
32 | #include "target.h" |
33 | #include "output.h" | |
91ea4f8d | 34 | #include "tm_p.h" |
f5eb5fd0 | 35 | #include "flags.h" |
52bfebf0 | 36 | #include "real.h" |
66fd46b6 | 37 | #include "regs.h" |
2f93eea8 | 38 | #include "function.h" |
6fb5fa3c | 39 | #include "df.h" |
7ffb5e78 | 40 | #include "tree.h" |
2c88418c | 41 | |
e2373f95 | 42 | /* Forward declarations */ |
7bc980e1 | 43 | static void set_of_1 (rtx, const_rtx, void *); |
f7d504c2 KG |
44 | static bool covers_regno_p (const_rtx, unsigned int); |
45 | static bool covers_regno_no_parallel_p (const_rtx, unsigned int); | |
0c20a65f | 46 | static int rtx_referenced_p_1 (rtx *, void *); |
f7d504c2 | 47 | static int computed_jump_p_1 (const_rtx); |
7bc980e1 | 48 | static void parms_set (rtx, const_rtx, void *); |
2a1777af | 49 | |
fa233e34 KG |
50 | static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode, |
51 | const_rtx, enum machine_mode, | |
2f93eea8 | 52 | unsigned HOST_WIDE_INT); |
fa233e34 KG |
53 | static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode, |
54 | const_rtx, enum machine_mode, | |
2f93eea8 | 55 | unsigned HOST_WIDE_INT); |
fa233e34 | 56 | static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
57 | enum machine_mode, |
58 | unsigned int); | |
fa233e34 | 59 | static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
60 | enum machine_mode, unsigned int); |
61 | ||
cf94b0fc PB |
62 | /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or |
63 | -1 if a code has no such operand. */ | |
64 | static int non_rtx_starting_operands[NUM_RTX_CODE]; | |
65 | ||
2c88418c RS |
66 | /* Bit flags that specify the machine subtype we are compiling for. |
67 | Bits are tested using macros TARGET_... defined in the tm.h file | |
68 | and set by `-m...' switches. Must be defined in rtlanal.c. */ | |
69 | ||
70 | int target_flags; | |
b12cbf2c AN |
71 | |
72 | /* Truncation narrows the mode from SOURCE mode to DESTINATION mode. | |
73 | If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is | |
74 | SIGN_EXTEND then while narrowing we also have to enforce the | |
75 | representation and sign-extend the value to mode DESTINATION_REP. | |
76 | ||
77 | If the value is already sign-extended to DESTINATION_REP mode we | |
78 | can just switch to DESTINATION mode on it. For each pair of | |
79 | integral modes SOURCE and DESTINATION, when truncating from SOURCE | |
80 | to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION] | |
81 | contains the number of high-order bits in SOURCE that have to be | |
82 | copies of the sign-bit so that we can do this mode-switch to | |
83 | DESTINATION. */ | |
84 | ||
85 | static unsigned int | |
86 | num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1]; | |
2c88418c RS |
87 | \f |
88 | /* Return 1 if the value of X is unstable | |
89 | (would be different at a different point in the program). | |
90 | The frame pointer, arg pointer, etc. are considered stable | |
91 | (within one function) and so is anything marked `unchanging'. */ | |
92 | ||
93 | int | |
f7d504c2 | 94 | rtx_unstable_p (const_rtx x) |
2c88418c | 95 | { |
f7d504c2 | 96 | const RTX_CODE code = GET_CODE (x); |
b3694847 SS |
97 | int i; |
98 | const char *fmt; | |
2c88418c | 99 | |
ae0fb1b9 JW |
100 | switch (code) |
101 | { | |
102 | case MEM: | |
389fdba0 | 103 | return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0)); |
2c88418c | 104 | |
ae0fb1b9 JW |
105 | case CONST: |
106 | case CONST_INT: | |
107 | case CONST_DOUBLE: | |
091a3ac7 | 108 | case CONST_FIXED: |
69ef87e2 | 109 | case CONST_VECTOR: |
ae0fb1b9 JW |
110 | case SYMBOL_REF: |
111 | case LABEL_REF: | |
112 | return 0; | |
2c88418c | 113 | |
ae0fb1b9 JW |
114 | case REG: |
115 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
c0fc376b | 116 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 | 117 | /* The arg pointer varies if it is not a fixed register. */ |
389fdba0 | 118 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
c0fc376b RH |
119 | return 0; |
120 | #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED | |
121 | /* ??? When call-clobbered, the value is stable modulo the restore | |
122 | that must happen after a call. This currently screws up local-alloc | |
123 | into believing that the restore is not needed. */ | |
124 | if (x == pic_offset_table_rtx) | |
125 | return 0; | |
126 | #endif | |
127 | return 1; | |
ae0fb1b9 JW |
128 | |
129 | case ASM_OPERANDS: | |
130 | if (MEM_VOLATILE_P (x)) | |
131 | return 1; | |
132 | ||
5d3cc252 | 133 | /* Fall through. */ |
ae0fb1b9 JW |
134 | |
135 | default: | |
136 | break; | |
137 | } | |
2c88418c RS |
138 | |
139 | fmt = GET_RTX_FORMAT (code); | |
140 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
141 | if (fmt[i] == 'e') | |
9c82ac6b JW |
142 | { |
143 | if (rtx_unstable_p (XEXP (x, i))) | |
144 | return 1; | |
145 | } | |
146 | else if (fmt[i] == 'E') | |
147 | { | |
148 | int j; | |
149 | for (j = 0; j < XVECLEN (x, i); j++) | |
150 | if (rtx_unstable_p (XVECEXP (x, i, j))) | |
151 | return 1; | |
152 | } | |
153 | ||
2c88418c RS |
154 | return 0; |
155 | } | |
156 | ||
157 | /* Return 1 if X has a value that can vary even between two | |
158 | executions of the program. 0 means X can be compared reliably | |
159 | against certain constants or near-constants. | |
e38fe8e0 BS |
160 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
161 | zero, we are slightly more conservative. | |
2c88418c RS |
162 | The frame pointer and the arg pointer are considered constant. */ |
163 | ||
4f588890 KG |
164 | bool |
165 | rtx_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 166 | { |
e978d62e | 167 | RTX_CODE code; |
b3694847 SS |
168 | int i; |
169 | const char *fmt; | |
2c88418c | 170 | |
e978d62e PB |
171 | if (!x) |
172 | return 0; | |
173 | ||
174 | code = GET_CODE (x); | |
2c88418c RS |
175 | switch (code) |
176 | { | |
177 | case MEM: | |
389fdba0 | 178 | return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); |
55efb413 | 179 | |
2c88418c RS |
180 | case CONST: |
181 | case CONST_INT: | |
182 | case CONST_DOUBLE: | |
091a3ac7 | 183 | case CONST_FIXED: |
69ef87e2 | 184 | case CONST_VECTOR: |
2c88418c RS |
185 | case SYMBOL_REF: |
186 | case LABEL_REF: | |
187 | return 0; | |
188 | ||
189 | case REG: | |
190 | /* Note that we have to test for the actual rtx used for the frame | |
191 | and arg pointers and not just the register number in case we have | |
192 | eliminated the frame and/or arg pointer and are using it | |
193 | for pseudos. */ | |
c0fc376b | 194 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 JL |
195 | /* The arg pointer varies if it is not a fixed register. */ |
196 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
c0fc376b | 197 | return 0; |
e38fe8e0 BS |
198 | if (x == pic_offset_table_rtx |
199 | #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED | |
200 | /* ??? When call-clobbered, the value is stable modulo the restore | |
201 | that must happen after a call. This currently screws up | |
202 | local-alloc into believing that the restore is not needed, so we | |
203 | must return 0 only if we are called from alias analysis. */ | |
204 | && for_alias | |
c0fc376b | 205 | #endif |
e38fe8e0 BS |
206 | ) |
207 | return 0; | |
c0fc376b | 208 | return 1; |
2c88418c RS |
209 | |
210 | case LO_SUM: | |
211 | /* The operand 0 of a LO_SUM is considered constant | |
e7d96a83 JW |
212 | (in fact it is related specifically to operand 1) |
213 | during alias analysis. */ | |
214 | return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias)) | |
215 | || rtx_varies_p (XEXP (x, 1), for_alias); | |
a6a2274a | 216 | |
ae0fb1b9 JW |
217 | case ASM_OPERANDS: |
218 | if (MEM_VOLATILE_P (x)) | |
219 | return 1; | |
220 | ||
5d3cc252 | 221 | /* Fall through. */ |
ae0fb1b9 | 222 | |
e9a25f70 JL |
223 | default: |
224 | break; | |
2c88418c RS |
225 | } |
226 | ||
227 | fmt = GET_RTX_FORMAT (code); | |
228 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
229 | if (fmt[i] == 'e') | |
9c82ac6b | 230 | { |
e38fe8e0 | 231 | if (rtx_varies_p (XEXP (x, i), for_alias)) |
9c82ac6b JW |
232 | return 1; |
233 | } | |
234 | else if (fmt[i] == 'E') | |
235 | { | |
236 | int j; | |
237 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 238 | if (rtx_varies_p (XVECEXP (x, i, j), for_alias)) |
9c82ac6b JW |
239 | return 1; |
240 | } | |
241 | ||
2c88418c RS |
242 | return 0; |
243 | } | |
244 | ||
2358ff91 EB |
245 | /* Return nonzero if the use of X as an address in a MEM can cause a trap. |
246 | MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls | |
247 | whether nonzero is returned for unaligned memory accesses on strict | |
248 | alignment machines. */ | |
2c88418c | 249 | |
2358ff91 | 250 | static int |
48e8382e PB |
251 | rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size, |
252 | enum machine_mode mode, bool unaligned_mems) | |
2c88418c | 253 | { |
b3694847 | 254 | enum rtx_code code = GET_CODE (x); |
2c88418c | 255 | |
48e8382e PB |
256 | if (STRICT_ALIGNMENT |
257 | && unaligned_mems | |
258 | && GET_MODE_SIZE (mode) != 0) | |
259 | { | |
260 | HOST_WIDE_INT actual_offset = offset; | |
261 | #ifdef SPARC_STACK_BOUNDARY_HACK | |
262 | /* ??? The SPARC port may claim a STACK_BOUNDARY higher than | |
263 | the real alignment of %sp. However, when it does this, the | |
264 | alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | |
265 | if (SPARC_STACK_BOUNDARY_HACK | |
266 | && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx)) | |
267 | actual_offset -= STACK_POINTER_OFFSET; | |
268 | #endif | |
269 | ||
65a74b5d PB |
270 | if (actual_offset % GET_MODE_SIZE (mode) != 0) |
271 | return 1; | |
48e8382e PB |
272 | } |
273 | ||
2c88418c RS |
274 | switch (code) |
275 | { | |
276 | case SYMBOL_REF: | |
48e8382e PB |
277 | if (SYMBOL_REF_WEAK (x)) |
278 | return 1; | |
279 | if (!CONSTANT_POOL_ADDRESS_P (x)) | |
280 | { | |
281 | tree decl; | |
282 | HOST_WIDE_INT decl_size; | |
283 | ||
284 | if (offset < 0) | |
285 | return 1; | |
286 | if (size == 0) | |
287 | size = GET_MODE_SIZE (mode); | |
288 | if (size == 0) | |
289 | return offset != 0; | |
290 | ||
291 | /* If the size of the access or of the symbol is unknown, | |
292 | assume the worst. */ | |
293 | decl = SYMBOL_REF_DECL (x); | |
294 | ||
295 | /* Else check that the access is in bounds. TODO: restructure | |
71c00b5c | 296 | expr_size/tree_expr_size/int_expr_size and just use the latter. */ |
48e8382e PB |
297 | if (!decl) |
298 | decl_size = -1; | |
299 | else if (DECL_P (decl) && DECL_SIZE_UNIT (decl)) | |
300 | decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0) | |
301 | ? tree_low_cst (DECL_SIZE_UNIT (decl), 0) | |
302 | : -1); | |
303 | else if (TREE_CODE (decl) == STRING_CST) | |
304 | decl_size = TREE_STRING_LENGTH (decl); | |
305 | else if (TYPE_SIZE_UNIT (TREE_TYPE (decl))) | |
306 | decl_size = int_size_in_bytes (TREE_TYPE (decl)); | |
307 | else | |
308 | decl_size = -1; | |
309 | ||
310 | return (decl_size <= 0 ? offset != 0 : offset + size > decl_size); | |
311 | } | |
312 | ||
313 | return 0; | |
ff0b6b99 | 314 | |
2c88418c | 315 | case LABEL_REF: |
2c88418c RS |
316 | return 0; |
317 | ||
318 | case REG: | |
319 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
4f73495e RH |
320 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
321 | || x == stack_pointer_rtx | |
322 | /* The arg pointer varies if it is not a fixed register. */ | |
323 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
324 | return 0; | |
325 | /* All of the virtual frame registers are stack references. */ | |
326 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
327 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
328 | return 0; | |
329 | return 1; | |
2c88418c RS |
330 | |
331 | case CONST: | |
48e8382e PB |
332 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
333 | mode, unaligned_mems); | |
2c88418c RS |
334 | |
335 | case PLUS: | |
2358ff91 | 336 | /* An address is assumed not to trap if: |
48e8382e PB |
337 | - it is the pic register plus a constant. */ |
338 | if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1))) | |
339 | return 0; | |
340 | ||
341 | /* - or it is an address that can't trap plus a constant integer, | |
2358ff91 EB |
342 | with the proper remainder modulo the mode size if we are |
343 | considering unaligned memory references. */ | |
481683e1 | 344 | if (CONST_INT_P (XEXP (x, 1)) |
48e8382e PB |
345 | && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)), |
346 | size, mode, unaligned_mems)) | |
2358ff91 EB |
347 | return 0; |
348 | ||
349 | return 1; | |
2c88418c RS |
350 | |
351 | case LO_SUM: | |
4f73495e | 352 | case PRE_MODIFY: |
48e8382e PB |
353 | return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size, |
354 | mode, unaligned_mems); | |
4f73495e RH |
355 | |
356 | case PRE_DEC: | |
357 | case PRE_INC: | |
358 | case POST_DEC: | |
359 | case POST_INC: | |
360 | case POST_MODIFY: | |
48e8382e PB |
361 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
362 | mode, unaligned_mems); | |
4f73495e | 363 | |
e9a25f70 JL |
364 | default: |
365 | break; | |
2c88418c RS |
366 | } |
367 | ||
368 | /* If it isn't one of the case above, it can cause a trap. */ | |
369 | return 1; | |
370 | } | |
371 | ||
2358ff91 EB |
372 | /* Return nonzero if the use of X as an address in a MEM can cause a trap. */ |
373 | ||
374 | int | |
f7d504c2 | 375 | rtx_addr_can_trap_p (const_rtx x) |
2358ff91 | 376 | { |
48e8382e | 377 | return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false); |
2358ff91 EB |
378 | } |
379 | ||
4977bab6 ZW |
380 | /* Return true if X is an address that is known to not be zero. */ |
381 | ||
382 | bool | |
f7d504c2 | 383 | nonzero_address_p (const_rtx x) |
4977bab6 | 384 | { |
f7d504c2 | 385 | const enum rtx_code code = GET_CODE (x); |
4977bab6 ZW |
386 | |
387 | switch (code) | |
388 | { | |
389 | case SYMBOL_REF: | |
390 | return !SYMBOL_REF_WEAK (x); | |
391 | ||
392 | case LABEL_REF: | |
393 | return true; | |
394 | ||
4977bab6 ZW |
395 | case REG: |
396 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
397 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx | |
398 | || x == stack_pointer_rtx | |
399 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
400 | return true; | |
401 | /* All of the virtual frame registers are stack references. */ | |
402 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
403 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
404 | return true; | |
405 | return false; | |
406 | ||
407 | case CONST: | |
408 | return nonzero_address_p (XEXP (x, 0)); | |
409 | ||
410 | case PLUS: | |
481683e1 | 411 | if (CONST_INT_P (XEXP (x, 1))) |
942d7821 | 412 | return nonzero_address_p (XEXP (x, 0)); |
4977bab6 ZW |
413 | /* Handle PIC references. */ |
414 | else if (XEXP (x, 0) == pic_offset_table_rtx | |
415 | && CONSTANT_P (XEXP (x, 1))) | |
416 | return true; | |
417 | return false; | |
418 | ||
419 | case PRE_MODIFY: | |
420 | /* Similar to the above; allow positive offsets. Further, since | |
421 | auto-inc is only allowed in memories, the register must be a | |
422 | pointer. */ | |
481683e1 | 423 | if (CONST_INT_P (XEXP (x, 1)) |
4977bab6 ZW |
424 | && INTVAL (XEXP (x, 1)) > 0) |
425 | return true; | |
426 | return nonzero_address_p (XEXP (x, 0)); | |
427 | ||
428 | case PRE_INC: | |
429 | /* Similarly. Further, the offset is always positive. */ | |
430 | return true; | |
431 | ||
432 | case PRE_DEC: | |
433 | case POST_DEC: | |
434 | case POST_INC: | |
435 | case POST_MODIFY: | |
436 | return nonzero_address_p (XEXP (x, 0)); | |
437 | ||
438 | case LO_SUM: | |
439 | return nonzero_address_p (XEXP (x, 1)); | |
440 | ||
441 | default: | |
442 | break; | |
443 | } | |
444 | ||
445 | /* If it isn't one of the case above, might be zero. */ | |
446 | return false; | |
447 | } | |
448 | ||
a6a2274a | 449 | /* Return 1 if X refers to a memory location whose address |
2c88418c | 450 | cannot be compared reliably with constant addresses, |
a6a2274a | 451 | or if X refers to a BLKmode memory object. |
e38fe8e0 BS |
452 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
453 | zero, we are slightly more conservative. */ | |
2c88418c | 454 | |
4f588890 KG |
455 | bool |
456 | rtx_addr_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 457 | { |
b3694847 SS |
458 | enum rtx_code code; |
459 | int i; | |
460 | const char *fmt; | |
2c88418c RS |
461 | |
462 | if (x == 0) | |
463 | return 0; | |
464 | ||
465 | code = GET_CODE (x); | |
466 | if (code == MEM) | |
e38fe8e0 | 467 | return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias); |
2c88418c RS |
468 | |
469 | fmt = GET_RTX_FORMAT (code); | |
470 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
471 | if (fmt[i] == 'e') | |
833c0b26 | 472 | { |
e38fe8e0 | 473 | if (rtx_addr_varies_p (XEXP (x, i), for_alias)) |
833c0b26 RK |
474 | return 1; |
475 | } | |
476 | else if (fmt[i] == 'E') | |
477 | { | |
478 | int j; | |
479 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 480 | if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias)) |
833c0b26 RK |
481 | return 1; |
482 | } | |
2c88418c RS |
483 | return 0; |
484 | } | |
485 | \f | |
486 | /* Return the value of the integer term in X, if one is apparent; | |
487 | otherwise return 0. | |
488 | Only obvious integer terms are detected. | |
3ef42a0c | 489 | This is used in cse.c with the `related_value' field. */ |
2c88418c | 490 | |
c166a311 | 491 | HOST_WIDE_INT |
f7d504c2 | 492 | get_integer_term (const_rtx x) |
2c88418c RS |
493 | { |
494 | if (GET_CODE (x) == CONST) | |
495 | x = XEXP (x, 0); | |
496 | ||
497 | if (GET_CODE (x) == MINUS | |
481683e1 | 498 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
499 | return - INTVAL (XEXP (x, 1)); |
500 | if (GET_CODE (x) == PLUS | |
481683e1 | 501 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
502 | return INTVAL (XEXP (x, 1)); |
503 | return 0; | |
504 | } | |
505 | ||
506 | /* If X is a constant, return the value sans apparent integer term; | |
507 | otherwise return 0. | |
508 | Only obvious integer terms are detected. */ | |
509 | ||
510 | rtx | |
f7d504c2 | 511 | get_related_value (const_rtx x) |
2c88418c RS |
512 | { |
513 | if (GET_CODE (x) != CONST) | |
514 | return 0; | |
515 | x = XEXP (x, 0); | |
516 | if (GET_CODE (x) == PLUS | |
481683e1 | 517 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
518 | return XEXP (x, 0); |
519 | else if (GET_CODE (x) == MINUS | |
481683e1 | 520 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
521 | return XEXP (x, 0); |
522 | return 0; | |
523 | } | |
524 | \f | |
7ffb5e78 RS |
525 | /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points |
526 | to somewhere in the same object or object_block as SYMBOL. */ | |
527 | ||
528 | bool | |
f7d504c2 | 529 | offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset) |
7ffb5e78 RS |
530 | { |
531 | tree decl; | |
532 | ||
533 | if (GET_CODE (symbol) != SYMBOL_REF) | |
534 | return false; | |
535 | ||
536 | if (offset == 0) | |
537 | return true; | |
538 | ||
539 | if (offset > 0) | |
540 | { | |
541 | if (CONSTANT_POOL_ADDRESS_P (symbol) | |
542 | && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol))) | |
543 | return true; | |
544 | ||
545 | decl = SYMBOL_REF_DECL (symbol); | |
546 | if (decl && offset < int_size_in_bytes (TREE_TYPE (decl))) | |
547 | return true; | |
548 | } | |
549 | ||
550 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) | |
551 | && SYMBOL_REF_BLOCK (symbol) | |
552 | && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0 | |
553 | && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol) | |
554 | < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size)) | |
555 | return true; | |
556 | ||
557 | return false; | |
558 | } | |
559 | ||
560 | /* Split X into a base and a constant offset, storing them in *BASE_OUT | |
561 | and *OFFSET_OUT respectively. */ | |
562 | ||
563 | void | |
564 | split_const (rtx x, rtx *base_out, rtx *offset_out) | |
565 | { | |
566 | if (GET_CODE (x) == CONST) | |
567 | { | |
568 | x = XEXP (x, 0); | |
481683e1 | 569 | if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1))) |
7ffb5e78 RS |
570 | { |
571 | *base_out = XEXP (x, 0); | |
572 | *offset_out = XEXP (x, 1); | |
573 | return; | |
574 | } | |
575 | } | |
576 | *base_out = x; | |
577 | *offset_out = const0_rtx; | |
578 | } | |
579 | \f | |
4b983fdc RH |
580 | /* Return the number of places FIND appears within X. If COUNT_DEST is |
581 | zero, we do not count occurrences inside the destination of a SET. */ | |
582 | ||
583 | int | |
f7d504c2 | 584 | count_occurrences (const_rtx x, const_rtx find, int count_dest) |
4b983fdc RH |
585 | { |
586 | int i, j; | |
587 | enum rtx_code code; | |
588 | const char *format_ptr; | |
589 | int count; | |
590 | ||
591 | if (x == find) | |
592 | return 1; | |
593 | ||
594 | code = GET_CODE (x); | |
595 | ||
596 | switch (code) | |
597 | { | |
598 | case REG: | |
599 | case CONST_INT: | |
600 | case CONST_DOUBLE: | |
091a3ac7 | 601 | case CONST_FIXED: |
69ef87e2 | 602 | case CONST_VECTOR: |
4b983fdc RH |
603 | case SYMBOL_REF: |
604 | case CODE_LABEL: | |
605 | case PC: | |
606 | case CC0: | |
607 | return 0; | |
608 | ||
2372a062 BS |
609 | case EXPR_LIST: |
610 | count = count_occurrences (XEXP (x, 0), find, count_dest); | |
611 | if (XEXP (x, 1)) | |
612 | count += count_occurrences (XEXP (x, 1), find, count_dest); | |
613 | return count; | |
614 | ||
4b983fdc | 615 | case MEM: |
3c0cb5de | 616 | if (MEM_P (find) && rtx_equal_p (x, find)) |
4b983fdc RH |
617 | return 1; |
618 | break; | |
619 | ||
620 | case SET: | |
621 | if (SET_DEST (x) == find && ! count_dest) | |
622 | return count_occurrences (SET_SRC (x), find, count_dest); | |
623 | break; | |
624 | ||
625 | default: | |
626 | break; | |
627 | } | |
628 | ||
629 | format_ptr = GET_RTX_FORMAT (code); | |
630 | count = 0; | |
631 | ||
632 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
633 | { | |
634 | switch (*format_ptr++) | |
635 | { | |
636 | case 'e': | |
637 | count += count_occurrences (XEXP (x, i), find, count_dest); | |
638 | break; | |
639 | ||
640 | case 'E': | |
641 | for (j = 0; j < XVECLEN (x, i); j++) | |
642 | count += count_occurrences (XVECEXP (x, i, j), find, count_dest); | |
643 | break; | |
644 | } | |
645 | } | |
646 | return count; | |
647 | } | |
6fb5fa3c | 648 | |
4b983fdc | 649 | \f |
2c88418c RS |
650 | /* Nonzero if register REG appears somewhere within IN. |
651 | Also works if REG is not a register; in this case it checks | |
652 | for a subexpression of IN that is Lisp "equal" to REG. */ | |
653 | ||
654 | int | |
f7d504c2 | 655 | reg_mentioned_p (const_rtx reg, const_rtx in) |
2c88418c | 656 | { |
b3694847 SS |
657 | const char *fmt; |
658 | int i; | |
659 | enum rtx_code code; | |
2c88418c RS |
660 | |
661 | if (in == 0) | |
662 | return 0; | |
663 | ||
664 | if (reg == in) | |
665 | return 1; | |
666 | ||
667 | if (GET_CODE (in) == LABEL_REF) | |
668 | return reg == XEXP (in, 0); | |
669 | ||
670 | code = GET_CODE (in); | |
671 | ||
672 | switch (code) | |
673 | { | |
674 | /* Compare registers by number. */ | |
675 | case REG: | |
f8cfc6aa | 676 | return REG_P (reg) && REGNO (in) == REGNO (reg); |
2c88418c RS |
677 | |
678 | /* These codes have no constituent expressions | |
679 | and are unique. */ | |
680 | case SCRATCH: | |
681 | case CC0: | |
682 | case PC: | |
683 | return 0; | |
684 | ||
685 | case CONST_INT: | |
69ef87e2 | 686 | case CONST_VECTOR: |
2c88418c | 687 | case CONST_DOUBLE: |
091a3ac7 | 688 | case CONST_FIXED: |
2c88418c RS |
689 | /* These are kept unique for a given value. */ |
690 | return 0; | |
a6a2274a | 691 | |
e9a25f70 JL |
692 | default: |
693 | break; | |
2c88418c RS |
694 | } |
695 | ||
696 | if (GET_CODE (reg) == code && rtx_equal_p (reg, in)) | |
697 | return 1; | |
698 | ||
699 | fmt = GET_RTX_FORMAT (code); | |
700 | ||
701 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
702 | { | |
703 | if (fmt[i] == 'E') | |
704 | { | |
b3694847 | 705 | int j; |
2c88418c RS |
706 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) |
707 | if (reg_mentioned_p (reg, XVECEXP (in, i, j))) | |
708 | return 1; | |
709 | } | |
710 | else if (fmt[i] == 'e' | |
711 | && reg_mentioned_p (reg, XEXP (in, i))) | |
712 | return 1; | |
713 | } | |
714 | return 0; | |
715 | } | |
716 | \f | |
717 | /* Return 1 if in between BEG and END, exclusive of BEG and END, there is | |
718 | no CODE_LABEL insn. */ | |
719 | ||
720 | int | |
f7d504c2 | 721 | no_labels_between_p (const_rtx beg, const_rtx end) |
2c88418c | 722 | { |
b3694847 | 723 | rtx p; |
978f547f JH |
724 | if (beg == end) |
725 | return 0; | |
2c88418c | 726 | for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) |
4b4bf941 | 727 | if (LABEL_P (p)) |
2c88418c RS |
728 | return 0; |
729 | return 1; | |
730 | } | |
731 | ||
732 | /* Nonzero if register REG is used in an insn between | |
733 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
734 | ||
735 | int | |
f7d504c2 | 736 | reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn) |
2c88418c | 737 | { |
b3694847 | 738 | rtx insn; |
2c88418c RS |
739 | |
740 | if (from_insn == to_insn) | |
741 | return 0; | |
742 | ||
743 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
b5b8b0ac | 744 | if (NONDEBUG_INSN_P (insn) |
8f3e7a26 | 745 | && (reg_overlap_mentioned_p (reg, PATTERN (insn)) |
76dd5923 | 746 | || (CALL_P (insn) && find_reg_fusage (insn, USE, reg)))) |
2c88418c RS |
747 | return 1; |
748 | return 0; | |
749 | } | |
750 | \f | |
751 | /* Nonzero if the old value of X, a register, is referenced in BODY. If X | |
752 | is entirely replaced by a new value and the only use is as a SET_DEST, | |
753 | we do not consider it a reference. */ | |
754 | ||
755 | int | |
f7d504c2 | 756 | reg_referenced_p (const_rtx x, const_rtx body) |
2c88418c RS |
757 | { |
758 | int i; | |
759 | ||
760 | switch (GET_CODE (body)) | |
761 | { | |
762 | case SET: | |
763 | if (reg_overlap_mentioned_p (x, SET_SRC (body))) | |
764 | return 1; | |
765 | ||
766 | /* If the destination is anything other than CC0, PC, a REG or a SUBREG | |
767 | of a REG that occupies all of the REG, the insn references X if | |
768 | it is mentioned in the destination. */ | |
769 | if (GET_CODE (SET_DEST (body)) != CC0 | |
770 | && GET_CODE (SET_DEST (body)) != PC | |
f8cfc6aa | 771 | && !REG_P (SET_DEST (body)) |
2c88418c | 772 | && ! (GET_CODE (SET_DEST (body)) == SUBREG |
f8cfc6aa | 773 | && REG_P (SUBREG_REG (SET_DEST (body))) |
2c88418c RS |
774 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body)))) |
775 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
776 | == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body))) | |
777 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
778 | && reg_overlap_mentioned_p (x, SET_DEST (body))) | |
779 | return 1; | |
e9a25f70 | 780 | return 0; |
2c88418c RS |
781 | |
782 | case ASM_OPERANDS: | |
783 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
784 | if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i))) | |
785 | return 1; | |
e9a25f70 | 786 | return 0; |
2c88418c RS |
787 | |
788 | case CALL: | |
789 | case USE: | |
14a774a9 | 790 | case IF_THEN_ELSE: |
2c88418c RS |
791 | return reg_overlap_mentioned_p (x, body); |
792 | ||
793 | case TRAP_IF: | |
794 | return reg_overlap_mentioned_p (x, TRAP_CONDITION (body)); | |
795 | ||
21b8482a JJ |
796 | case PREFETCH: |
797 | return reg_overlap_mentioned_p (x, XEXP (body, 0)); | |
798 | ||
2ac4fed0 RK |
799 | case UNSPEC: |
800 | case UNSPEC_VOLATILE: | |
2f9fb4c2 R |
801 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
802 | if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i))) | |
803 | return 1; | |
804 | return 0; | |
805 | ||
2c88418c RS |
806 | case PARALLEL: |
807 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
808 | if (reg_referenced_p (x, XVECEXP (body, 0, i))) | |
809 | return 1; | |
e9a25f70 | 810 | return 0; |
a6a2274a | 811 | |
0d3ffb5a | 812 | case CLOBBER: |
3c0cb5de | 813 | if (MEM_P (XEXP (body, 0))) |
0d3ffb5a GK |
814 | if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) |
815 | return 1; | |
816 | return 0; | |
817 | ||
0c99ec5c RH |
818 | case COND_EXEC: |
819 | if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) | |
820 | return 1; | |
821 | return reg_referenced_p (x, COND_EXEC_CODE (body)); | |
822 | ||
e9a25f70 JL |
823 | default: |
824 | return 0; | |
2c88418c | 825 | } |
2c88418c | 826 | } |
2c88418c RS |
827 | \f |
828 | /* Nonzero if register REG is set or clobbered in an insn between | |
829 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
830 | ||
831 | int | |
ed7a4b4b | 832 | reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn) |
2c88418c | 833 | { |
ed7a4b4b | 834 | const_rtx insn; |
2c88418c RS |
835 | |
836 | if (from_insn == to_insn) | |
837 | return 0; | |
838 | ||
839 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 840 | if (INSN_P (insn) && reg_set_p (reg, insn)) |
2c88418c RS |
841 | return 1; |
842 | return 0; | |
843 | } | |
844 | ||
845 | /* Internals of reg_set_between_p. */ | |
2c88418c | 846 | int |
ed7a4b4b | 847 | reg_set_p (const_rtx reg, const_rtx insn) |
2c88418c | 848 | { |
2c88418c RS |
849 | /* We can be passed an insn or part of one. If we are passed an insn, |
850 | check if a side-effect of the insn clobbers REG. */ | |
4977bab6 ZW |
851 | if (INSN_P (insn) |
852 | && (FIND_REG_INC_NOTE (insn, reg) | |
4b4bf941 | 853 | || (CALL_P (insn) |
f8cfc6aa | 854 | && ((REG_P (reg) |
4f1605d2 | 855 | && REGNO (reg) < FIRST_PSEUDO_REGISTER |
5da20cfe RS |
856 | && overlaps_hard_reg_set_p (regs_invalidated_by_call, |
857 | GET_MODE (reg), REGNO (reg))) | |
3c0cb5de | 858 | || MEM_P (reg) |
4977bab6 ZW |
859 | || find_reg_fusage (insn, CLOBBER, reg))))) |
860 | return 1; | |
2c88418c | 861 | |
91b2d119 | 862 | return set_of (reg, insn) != NULL_RTX; |
2c88418c RS |
863 | } |
864 | ||
865 | /* Similar to reg_set_between_p, but check all registers in X. Return 0 | |
866 | only if none of them are modified between START and END. Return 1 if | |
fa10beec | 867 | X contains a MEM; this routine does use memory aliasing. */ |
2c88418c RS |
868 | |
869 | int | |
9678086d | 870 | modified_between_p (const_rtx x, const_rtx start, const_rtx end) |
2c88418c | 871 | { |
9678086d | 872 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 873 | const char *fmt; |
f8163c92 | 874 | int i, j; |
7b52eede JH |
875 | rtx insn; |
876 | ||
877 | if (start == end) | |
878 | return 0; | |
2c88418c RS |
879 | |
880 | switch (code) | |
881 | { | |
882 | case CONST_INT: | |
883 | case CONST_DOUBLE: | |
091a3ac7 | 884 | case CONST_FIXED: |
69ef87e2 | 885 | case CONST_VECTOR: |
2c88418c RS |
886 | case CONST: |
887 | case SYMBOL_REF: | |
888 | case LABEL_REF: | |
889 | return 0; | |
890 | ||
891 | case PC: | |
892 | case CC0: | |
893 | return 1; | |
894 | ||
895 | case MEM: | |
7b52eede | 896 | if (modified_between_p (XEXP (x, 0), start, end)) |
2c88418c | 897 | return 1; |
550b7784 KK |
898 | if (MEM_READONLY_P (x)) |
899 | return 0; | |
7b52eede JH |
900 | for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn)) |
901 | if (memory_modified_in_insn_p (x, insn)) | |
902 | return 1; | |
903 | return 0; | |
2c88418c RS |
904 | break; |
905 | ||
906 | case REG: | |
907 | return reg_set_between_p (x, start, end); | |
a6a2274a | 908 | |
e9a25f70 JL |
909 | default: |
910 | break; | |
2c88418c RS |
911 | } |
912 | ||
913 | fmt = GET_RTX_FORMAT (code); | |
914 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
f8163c92 RK |
915 | { |
916 | if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end)) | |
917 | return 1; | |
918 | ||
d4757e6a | 919 | else if (fmt[i] == 'E') |
f8163c92 RK |
920 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
921 | if (modified_between_p (XVECEXP (x, i, j), start, end)) | |
922 | return 1; | |
923 | } | |
924 | ||
925 | return 0; | |
926 | } | |
927 | ||
928 | /* Similar to reg_set_p, but check all registers in X. Return 0 only if none | |
929 | of them are modified in INSN. Return 1 if X contains a MEM; this routine | |
7b52eede | 930 | does use memory aliasing. */ |
f8163c92 RK |
931 | |
932 | int | |
9678086d | 933 | modified_in_p (const_rtx x, const_rtx insn) |
f8163c92 | 934 | { |
9678086d | 935 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 936 | const char *fmt; |
f8163c92 RK |
937 | int i, j; |
938 | ||
939 | switch (code) | |
940 | { | |
941 | case CONST_INT: | |
942 | case CONST_DOUBLE: | |
091a3ac7 | 943 | case CONST_FIXED: |
69ef87e2 | 944 | case CONST_VECTOR: |
f8163c92 RK |
945 | case CONST: |
946 | case SYMBOL_REF: | |
947 | case LABEL_REF: | |
948 | return 0; | |
949 | ||
950 | case PC: | |
951 | case CC0: | |
2c88418c RS |
952 | return 1; |
953 | ||
f8163c92 | 954 | case MEM: |
7b52eede | 955 | if (modified_in_p (XEXP (x, 0), insn)) |
f8163c92 | 956 | return 1; |
550b7784 KK |
957 | if (MEM_READONLY_P (x)) |
958 | return 0; | |
7b52eede JH |
959 | if (memory_modified_in_insn_p (x, insn)) |
960 | return 1; | |
961 | return 0; | |
f8163c92 RK |
962 | break; |
963 | ||
964 | case REG: | |
965 | return reg_set_p (x, insn); | |
e9a25f70 JL |
966 | |
967 | default: | |
968 | break; | |
f8163c92 RK |
969 | } |
970 | ||
971 | fmt = GET_RTX_FORMAT (code); | |
972 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
973 | { | |
974 | if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn)) | |
975 | return 1; | |
976 | ||
d4757e6a | 977 | else if (fmt[i] == 'E') |
f8163c92 RK |
978 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
979 | if (modified_in_p (XVECEXP (x, i, j), insn)) | |
980 | return 1; | |
981 | } | |
982 | ||
2c88418c RS |
983 | return 0; |
984 | } | |
985 | \f | |
91b2d119 JH |
986 | /* Helper function for set_of. */ |
987 | struct set_of_data | |
988 | { | |
7bc980e1 KG |
989 | const_rtx found; |
990 | const_rtx pat; | |
91b2d119 JH |
991 | }; |
992 | ||
993 | static void | |
7bc980e1 | 994 | set_of_1 (rtx x, const_rtx pat, void *data1) |
91b2d119 | 995 | { |
7bc980e1 KG |
996 | struct set_of_data *const data = (struct set_of_data *) (data1); |
997 | if (rtx_equal_p (x, data->pat) | |
998 | || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x))) | |
999 | data->found = pat; | |
91b2d119 JH |
1000 | } |
1001 | ||
1002 | /* Give an INSN, return a SET or CLOBBER expression that does modify PAT | |
eaec9b3d | 1003 | (either directly or via STRICT_LOW_PART and similar modifiers). */ |
7bc980e1 KG |
1004 | const_rtx |
1005 | set_of (const_rtx pat, const_rtx insn) | |
91b2d119 JH |
1006 | { |
1007 | struct set_of_data data; | |
1008 | data.found = NULL_RTX; | |
1009 | data.pat = pat; | |
1010 | note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data); | |
1011 | return data.found; | |
1012 | } | |
1013 | \f | |
2c88418c RS |
1014 | /* Given an INSN, return a SET expression if this insn has only a single SET. |
1015 | It may also have CLOBBERs, USEs, or SET whose output | |
1016 | will not be used, which we ignore. */ | |
1017 | ||
1018 | rtx | |
f7d504c2 | 1019 | single_set_2 (const_rtx insn, const_rtx pat) |
2c88418c | 1020 | { |
c9b89a21 JH |
1021 | rtx set = NULL; |
1022 | int set_verified = 1; | |
2c88418c | 1023 | int i; |
c9b89a21 | 1024 | |
b1cdafbb | 1025 | if (GET_CODE (pat) == PARALLEL) |
2c88418c | 1026 | { |
c9b89a21 | 1027 | for (i = 0; i < XVECLEN (pat, 0); i++) |
b1cdafbb | 1028 | { |
c9b89a21 JH |
1029 | rtx sub = XVECEXP (pat, 0, i); |
1030 | switch (GET_CODE (sub)) | |
1031 | { | |
1032 | case USE: | |
1033 | case CLOBBER: | |
1034 | break; | |
1035 | ||
1036 | case SET: | |
1037 | /* We can consider insns having multiple sets, where all | |
1038 | but one are dead as single set insns. In common case | |
1039 | only single set is present in the pattern so we want | |
f63d1bf7 | 1040 | to avoid checking for REG_UNUSED notes unless necessary. |
c9b89a21 JH |
1041 | |
1042 | When we reach set first time, we just expect this is | |
1043 | the single set we are looking for and only when more | |
1044 | sets are found in the insn, we check them. */ | |
1045 | if (!set_verified) | |
1046 | { | |
1047 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (set)) | |
1048 | && !side_effects_p (set)) | |
1049 | set = NULL; | |
1050 | else | |
1051 | set_verified = 1; | |
1052 | } | |
1053 | if (!set) | |
1054 | set = sub, set_verified = 0; | |
1055 | else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub)) | |
1056 | || side_effects_p (sub)) | |
1057 | return NULL_RTX; | |
1058 | break; | |
1059 | ||
1060 | default: | |
1061 | return NULL_RTX; | |
1062 | } | |
787ccee0 | 1063 | } |
2c88418c | 1064 | } |
c9b89a21 | 1065 | return set; |
2c88418c | 1066 | } |
941c63ac JL |
1067 | |
1068 | /* Given an INSN, return nonzero if it has more than one SET, else return | |
1069 | zero. */ | |
1070 | ||
5f7d3786 | 1071 | int |
f7d504c2 | 1072 | multiple_sets (const_rtx insn) |
941c63ac | 1073 | { |
cae8acdd | 1074 | int found; |
941c63ac | 1075 | int i; |
a6a2274a | 1076 | |
941c63ac | 1077 | /* INSN must be an insn. */ |
2c3c49de | 1078 | if (! INSN_P (insn)) |
941c63ac JL |
1079 | return 0; |
1080 | ||
1081 | /* Only a PARALLEL can have multiple SETs. */ | |
1082 | if (GET_CODE (PATTERN (insn)) == PARALLEL) | |
1083 | { | |
1084 | for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
1085 | if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET) | |
1086 | { | |
1087 | /* If we have already found a SET, then return now. */ | |
1088 | if (found) | |
1089 | return 1; | |
1090 | else | |
1091 | found = 1; | |
1092 | } | |
1093 | } | |
a6a2274a | 1094 | |
941c63ac JL |
1095 | /* Either zero or one SET. */ |
1096 | return 0; | |
1097 | } | |
2c88418c | 1098 | \f |
7142e318 JW |
1099 | /* Return nonzero if the destination of SET equals the source |
1100 | and there are no side effects. */ | |
1101 | ||
1102 | int | |
f7d504c2 | 1103 | set_noop_p (const_rtx set) |
7142e318 JW |
1104 | { |
1105 | rtx src = SET_SRC (set); | |
1106 | rtx dst = SET_DEST (set); | |
1107 | ||
371b8fc0 JH |
1108 | if (dst == pc_rtx && src == pc_rtx) |
1109 | return 1; | |
1110 | ||
3c0cb5de | 1111 | if (MEM_P (dst) && MEM_P (src)) |
cd648cec JH |
1112 | return rtx_equal_p (dst, src) && !side_effects_p (dst); |
1113 | ||
46d096a3 | 1114 | if (GET_CODE (dst) == ZERO_EXTRACT) |
7142e318 | 1115 | return rtx_equal_p (XEXP (dst, 0), src) |
cd648cec JH |
1116 | && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx |
1117 | && !side_effects_p (src); | |
7142e318 JW |
1118 | |
1119 | if (GET_CODE (dst) == STRICT_LOW_PART) | |
1120 | dst = XEXP (dst, 0); | |
1121 | ||
1122 | if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) | |
1123 | { | |
1124 | if (SUBREG_BYTE (src) != SUBREG_BYTE (dst)) | |
1125 | return 0; | |
1126 | src = SUBREG_REG (src); | |
1127 | dst = SUBREG_REG (dst); | |
1128 | } | |
1129 | ||
f8cfc6aa | 1130 | return (REG_P (src) && REG_P (dst) |
7142e318 JW |
1131 | && REGNO (src) == REGNO (dst)); |
1132 | } | |
0005550b JH |
1133 | \f |
1134 | /* Return nonzero if an insn consists only of SETs, each of which only sets a | |
1135 | value to itself. */ | |
1136 | ||
1137 | int | |
fa233e34 | 1138 | noop_move_p (const_rtx insn) |
0005550b JH |
1139 | { |
1140 | rtx pat = PATTERN (insn); | |
1141 | ||
b5832b43 JH |
1142 | if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE) |
1143 | return 1; | |
1144 | ||
0005550b JH |
1145 | /* Insns carrying these notes are useful later on. */ |
1146 | if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) | |
1147 | return 0; | |
1148 | ||
1149 | if (GET_CODE (pat) == SET && set_noop_p (pat)) | |
1150 | return 1; | |
1151 | ||
1152 | if (GET_CODE (pat) == PARALLEL) | |
1153 | { | |
1154 | int i; | |
1155 | /* If nothing but SETs of registers to themselves, | |
1156 | this insn can also be deleted. */ | |
1157 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1158 | { | |
1159 | rtx tem = XVECEXP (pat, 0, i); | |
1160 | ||
1161 | if (GET_CODE (tem) == USE | |
1162 | || GET_CODE (tem) == CLOBBER) | |
1163 | continue; | |
1164 | ||
1165 | if (GET_CODE (tem) != SET || ! set_noop_p (tem)) | |
1166 | return 0; | |
1167 | } | |
1168 | ||
1169 | return 1; | |
1170 | } | |
1171 | return 0; | |
1172 | } | |
1173 | \f | |
7142e318 | 1174 | |
63be01fb JW |
1175 | /* Return the last thing that X was assigned from before *PINSN. If VALID_TO |
1176 | is not NULL_RTX then verify that the object is not modified up to VALID_TO. | |
1177 | If the object was modified, if we hit a partial assignment to X, or hit a | |
1178 | CODE_LABEL first, return X. If we found an assignment, update *PINSN to | |
1179 | point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to | |
1180 | be the src. */ | |
2c88418c RS |
1181 | |
1182 | rtx | |
0c20a65f | 1183 | find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg) |
2c88418c RS |
1184 | { |
1185 | rtx p; | |
1186 | ||
4b4bf941 | 1187 | for (p = PREV_INSN (*pinsn); p && !LABEL_P (p); |
2c88418c | 1188 | p = PREV_INSN (p)) |
2c3c49de | 1189 | if (INSN_P (p)) |
2c88418c RS |
1190 | { |
1191 | rtx set = single_set (p); | |
c166a311 | 1192 | rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX); |
2c88418c RS |
1193 | |
1194 | if (set && rtx_equal_p (x, SET_DEST (set))) | |
1195 | { | |
1196 | rtx src = SET_SRC (set); | |
1197 | ||
1198 | if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST) | |
1199 | src = XEXP (note, 0); | |
1200 | ||
63be01fb JW |
1201 | if ((valid_to == NULL_RTX |
1202 | || ! modified_between_p (src, PREV_INSN (p), valid_to)) | |
2c88418c RS |
1203 | /* Reject hard registers because we don't usually want |
1204 | to use them; we'd rather use a pseudo. */ | |
f8cfc6aa | 1205 | && (! (REG_P (src) |
89d3d442 | 1206 | && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg)) |
2c88418c RS |
1207 | { |
1208 | *pinsn = p; | |
1209 | return src; | |
1210 | } | |
1211 | } | |
a6a2274a | 1212 | |
2c88418c RS |
1213 | /* If set in non-simple way, we don't have a value. */ |
1214 | if (reg_set_p (x, p)) | |
1215 | break; | |
1216 | } | |
1217 | ||
1218 | return x; | |
a6a2274a | 1219 | } |
2c88418c RS |
1220 | \f |
1221 | /* Return nonzero if register in range [REGNO, ENDREGNO) | |
1222 | appears either explicitly or implicitly in X | |
1223 | other than being stored into. | |
1224 | ||
1225 | References contained within the substructure at LOC do not count. | |
1226 | LOC may be zero, meaning don't ignore anything. */ | |
1227 | ||
1228 | int | |
f7d504c2 | 1229 | refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, |
0c20a65f | 1230 | rtx *loc) |
2c88418c | 1231 | { |
770ae6cc RK |
1232 | int i; |
1233 | unsigned int x_regno; | |
1234 | RTX_CODE code; | |
1235 | const char *fmt; | |
2c88418c RS |
1236 | |
1237 | repeat: | |
1238 | /* The contents of a REG_NONNEG note is always zero, so we must come here | |
1239 | upon repeat in case the last REG_NOTE is a REG_NONNEG note. */ | |
1240 | if (x == 0) | |
1241 | return 0; | |
1242 | ||
1243 | code = GET_CODE (x); | |
1244 | ||
1245 | switch (code) | |
1246 | { | |
1247 | case REG: | |
770ae6cc | 1248 | x_regno = REGNO (x); |
f8163c92 RK |
1249 | |
1250 | /* If we modifying the stack, frame, or argument pointer, it will | |
1251 | clobber a virtual register. In fact, we could be more precise, | |
1252 | but it isn't worth it. */ | |
770ae6cc | 1253 | if ((x_regno == STACK_POINTER_REGNUM |
f8163c92 | 1254 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
770ae6cc | 1255 | || x_regno == ARG_POINTER_REGNUM |
f8163c92 | 1256 | #endif |
770ae6cc | 1257 | || x_regno == FRAME_POINTER_REGNUM) |
f8163c92 RK |
1258 | && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER) |
1259 | return 1; | |
1260 | ||
09e18274 | 1261 | return endregno > x_regno && regno < END_REGNO (x); |
2c88418c RS |
1262 | |
1263 | case SUBREG: | |
1264 | /* If this is a SUBREG of a hard reg, we can see exactly which | |
1265 | registers are being modified. Otherwise, handle normally. */ | |
f8cfc6aa | 1266 | if (REG_P (SUBREG_REG (x)) |
2c88418c RS |
1267 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) |
1268 | { | |
ddef6bc7 | 1269 | unsigned int inner_regno = subreg_regno (x); |
770ae6cc | 1270 | unsigned int inner_endregno |
403c659c | 1271 | = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER |
f1f4e530 | 1272 | ? subreg_nregs (x) : 1); |
2c88418c RS |
1273 | |
1274 | return endregno > inner_regno && regno < inner_endregno; | |
1275 | } | |
1276 | break; | |
1277 | ||
1278 | case CLOBBER: | |
1279 | case SET: | |
1280 | if (&SET_DEST (x) != loc | |
1281 | /* Note setting a SUBREG counts as referring to the REG it is in for | |
1282 | a pseudo but not for hard registers since we can | |
1283 | treat each word individually. */ | |
1284 | && ((GET_CODE (SET_DEST (x)) == SUBREG | |
1285 | && loc != &SUBREG_REG (SET_DEST (x)) | |
f8cfc6aa | 1286 | && REG_P (SUBREG_REG (SET_DEST (x))) |
2c88418c RS |
1287 | && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER |
1288 | && refers_to_regno_p (regno, endregno, | |
1289 | SUBREG_REG (SET_DEST (x)), loc)) | |
f8cfc6aa | 1290 | || (!REG_P (SET_DEST (x)) |
2c88418c RS |
1291 | && refers_to_regno_p (regno, endregno, SET_DEST (x), loc)))) |
1292 | return 1; | |
1293 | ||
1294 | if (code == CLOBBER || loc == &SET_SRC (x)) | |
1295 | return 0; | |
1296 | x = SET_SRC (x); | |
1297 | goto repeat; | |
e9a25f70 JL |
1298 | |
1299 | default: | |
1300 | break; | |
2c88418c RS |
1301 | } |
1302 | ||
1303 | /* X does not match, so try its subexpressions. */ | |
1304 | ||
1305 | fmt = GET_RTX_FORMAT (code); | |
1306 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1307 | { | |
1308 | if (fmt[i] == 'e' && loc != &XEXP (x, i)) | |
1309 | { | |
1310 | if (i == 0) | |
1311 | { | |
1312 | x = XEXP (x, 0); | |
1313 | goto repeat; | |
1314 | } | |
1315 | else | |
1316 | if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc)) | |
1317 | return 1; | |
1318 | } | |
1319 | else if (fmt[i] == 'E') | |
1320 | { | |
b3694847 | 1321 | int j; |
6a87d634 | 1322 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2c88418c RS |
1323 | if (loc != &XVECEXP (x, i, j) |
1324 | && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc)) | |
1325 | return 1; | |
1326 | } | |
1327 | } | |
1328 | return 0; | |
1329 | } | |
1330 | ||
1331 | /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, | |
1332 | we check if any register number in X conflicts with the relevant register | |
1333 | numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN | |
1334 | contains a MEM (we don't bother checking for memory addresses that can't | |
1335 | conflict because we expect this to be a rare case. */ | |
1336 | ||
1337 | int | |
f7d504c2 | 1338 | reg_overlap_mentioned_p (const_rtx x, const_rtx in) |
2c88418c | 1339 | { |
770ae6cc | 1340 | unsigned int regno, endregno; |
2c88418c | 1341 | |
6f626d1b PB |
1342 | /* If either argument is a constant, then modifying X can not |
1343 | affect IN. Here we look at IN, we can profitably combine | |
1344 | CONSTANT_P (x) with the switch statement below. */ | |
1345 | if (CONSTANT_P (in)) | |
b98b49ac | 1346 | return 0; |
0c99ec5c | 1347 | |
6f626d1b | 1348 | recurse: |
0c99ec5c | 1349 | switch (GET_CODE (x)) |
2c88418c | 1350 | { |
6f626d1b PB |
1351 | case STRICT_LOW_PART: |
1352 | case ZERO_EXTRACT: | |
1353 | case SIGN_EXTRACT: | |
1354 | /* Overly conservative. */ | |
1355 | x = XEXP (x, 0); | |
1356 | goto recurse; | |
1357 | ||
0c99ec5c | 1358 | case SUBREG: |
2c88418c RS |
1359 | regno = REGNO (SUBREG_REG (x)); |
1360 | if (regno < FIRST_PSEUDO_REGISTER) | |
ddef6bc7 | 1361 | regno = subreg_regno (x); |
f1f4e530 JM |
1362 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
1363 | ? subreg_nregs (x) : 1); | |
0c99ec5c | 1364 | goto do_reg; |
2c88418c | 1365 | |
0c99ec5c RH |
1366 | case REG: |
1367 | regno = REGNO (x); | |
09e18274 | 1368 | endregno = END_REGNO (x); |
f1f4e530 | 1369 | do_reg: |
8e2e89f7 | 1370 | return refers_to_regno_p (regno, endregno, in, (rtx*) 0); |
2c88418c | 1371 | |
0c99ec5c RH |
1372 | case MEM: |
1373 | { | |
1374 | const char *fmt; | |
1375 | int i; | |
2c88418c | 1376 | |
3c0cb5de | 1377 | if (MEM_P (in)) |
2c88418c RS |
1378 | return 1; |
1379 | ||
0c99ec5c RH |
1380 | fmt = GET_RTX_FORMAT (GET_CODE (in)); |
1381 | for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--) | |
3b009185 RH |
1382 | if (fmt[i] == 'e') |
1383 | { | |
1384 | if (reg_overlap_mentioned_p (x, XEXP (in, i))) | |
1385 | return 1; | |
1386 | } | |
1387 | else if (fmt[i] == 'E') | |
1388 | { | |
1389 | int j; | |
1390 | for (j = XVECLEN (in, i) - 1; j >= 0; --j) | |
1391 | if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j))) | |
1392 | return 1; | |
1393 | } | |
c0222c21 | 1394 | |
0c99ec5c RH |
1395 | return 0; |
1396 | } | |
1397 | ||
1398 | case SCRATCH: | |
1399 | case PC: | |
1400 | case CC0: | |
1401 | return reg_mentioned_p (x, in); | |
1402 | ||
1403 | case PARALLEL: | |
37ceff9d | 1404 | { |
90d036a0 | 1405 | int i; |
37ceff9d RH |
1406 | |
1407 | /* If any register in here refers to it we return true. */ | |
7193d1dc RK |
1408 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
1409 | if (XEXP (XVECEXP (x, 0, i), 0) != 0 | |
1410 | && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in)) | |
6f626d1b | 1411 | return 1; |
7193d1dc | 1412 | return 0; |
37ceff9d | 1413 | } |
2c88418c | 1414 | |
0c99ec5c | 1415 | default: |
41374e13 | 1416 | gcc_assert (CONSTANT_P (x)); |
6f626d1b PB |
1417 | return 0; |
1418 | } | |
2c88418c RS |
1419 | } |
1420 | \f | |
2c88418c | 1421 | /* Call FUN on each register or MEM that is stored into or clobbered by X. |
c3a1ef9d MM |
1422 | (X would be the pattern of an insn). DATA is an arbitrary pointer, |
1423 | ignored by note_stores, but passed to FUN. | |
1424 | ||
1425 | FUN receives three arguments: | |
1426 | 1. the REG, MEM, CC0 or PC being stored in or clobbered, | |
1427 | 2. the SET or CLOBBER rtx that does the store, | |
1428 | 3. the pointer DATA provided to note_stores. | |
2c88418c RS |
1429 | |
1430 | If the item being stored in or clobbered is a SUBREG of a hard register, | |
1431 | the SUBREG will be passed. */ | |
a6a2274a | 1432 | |
2c88418c | 1433 | void |
7bc980e1 | 1434 | note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data) |
2c88418c | 1435 | { |
aa317c97 | 1436 | int i; |
90d036a0 | 1437 | |
aa317c97 KG |
1438 | if (GET_CODE (x) == COND_EXEC) |
1439 | x = COND_EXEC_CODE (x); | |
90d036a0 | 1440 | |
aa317c97 KG |
1441 | if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) |
1442 | { | |
1443 | rtx dest = SET_DEST (x); | |
1444 | ||
1445 | while ((GET_CODE (dest) == SUBREG | |
1446 | && (!REG_P (SUBREG_REG (dest)) | |
1447 | || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER)) | |
1448 | || GET_CODE (dest) == ZERO_EXTRACT | |
1449 | || GET_CODE (dest) == STRICT_LOW_PART) | |
1450 | dest = XEXP (dest, 0); | |
1451 | ||
1452 | /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, | |
1453 | each of whose first operand is a register. */ | |
1454 | if (GET_CODE (dest) == PARALLEL) | |
1455 | { | |
1456 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1457 | if (XEXP (XVECEXP (dest, 0, i), 0) != 0) | |
1458 | (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data); | |
1459 | } | |
1460 | else | |
1461 | (*fun) (dest, x, data); | |
1462 | } | |
770ae6cc | 1463 | |
aa317c97 KG |
1464 | else if (GET_CODE (x) == PARALLEL) |
1465 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
1466 | note_stores (XVECEXP (x, 0, i), fun, data); | |
1467 | } | |
2c88418c | 1468 | \f |
e2373f95 RK |
1469 | /* Like notes_stores, but call FUN for each expression that is being |
1470 | referenced in PBODY, a pointer to the PATTERN of an insn. We only call | |
1471 | FUN for each expression, not any interior subexpressions. FUN receives a | |
1472 | pointer to the expression and the DATA passed to this function. | |
1473 | ||
1474 | Note that this is not quite the same test as that done in reg_referenced_p | |
1475 | since that considers something as being referenced if it is being | |
1476 | partially set, while we do not. */ | |
1477 | ||
1478 | void | |
0c20a65f | 1479 | note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data) |
e2373f95 RK |
1480 | { |
1481 | rtx body = *pbody; | |
1482 | int i; | |
1483 | ||
1484 | switch (GET_CODE (body)) | |
1485 | { | |
1486 | case COND_EXEC: | |
1487 | (*fun) (&COND_EXEC_TEST (body), data); | |
1488 | note_uses (&COND_EXEC_CODE (body), fun, data); | |
1489 | return; | |
1490 | ||
1491 | case PARALLEL: | |
1492 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1493 | note_uses (&XVECEXP (body, 0, i), fun, data); | |
1494 | return; | |
1495 | ||
bbbc206e BS |
1496 | case SEQUENCE: |
1497 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1498 | note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data); | |
1499 | return; | |
1500 | ||
e2373f95 RK |
1501 | case USE: |
1502 | (*fun) (&XEXP (body, 0), data); | |
1503 | return; | |
1504 | ||
1505 | case ASM_OPERANDS: | |
1506 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
1507 | (*fun) (&ASM_OPERANDS_INPUT (body, i), data); | |
1508 | return; | |
1509 | ||
1510 | case TRAP_IF: | |
1511 | (*fun) (&TRAP_CONDITION (body), data); | |
1512 | return; | |
1513 | ||
21b8482a JJ |
1514 | case PREFETCH: |
1515 | (*fun) (&XEXP (body, 0), data); | |
1516 | return; | |
1517 | ||
e2373f95 RK |
1518 | case UNSPEC: |
1519 | case UNSPEC_VOLATILE: | |
1520 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1521 | (*fun) (&XVECEXP (body, 0, i), data); | |
1522 | return; | |
1523 | ||
1524 | case CLOBBER: | |
3c0cb5de | 1525 | if (MEM_P (XEXP (body, 0))) |
e2373f95 RK |
1526 | (*fun) (&XEXP (XEXP (body, 0), 0), data); |
1527 | return; | |
1528 | ||
1529 | case SET: | |
1530 | { | |
1531 | rtx dest = SET_DEST (body); | |
1532 | ||
1533 | /* For sets we replace everything in source plus registers in memory | |
1534 | expression in store and operands of a ZERO_EXTRACT. */ | |
1535 | (*fun) (&SET_SRC (body), data); | |
1536 | ||
1537 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
1538 | { | |
1539 | (*fun) (&XEXP (dest, 1), data); | |
1540 | (*fun) (&XEXP (dest, 2), data); | |
1541 | } | |
1542 | ||
1543 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART) | |
1544 | dest = XEXP (dest, 0); | |
1545 | ||
3c0cb5de | 1546 | if (MEM_P (dest)) |
e2373f95 RK |
1547 | (*fun) (&XEXP (dest, 0), data); |
1548 | } | |
1549 | return; | |
1550 | ||
1551 | default: | |
1552 | /* All the other possibilities never store. */ | |
1553 | (*fun) (pbody, data); | |
1554 | return; | |
1555 | } | |
1556 | } | |
1557 | \f | |
2c88418c RS |
1558 | /* Return nonzero if X's old contents don't survive after INSN. |
1559 | This will be true if X is (cc0) or if X is a register and | |
1560 | X dies in INSN or because INSN entirely sets X. | |
1561 | ||
46d096a3 SB |
1562 | "Entirely set" means set directly and not through a SUBREG, or |
1563 | ZERO_EXTRACT, so no trace of the old contents remains. | |
2c88418c RS |
1564 | Likewise, REG_INC does not count. |
1565 | ||
1566 | REG may be a hard or pseudo reg. Renumbering is not taken into account, | |
1567 | but for this use that makes no difference, since regs don't overlap | |
1568 | during their lifetimes. Therefore, this function may be used | |
6fb5fa3c | 1569 | at any time after deaths have been computed. |
2c88418c RS |
1570 | |
1571 | If REG is a hard reg that occupies multiple machine registers, this | |
1572 | function will only return 1 if each of those registers will be replaced | |
1573 | by INSN. */ | |
1574 | ||
1575 | int | |
f7d504c2 | 1576 | dead_or_set_p (const_rtx insn, const_rtx x) |
2c88418c | 1577 | { |
09e18274 | 1578 | unsigned int regno, end_regno; |
770ae6cc | 1579 | unsigned int i; |
2c88418c RS |
1580 | |
1581 | /* Can't use cc0_rtx below since this file is used by genattrtab.c. */ | |
1582 | if (GET_CODE (x) == CC0) | |
1583 | return 1; | |
1584 | ||
41374e13 | 1585 | gcc_assert (REG_P (x)); |
2c88418c RS |
1586 | |
1587 | regno = REGNO (x); | |
09e18274 RS |
1588 | end_regno = END_REGNO (x); |
1589 | for (i = regno; i < end_regno; i++) | |
2c88418c RS |
1590 | if (! dead_or_set_regno_p (insn, i)) |
1591 | return 0; | |
1592 | ||
1593 | return 1; | |
1594 | } | |
1595 | ||
194acded HPN |
1596 | /* Return TRUE iff DEST is a register or subreg of a register and |
1597 | doesn't change the number of words of the inner register, and any | |
1598 | part of the register is TEST_REGNO. */ | |
1599 | ||
1600 | static bool | |
f7d504c2 | 1601 | covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1602 | { |
1603 | unsigned int regno, endregno; | |
1604 | ||
1605 | if (GET_CODE (dest) == SUBREG | |
1606 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
1607 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
1608 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
1609 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) | |
1610 | dest = SUBREG_REG (dest); | |
1611 | ||
1612 | if (!REG_P (dest)) | |
1613 | return false; | |
1614 | ||
1615 | regno = REGNO (dest); | |
09e18274 | 1616 | endregno = END_REGNO (dest); |
194acded HPN |
1617 | return (test_regno >= regno && test_regno < endregno); |
1618 | } | |
1619 | ||
1620 | /* Like covers_regno_no_parallel_p, but also handles PARALLELs where | |
1621 | any member matches the covers_regno_no_parallel_p criteria. */ | |
1622 | ||
1623 | static bool | |
f7d504c2 | 1624 | covers_regno_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1625 | { |
1626 | if (GET_CODE (dest) == PARALLEL) | |
1627 | { | |
1628 | /* Some targets place small structures in registers for return | |
1629 | values of functions, and those registers are wrapped in | |
1630 | PARALLELs that we may see as the destination of a SET. */ | |
1631 | int i; | |
1632 | ||
1633 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1634 | { | |
1635 | rtx inner = XEXP (XVECEXP (dest, 0, i), 0); | |
1636 | if (inner != NULL_RTX | |
1637 | && covers_regno_no_parallel_p (inner, test_regno)) | |
1638 | return true; | |
1639 | } | |
1640 | ||
1641 | return false; | |
1642 | } | |
1643 | else | |
1644 | return covers_regno_no_parallel_p (dest, test_regno); | |
1645 | } | |
1646 | ||
6fb5fa3c | 1647 | /* Utility function for dead_or_set_p to check an individual register. */ |
2c88418c RS |
1648 | |
1649 | int | |
f7d504c2 | 1650 | dead_or_set_regno_p (const_rtx insn, unsigned int test_regno) |
2c88418c | 1651 | { |
f7d504c2 | 1652 | const_rtx pattern; |
2c88418c | 1653 | |
0a2287bf RH |
1654 | /* See if there is a death note for something that includes TEST_REGNO. */ |
1655 | if (find_regno_note (insn, REG_DEAD, test_regno)) | |
1656 | return 1; | |
2c88418c | 1657 | |
4b4bf941 | 1658 | if (CALL_P (insn) |
8f3e7a26 RK |
1659 | && find_regno_fusage (insn, CLOBBER, test_regno)) |
1660 | return 1; | |
1661 | ||
0c99ec5c RH |
1662 | pattern = PATTERN (insn); |
1663 | ||
1664 | if (GET_CODE (pattern) == COND_EXEC) | |
1665 | pattern = COND_EXEC_CODE (pattern); | |
1666 | ||
1667 | if (GET_CODE (pattern) == SET) | |
194acded | 1668 | return covers_regno_p (SET_DEST (pattern), test_regno); |
0c99ec5c | 1669 | else if (GET_CODE (pattern) == PARALLEL) |
2c88418c | 1670 | { |
b3694847 | 1671 | int i; |
2c88418c | 1672 | |
0c99ec5c | 1673 | for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--) |
2c88418c | 1674 | { |
0c99ec5c RH |
1675 | rtx body = XVECEXP (pattern, 0, i); |
1676 | ||
1677 | if (GET_CODE (body) == COND_EXEC) | |
1678 | body = COND_EXEC_CODE (body); | |
2c88418c | 1679 | |
194acded HPN |
1680 | if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) |
1681 | && covers_regno_p (SET_DEST (body), test_regno)) | |
1682 | return 1; | |
2c88418c RS |
1683 | } |
1684 | } | |
1685 | ||
1686 | return 0; | |
1687 | } | |
1688 | ||
1689 | /* Return the reg-note of kind KIND in insn INSN, if there is one. | |
1690 | If DATUM is nonzero, look for one whose datum is DATUM. */ | |
1691 | ||
1692 | rtx | |
f7d504c2 | 1693 | find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum) |
2c88418c | 1694 | { |
b3694847 | 1695 | rtx link; |
2c88418c | 1696 | |
af082de3 BE |
1697 | gcc_assert (insn); |
1698 | ||
ae78d276 | 1699 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1700 | if (! INSN_P (insn)) |
ae78d276 | 1701 | return 0; |
cd798543 AP |
1702 | if (datum == 0) |
1703 | { | |
1704 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1705 | if (REG_NOTE_KIND (link) == kind) | |
1706 | return link; | |
1707 | return 0; | |
1708 | } | |
ae78d276 | 1709 | |
2c88418c | 1710 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
cd798543 | 1711 | if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0)) |
2c88418c RS |
1712 | return link; |
1713 | return 0; | |
1714 | } | |
1715 | ||
1716 | /* Return the reg-note of kind KIND in insn INSN which applies to register | |
99309f3b RK |
1717 | number REGNO, if any. Return 0 if there is no such reg-note. Note that |
1718 | the REGNO of this NOTE need not be REGNO if REGNO is a hard register; | |
1719 | it might be the case that the note overlaps REGNO. */ | |
2c88418c RS |
1720 | |
1721 | rtx | |
f7d504c2 | 1722 | find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno) |
2c88418c | 1723 | { |
b3694847 | 1724 | rtx link; |
2c88418c | 1725 | |
ae78d276 | 1726 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1727 | if (! INSN_P (insn)) |
ae78d276 MM |
1728 | return 0; |
1729 | ||
2c88418c RS |
1730 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1731 | if (REG_NOTE_KIND (link) == kind | |
1732 | /* Verify that it is a register, so that scratch and MEM won't cause a | |
1733 | problem here. */ | |
f8cfc6aa | 1734 | && REG_P (XEXP (link, 0)) |
99309f3b | 1735 | && REGNO (XEXP (link, 0)) <= regno |
09e18274 | 1736 | && END_REGNO (XEXP (link, 0)) > regno) |
2c88418c RS |
1737 | return link; |
1738 | return 0; | |
1739 | } | |
8f3e7a26 | 1740 | |
d9c695ff RK |
1741 | /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and |
1742 | has such a note. */ | |
1743 | ||
1744 | rtx | |
f7d504c2 | 1745 | find_reg_equal_equiv_note (const_rtx insn) |
d9c695ff | 1746 | { |
cd648cec | 1747 | rtx link; |
d9c695ff | 1748 | |
cd648cec | 1749 | if (!INSN_P (insn)) |
d9c695ff | 1750 | return 0; |
ea8f106d | 1751 | |
cd648cec JH |
1752 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1753 | if (REG_NOTE_KIND (link) == REG_EQUAL | |
1754 | || REG_NOTE_KIND (link) == REG_EQUIV) | |
1755 | { | |
ea8f106d SB |
1756 | /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on |
1757 | insns that have multiple sets. Checking single_set to | |
1758 | make sure of this is not the proper check, as explained | |
1759 | in the comment in set_unique_reg_note. | |
1760 | ||
1761 | This should be changed into an assert. */ | |
1762 | if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) | |
cd648cec JH |
1763 | return 0; |
1764 | return link; | |
1765 | } | |
1766 | return NULL; | |
d9c695ff RK |
1767 | } |
1768 | ||
2a450639 RS |
1769 | /* Check whether INSN is a single_set whose source is known to be |
1770 | equivalent to a constant. Return that constant if so, otherwise | |
1771 | return null. */ | |
1772 | ||
1773 | rtx | |
f7d504c2 | 1774 | find_constant_src (const_rtx insn) |
2a450639 RS |
1775 | { |
1776 | rtx note, set, x; | |
1777 | ||
1778 | set = single_set (insn); | |
1779 | if (set) | |
1780 | { | |
1781 | x = avoid_constant_pool_reference (SET_SRC (set)); | |
1782 | if (CONSTANT_P (x)) | |
1783 | return x; | |
1784 | } | |
1785 | ||
1786 | note = find_reg_equal_equiv_note (insn); | |
1787 | if (note && CONSTANT_P (XEXP (note, 0))) | |
1788 | return XEXP (note, 0); | |
1789 | ||
1790 | return NULL_RTX; | |
1791 | } | |
1792 | ||
8f3e7a26 RK |
1793 | /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found |
1794 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1795 | ||
1796 | int | |
f7d504c2 | 1797 | find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum) |
8f3e7a26 RK |
1798 | { |
1799 | /* If it's not a CALL_INSN, it can't possibly have a | |
1800 | CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */ | |
4b4bf941 | 1801 | if (!CALL_P (insn)) |
8f3e7a26 RK |
1802 | return 0; |
1803 | ||
41374e13 | 1804 | gcc_assert (datum); |
8f3e7a26 | 1805 | |
f8cfc6aa | 1806 | if (!REG_P (datum)) |
8f3e7a26 | 1807 | { |
b3694847 | 1808 | rtx link; |
8f3e7a26 RK |
1809 | |
1810 | for (link = CALL_INSN_FUNCTION_USAGE (insn); | |
a6a2274a | 1811 | link; |
8f3e7a26 | 1812 | link = XEXP (link, 1)) |
a6a2274a | 1813 | if (GET_CODE (XEXP (link, 0)) == code |
cc863bea | 1814 | && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0))) |
a6a2274a | 1815 | return 1; |
8f3e7a26 RK |
1816 | } |
1817 | else | |
1818 | { | |
770ae6cc | 1819 | unsigned int regno = REGNO (datum); |
8f3e7a26 RK |
1820 | |
1821 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
1822 | to pseudo registers, so don't bother checking. */ | |
1823 | ||
1824 | if (regno < FIRST_PSEUDO_REGISTER) | |
a6a2274a | 1825 | { |
09e18274 | 1826 | unsigned int end_regno = END_HARD_REGNO (datum); |
770ae6cc | 1827 | unsigned int i; |
8f3e7a26 RK |
1828 | |
1829 | for (i = regno; i < end_regno; i++) | |
1830 | if (find_regno_fusage (insn, code, i)) | |
1831 | return 1; | |
a6a2274a | 1832 | } |
8f3e7a26 RK |
1833 | } |
1834 | ||
1835 | return 0; | |
1836 | } | |
1837 | ||
1838 | /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found | |
1839 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1840 | ||
1841 | int | |
f7d504c2 | 1842 | find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno) |
8f3e7a26 | 1843 | { |
b3694847 | 1844 | rtx link; |
8f3e7a26 RK |
1845 | |
1846 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
1847 | to pseudo registers, so don't bother checking. */ | |
1848 | ||
1849 | if (regno >= FIRST_PSEUDO_REGISTER | |
4b4bf941 | 1850 | || !CALL_P (insn) ) |
8f3e7a26 RK |
1851 | return 0; |
1852 | ||
1853 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
83ab3839 | 1854 | { |
770ae6cc | 1855 | rtx op, reg; |
83ab3839 RH |
1856 | |
1857 | if (GET_CODE (op = XEXP (link, 0)) == code | |
f8cfc6aa | 1858 | && REG_P (reg = XEXP (op, 0)) |
09e18274 RS |
1859 | && REGNO (reg) <= regno |
1860 | && END_HARD_REGNO (reg) > regno) | |
83ab3839 RH |
1861 | return 1; |
1862 | } | |
8f3e7a26 RK |
1863 | |
1864 | return 0; | |
1865 | } | |
a6a063b8 | 1866 | |
2c88418c | 1867 | \f |
efc0b2bd ILT |
1868 | /* Allocate a register note with kind KIND and datum DATUM. LIST is |
1869 | stored as the pointer to the next register note. */ | |
65c5f2a6 | 1870 | |
efc0b2bd ILT |
1871 | rtx |
1872 | alloc_reg_note (enum reg_note kind, rtx datum, rtx list) | |
65c5f2a6 ILT |
1873 | { |
1874 | rtx note; | |
1875 | ||
1876 | switch (kind) | |
1877 | { | |
1878 | case REG_CC_SETTER: | |
1879 | case REG_CC_USER: | |
1880 | case REG_LABEL_TARGET: | |
1881 | case REG_LABEL_OPERAND: | |
1882 | /* These types of register notes use an INSN_LIST rather than an | |
1883 | EXPR_LIST, so that copying is done right and dumps look | |
1884 | better. */ | |
efc0b2bd | 1885 | note = alloc_INSN_LIST (datum, list); |
65c5f2a6 ILT |
1886 | PUT_REG_NOTE_KIND (note, kind); |
1887 | break; | |
1888 | ||
1889 | default: | |
efc0b2bd | 1890 | note = alloc_EXPR_LIST (kind, datum, list); |
65c5f2a6 ILT |
1891 | break; |
1892 | } | |
1893 | ||
efc0b2bd ILT |
1894 | return note; |
1895 | } | |
1896 | ||
1897 | /* Add register note with kind KIND and datum DATUM to INSN. */ | |
1898 | ||
1899 | void | |
1900 | add_reg_note (rtx insn, enum reg_note kind, rtx datum) | |
1901 | { | |
1902 | REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn)); | |
65c5f2a6 ILT |
1903 | } |
1904 | ||
2c88418c RS |
1905 | /* Remove register note NOTE from the REG_NOTES of INSN. */ |
1906 | ||
1907 | void | |
f7d504c2 | 1908 | remove_note (rtx insn, const_rtx note) |
2c88418c | 1909 | { |
b3694847 | 1910 | rtx link; |
2c88418c | 1911 | |
49c3bb12 RH |
1912 | if (note == NULL_RTX) |
1913 | return; | |
1914 | ||
2c88418c | 1915 | if (REG_NOTES (insn) == note) |
6fb5fa3c DB |
1916 | REG_NOTES (insn) = XEXP (note, 1); |
1917 | else | |
1918 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1919 | if (XEXP (link, 1) == note) | |
1920 | { | |
1921 | XEXP (link, 1) = XEXP (note, 1); | |
1922 | break; | |
1923 | } | |
1924 | ||
1925 | switch (REG_NOTE_KIND (note)) | |
2c88418c | 1926 | { |
6fb5fa3c DB |
1927 | case REG_EQUAL: |
1928 | case REG_EQUIV: | |
1929 | df_notes_rescan (insn); | |
1930 | break; | |
1931 | default: | |
1932 | break; | |
2c88418c | 1933 | } |
2c88418c | 1934 | } |
55a98783 | 1935 | |
7cd689bc SB |
1936 | /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */ |
1937 | ||
1938 | void | |
1939 | remove_reg_equal_equiv_notes (rtx insn) | |
1940 | { | |
1941 | rtx *loc; | |
1942 | ||
1943 | loc = ®_NOTES (insn); | |
1944 | while (*loc) | |
1945 | { | |
1946 | enum reg_note kind = REG_NOTE_KIND (*loc); | |
1947 | if (kind == REG_EQUAL || kind == REG_EQUIV) | |
1948 | *loc = XEXP (*loc, 1); | |
1949 | else | |
1950 | loc = &XEXP (*loc, 1); | |
1951 | } | |
1952 | } | |
1953 | ||
5f0d2358 RK |
1954 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
1955 | return 1 if it is found. A simple equality test is used to determine if | |
1956 | NODE matches. */ | |
1957 | ||
1958 | int | |
f7d504c2 | 1959 | in_expr_list_p (const_rtx listp, const_rtx node) |
5f0d2358 | 1960 | { |
f7d504c2 | 1961 | const_rtx x; |
5f0d2358 RK |
1962 | |
1963 | for (x = listp; x; x = XEXP (x, 1)) | |
1964 | if (node == XEXP (x, 0)) | |
1965 | return 1; | |
1966 | ||
1967 | return 0; | |
1968 | } | |
1969 | ||
dd248abd RK |
1970 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
1971 | remove that entry from the list if it is found. | |
55a98783 | 1972 | |
dd248abd | 1973 | A simple equality test is used to determine if NODE matches. */ |
55a98783 JL |
1974 | |
1975 | void | |
f7d504c2 | 1976 | remove_node_from_expr_list (const_rtx node, rtx *listp) |
55a98783 JL |
1977 | { |
1978 | rtx temp = *listp; | |
1979 | rtx prev = NULL_RTX; | |
1980 | ||
1981 | while (temp) | |
1982 | { | |
1983 | if (node == XEXP (temp, 0)) | |
1984 | { | |
1985 | /* Splice the node out of the list. */ | |
1986 | if (prev) | |
1987 | XEXP (prev, 1) = XEXP (temp, 1); | |
1988 | else | |
1989 | *listp = XEXP (temp, 1); | |
1990 | ||
1991 | return; | |
1992 | } | |
dd248abd RK |
1993 | |
1994 | prev = temp; | |
55a98783 JL |
1995 | temp = XEXP (temp, 1); |
1996 | } | |
1997 | } | |
2c88418c | 1998 | \f |
2b067faf RS |
1999 | /* Nonzero if X contains any volatile instructions. These are instructions |
2000 | which may cause unpredictable machine state instructions, and thus no | |
2001 | instructions should be moved or combined across them. This includes | |
2002 | only volatile asms and UNSPEC_VOLATILE instructions. */ | |
2003 | ||
2004 | int | |
f7d504c2 | 2005 | volatile_insn_p (const_rtx x) |
2b067faf | 2006 | { |
f7d504c2 | 2007 | const RTX_CODE code = GET_CODE (x); |
2b067faf RS |
2008 | switch (code) |
2009 | { | |
2010 | case LABEL_REF: | |
2011 | case SYMBOL_REF: | |
2012 | case CONST_INT: | |
2013 | case CONST: | |
2014 | case CONST_DOUBLE: | |
091a3ac7 | 2015 | case CONST_FIXED: |
69ef87e2 | 2016 | case CONST_VECTOR: |
2b067faf RS |
2017 | case CC0: |
2018 | case PC: | |
2019 | case REG: | |
2020 | case SCRATCH: | |
2021 | case CLOBBER: | |
2b067faf RS |
2022 | case ADDR_VEC: |
2023 | case ADDR_DIFF_VEC: | |
2024 | case CALL: | |
2025 | case MEM: | |
2026 | return 0; | |
2027 | ||
2028 | case UNSPEC_VOLATILE: | |
2029 | /* case TRAP_IF: This isn't clear yet. */ | |
2030 | return 1; | |
2031 | ||
4c46ea23 | 2032 | case ASM_INPUT: |
2b067faf RS |
2033 | case ASM_OPERANDS: |
2034 | if (MEM_VOLATILE_P (x)) | |
2035 | return 1; | |
e9a25f70 JL |
2036 | |
2037 | default: | |
2038 | break; | |
2b067faf RS |
2039 | } |
2040 | ||
2041 | /* Recursively scan the operands of this expression. */ | |
2042 | ||
2043 | { | |
f7d504c2 | 2044 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2045 | int i; |
a6a2274a | 2046 | |
2b067faf RS |
2047 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2048 | { | |
2049 | if (fmt[i] == 'e') | |
2050 | { | |
31001f72 | 2051 | if (volatile_insn_p (XEXP (x, i))) |
2b067faf RS |
2052 | return 1; |
2053 | } | |
d4757e6a | 2054 | else if (fmt[i] == 'E') |
2b067faf | 2055 | { |
b3694847 | 2056 | int j; |
2b067faf | 2057 | for (j = 0; j < XVECLEN (x, i); j++) |
31001f72 | 2058 | if (volatile_insn_p (XVECEXP (x, i, j))) |
2b067faf RS |
2059 | return 1; |
2060 | } | |
2061 | } | |
2062 | } | |
2063 | return 0; | |
2064 | } | |
2065 | ||
2c88418c | 2066 | /* Nonzero if X contains any volatile memory references |
2ac4fed0 | 2067 | UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */ |
2c88418c RS |
2068 | |
2069 | int | |
f7d504c2 | 2070 | volatile_refs_p (const_rtx x) |
2c88418c | 2071 | { |
f7d504c2 | 2072 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2073 | switch (code) |
2074 | { | |
2075 | case LABEL_REF: | |
2076 | case SYMBOL_REF: | |
2077 | case CONST_INT: | |
2078 | case CONST: | |
2079 | case CONST_DOUBLE: | |
091a3ac7 | 2080 | case CONST_FIXED: |
69ef87e2 | 2081 | case CONST_VECTOR: |
2c88418c RS |
2082 | case CC0: |
2083 | case PC: | |
2084 | case REG: | |
2085 | case SCRATCH: | |
2086 | case CLOBBER: | |
2c88418c RS |
2087 | case ADDR_VEC: |
2088 | case ADDR_DIFF_VEC: | |
2089 | return 0; | |
2090 | ||
2ac4fed0 | 2091 | case UNSPEC_VOLATILE: |
2c88418c RS |
2092 | return 1; |
2093 | ||
2094 | case MEM: | |
4c46ea23 | 2095 | case ASM_INPUT: |
2c88418c RS |
2096 | case ASM_OPERANDS: |
2097 | if (MEM_VOLATILE_P (x)) | |
2098 | return 1; | |
e9a25f70 JL |
2099 | |
2100 | default: | |
2101 | break; | |
2c88418c RS |
2102 | } |
2103 | ||
2104 | /* Recursively scan the operands of this expression. */ | |
2105 | ||
2106 | { | |
f7d504c2 | 2107 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2108 | int i; |
a6a2274a | 2109 | |
2c88418c RS |
2110 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2111 | { | |
2112 | if (fmt[i] == 'e') | |
2113 | { | |
2114 | if (volatile_refs_p (XEXP (x, i))) | |
2115 | return 1; | |
2116 | } | |
d4757e6a | 2117 | else if (fmt[i] == 'E') |
2c88418c | 2118 | { |
b3694847 | 2119 | int j; |
2c88418c RS |
2120 | for (j = 0; j < XVECLEN (x, i); j++) |
2121 | if (volatile_refs_p (XVECEXP (x, i, j))) | |
2122 | return 1; | |
2123 | } | |
2124 | } | |
2125 | } | |
2126 | return 0; | |
2127 | } | |
2128 | ||
2129 | /* Similar to above, except that it also rejects register pre- and post- | |
2130 | incrementing. */ | |
2131 | ||
2132 | int | |
f7d504c2 | 2133 | side_effects_p (const_rtx x) |
2c88418c | 2134 | { |
f7d504c2 | 2135 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2136 | switch (code) |
2137 | { | |
2138 | case LABEL_REF: | |
2139 | case SYMBOL_REF: | |
2140 | case CONST_INT: | |
2141 | case CONST: | |
2142 | case CONST_DOUBLE: | |
091a3ac7 | 2143 | case CONST_FIXED: |
69ef87e2 | 2144 | case CONST_VECTOR: |
2c88418c RS |
2145 | case CC0: |
2146 | case PC: | |
2147 | case REG: | |
2148 | case SCRATCH: | |
2c88418c RS |
2149 | case ADDR_VEC: |
2150 | case ADDR_DIFF_VEC: | |
b5b8b0ac | 2151 | case VAR_LOCATION: |
2c88418c RS |
2152 | return 0; |
2153 | ||
2154 | case CLOBBER: | |
2155 | /* Reject CLOBBER with a non-VOID mode. These are made by combine.c | |
2156 | when some combination can't be done. If we see one, don't think | |
2157 | that we can simplify the expression. */ | |
2158 | return (GET_MODE (x) != VOIDmode); | |
2159 | ||
2160 | case PRE_INC: | |
2161 | case PRE_DEC: | |
2162 | case POST_INC: | |
2163 | case POST_DEC: | |
1fb9c5cd MH |
2164 | case PRE_MODIFY: |
2165 | case POST_MODIFY: | |
2c88418c | 2166 | case CALL: |
2ac4fed0 | 2167 | case UNSPEC_VOLATILE: |
2c88418c RS |
2168 | /* case TRAP_IF: This isn't clear yet. */ |
2169 | return 1; | |
2170 | ||
2171 | case MEM: | |
4c46ea23 | 2172 | case ASM_INPUT: |
2c88418c RS |
2173 | case ASM_OPERANDS: |
2174 | if (MEM_VOLATILE_P (x)) | |
2175 | return 1; | |
e9a25f70 JL |
2176 | |
2177 | default: | |
2178 | break; | |
2c88418c RS |
2179 | } |
2180 | ||
2181 | /* Recursively scan the operands of this expression. */ | |
2182 | ||
2183 | { | |
b3694847 SS |
2184 | const char *fmt = GET_RTX_FORMAT (code); |
2185 | int i; | |
a6a2274a | 2186 | |
2c88418c RS |
2187 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2188 | { | |
2189 | if (fmt[i] == 'e') | |
2190 | { | |
2191 | if (side_effects_p (XEXP (x, i))) | |
2192 | return 1; | |
2193 | } | |
d4757e6a | 2194 | else if (fmt[i] == 'E') |
2c88418c | 2195 | { |
b3694847 | 2196 | int j; |
2c88418c RS |
2197 | for (j = 0; j < XVECLEN (x, i); j++) |
2198 | if (side_effects_p (XVECEXP (x, i, j))) | |
2199 | return 1; | |
2200 | } | |
2201 | } | |
2202 | } | |
2203 | return 0; | |
2204 | } | |
2205 | \f | |
e755fcf5 | 2206 | /* Return nonzero if evaluating rtx X might cause a trap. |
48e8382e PB |
2207 | FLAGS controls how to consider MEMs. A nonzero means the context |
2208 | of the access may have changed from the original, such that the | |
2209 | address may have become invalid. */ | |
2c88418c | 2210 | |
215b063c | 2211 | int |
f7d504c2 | 2212 | may_trap_p_1 (const_rtx x, unsigned flags) |
2c88418c RS |
2213 | { |
2214 | int i; | |
2215 | enum rtx_code code; | |
6f7d635c | 2216 | const char *fmt; |
48e8382e PB |
2217 | |
2218 | /* We make no distinction currently, but this function is part of | |
2219 | the internal target-hooks ABI so we keep the parameter as | |
2220 | "unsigned flags". */ | |
2221 | bool code_changed = flags != 0; | |
2c88418c RS |
2222 | |
2223 | if (x == 0) | |
2224 | return 0; | |
2225 | code = GET_CODE (x); | |
2226 | switch (code) | |
2227 | { | |
2228 | /* Handle these cases quickly. */ | |
2229 | case CONST_INT: | |
2230 | case CONST_DOUBLE: | |
091a3ac7 | 2231 | case CONST_FIXED: |
69ef87e2 | 2232 | case CONST_VECTOR: |
2c88418c RS |
2233 | case SYMBOL_REF: |
2234 | case LABEL_REF: | |
2235 | case CONST: | |
2236 | case PC: | |
2237 | case CC0: | |
2238 | case REG: | |
2239 | case SCRATCH: | |
2240 | return 0; | |
2241 | ||
215b063c | 2242 | case UNSPEC: |
2ac4fed0 | 2243 | case UNSPEC_VOLATILE: |
215b063c PB |
2244 | return targetm.unspec_may_trap_p (x, flags); |
2245 | ||
2246 | case ASM_INPUT: | |
2c88418c RS |
2247 | case TRAP_IF: |
2248 | return 1; | |
2249 | ||
22aa60a1 RH |
2250 | case ASM_OPERANDS: |
2251 | return MEM_VOLATILE_P (x); | |
2252 | ||
2c88418c RS |
2253 | /* Memory ref can trap unless it's a static var or a stack slot. */ |
2254 | case MEM: | |
e755fcf5 | 2255 | if (/* MEM_NOTRAP_P only relates to the actual position of the memory |
48e8382e PB |
2256 | reference; moving it out of context such as when moving code |
2257 | when optimizing, might cause its address to become invalid. */ | |
2258 | code_changed | |
2259 | || !MEM_NOTRAP_P (x)) | |
2260 | { | |
2261 | HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0; | |
2262 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size, | |
2263 | GET_MODE (x), code_changed); | |
2264 | } | |
2265 | ||
2266 | return 0; | |
2c88418c RS |
2267 | |
2268 | /* Division by a non-constant might trap. */ | |
2269 | case DIV: | |
2270 | case MOD: | |
2271 | case UDIV: | |
2272 | case UMOD: | |
52bfebf0 RS |
2273 | if (HONOR_SNANS (GET_MODE (x))) |
2274 | return 1; | |
3d8bf70f | 2275 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x))) |
f9013075 DE |
2276 | return flag_trapping_math; |
2277 | if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx)) | |
2c88418c | 2278 | return 1; |
e9a25f70 JL |
2279 | break; |
2280 | ||
b278301b RK |
2281 | case EXPR_LIST: |
2282 | /* An EXPR_LIST is used to represent a function call. This | |
2283 | certainly may trap. */ | |
2284 | return 1; | |
e9a25f70 | 2285 | |
734508ea JW |
2286 | case GE: |
2287 | case GT: | |
2288 | case LE: | |
2289 | case LT: | |
19aec195 | 2290 | case LTGT: |
55143861 | 2291 | case COMPARE: |
734508ea | 2292 | /* Some floating point comparisons may trap. */ |
f5eb5fd0 JH |
2293 | if (!flag_trapping_math) |
2294 | break; | |
734508ea JW |
2295 | /* ??? There is no machine independent way to check for tests that trap |
2296 | when COMPARE is used, though many targets do make this distinction. | |
2297 | For instance, sparc uses CCFPE for compares which generate exceptions | |
2298 | and CCFP for compares which do not generate exceptions. */ | |
52bfebf0 | 2299 | if (HONOR_NANS (GET_MODE (x))) |
55143861 JJ |
2300 | return 1; |
2301 | /* But often the compare has some CC mode, so check operand | |
2302 | modes as well. */ | |
52bfebf0 RS |
2303 | if (HONOR_NANS (GET_MODE (XEXP (x, 0))) |
2304 | || HONOR_NANS (GET_MODE (XEXP (x, 1)))) | |
2305 | return 1; | |
2306 | break; | |
2307 | ||
2308 | case EQ: | |
2309 | case NE: | |
2310 | if (HONOR_SNANS (GET_MODE (x))) | |
2311 | return 1; | |
2312 | /* Often comparison is CC mode, so check operand modes. */ | |
2313 | if (HONOR_SNANS (GET_MODE (XEXP (x, 0))) | |
2314 | || HONOR_SNANS (GET_MODE (XEXP (x, 1)))) | |
55143861 JJ |
2315 | return 1; |
2316 | break; | |
2317 | ||
22fd5743 FH |
2318 | case FIX: |
2319 | /* Conversion of floating point might trap. */ | |
2320 | if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0)))) | |
2321 | return 1; | |
2322 | break; | |
2323 | ||
05cc23e8 RH |
2324 | case NEG: |
2325 | case ABS: | |
e3947b34 | 2326 | case SUBREG: |
05cc23e8 RH |
2327 | /* These operations don't trap even with floating point. */ |
2328 | break; | |
2329 | ||
2c88418c RS |
2330 | default: |
2331 | /* Any floating arithmetic may trap. */ | |
3d8bf70f | 2332 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) |
f5eb5fd0 | 2333 | && flag_trapping_math) |
2c88418c RS |
2334 | return 1; |
2335 | } | |
2336 | ||
2337 | fmt = GET_RTX_FORMAT (code); | |
2338 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2339 | { | |
2340 | if (fmt[i] == 'e') | |
2341 | { | |
e755fcf5 | 2342 | if (may_trap_p_1 (XEXP (x, i), flags)) |
2c88418c RS |
2343 | return 1; |
2344 | } | |
2345 | else if (fmt[i] == 'E') | |
2346 | { | |
b3694847 | 2347 | int j; |
2c88418c | 2348 | for (j = 0; j < XVECLEN (x, i); j++) |
e755fcf5 | 2349 | if (may_trap_p_1 (XVECEXP (x, i, j), flags)) |
2c88418c RS |
2350 | return 1; |
2351 | } | |
2352 | } | |
2353 | return 0; | |
2354 | } | |
2358ff91 EB |
2355 | |
2356 | /* Return nonzero if evaluating rtx X might cause a trap. */ | |
2357 | ||
2358 | int | |
f7d504c2 | 2359 | may_trap_p (const_rtx x) |
2358ff91 | 2360 | { |
e755fcf5 ZD |
2361 | return may_trap_p_1 (x, 0); |
2362 | } | |
2363 | ||
c0220ea4 | 2364 | /* Same as above, but additionally return nonzero if evaluating rtx X might |
2358ff91 EB |
2365 | cause a fault. We define a fault for the purpose of this function as a |
2366 | erroneous execution condition that cannot be encountered during the normal | |
2367 | execution of a valid program; the typical example is an unaligned memory | |
2368 | access on a strict alignment machine. The compiler guarantees that it | |
2369 | doesn't generate code that will fault from a valid program, but this | |
2370 | guarantee doesn't mean anything for individual instructions. Consider | |
2371 | the following example: | |
2372 | ||
2373 | struct S { int d; union { char *cp; int *ip; }; }; | |
2374 | ||
2375 | int foo(struct S *s) | |
2376 | { | |
2377 | if (s->d == 1) | |
2378 | return *s->ip; | |
2379 | else | |
2380 | return *s->cp; | |
2381 | } | |
2382 | ||
2383 | on a strict alignment machine. In a valid program, foo will never be | |
2384 | invoked on a structure for which d is equal to 1 and the underlying | |
2385 | unique field of the union not aligned on a 4-byte boundary, but the | |
2386 | expression *s->ip might cause a fault if considered individually. | |
2387 | ||
2388 | At the RTL level, potentially problematic expressions will almost always | |
2389 | verify may_trap_p; for example, the above dereference can be emitted as | |
2390 | (mem:SI (reg:P)) and this expression is may_trap_p for a generic register. | |
2391 | However, suppose that foo is inlined in a caller that causes s->cp to | |
2392 | point to a local character variable and guarantees that s->d is not set | |
2393 | to 1; foo may have been effectively translated into pseudo-RTL as: | |
2394 | ||
2395 | if ((reg:SI) == 1) | |
2396 | (set (reg:SI) (mem:SI (%fp - 7))) | |
2397 | else | |
2398 | (set (reg:QI) (mem:QI (%fp - 7))) | |
2399 | ||
2400 | Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a | |
2401 | memory reference to a stack slot, but it will certainly cause a fault | |
2402 | on a strict alignment machine. */ | |
2403 | ||
2404 | int | |
f7d504c2 | 2405 | may_trap_or_fault_p (const_rtx x) |
2358ff91 | 2406 | { |
48e8382e | 2407 | return may_trap_p_1 (x, 1); |
2358ff91 | 2408 | } |
2c88418c RS |
2409 | \f |
2410 | /* Return nonzero if X contains a comparison that is not either EQ or NE, | |
2411 | i.e., an inequality. */ | |
2412 | ||
2413 | int | |
f7d504c2 | 2414 | inequality_comparisons_p (const_rtx x) |
2c88418c | 2415 | { |
b3694847 SS |
2416 | const char *fmt; |
2417 | int len, i; | |
f7d504c2 | 2418 | const enum rtx_code code = GET_CODE (x); |
2c88418c RS |
2419 | |
2420 | switch (code) | |
2421 | { | |
2422 | case REG: | |
2423 | case SCRATCH: | |
2424 | case PC: | |
2425 | case CC0: | |
2426 | case CONST_INT: | |
2427 | case CONST_DOUBLE: | |
091a3ac7 | 2428 | case CONST_FIXED: |
69ef87e2 | 2429 | case CONST_VECTOR: |
2c88418c RS |
2430 | case CONST: |
2431 | case LABEL_REF: | |
2432 | case SYMBOL_REF: | |
2433 | return 0; | |
2434 | ||
2435 | case LT: | |
2436 | case LTU: | |
2437 | case GT: | |
2438 | case GTU: | |
2439 | case LE: | |
2440 | case LEU: | |
2441 | case GE: | |
2442 | case GEU: | |
2443 | return 1; | |
a6a2274a | 2444 | |
e9a25f70 JL |
2445 | default: |
2446 | break; | |
2c88418c RS |
2447 | } |
2448 | ||
2449 | len = GET_RTX_LENGTH (code); | |
2450 | fmt = GET_RTX_FORMAT (code); | |
2451 | ||
2452 | for (i = 0; i < len; i++) | |
2453 | { | |
2454 | if (fmt[i] == 'e') | |
2455 | { | |
2456 | if (inequality_comparisons_p (XEXP (x, i))) | |
2457 | return 1; | |
2458 | } | |
2459 | else if (fmt[i] == 'E') | |
2460 | { | |
b3694847 | 2461 | int j; |
2c88418c RS |
2462 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2463 | if (inequality_comparisons_p (XVECEXP (x, i, j))) | |
2464 | return 1; | |
2465 | } | |
2466 | } | |
a6a2274a | 2467 | |
2c88418c RS |
2468 | return 0; |
2469 | } | |
2470 | \f | |
1ed0205e VM |
2471 | /* Replace any occurrence of FROM in X with TO. The function does |
2472 | not enter into CONST_DOUBLE for the replace. | |
2c88418c RS |
2473 | |
2474 | Note that copying is not done so X must not be shared unless all copies | |
2475 | are to be modified. */ | |
2476 | ||
2477 | rtx | |
0c20a65f | 2478 | replace_rtx (rtx x, rtx from, rtx to) |
2c88418c | 2479 | { |
b3694847 SS |
2480 | int i, j; |
2481 | const char *fmt; | |
2c88418c | 2482 | |
1ed0205e | 2483 | /* The following prevents loops occurrence when we change MEM in |
dc297297 | 2484 | CONST_DOUBLE onto the same CONST_DOUBLE. */ |
1ed0205e VM |
2485 | if (x != 0 && GET_CODE (x) == CONST_DOUBLE) |
2486 | return x; | |
2487 | ||
2c88418c RS |
2488 | if (x == from) |
2489 | return to; | |
2490 | ||
2491 | /* Allow this function to make replacements in EXPR_LISTs. */ | |
2492 | if (x == 0) | |
2493 | return 0; | |
2494 | ||
9dd791c8 AO |
2495 | if (GET_CODE (x) == SUBREG) |
2496 | { | |
55d796da | 2497 | rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to); |
9dd791c8 | 2498 | |
481683e1 | 2499 | if (CONST_INT_P (new_rtx)) |
9dd791c8 | 2500 | { |
55d796da | 2501 | x = simplify_subreg (GET_MODE (x), new_rtx, |
9dd791c8 AO |
2502 | GET_MODE (SUBREG_REG (x)), |
2503 | SUBREG_BYTE (x)); | |
41374e13 | 2504 | gcc_assert (x); |
9dd791c8 AO |
2505 | } |
2506 | else | |
55d796da | 2507 | SUBREG_REG (x) = new_rtx; |
9dd791c8 AO |
2508 | |
2509 | return x; | |
2510 | } | |
2511 | else if (GET_CODE (x) == ZERO_EXTEND) | |
2512 | { | |
55d796da | 2513 | rtx new_rtx = replace_rtx (XEXP (x, 0), from, to); |
9dd791c8 | 2514 | |
481683e1 | 2515 | if (CONST_INT_P (new_rtx)) |
9dd791c8 AO |
2516 | { |
2517 | x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
55d796da | 2518 | new_rtx, GET_MODE (XEXP (x, 0))); |
41374e13 | 2519 | gcc_assert (x); |
9dd791c8 AO |
2520 | } |
2521 | else | |
55d796da | 2522 | XEXP (x, 0) = new_rtx; |
9dd791c8 AO |
2523 | |
2524 | return x; | |
2525 | } | |
2526 | ||
2c88418c RS |
2527 | fmt = GET_RTX_FORMAT (GET_CODE (x)); |
2528 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
2529 | { | |
2530 | if (fmt[i] == 'e') | |
2531 | XEXP (x, i) = replace_rtx (XEXP (x, i), from, to); | |
2532 | else if (fmt[i] == 'E') | |
2533 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2534 | XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to); | |
2535 | } | |
2536 | ||
2537 | return x; | |
a6a2274a | 2538 | } |
2c88418c | 2539 | \f |
39811184 | 2540 | /* Replace occurrences of the old label in *X with the new one. |
4af16369 | 2541 | DATA is a REPLACE_LABEL_DATA containing the old and new labels. */ |
39811184 JZ |
2542 | |
2543 | int | |
0c20a65f | 2544 | replace_label (rtx *x, void *data) |
39811184 JZ |
2545 | { |
2546 | rtx l = *x; | |
4af16369 JZ |
2547 | rtx old_label = ((replace_label_data *) data)->r1; |
2548 | rtx new_label = ((replace_label_data *) data)->r2; | |
2549 | bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses; | |
39811184 JZ |
2550 | |
2551 | if (l == NULL_RTX) | |
2552 | return 0; | |
2553 | ||
173cd571 JZ |
2554 | if (GET_CODE (l) == SYMBOL_REF |
2555 | && CONSTANT_POOL_ADDRESS_P (l)) | |
4af16369 | 2556 | { |
173cd571 | 2557 | rtx c = get_pool_constant (l); |
4af16369 JZ |
2558 | if (rtx_referenced_p (old_label, c)) |
2559 | { | |
2560 | rtx new_c, new_l; | |
2561 | replace_label_data *d = (replace_label_data *) data; | |
0c20a65f | 2562 | |
4af16369 JZ |
2563 | /* Create a copy of constant C; replace the label inside |
2564 | but do not update LABEL_NUSES because uses in constant pool | |
2565 | are not counted. */ | |
2566 | new_c = copy_rtx (c); | |
2567 | d->update_label_nuses = false; | |
2568 | for_each_rtx (&new_c, replace_label, data); | |
2569 | d->update_label_nuses = update_label_nuses; | |
2570 | ||
2571 | /* Add the new constant NEW_C to constant pool and replace | |
2572 | the old reference to constant by new reference. */ | |
173cd571 | 2573 | new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0); |
4af16369 JZ |
2574 | *x = replace_rtx (l, l, new_l); |
2575 | } | |
2576 | return 0; | |
2577 | } | |
2578 | ||
39811184 JZ |
2579 | /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL |
2580 | field. This is not handled by for_each_rtx because it doesn't | |
2581 | handle unprinted ('0') fields. */ | |
4b4bf941 | 2582 | if (JUMP_P (l) && JUMP_LABEL (l) == old_label) |
39811184 | 2583 | JUMP_LABEL (l) = new_label; |
39811184 | 2584 | |
4af16369 JZ |
2585 | if ((GET_CODE (l) == LABEL_REF |
2586 | || GET_CODE (l) == INSN_LIST) | |
2587 | && XEXP (l, 0) == old_label) | |
2588 | { | |
2589 | XEXP (l, 0) = new_label; | |
2590 | if (update_label_nuses) | |
2591 | { | |
2592 | ++LABEL_NUSES (new_label); | |
2593 | --LABEL_NUSES (old_label); | |
2594 | } | |
2595 | return 0; | |
2596 | } | |
39811184 JZ |
2597 | |
2598 | return 0; | |
2599 | } | |
2600 | ||
4af16369 JZ |
2601 | /* When *BODY is equal to X or X is directly referenced by *BODY |
2602 | return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero | |
2603 | too, otherwise FOR_EACH_RTX continues traversing *BODY. */ | |
39811184 JZ |
2604 | |
2605 | static int | |
0c20a65f | 2606 | rtx_referenced_p_1 (rtx *body, void *x) |
39811184 | 2607 | { |
4af16369 JZ |
2608 | rtx y = (rtx) x; |
2609 | ||
2610 | if (*body == NULL_RTX) | |
2611 | return y == NULL_RTX; | |
2612 | ||
2613 | /* Return true if a label_ref *BODY refers to label Y. */ | |
4b4bf941 | 2614 | if (GET_CODE (*body) == LABEL_REF && LABEL_P (y)) |
4af16369 JZ |
2615 | return XEXP (*body, 0) == y; |
2616 | ||
2617 | /* If *BODY is a reference to pool constant traverse the constant. */ | |
2618 | if (GET_CODE (*body) == SYMBOL_REF | |
2619 | && CONSTANT_POOL_ADDRESS_P (*body)) | |
2620 | return rtx_referenced_p (y, get_pool_constant (*body)); | |
2621 | ||
2622 | /* By default, compare the RTL expressions. */ | |
2623 | return rtx_equal_p (*body, y); | |
39811184 JZ |
2624 | } |
2625 | ||
4af16369 | 2626 | /* Return true if X is referenced in BODY. */ |
39811184 JZ |
2627 | |
2628 | int | |
0c20a65f | 2629 | rtx_referenced_p (rtx x, rtx body) |
39811184 | 2630 | { |
4af16369 | 2631 | return for_each_rtx (&body, rtx_referenced_p_1, x); |
39811184 JZ |
2632 | } |
2633 | ||
ee735eef JZ |
2634 | /* If INSN is a tablejump return true and store the label (before jump table) to |
2635 | *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */ | |
39811184 JZ |
2636 | |
2637 | bool | |
f7d504c2 | 2638 | tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep) |
39811184 | 2639 | { |
ee735eef JZ |
2640 | rtx label, table; |
2641 | ||
4b4bf941 | 2642 | if (JUMP_P (insn) |
ee735eef JZ |
2643 | && (label = JUMP_LABEL (insn)) != NULL_RTX |
2644 | && (table = next_active_insn (label)) != NULL_RTX | |
481683e1 | 2645 | && JUMP_TABLE_DATA_P (table)) |
39811184 | 2646 | { |
ee735eef JZ |
2647 | if (labelp) |
2648 | *labelp = label; | |
2649 | if (tablep) | |
2650 | *tablep = table; | |
39811184 JZ |
2651 | return true; |
2652 | } | |
2653 | return false; | |
2654 | } | |
2655 | ||
fce7e199 RH |
2656 | /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or |
2657 | constant that is not in the constant pool and not in the condition | |
2658 | of an IF_THEN_ELSE. */ | |
2a1777af JL |
2659 | |
2660 | static int | |
f7d504c2 | 2661 | computed_jump_p_1 (const_rtx x) |
2a1777af | 2662 | { |
f7d504c2 | 2663 | const enum rtx_code code = GET_CODE (x); |
2a1777af | 2664 | int i, j; |
6f7d635c | 2665 | const char *fmt; |
2a1777af JL |
2666 | |
2667 | switch (code) | |
2668 | { | |
2a1777af JL |
2669 | case LABEL_REF: |
2670 | case PC: | |
2671 | return 0; | |
2672 | ||
fce7e199 RH |
2673 | case CONST: |
2674 | case CONST_INT: | |
2675 | case CONST_DOUBLE: | |
091a3ac7 | 2676 | case CONST_FIXED: |
69ef87e2 | 2677 | case CONST_VECTOR: |
fce7e199 | 2678 | case SYMBOL_REF: |
2a1777af JL |
2679 | case REG: |
2680 | return 1; | |
2681 | ||
2682 | case MEM: | |
2683 | return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
2684 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))); | |
2685 | ||
2686 | case IF_THEN_ELSE: | |
fce7e199 RH |
2687 | return (computed_jump_p_1 (XEXP (x, 1)) |
2688 | || computed_jump_p_1 (XEXP (x, 2))); | |
1d300e19 KG |
2689 | |
2690 | default: | |
2691 | break; | |
2a1777af JL |
2692 | } |
2693 | ||
2694 | fmt = GET_RTX_FORMAT (code); | |
2695 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2696 | { | |
2697 | if (fmt[i] == 'e' | |
fce7e199 | 2698 | && computed_jump_p_1 (XEXP (x, i))) |
2a1777af JL |
2699 | return 1; |
2700 | ||
d4757e6a | 2701 | else if (fmt[i] == 'E') |
2a1777af | 2702 | for (j = 0; j < XVECLEN (x, i); j++) |
fce7e199 | 2703 | if (computed_jump_p_1 (XVECEXP (x, i, j))) |
2a1777af JL |
2704 | return 1; |
2705 | } | |
2706 | ||
2707 | return 0; | |
2708 | } | |
2709 | ||
2710 | /* Return nonzero if INSN is an indirect jump (aka computed jump). | |
2711 | ||
2712 | Tablejumps and casesi insns are not considered indirect jumps; | |
4eb00163 | 2713 | we can recognize them by a (use (label_ref)). */ |
2a1777af JL |
2714 | |
2715 | int | |
f7d504c2 | 2716 | computed_jump_p (const_rtx insn) |
2a1777af JL |
2717 | { |
2718 | int i; | |
4b4bf941 | 2719 | if (JUMP_P (insn)) |
2a1777af JL |
2720 | { |
2721 | rtx pat = PATTERN (insn); | |
2a1777af | 2722 | |
cf7c4aa6 HPN |
2723 | /* If we have a JUMP_LABEL set, we're not a computed jump. */ |
2724 | if (JUMP_LABEL (insn) != NULL) | |
f759eb8b | 2725 | return 0; |
cf7c4aa6 HPN |
2726 | |
2727 | if (GET_CODE (pat) == PARALLEL) | |
2a1777af JL |
2728 | { |
2729 | int len = XVECLEN (pat, 0); | |
2730 | int has_use_labelref = 0; | |
2731 | ||
2732 | for (i = len - 1; i >= 0; i--) | |
2733 | if (GET_CODE (XVECEXP (pat, 0, i)) == USE | |
2734 | && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) | |
2735 | == LABEL_REF)) | |
2736 | has_use_labelref = 1; | |
2737 | ||
2738 | if (! has_use_labelref) | |
2739 | for (i = len - 1; i >= 0; i--) | |
2740 | if (GET_CODE (XVECEXP (pat, 0, i)) == SET | |
2741 | && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx | |
fce7e199 | 2742 | && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i)))) |
2a1777af JL |
2743 | return 1; |
2744 | } | |
2745 | else if (GET_CODE (pat) == SET | |
2746 | && SET_DEST (pat) == pc_rtx | |
fce7e199 | 2747 | && computed_jump_p_1 (SET_SRC (pat))) |
2a1777af JL |
2748 | return 1; |
2749 | } | |
2750 | return 0; | |
2751 | } | |
ccc2d6d0 | 2752 | |
cf94b0fc PB |
2753 | /* Optimized loop of for_each_rtx, trying to avoid useless recursive |
2754 | calls. Processes the subexpressions of EXP and passes them to F. */ | |
2755 | static int | |
2756 | for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data) | |
2757 | { | |
2758 | int result, i, j; | |
2759 | const char *format = GET_RTX_FORMAT (GET_CODE (exp)); | |
2760 | rtx *x; | |
2761 | ||
2762 | for (; format[n] != '\0'; n++) | |
2763 | { | |
2764 | switch (format[n]) | |
2765 | { | |
2766 | case 'e': | |
2767 | /* Call F on X. */ | |
2768 | x = &XEXP (exp, n); | |
2769 | result = (*f) (x, data); | |
2770 | if (result == -1) | |
2771 | /* Do not traverse sub-expressions. */ | |
2772 | continue; | |
2773 | else if (result != 0) | |
2774 | /* Stop the traversal. */ | |
2775 | return result; | |
2776 | ||
2777 | if (*x == NULL_RTX) | |
2778 | /* There are no sub-expressions. */ | |
2779 | continue; | |
2780 | ||
2781 | i = non_rtx_starting_operands[GET_CODE (*x)]; | |
2782 | if (i >= 0) | |
2783 | { | |
2784 | result = for_each_rtx_1 (*x, i, f, data); | |
2785 | if (result != 0) | |
2786 | return result; | |
2787 | } | |
2788 | break; | |
2789 | ||
2790 | case 'V': | |
2791 | case 'E': | |
2792 | if (XVEC (exp, n) == 0) | |
2793 | continue; | |
2794 | for (j = 0; j < XVECLEN (exp, n); ++j) | |
2795 | { | |
2796 | /* Call F on X. */ | |
2797 | x = &XVECEXP (exp, n, j); | |
2798 | result = (*f) (x, data); | |
2799 | if (result == -1) | |
2800 | /* Do not traverse sub-expressions. */ | |
2801 | continue; | |
2802 | else if (result != 0) | |
2803 | /* Stop the traversal. */ | |
2804 | return result; | |
2805 | ||
2806 | if (*x == NULL_RTX) | |
2807 | /* There are no sub-expressions. */ | |
2808 | continue; | |
2809 | ||
2810 | i = non_rtx_starting_operands[GET_CODE (*x)]; | |
2811 | if (i >= 0) | |
2812 | { | |
2813 | result = for_each_rtx_1 (*x, i, f, data); | |
2814 | if (result != 0) | |
2815 | return result; | |
2816 | } | |
2817 | } | |
2818 | break; | |
2819 | ||
2820 | default: | |
2821 | /* Nothing to do. */ | |
2822 | break; | |
2823 | } | |
2824 | } | |
2825 | ||
2826 | return 0; | |
2827 | } | |
2828 | ||
ccc2d6d0 MM |
2829 | /* Traverse X via depth-first search, calling F for each |
2830 | sub-expression (including X itself). F is also passed the DATA. | |
2831 | If F returns -1, do not traverse sub-expressions, but continue | |
2832 | traversing the rest of the tree. If F ever returns any other | |
40f03658 | 2833 | nonzero value, stop the traversal, and return the value returned |
ccc2d6d0 MM |
2834 | by F. Otherwise, return 0. This function does not traverse inside |
2835 | tree structure that contains RTX_EXPRs, or into sub-expressions | |
2836 | whose format code is `0' since it is not known whether or not those | |
2837 | codes are actually RTL. | |
2838 | ||
2839 | This routine is very general, and could (should?) be used to | |
2840 | implement many of the other routines in this file. */ | |
2841 | ||
ae0b51ef | 2842 | int |
0c20a65f | 2843 | for_each_rtx (rtx *x, rtx_function f, void *data) |
ccc2d6d0 MM |
2844 | { |
2845 | int result; | |
ccc2d6d0 MM |
2846 | int i; |
2847 | ||
2848 | /* Call F on X. */ | |
b987f237 | 2849 | result = (*f) (x, data); |
ccc2d6d0 MM |
2850 | if (result == -1) |
2851 | /* Do not traverse sub-expressions. */ | |
2852 | return 0; | |
2853 | else if (result != 0) | |
2854 | /* Stop the traversal. */ | |
2855 | return result; | |
2856 | ||
2857 | if (*x == NULL_RTX) | |
2858 | /* There are no sub-expressions. */ | |
2859 | return 0; | |
2860 | ||
cf94b0fc PB |
2861 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
2862 | if (i < 0) | |
2863 | return 0; | |
ccc2d6d0 | 2864 | |
cf94b0fc | 2865 | return for_each_rtx_1 (*x, i, f, data); |
ccc2d6d0 | 2866 | } |
3ec2b590 | 2867 | |
cf94b0fc | 2868 | |
777b1b71 RH |
2869 | /* Searches X for any reference to REGNO, returning the rtx of the |
2870 | reference found if any. Otherwise, returns NULL_RTX. */ | |
2871 | ||
2872 | rtx | |
0c20a65f | 2873 | regno_use_in (unsigned int regno, rtx x) |
777b1b71 | 2874 | { |
b3694847 | 2875 | const char *fmt; |
777b1b71 RH |
2876 | int i, j; |
2877 | rtx tem; | |
2878 | ||
f8cfc6aa | 2879 | if (REG_P (x) && REGNO (x) == regno) |
777b1b71 RH |
2880 | return x; |
2881 | ||
2882 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
2883 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
2884 | { | |
2885 | if (fmt[i] == 'e') | |
2886 | { | |
2887 | if ((tem = regno_use_in (regno, XEXP (x, i)))) | |
2888 | return tem; | |
2889 | } | |
2890 | else if (fmt[i] == 'E') | |
2891 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2892 | if ((tem = regno_use_in (regno , XVECEXP (x, i, j)))) | |
2893 | return tem; | |
2894 | } | |
2895 | ||
2896 | return NULL_RTX; | |
2897 | } | |
2dfa9a87 | 2898 | |
e5c56fd9 JH |
2899 | /* Return a value indicating whether OP, an operand of a commutative |
2900 | operation, is preferred as the first or second operand. The higher | |
2901 | the value, the stronger the preference for being the first operand. | |
2902 | We use negative values to indicate a preference for the first operand | |
2903 | and positive values for the second operand. */ | |
2904 | ||
9b3bd424 | 2905 | int |
0c20a65f | 2906 | commutative_operand_precedence (rtx op) |
e5c56fd9 | 2907 | { |
e3d6e740 | 2908 | enum rtx_code code = GET_CODE (op); |
e3d6e740 | 2909 | |
e5c56fd9 | 2910 | /* Constants always come the second operand. Prefer "nice" constants. */ |
e3d6e740 | 2911 | if (code == CONST_INT) |
7e0b4eae | 2912 | return -8; |
e3d6e740 | 2913 | if (code == CONST_DOUBLE) |
7e0b4eae | 2914 | return -7; |
091a3ac7 CF |
2915 | if (code == CONST_FIXED) |
2916 | return -7; | |
9ce79a7a | 2917 | op = avoid_constant_pool_reference (op); |
79b82df3 | 2918 | code = GET_CODE (op); |
ec8e098d PB |
2919 | |
2920 | switch (GET_RTX_CLASS (code)) | |
2921 | { | |
2922 | case RTX_CONST_OBJ: | |
2923 | if (code == CONST_INT) | |
7e0b4eae | 2924 | return -6; |
ec8e098d | 2925 | if (code == CONST_DOUBLE) |
7e0b4eae | 2926 | return -5; |
091a3ac7 CF |
2927 | if (code == CONST_FIXED) |
2928 | return -5; | |
7e0b4eae | 2929 | return -4; |
ec8e098d PB |
2930 | |
2931 | case RTX_EXTRA: | |
2932 | /* SUBREGs of objects should come second. */ | |
2933 | if (code == SUBREG && OBJECT_P (SUBREG_REG (op))) | |
7e0b4eae | 2934 | return -3; |
6fb5fa3c | 2935 | return 0; |
ec8e098d PB |
2936 | |
2937 | case RTX_OBJ: | |
2938 | /* Complex expressions should be the first, so decrease priority | |
7e0b4eae PB |
2939 | of objects. Prefer pointer objects over non pointer objects. */ |
2940 | if ((REG_P (op) && REG_POINTER (op)) | |
2941 | || (MEM_P (op) && MEM_POINTER (op))) | |
2942 | return -1; | |
2943 | return -2; | |
ec8e098d PB |
2944 | |
2945 | case RTX_COMM_ARITH: | |
2946 | /* Prefer operands that are themselves commutative to be first. | |
2947 | This helps to make things linear. In particular, | |
2948 | (and (and (reg) (reg)) (not (reg))) is canonical. */ | |
2949 | return 4; | |
2950 | ||
2951 | case RTX_BIN_ARITH: | |
2952 | /* If only one operand is a binary expression, it will be the first | |
2953 | operand. In particular, (plus (minus (reg) (reg)) (neg (reg))) | |
2954 | is canonical, although it will usually be further simplified. */ | |
2955 | return 2; | |
e3d6e740 | 2956 | |
ec8e098d PB |
2957 | case RTX_UNARY: |
2958 | /* Then prefer NEG and NOT. */ | |
2959 | if (code == NEG || code == NOT) | |
2960 | return 1; | |
e5c56fd9 | 2961 | |
ec8e098d PB |
2962 | default: |
2963 | return 0; | |
2964 | } | |
e5c56fd9 JH |
2965 | } |
2966 | ||
f63d1bf7 | 2967 | /* Return 1 iff it is necessary to swap operands of commutative operation |
e5c56fd9 JH |
2968 | in order to canonicalize expression. */ |
2969 | ||
7e0b4eae | 2970 | bool |
0c20a65f | 2971 | swap_commutative_operands_p (rtx x, rtx y) |
e5c56fd9 | 2972 | { |
9b3bd424 RH |
2973 | return (commutative_operand_precedence (x) |
2974 | < commutative_operand_precedence (y)); | |
e5c56fd9 | 2975 | } |
2dfa9a87 MH |
2976 | |
2977 | /* Return 1 if X is an autoincrement side effect and the register is | |
2978 | not the stack pointer. */ | |
2979 | int | |
f7d504c2 | 2980 | auto_inc_p (const_rtx x) |
2dfa9a87 MH |
2981 | { |
2982 | switch (GET_CODE (x)) | |
2983 | { | |
2984 | case PRE_INC: | |
2985 | case POST_INC: | |
2986 | case PRE_DEC: | |
2987 | case POST_DEC: | |
2988 | case PRE_MODIFY: | |
2989 | case POST_MODIFY: | |
2990 | /* There are no REG_INC notes for SP. */ | |
2991 | if (XEXP (x, 0) != stack_pointer_rtx) | |
2992 | return 1; | |
2993 | default: | |
2994 | break; | |
2995 | } | |
2996 | return 0; | |
2997 | } | |
3b10cf4b | 2998 | |
f9da5064 | 2999 | /* Return nonzero if IN contains a piece of rtl that has the address LOC. */ |
db7ba742 | 3000 | int |
f7d504c2 | 3001 | loc_mentioned_in_p (rtx *loc, const_rtx in) |
db7ba742 | 3002 | { |
a52b023a PB |
3003 | enum rtx_code code; |
3004 | const char *fmt; | |
db7ba742 R |
3005 | int i, j; |
3006 | ||
a52b023a PB |
3007 | if (!in) |
3008 | return 0; | |
3009 | ||
3010 | code = GET_CODE (in); | |
3011 | fmt = GET_RTX_FORMAT (code); | |
db7ba742 R |
3012 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
3013 | { | |
db7ba742 R |
3014 | if (fmt[i] == 'e') |
3015 | { | |
e0651058 | 3016 | if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i))) |
db7ba742 R |
3017 | return 1; |
3018 | } | |
3019 | else if (fmt[i] == 'E') | |
3020 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) | |
e0651058 AO |
3021 | if (loc == &XVECEXP (in, i, j) |
3022 | || loc_mentioned_in_p (loc, XVECEXP (in, i, j))) | |
db7ba742 R |
3023 | return 1; |
3024 | } | |
3025 | return 0; | |
3026 | } | |
ddef6bc7 | 3027 | |
bb51e270 RS |
3028 | /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE, |
3029 | and SUBREG_BYTE, return the bit offset where the subreg begins | |
3030 | (counting from the least significant bit of the operand). */ | |
33aceff2 JW |
3031 | |
3032 | unsigned int | |
bb51e270 RS |
3033 | subreg_lsb_1 (enum machine_mode outer_mode, |
3034 | enum machine_mode inner_mode, | |
3035 | unsigned int subreg_byte) | |
33aceff2 | 3036 | { |
33aceff2 JW |
3037 | unsigned int bitpos; |
3038 | unsigned int byte; | |
3039 | unsigned int word; | |
3040 | ||
3041 | /* A paradoxical subreg begins at bit position 0. */ | |
bb51e270 | 3042 | if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode)) |
33aceff2 JW |
3043 | return 0; |
3044 | ||
3045 | if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) | |
3046 | /* If the subreg crosses a word boundary ensure that | |
3047 | it also begins and ends on a word boundary. */ | |
41374e13 NS |
3048 | gcc_assert (!((subreg_byte % UNITS_PER_WORD |
3049 | + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD | |
3050 | && (subreg_byte % UNITS_PER_WORD | |
3051 | || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))); | |
33aceff2 JW |
3052 | |
3053 | if (WORDS_BIG_ENDIAN) | |
3054 | word = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3055 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD; |
33aceff2 | 3056 | else |
bb51e270 | 3057 | word = subreg_byte / UNITS_PER_WORD; |
33aceff2 JW |
3058 | bitpos = word * BITS_PER_WORD; |
3059 | ||
3060 | if (BYTES_BIG_ENDIAN) | |
3061 | byte = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3062 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD; |
33aceff2 | 3063 | else |
bb51e270 | 3064 | byte = subreg_byte % UNITS_PER_WORD; |
33aceff2 JW |
3065 | bitpos += byte * BITS_PER_UNIT; |
3066 | ||
3067 | return bitpos; | |
3068 | } | |
3069 | ||
bb51e270 RS |
3070 | /* Given a subreg X, return the bit offset where the subreg begins |
3071 | (counting from the least significant bit of the reg). */ | |
3072 | ||
3073 | unsigned int | |
f7d504c2 | 3074 | subreg_lsb (const_rtx x) |
bb51e270 RS |
3075 | { |
3076 | return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
3077 | SUBREG_BYTE (x)); | |
3078 | } | |
3079 | ||
f1f4e530 | 3080 | /* Fill in information about a subreg of a hard register. |
ddef6bc7 JJ |
3081 | xregno - A regno of an inner hard subreg_reg (or what will become one). |
3082 | xmode - The mode of xregno. | |
3083 | offset - The byte offset. | |
3084 | ymode - The mode of a top level SUBREG (or what may become one). | |
f1f4e530 | 3085 | info - Pointer to structure to fill in. */ |
c619e982 | 3086 | void |
f1f4e530 JM |
3087 | subreg_get_info (unsigned int xregno, enum machine_mode xmode, |
3088 | unsigned int offset, enum machine_mode ymode, | |
3089 | struct subreg_info *info) | |
04c5580f | 3090 | { |
8521c414 | 3091 | int nregs_xmode, nregs_ymode; |
04c5580f | 3092 | int mode_multiple, nregs_multiple; |
f1f4e530 | 3093 | int offset_adj, y_offset, y_offset_adj; |
8521c414 | 3094 | int regsize_xmode, regsize_ymode; |
f1f4e530 | 3095 | bool rknown; |
04c5580f | 3096 | |
41374e13 | 3097 | gcc_assert (xregno < FIRST_PSEUDO_REGISTER); |
04c5580f | 3098 | |
f1f4e530 JM |
3099 | rknown = false; |
3100 | ||
dd79bb7e GK |
3101 | /* If there are holes in a non-scalar mode in registers, we expect |
3102 | that it is made up of its units concatenated together. */ | |
8521c414 | 3103 | if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)) |
dd79bb7e | 3104 | { |
8521c414 JM |
3105 | enum machine_mode xmode_unit; |
3106 | ||
3107 | nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode); | |
3108 | if (GET_MODE_INNER (xmode) == VOIDmode) | |
3109 | xmode_unit = xmode; | |
3110 | else | |
3111 | xmode_unit = GET_MODE_INNER (xmode); | |
3112 | gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit)); | |
3113 | gcc_assert (nregs_xmode | |
3114 | == (GET_MODE_NUNITS (xmode) | |
3115 | * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit))); | |
3116 | gcc_assert (hard_regno_nregs[xregno][xmode] | |
3117 | == (hard_regno_nregs[xregno][xmode_unit] | |
3118 | * GET_MODE_NUNITS (xmode))); | |
dd79bb7e GK |
3119 | |
3120 | /* You can only ask for a SUBREG of a value with holes in the middle | |
3121 | if you don't cross the holes. (Such a SUBREG should be done by | |
3122 | picking a different register class, or doing it in memory if | |
3123 | necessary.) An example of a value with holes is XCmode on 32-bit | |
3124 | x86 with -m128bit-long-double; it's represented in 6 32-bit registers, | |
3125 | 3 for each part, but in memory it's two 128-bit parts. | |
3126 | Padding is assumed to be at the end (not necessarily the 'high part') | |
3127 | of each unit. */ | |
8521c414 JM |
3128 | if ((offset / GET_MODE_SIZE (xmode_unit) + 1 |
3129 | < GET_MODE_NUNITS (xmode)) | |
3130 | && (offset / GET_MODE_SIZE (xmode_unit) | |
dd79bb7e | 3131 | != ((offset + GET_MODE_SIZE (ymode) - 1) |
8521c414 | 3132 | / GET_MODE_SIZE (xmode_unit)))) |
f1f4e530 JM |
3133 | { |
3134 | info->representable_p = false; | |
3135 | rknown = true; | |
3136 | } | |
dd79bb7e GK |
3137 | } |
3138 | else | |
3139 | nregs_xmode = hard_regno_nregs[xregno][xmode]; | |
3140 | ||
66fd46b6 | 3141 | nregs_ymode = hard_regno_nregs[xregno][ymode]; |
04c5580f | 3142 | |
dd79bb7e | 3143 | /* Paradoxical subregs are otherwise valid. */ |
f1f4e530 JM |
3144 | if (!rknown |
3145 | && offset == 0 | |
3146 | && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode)) | |
3147 | { | |
3148 | info->representable_p = true; | |
3149 | /* If this is a big endian paradoxical subreg, which uses more | |
3150 | actual hard registers than the original register, we must | |
3151 | return a negative offset so that we find the proper highpart | |
3152 | of the register. */ | |
3153 | if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD | |
3154 | ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN) | |
3155 | info->offset = nregs_xmode - nregs_ymode; | |
3156 | else | |
3157 | info->offset = 0; | |
3158 | info->nregs = nregs_ymode; | |
3159 | return; | |
3160 | } | |
04c5580f | 3161 | |
8521c414 JM |
3162 | /* If registers store different numbers of bits in the different |
3163 | modes, we cannot generally form this subreg. */ | |
f1f4e530 | 3164 | if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode) |
5f7fc2b8 JM |
3165 | && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode) |
3166 | && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0 | |
3167 | && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0) | |
f1f4e530 JM |
3168 | { |
3169 | regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode; | |
f1f4e530 | 3170 | regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode; |
f1f4e530 JM |
3171 | if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1) |
3172 | { | |
3173 | info->representable_p = false; | |
3174 | info->nregs | |
3175 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3176 | info->offset = offset / regsize_xmode; | |
3177 | return; | |
3178 | } | |
3179 | if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1) | |
3180 | { | |
3181 | info->representable_p = false; | |
3182 | info->nregs | |
3183 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3184 | info->offset = offset / regsize_xmode; | |
3185 | return; | |
3186 | } | |
3187 | } | |
8521c414 | 3188 | |
dd79bb7e | 3189 | /* Lowpart subregs are otherwise valid. */ |
f1f4e530 JM |
3190 | if (!rknown && offset == subreg_lowpart_offset (ymode, xmode)) |
3191 | { | |
3192 | info->representable_p = true; | |
3193 | rknown = true; | |
a446b4e8 JM |
3194 | |
3195 | if (offset == 0 || nregs_xmode == nregs_ymode) | |
3196 | { | |
3197 | info->offset = 0; | |
3198 | info->nregs = nregs_ymode; | |
3199 | return; | |
3200 | } | |
f1f4e530 | 3201 | } |
04c5580f | 3202 | |
dd79bb7e GK |
3203 | /* This should always pass, otherwise we don't know how to verify |
3204 | the constraint. These conditions may be relaxed but | |
3205 | subreg_regno_offset would need to be redesigned. */ | |
41374e13 | 3206 | gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0); |
41374e13 | 3207 | gcc_assert ((nregs_xmode % nregs_ymode) == 0); |
04c5580f | 3208 | |
b20b352b | 3209 | /* The XMODE value can be seen as a vector of NREGS_XMODE |
dcc24678 | 3210 | values. The subreg must represent a lowpart of given field. |
04c5580f | 3211 | Compute what field it is. */ |
f1f4e530 JM |
3212 | offset_adj = offset; |
3213 | offset_adj -= subreg_lowpart_offset (ymode, | |
3214 | mode_for_size (GET_MODE_BITSIZE (xmode) | |
3215 | / nregs_xmode, | |
3216 | MODE_INT, 0)); | |
04c5580f | 3217 | |
dd79bb7e | 3218 | /* Size of ymode must not be greater than the size of xmode. */ |
04c5580f | 3219 | mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); |
41374e13 | 3220 | gcc_assert (mode_multiple != 0); |
04c5580f JH |
3221 | |
3222 | y_offset = offset / GET_MODE_SIZE (ymode); | |
f1f4e530 JM |
3223 | y_offset_adj = offset_adj / GET_MODE_SIZE (ymode); |
3224 | nregs_multiple = nregs_xmode / nregs_ymode; | |
41374e13 | 3225 | |
f1f4e530 | 3226 | gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0); |
41374e13 NS |
3227 | gcc_assert ((mode_multiple % nregs_multiple) == 0); |
3228 | ||
f1f4e530 JM |
3229 | if (!rknown) |
3230 | { | |
3231 | info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple))); | |
3232 | rknown = true; | |
3233 | } | |
3234 | info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode; | |
3235 | info->nregs = nregs_ymode; | |
3236 | } | |
3237 | ||
3238 | /* This function returns the regno offset of a subreg expression. | |
3239 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3240 | xmode - The mode of xregno. | |
3241 | offset - The byte offset. | |
3242 | ymode - The mode of a top level SUBREG (or what may become one). | |
3243 | RETURN - The regno offset which would be used. */ | |
3244 | unsigned int | |
3245 | subreg_regno_offset (unsigned int xregno, enum machine_mode xmode, | |
3246 | unsigned int offset, enum machine_mode ymode) | |
3247 | { | |
3248 | struct subreg_info info; | |
3249 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3250 | return info.offset; | |
3251 | } | |
3252 | ||
3253 | /* This function returns true when the offset is representable via | |
3254 | subreg_offset in the given regno. | |
3255 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3256 | xmode - The mode of xregno. | |
3257 | offset - The byte offset. | |
3258 | ymode - The mode of a top level SUBREG (or what may become one). | |
3259 | RETURN - Whether the offset is representable. */ | |
3260 | bool | |
3261 | subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, | |
3262 | unsigned int offset, enum machine_mode ymode) | |
3263 | { | |
3264 | struct subreg_info info; | |
3265 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
05cee290 | 3266 | return info.representable_p; |
04c5580f JH |
3267 | } |
3268 | ||
eef302d2 RS |
3269 | /* Return the number of a YMODE register to which |
3270 | ||
3271 | (subreg:YMODE (reg:XMODE XREGNO) OFFSET) | |
3272 | ||
3273 | can be simplified. Return -1 if the subreg can't be simplified. | |
3274 | ||
3275 | XREGNO is a hard register number. */ | |
3276 | ||
3277 | int | |
3278 | simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode, | |
3279 | unsigned int offset, enum machine_mode ymode) | |
3280 | { | |
3281 | struct subreg_info info; | |
3282 | unsigned int yregno; | |
3283 | ||
3284 | #ifdef CANNOT_CHANGE_MODE_CLASS | |
3285 | /* Give the backend a chance to disallow the mode change. */ | |
3286 | if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT | |
3287 | && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT | |
3288 | && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)) | |
3289 | return -1; | |
3290 | #endif | |
3291 | ||
3292 | /* We shouldn't simplify stack-related registers. */ | |
3293 | if ((!reload_completed || frame_pointer_needed) | |
3294 | && (xregno == FRAME_POINTER_REGNUM | |
3295 | || xregno == HARD_FRAME_POINTER_REGNUM)) | |
3296 | return -1; | |
3297 | ||
3298 | if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
3299 | && xregno == ARG_POINTER_REGNUM) | |
3300 | return -1; | |
3301 | ||
3302 | if (xregno == STACK_POINTER_REGNUM) | |
3303 | return -1; | |
3304 | ||
3305 | /* Try to get the register offset. */ | |
3306 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3307 | if (!info.representable_p) | |
3308 | return -1; | |
3309 | ||
3310 | /* Make sure that the offsetted register value is in range. */ | |
3311 | yregno = xregno + info.offset; | |
3312 | if (!HARD_REGISTER_NUM_P (yregno)) | |
3313 | return -1; | |
3314 | ||
3315 | /* See whether (reg:YMODE YREGNO) is valid. | |
3316 | ||
3317 | ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid. | |
3318 | This is a kludge to work around how float/complex arguments are passed | |
3319 | on 32-bit SPARC and should be fixed. */ | |
3320 | if (!HARD_REGNO_MODE_OK (yregno, ymode) | |
3321 | && HARD_REGNO_MODE_OK (xregno, xmode)) | |
3322 | return -1; | |
3323 | ||
3324 | return (int) yregno; | |
3325 | } | |
3326 | ||
dc297297 | 3327 | /* Return the final regno that a subreg expression refers to. */ |
a6a2274a | 3328 | unsigned int |
f7d504c2 | 3329 | subreg_regno (const_rtx x) |
ddef6bc7 JJ |
3330 | { |
3331 | unsigned int ret; | |
3332 | rtx subreg = SUBREG_REG (x); | |
3333 | int regno = REGNO (subreg); | |
3334 | ||
a6a2274a KH |
3335 | ret = regno + subreg_regno_offset (regno, |
3336 | GET_MODE (subreg), | |
ddef6bc7 JJ |
3337 | SUBREG_BYTE (x), |
3338 | GET_MODE (x)); | |
3339 | return ret; | |
3340 | ||
3341 | } | |
f1f4e530 JM |
3342 | |
3343 | /* Return the number of registers that a subreg expression refers | |
3344 | to. */ | |
3345 | unsigned int | |
f7d504c2 | 3346 | subreg_nregs (const_rtx x) |
ba49cb7b KZ |
3347 | { |
3348 | return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x); | |
3349 | } | |
3350 | ||
3351 | /* Return the number of registers that a subreg REG with REGNO | |
3352 | expression refers to. This is a copy of the rtlanal.c:subreg_nregs | |
3353 | changed so that the regno can be passed in. */ | |
3354 | ||
3355 | unsigned int | |
3356 | subreg_nregs_with_regno (unsigned int regno, const_rtx x) | |
f1f4e530 JM |
3357 | { |
3358 | struct subreg_info info; | |
3359 | rtx subreg = SUBREG_REG (x); | |
f1f4e530 JM |
3360 | |
3361 | subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x), | |
3362 | &info); | |
3363 | return info.nregs; | |
3364 | } | |
3365 | ||
ba49cb7b | 3366 | |
833366d6 JH |
3367 | struct parms_set_data |
3368 | { | |
3369 | int nregs; | |
3370 | HARD_REG_SET regs; | |
3371 | }; | |
3372 | ||
3373 | /* Helper function for noticing stores to parameter registers. */ | |
3374 | static void | |
7bc980e1 | 3375 | parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) |
833366d6 | 3376 | { |
1634b18f | 3377 | struct parms_set_data *const d = (struct parms_set_data *) data; |
833366d6 JH |
3378 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER |
3379 | && TEST_HARD_REG_BIT (d->regs, REGNO (x))) | |
3380 | { | |
3381 | CLEAR_HARD_REG_BIT (d->regs, REGNO (x)); | |
3382 | d->nregs--; | |
3383 | } | |
3384 | } | |
3385 | ||
a6a2274a | 3386 | /* Look backward for first parameter to be loaded. |
b2df20b4 DJ |
3387 | Note that loads of all parameters will not necessarily be |
3388 | found if CSE has eliminated some of them (e.g., an argument | |
3389 | to the outer function is passed down as a parameter). | |
833366d6 JH |
3390 | Do not skip BOUNDARY. */ |
3391 | rtx | |
0c20a65f | 3392 | find_first_parameter_load (rtx call_insn, rtx boundary) |
833366d6 JH |
3393 | { |
3394 | struct parms_set_data parm; | |
b2df20b4 | 3395 | rtx p, before, first_set; |
833366d6 JH |
3396 | |
3397 | /* Since different machines initialize their parameter registers | |
3398 | in different orders, assume nothing. Collect the set of all | |
3399 | parameter registers. */ | |
3400 | CLEAR_HARD_REG_SET (parm.regs); | |
3401 | parm.nregs = 0; | |
3402 | for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) | |
3403 | if (GET_CODE (XEXP (p, 0)) == USE | |
f8cfc6aa | 3404 | && REG_P (XEXP (XEXP (p, 0), 0))) |
833366d6 | 3405 | { |
41374e13 | 3406 | gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER); |
833366d6 JH |
3407 | |
3408 | /* We only care about registers which can hold function | |
3409 | arguments. */ | |
3410 | if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0)))) | |
3411 | continue; | |
3412 | ||
3413 | SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0))); | |
3414 | parm.nregs++; | |
3415 | } | |
3416 | before = call_insn; | |
b2df20b4 | 3417 | first_set = call_insn; |
833366d6 JH |
3418 | |
3419 | /* Search backward for the first set of a register in this set. */ | |
3420 | while (parm.nregs && before != boundary) | |
3421 | { | |
3422 | before = PREV_INSN (before); | |
3423 | ||
3424 | /* It is possible that some loads got CSEed from one call to | |
3425 | another. Stop in that case. */ | |
4b4bf941 | 3426 | if (CALL_P (before)) |
833366d6 JH |
3427 | break; |
3428 | ||
dbc1a163 | 3429 | /* Our caller needs either ensure that we will find all sets |
833366d6 | 3430 | (in case code has not been optimized yet), or take care |
eaec9b3d | 3431 | for possible labels in a way by setting boundary to preceding |
833366d6 | 3432 | CODE_LABEL. */ |
4b4bf941 | 3433 | if (LABEL_P (before)) |
dbc1a163 | 3434 | { |
41374e13 | 3435 | gcc_assert (before == boundary); |
dbc1a163 RH |
3436 | break; |
3437 | } | |
833366d6 | 3438 | |
0d025d43 | 3439 | if (INSN_P (before)) |
b2df20b4 DJ |
3440 | { |
3441 | int nregs_old = parm.nregs; | |
3442 | note_stores (PATTERN (before), parms_set, &parm); | |
3443 | /* If we found something that did not set a parameter reg, | |
3444 | we're done. Do not keep going, as that might result | |
3445 | in hoisting an insn before the setting of a pseudo | |
3446 | that is used by the hoisted insn. */ | |
3447 | if (nregs_old != parm.nregs) | |
3448 | first_set = before; | |
3449 | else | |
3450 | break; | |
3451 | } | |
833366d6 | 3452 | } |
b2df20b4 | 3453 | return first_set; |
833366d6 | 3454 | } |
3dec4024 | 3455 | |
14b493d6 | 3456 | /* Return true if we should avoid inserting code between INSN and preceding |
3dec4024 JH |
3457 | call instruction. */ |
3458 | ||
3459 | bool | |
9678086d | 3460 | keep_with_call_p (const_rtx insn) |
3dec4024 JH |
3461 | { |
3462 | rtx set; | |
3463 | ||
3464 | if (INSN_P (insn) && (set = single_set (insn)) != NULL) | |
3465 | { | |
f8cfc6aa | 3466 | if (REG_P (SET_DEST (set)) |
5df533b3 | 3467 | && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER |
3dec4024 JH |
3468 | && fixed_regs[REGNO (SET_DEST (set))] |
3469 | && general_operand (SET_SRC (set), VOIDmode)) | |
3470 | return true; | |
f8cfc6aa | 3471 | if (REG_P (SET_SRC (set)) |
3dec4024 | 3472 | && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set))) |
f8cfc6aa | 3473 | && REG_P (SET_DEST (set)) |
3dec4024 JH |
3474 | && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER) |
3475 | return true; | |
bc204393 RH |
3476 | /* There may be a stack pop just after the call and before the store |
3477 | of the return register. Search for the actual store when deciding | |
3478 | if we can break or not. */ | |
3dec4024 JH |
3479 | if (SET_DEST (set) == stack_pointer_rtx) |
3480 | { | |
75547801 | 3481 | /* This CONST_CAST is okay because next_nonnote_insn just |
4e9b57fa | 3482 | returns its argument and we assign it to a const_rtx |
75547801 | 3483 | variable. */ |
b1d5455a | 3484 | const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn)); |
bc204393 | 3485 | if (i2 && keep_with_call_p (i2)) |
3dec4024 JH |
3486 | return true; |
3487 | } | |
3488 | } | |
3489 | return false; | |
3490 | } | |
71d2c5bd | 3491 | |
432f982f JH |
3492 | /* Return true if LABEL is a target of JUMP_INSN. This applies only |
3493 | to non-complex jumps. That is, direct unconditional, conditional, | |
3494 | and tablejumps, but not computed jumps or returns. It also does | |
3495 | not apply to the fallthru case of a conditional jump. */ | |
3496 | ||
3497 | bool | |
f7d504c2 | 3498 | label_is_jump_target_p (const_rtx label, const_rtx jump_insn) |
432f982f JH |
3499 | { |
3500 | rtx tmp = JUMP_LABEL (jump_insn); | |
3501 | ||
3502 | if (label == tmp) | |
3503 | return true; | |
3504 | ||
3505 | if (tablejump_p (jump_insn, NULL, &tmp)) | |
3506 | { | |
3507 | rtvec vec = XVEC (PATTERN (tmp), | |
3508 | GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC); | |
3509 | int i, veclen = GET_NUM_ELEM (vec); | |
3510 | ||
3511 | for (i = 0; i < veclen; ++i) | |
3512 | if (XEXP (RTVEC_ELT (vec, i), 0) == label) | |
3513 | return true; | |
3514 | } | |
3515 | ||
cb2f563b HPN |
3516 | if (find_reg_note (jump_insn, REG_LABEL_TARGET, label)) |
3517 | return true; | |
3518 | ||
432f982f JH |
3519 | return false; |
3520 | } | |
3521 | ||
f894b69b PB |
3522 | \f |
3523 | /* Return an estimate of the cost of computing rtx X. | |
3524 | One use is in cse, to decide which expression to keep in the hash table. | |
3525 | Another is in rtl generation, to pick the cheapest way to multiply. | |
f40751dd JH |
3526 | Other uses like the latter are expected in the future. |
3527 | ||
3528 | SPEED parameter specify whether costs optimized for speed or size should | |
3529 | be returned. */ | |
f894b69b PB |
3530 | |
3531 | int | |
f40751dd | 3532 | rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED, bool speed) |
f894b69b PB |
3533 | { |
3534 | int i, j; | |
3535 | enum rtx_code code; | |
3536 | const char *fmt; | |
3537 | int total; | |
3538 | ||
3539 | if (x == 0) | |
3540 | return 0; | |
3541 | ||
3542 | /* Compute the default costs of certain things. | |
3543 | Note that targetm.rtx_costs can override the defaults. */ | |
3544 | ||
3545 | code = GET_CODE (x); | |
3546 | switch (code) | |
3547 | { | |
3548 | case MULT: | |
3549 | total = COSTS_N_INSNS (5); | |
3550 | break; | |
3551 | case DIV: | |
3552 | case UDIV: | |
3553 | case MOD: | |
3554 | case UMOD: | |
3555 | total = COSTS_N_INSNS (7); | |
3556 | break; | |
3557 | case USE: | |
db3edc20 | 3558 | /* Used in combine.c as a marker. */ |
f894b69b PB |
3559 | total = 0; |
3560 | break; | |
3561 | default: | |
3562 | total = COSTS_N_INSNS (1); | |
3563 | } | |
3564 | ||
3565 | switch (code) | |
3566 | { | |
3567 | case REG: | |
3568 | return 0; | |
3569 | ||
3570 | case SUBREG: | |
edb81165 | 3571 | total = 0; |
f894b69b PB |
3572 | /* If we can't tie these modes, make this expensive. The larger |
3573 | the mode, the more expensive it is. */ | |
3574 | if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x)))) | |
3575 | return COSTS_N_INSNS (2 | |
3576 | + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD); | |
3577 | break; | |
3578 | ||
3579 | default: | |
f40751dd | 3580 | if (targetm.rtx_costs (x, code, outer_code, &total, speed)) |
f894b69b PB |
3581 | return total; |
3582 | break; | |
3583 | } | |
3584 | ||
3585 | /* Sum the costs of the sub-rtx's, plus cost of this operation, | |
3586 | which is already in total. */ | |
3587 | ||
3588 | fmt = GET_RTX_FORMAT (code); | |
3589 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3590 | if (fmt[i] == 'e') | |
f40751dd | 3591 | total += rtx_cost (XEXP (x, i), code, speed); |
f894b69b PB |
3592 | else if (fmt[i] == 'E') |
3593 | for (j = 0; j < XVECLEN (x, i); j++) | |
f40751dd | 3594 | total += rtx_cost (XVECEXP (x, i, j), code, speed); |
f894b69b PB |
3595 | |
3596 | return total; | |
3597 | } | |
3598 | \f | |
3599 | /* Return cost of address expression X. | |
f40751dd JH |
3600 | Expect that X is properly formed address reference. |
3601 | ||
3602 | SPEED parameter specify whether costs optimized for speed or size should | |
3603 | be returned. */ | |
f894b69b PB |
3604 | |
3605 | int | |
09e881c9 | 3606 | address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed) |
f894b69b | 3607 | { |
f894b69b PB |
3608 | /* We may be asked for cost of various unusual addresses, such as operands |
3609 | of push instruction. It is not worthwhile to complicate writing | |
3610 | of the target hook by such cases. */ | |
3611 | ||
09e881c9 | 3612 | if (!memory_address_addr_space_p (mode, x, as)) |
f894b69b PB |
3613 | return 1000; |
3614 | ||
f40751dd | 3615 | return targetm.address_cost (x, speed); |
f894b69b PB |
3616 | } |
3617 | ||
3618 | /* If the target doesn't override, compute the cost as with arithmetic. */ | |
3619 | ||
3620 | int | |
f40751dd | 3621 | default_address_cost (rtx x, bool speed) |
f894b69b | 3622 | { |
f40751dd | 3623 | return rtx_cost (x, MEM, speed); |
f894b69b | 3624 | } |
2f93eea8 PB |
3625 | \f |
3626 | ||
3627 | unsigned HOST_WIDE_INT | |
fa233e34 | 3628 | nonzero_bits (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
3629 | { |
3630 | return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0); | |
3631 | } | |
3632 | ||
3633 | unsigned int | |
fa233e34 | 3634 | num_sign_bit_copies (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
3635 | { |
3636 | return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0); | |
3637 | } | |
3638 | ||
3639 | /* The function cached_nonzero_bits is a wrapper around nonzero_bits1. | |
3640 | It avoids exponential behavior in nonzero_bits1 when X has | |
3641 | identical subexpressions on the first or the second level. */ | |
3642 | ||
3643 | static unsigned HOST_WIDE_INT | |
fa233e34 | 3644 | cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
3645 | enum machine_mode known_mode, |
3646 | unsigned HOST_WIDE_INT known_ret) | |
3647 | { | |
3648 | if (x == known_x && mode == known_mode) | |
3649 | return known_ret; | |
3650 | ||
3651 | /* Try to find identical subexpressions. If found call | |
3652 | nonzero_bits1 on X with the subexpressions as KNOWN_X and the | |
3653 | precomputed value for the subexpression as KNOWN_RET. */ | |
3654 | ||
3655 | if (ARITHMETIC_P (x)) | |
3656 | { | |
3657 | rtx x0 = XEXP (x, 0); | |
3658 | rtx x1 = XEXP (x, 1); | |
3659 | ||
3660 | /* Check the first level. */ | |
3661 | if (x0 == x1) | |
3662 | return nonzero_bits1 (x, mode, x0, mode, | |
3663 | cached_nonzero_bits (x0, mode, known_x, | |
3664 | known_mode, known_ret)); | |
3665 | ||
3666 | /* Check the second level. */ | |
3667 | if (ARITHMETIC_P (x0) | |
3668 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
3669 | return nonzero_bits1 (x, mode, x1, mode, | |
3670 | cached_nonzero_bits (x1, mode, known_x, | |
3671 | known_mode, known_ret)); | |
3672 | ||
3673 | if (ARITHMETIC_P (x1) | |
3674 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
3675 | return nonzero_bits1 (x, mode, x0, mode, | |
3676 | cached_nonzero_bits (x0, mode, known_x, | |
3677 | known_mode, known_ret)); | |
3678 | } | |
3679 | ||
3680 | return nonzero_bits1 (x, mode, known_x, known_mode, known_ret); | |
3681 | } | |
3682 | ||
3683 | /* We let num_sign_bit_copies recur into nonzero_bits as that is useful. | |
3684 | We don't let nonzero_bits recur into num_sign_bit_copies, because that | |
3685 | is less useful. We can't allow both, because that results in exponential | |
3686 | run time recursion. There is a nullstone testcase that triggered | |
3687 | this. This macro avoids accidental uses of num_sign_bit_copies. */ | |
3688 | #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior | |
3689 | ||
3690 | /* Given an expression, X, compute which bits in X can be nonzero. | |
3691 | We don't care about bits outside of those defined in MODE. | |
3692 | ||
3693 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
3694 | an arithmetic operation, we can do better. */ | |
3695 | ||
3696 | static unsigned HOST_WIDE_INT | |
fa233e34 | 3697 | nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
3698 | enum machine_mode known_mode, |
3699 | unsigned HOST_WIDE_INT known_ret) | |
3700 | { | |
3701 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); | |
3702 | unsigned HOST_WIDE_INT inner_nz; | |
3703 | enum rtx_code code; | |
3704 | unsigned int mode_width = GET_MODE_BITSIZE (mode); | |
3705 | ||
ff596cd2 RL |
3706 | /* For floating-point and vector values, assume all bits are needed. */ |
3707 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode) | |
3708 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
2f93eea8 PB |
3709 | return nonzero; |
3710 | ||
3711 | /* If X is wider than MODE, use its mode instead. */ | |
3712 | if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) | |
3713 | { | |
3714 | mode = GET_MODE (x); | |
3715 | nonzero = GET_MODE_MASK (mode); | |
3716 | mode_width = GET_MODE_BITSIZE (mode); | |
3717 | } | |
3718 | ||
3719 | if (mode_width > HOST_BITS_PER_WIDE_INT) | |
3720 | /* Our only callers in this case look for single bit values. So | |
3721 | just return the mode mask. Those tests will then be false. */ | |
3722 | return nonzero; | |
3723 | ||
3724 | #ifndef WORD_REGISTER_OPERATIONS | |
3725 | /* If MODE is wider than X, but both are a single word for both the host | |
3726 | and target machines, we can compute this from which bits of the | |
3727 | object might be nonzero in its own mode, taking into account the fact | |
3728 | that on many CISC machines, accessing an object in a wider mode | |
3729 | causes the high-order bits to become undefined. So they are | |
3730 | not known to be zero. */ | |
3731 | ||
3732 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
3733 | && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD | |
3734 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
3735 | && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) | |
3736 | { | |
3737 | nonzero &= cached_nonzero_bits (x, GET_MODE (x), | |
3738 | known_x, known_mode, known_ret); | |
3739 | nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); | |
3740 | return nonzero; | |
3741 | } | |
3742 | #endif | |
3743 | ||
3744 | code = GET_CODE (x); | |
3745 | switch (code) | |
3746 | { | |
3747 | case REG: | |
3748 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
3749 | /* If pointers extend unsigned and this is a pointer in Pmode, say that | |
3750 | all the bits above ptr_mode are known to be zero. */ | |
d4ebfa65 BE |
3751 | /* As we do not know which address space the pointer is refering to, |
3752 | we can do this only if the target does not support different pointer | |
3753 | or address modes depending on the address space. */ | |
3754 | if (target_default_pointer_address_modes_p () | |
3755 | && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
2f93eea8 PB |
3756 | && REG_POINTER (x)) |
3757 | nonzero &= GET_MODE_MASK (ptr_mode); | |
3758 | #endif | |
3759 | ||
3760 | /* Include declared information about alignment of pointers. */ | |
3761 | /* ??? We don't properly preserve REG_POINTER changes across | |
3762 | pointer-to-integer casts, so we can't trust it except for | |
3763 | things that we know must be pointers. See execute/960116-1.c. */ | |
3764 | if ((x == stack_pointer_rtx | |
3765 | || x == frame_pointer_rtx | |
3766 | || x == arg_pointer_rtx) | |
3767 | && REGNO_POINTER_ALIGN (REGNO (x))) | |
3768 | { | |
3769 | unsigned HOST_WIDE_INT alignment | |
3770 | = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT; | |
3771 | ||
3772 | #ifdef PUSH_ROUNDING | |
3773 | /* If PUSH_ROUNDING is defined, it is possible for the | |
3774 | stack to be momentarily aligned only to that amount, | |
3775 | so we pick the least alignment. */ | |
3776 | if (x == stack_pointer_rtx && PUSH_ARGS) | |
3777 | alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1), | |
3778 | alignment); | |
3779 | #endif | |
3780 | ||
3781 | nonzero &= ~(alignment - 1); | |
3782 | } | |
3783 | ||
3784 | { | |
3785 | unsigned HOST_WIDE_INT nonzero_for_hook = nonzero; | |
55d796da | 3786 | rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x, |
2f93eea8 PB |
3787 | known_mode, known_ret, |
3788 | &nonzero_for_hook); | |
3789 | ||
55d796da KG |
3790 | if (new_rtx) |
3791 | nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x, | |
2f93eea8 PB |
3792 | known_mode, known_ret); |
3793 | ||
3794 | return nonzero_for_hook; | |
3795 | } | |
3796 | ||
3797 | case CONST_INT: | |
3798 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
3799 | /* If X is negative in MODE, sign-extend the value. */ | |
3800 | if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD | |
3801 | && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1)))) | |
3802 | return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width)); | |
3803 | #endif | |
3804 | ||
3805 | return INTVAL (x); | |
3806 | ||
3807 | case MEM: | |
3808 | #ifdef LOAD_EXTEND_OP | |
3809 | /* In many, if not most, RISC machines, reading a byte from memory | |
3810 | zeros the rest of the register. Noticing that fact saves a lot | |
3811 | of extra zero-extends. */ | |
3812 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) | |
3813 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
3814 | #endif | |
3815 | break; | |
3816 | ||
3817 | case EQ: case NE: | |
3818 | case UNEQ: case LTGT: | |
3819 | case GT: case GTU: case UNGT: | |
3820 | case LT: case LTU: case UNLT: | |
3821 | case GE: case GEU: case UNGE: | |
3822 | case LE: case LEU: case UNLE: | |
3823 | case UNORDERED: case ORDERED: | |
2f93eea8 PB |
3824 | /* If this produces an integer result, we know which bits are set. |
3825 | Code here used to clear bits outside the mode of X, but that is | |
3826 | now done above. */ | |
505ac507 RH |
3827 | /* Mind that MODE is the mode the caller wants to look at this |
3828 | operation in, and not the actual operation mode. We can wind | |
3829 | up with (subreg:DI (gt:V4HI x y)), and we don't have anything | |
3830 | that describes the results of a vector compare. */ | |
3831 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT | |
2f93eea8 PB |
3832 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
3833 | nonzero = STORE_FLAG_VALUE; | |
3834 | break; | |
3835 | ||
3836 | case NEG: | |
3837 | #if 0 | |
3838 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
3839 | and num_sign_bit_copies. */ | |
3840 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
3841 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
3842 | nonzero = 1; | |
3843 | #endif | |
3844 | ||
3845 | if (GET_MODE_SIZE (GET_MODE (x)) < mode_width) | |
3846 | nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); | |
3847 | break; | |
3848 | ||
3849 | case ABS: | |
3850 | #if 0 | |
3851 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
3852 | and num_sign_bit_copies. */ | |
3853 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
3854 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
3855 | nonzero = 1; | |
3856 | #endif | |
3857 | break; | |
3858 | ||
3859 | case TRUNCATE: | |
3860 | nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode, | |
3861 | known_x, known_mode, known_ret) | |
3862 | & GET_MODE_MASK (mode)); | |
3863 | break; | |
3864 | ||
3865 | case ZERO_EXTEND: | |
3866 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
3867 | known_x, known_mode, known_ret); | |
3868 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
3869 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
3870 | break; | |
3871 | ||
3872 | case SIGN_EXTEND: | |
3873 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
3874 | Otherwise, show all the bits in the outer mode but not the inner | |
3875 | may be nonzero. */ | |
3876 | inner_nz = cached_nonzero_bits (XEXP (x, 0), mode, | |
3877 | known_x, known_mode, known_ret); | |
3878 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
3879 | { | |
3880 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
3881 | if (inner_nz | |
3882 | & (((HOST_WIDE_INT) 1 | |
3883 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))) | |
3884 | inner_nz |= (GET_MODE_MASK (mode) | |
3885 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); | |
3886 | } | |
3887 | ||
3888 | nonzero &= inner_nz; | |
3889 | break; | |
3890 | ||
3891 | case AND: | |
3892 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
3893 | known_x, known_mode, known_ret) | |
3894 | & cached_nonzero_bits (XEXP (x, 1), mode, | |
3895 | known_x, known_mode, known_ret); | |
3896 | break; | |
3897 | ||
3898 | case XOR: case IOR: | |
3899 | case UMIN: case UMAX: case SMIN: case SMAX: | |
3900 | { | |
3901 | unsigned HOST_WIDE_INT nonzero0 = | |
3902 | cached_nonzero_bits (XEXP (x, 0), mode, | |
3903 | known_x, known_mode, known_ret); | |
3904 | ||
3905 | /* Don't call nonzero_bits for the second time if it cannot change | |
3906 | anything. */ | |
3907 | if ((nonzero & nonzero0) != nonzero) | |
3908 | nonzero &= nonzero0 | |
3909 | | cached_nonzero_bits (XEXP (x, 1), mode, | |
3910 | known_x, known_mode, known_ret); | |
3911 | } | |
3912 | break; | |
3913 | ||
3914 | case PLUS: case MINUS: | |
3915 | case MULT: | |
3916 | case DIV: case UDIV: | |
3917 | case MOD: case UMOD: | |
3918 | /* We can apply the rules of arithmetic to compute the number of | |
3919 | high- and low-order zero bits of these operations. We start by | |
3920 | computing the width (position of the highest-order nonzero bit) | |
3921 | and the number of low-order zero bits for each value. */ | |
3922 | { | |
3923 | unsigned HOST_WIDE_INT nz0 = | |
3924 | cached_nonzero_bits (XEXP (x, 0), mode, | |
3925 | known_x, known_mode, known_ret); | |
3926 | unsigned HOST_WIDE_INT nz1 = | |
3927 | cached_nonzero_bits (XEXP (x, 1), mode, | |
3928 | known_x, known_mode, known_ret); | |
3929 | int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1; | |
3930 | int width0 = floor_log2 (nz0) + 1; | |
3931 | int width1 = floor_log2 (nz1) + 1; | |
3932 | int low0 = floor_log2 (nz0 & -nz0); | |
3933 | int low1 = floor_log2 (nz1 & -nz1); | |
3934 | HOST_WIDE_INT op0_maybe_minusp | |
3935 | = (nz0 & ((HOST_WIDE_INT) 1 << sign_index)); | |
3936 | HOST_WIDE_INT op1_maybe_minusp | |
3937 | = (nz1 & ((HOST_WIDE_INT) 1 << sign_index)); | |
3938 | unsigned int result_width = mode_width; | |
3939 | int result_low = 0; | |
3940 | ||
3941 | switch (code) | |
3942 | { | |
3943 | case PLUS: | |
3944 | result_width = MAX (width0, width1) + 1; | |
3945 | result_low = MIN (low0, low1); | |
3946 | break; | |
3947 | case MINUS: | |
3948 | result_low = MIN (low0, low1); | |
3949 | break; | |
3950 | case MULT: | |
3951 | result_width = width0 + width1; | |
3952 | result_low = low0 + low1; | |
3953 | break; | |
3954 | case DIV: | |
3955 | if (width1 == 0) | |
3956 | break; | |
3957 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
3958 | result_width = width0; | |
3959 | break; | |
3960 | case UDIV: | |
3961 | if (width1 == 0) | |
3962 | break; | |
3963 | result_width = width0; | |
3964 | break; | |
3965 | case MOD: | |
3966 | if (width1 == 0) | |
3967 | break; | |
3968 | if (! op0_maybe_minusp && ! op1_maybe_minusp) | |
3969 | result_width = MIN (width0, width1); | |
3970 | result_low = MIN (low0, low1); | |
3971 | break; | |
3972 | case UMOD: | |
3973 | if (width1 == 0) | |
3974 | break; | |
3975 | result_width = MIN (width0, width1); | |
3976 | result_low = MIN (low0, low1); | |
3977 | break; | |
3978 | default: | |
41374e13 | 3979 | gcc_unreachable (); |
2f93eea8 PB |
3980 | } |
3981 | ||
3982 | if (result_width < mode_width) | |
3983 | nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1; | |
3984 | ||
3985 | if (result_low > 0) | |
3986 | nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1); | |
3987 | ||
3988 | #ifdef POINTERS_EXTEND_UNSIGNED | |
3989 | /* If pointers extend unsigned and this is an addition or subtraction | |
3990 | to a pointer in Pmode, all the bits above ptr_mode are known to be | |
3991 | zero. */ | |
d4ebfa65 BE |
3992 | /* As we do not know which address space the pointer is refering to, |
3993 | we can do this only if the target does not support different pointer | |
3994 | or address modes depending on the address space. */ | |
3995 | if (target_default_pointer_address_modes_p () | |
3996 | && POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode | |
2f93eea8 | 3997 | && (code == PLUS || code == MINUS) |
f8cfc6aa | 3998 | && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0))) |
2f93eea8 PB |
3999 | nonzero &= GET_MODE_MASK (ptr_mode); |
4000 | #endif | |
4001 | } | |
4002 | break; | |
4003 | ||
4004 | case ZERO_EXTRACT: | |
481683e1 | 4005 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 PB |
4006 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
4007 | nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; | |
4008 | break; | |
4009 | ||
4010 | case SUBREG: | |
4011 | /* If this is a SUBREG formed for a promoted variable that has | |
4012 | been zero-extended, we know that at least the high-order bits | |
4013 | are zero, though others might be too. */ | |
4014 | ||
4015 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0) | |
4016 | nonzero = GET_MODE_MASK (GET_MODE (x)) | |
4017 | & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x), | |
4018 | known_x, known_mode, known_ret); | |
4019 | ||
4020 | /* If the inner mode is a single word for both the host and target | |
4021 | machines, we can compute this from which bits of the inner | |
4022 | object might be nonzero. */ | |
4023 | if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD | |
4024 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) | |
4025 | <= HOST_BITS_PER_WIDE_INT)) | |
4026 | { | |
4027 | nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, | |
4028 | known_x, known_mode, known_ret); | |
4029 | ||
4030 | #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) | |
4031 | /* If this is a typical RISC machine, we only have to worry | |
4032 | about the way loads are extended. */ | |
4033 | if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND | |
4034 | ? (((nonzero | |
4035 | & (((unsigned HOST_WIDE_INT) 1 | |
4036 | << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))) | |
4037 | != 0)) | |
4038 | : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND) | |
3c0cb5de | 4039 | || !MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
4040 | #endif |
4041 | { | |
4042 | /* On many CISC machines, accessing an object in a wider mode | |
4043 | causes the high-order bits to become undefined. So they are | |
4044 | not known to be zero. */ | |
4045 | if (GET_MODE_SIZE (GET_MODE (x)) | |
4046 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
4047 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) | |
4048 | & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))); | |
4049 | } | |
4050 | } | |
4051 | break; | |
4052 | ||
4053 | case ASHIFTRT: | |
4054 | case LSHIFTRT: | |
4055 | case ASHIFT: | |
4056 | case ROTATE: | |
4057 | /* The nonzero bits are in two classes: any bits within MODE | |
4058 | that aren't in GET_MODE (x) are always significant. The rest of the | |
4059 | nonzero bits are those that are significant in the operand of | |
4060 | the shift when shifted the appropriate number of bits. This | |
4061 | shows that high-order bits are cleared by the right shift and | |
4062 | low-order bits by left shifts. */ | |
481683e1 | 4063 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4064 | && INTVAL (XEXP (x, 1)) >= 0 |
39b2ac74 JJ |
4065 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
4066 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))) | |
2f93eea8 PB |
4067 | { |
4068 | enum machine_mode inner_mode = GET_MODE (x); | |
4069 | unsigned int width = GET_MODE_BITSIZE (inner_mode); | |
4070 | int count = INTVAL (XEXP (x, 1)); | |
4071 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); | |
4072 | unsigned HOST_WIDE_INT op_nonzero = | |
4073 | cached_nonzero_bits (XEXP (x, 0), mode, | |
4074 | known_x, known_mode, known_ret); | |
4075 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; | |
4076 | unsigned HOST_WIDE_INT outer = 0; | |
4077 | ||
4078 | if (mode_width > width) | |
4079 | outer = (op_nonzero & nonzero & ~mode_mask); | |
4080 | ||
4081 | if (code == LSHIFTRT) | |
4082 | inner >>= count; | |
4083 | else if (code == ASHIFTRT) | |
4084 | { | |
4085 | inner >>= count; | |
4086 | ||
4087 | /* If the sign bit may have been nonzero before the shift, we | |
4088 | need to mark all the places it could have been copied to | |
4089 | by the shift as possibly nonzero. */ | |
4090 | if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count))) | |
4091 | inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count); | |
4092 | } | |
4093 | else if (code == ASHIFT) | |
4094 | inner <<= count; | |
4095 | else | |
4096 | inner = ((inner << (count % width) | |
4097 | | (inner >> (width - (count % width)))) & mode_mask); | |
4098 | ||
4099 | nonzero &= (outer | inner); | |
4100 | } | |
4101 | break; | |
4102 | ||
4103 | case FFS: | |
4104 | case POPCOUNT: | |
4105 | /* This is at most the number of bits in the mode. */ | |
4106 | nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1; | |
4107 | break; | |
4108 | ||
4109 | case CLZ: | |
4110 | /* If CLZ has a known value at zero, then the nonzero bits are | |
4111 | that value, plus the number of bits in the mode minus one. */ | |
4112 | if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
4113 | nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
4114 | else | |
4115 | nonzero = -1; | |
4116 | break; | |
4117 | ||
4118 | case CTZ: | |
4119 | /* If CTZ has a known value at zero, then the nonzero bits are | |
4120 | that value, plus the number of bits in the mode minus one. */ | |
4121 | if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
4122 | nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
4123 | else | |
4124 | nonzero = -1; | |
4125 | break; | |
4126 | ||
4127 | case PARITY: | |
4128 | nonzero = 1; | |
4129 | break; | |
4130 | ||
4131 | case IF_THEN_ELSE: | |
4132 | { | |
4133 | unsigned HOST_WIDE_INT nonzero_true = | |
4134 | cached_nonzero_bits (XEXP (x, 1), mode, | |
4135 | known_x, known_mode, known_ret); | |
4136 | ||
4137 | /* Don't call nonzero_bits for the second time if it cannot change | |
4138 | anything. */ | |
4139 | if ((nonzero & nonzero_true) != nonzero) | |
4140 | nonzero &= nonzero_true | |
4141 | | cached_nonzero_bits (XEXP (x, 2), mode, | |
4142 | known_x, known_mode, known_ret); | |
4143 | } | |
4144 | break; | |
4145 | ||
4146 | default: | |
4147 | break; | |
4148 | } | |
4149 | ||
4150 | return nonzero; | |
4151 | } | |
4152 | ||
4153 | /* See the macro definition above. */ | |
4154 | #undef cached_num_sign_bit_copies | |
4155 | ||
4156 | \f | |
4157 | /* The function cached_num_sign_bit_copies is a wrapper around | |
4158 | num_sign_bit_copies1. It avoids exponential behavior in | |
4159 | num_sign_bit_copies1 when X has identical subexpressions on the | |
4160 | first or the second level. */ | |
4161 | ||
4162 | static unsigned int | |
fa233e34 | 4163 | cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4164 | enum machine_mode known_mode, |
4165 | unsigned int known_ret) | |
4166 | { | |
4167 | if (x == known_x && mode == known_mode) | |
4168 | return known_ret; | |
4169 | ||
4170 | /* Try to find identical subexpressions. If found call | |
4171 | num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and | |
4172 | the precomputed value for the subexpression as KNOWN_RET. */ | |
4173 | ||
4174 | if (ARITHMETIC_P (x)) | |
4175 | { | |
4176 | rtx x0 = XEXP (x, 0); | |
4177 | rtx x1 = XEXP (x, 1); | |
4178 | ||
4179 | /* Check the first level. */ | |
4180 | if (x0 == x1) | |
4181 | return | |
4182 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4183 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4184 | known_mode, | |
4185 | known_ret)); | |
4186 | ||
4187 | /* Check the second level. */ | |
4188 | if (ARITHMETIC_P (x0) | |
4189 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
4190 | return | |
4191 | num_sign_bit_copies1 (x, mode, x1, mode, | |
4192 | cached_num_sign_bit_copies (x1, mode, known_x, | |
4193 | known_mode, | |
4194 | known_ret)); | |
4195 | ||
4196 | if (ARITHMETIC_P (x1) | |
4197 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
4198 | return | |
4199 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4200 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4201 | known_mode, | |
4202 | known_ret)); | |
4203 | } | |
4204 | ||
4205 | return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret); | |
4206 | } | |
4207 | ||
4208 | /* Return the number of bits at the high-order end of X that are known to | |
4209 | be equal to the sign bit. X will be used in mode MODE; if MODE is | |
4210 | VOIDmode, X will be used in its own mode. The returned value will always | |
4211 | be between 1 and the number of bits in MODE. */ | |
4212 | ||
4213 | static unsigned int | |
fa233e34 | 4214 | num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4215 | enum machine_mode known_mode, |
4216 | unsigned int known_ret) | |
4217 | { | |
4218 | enum rtx_code code = GET_CODE (x); | |
4219 | unsigned int bitwidth = GET_MODE_BITSIZE (mode); | |
4220 | int num0, num1, result; | |
4221 | unsigned HOST_WIDE_INT nonzero; | |
4222 | ||
4223 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
4224 | VOIDmode, we don't know anything. Likewise if one of the modes is | |
4225 | floating-point. */ | |
4226 | ||
4227 | if (mode == VOIDmode) | |
4228 | mode = GET_MODE (x); | |
4229 | ||
ff596cd2 RL |
4230 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)) |
4231 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
2f93eea8 PB |
4232 | return 1; |
4233 | ||
4234 | /* For a smaller object, just ignore the high bits. */ | |
4235 | if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) | |
4236 | { | |
4237 | num0 = cached_num_sign_bit_copies (x, GET_MODE (x), | |
4238 | known_x, known_mode, known_ret); | |
4239 | return MAX (1, | |
4240 | num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)); | |
4241 | } | |
4242 | ||
4243 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) | |
4244 | { | |
4245 | #ifndef WORD_REGISTER_OPERATIONS | |
4246 | /* If this machine does not do all register operations on the entire | |
4247 | register and MODE is wider than the mode of X, we can say nothing | |
4248 | at all about the high-order bits. */ | |
4249 | return 1; | |
4250 | #else | |
4251 | /* Likewise on machines that do, if the mode of the object is smaller | |
4252 | than a word and loads of that size don't sign extend, we can say | |
4253 | nothing about the high order bits. */ | |
4254 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
4255 | #ifdef LOAD_EXTEND_OP | |
4256 | && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND | |
4257 | #endif | |
4258 | ) | |
4259 | return 1; | |
4260 | #endif | |
4261 | } | |
4262 | ||
4263 | switch (code) | |
4264 | { | |
4265 | case REG: | |
4266 | ||
4267 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4268 | /* If pointers extend signed and this is a pointer in Pmode, say that | |
4269 | all the bits above ptr_mode are known to be sign bit copies. */ | |
d4ebfa65 BE |
4270 | /* As we do not know which address space the pointer is refering to, |
4271 | we can do this only if the target does not support different pointer | |
4272 | or address modes depending on the address space. */ | |
4273 | if (target_default_pointer_address_modes_p () | |
4274 | && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
4275 | && mode == Pmode && REG_POINTER (x)) | |
2f93eea8 PB |
4276 | return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; |
4277 | #endif | |
4278 | ||
4279 | { | |
4280 | unsigned int copies_for_hook = 1, copies = 1; | |
55d796da | 4281 | rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x, |
2f93eea8 PB |
4282 | known_mode, known_ret, |
4283 | &copies_for_hook); | |
4284 | ||
55d796da KG |
4285 | if (new_rtx) |
4286 | copies = cached_num_sign_bit_copies (new_rtx, mode, known_x, | |
2f93eea8 PB |
4287 | known_mode, known_ret); |
4288 | ||
4289 | if (copies > 1 || copies_for_hook > 1) | |
4290 | return MAX (copies, copies_for_hook); | |
4291 | ||
4292 | /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */ | |
4293 | } | |
4294 | break; | |
4295 | ||
4296 | case MEM: | |
4297 | #ifdef LOAD_EXTEND_OP | |
4298 | /* Some RISC machines sign-extend all loads of smaller than a word. */ | |
4299 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) | |
4300 | return MAX (1, ((int) bitwidth | |
4301 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1)); | |
4302 | #endif | |
4303 | break; | |
4304 | ||
4305 | case CONST_INT: | |
4306 | /* If the constant is negative, take its 1's complement and remask. | |
4307 | Then see how many zero bits we have. */ | |
4308 | nonzero = INTVAL (x) & GET_MODE_MASK (mode); | |
4309 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
4310 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4311 | nonzero = (~nonzero) & GET_MODE_MASK (mode); | |
4312 | ||
4313 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4314 | ||
4315 | case SUBREG: | |
4316 | /* If this is a SUBREG for a promoted object that is sign-extended | |
4317 | and we are looking at it in a wider mode, we know that at least the | |
4318 | high-order bits are known to be sign bit copies. */ | |
4319 | ||
4320 | if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x)) | |
4321 | { | |
4322 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode, | |
4323 | known_x, known_mode, known_ret); | |
4324 | return MAX ((int) bitwidth | |
4325 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1, | |
4326 | num0); | |
4327 | } | |
4328 | ||
4329 | /* For a smaller object, just ignore the high bits. */ | |
4330 | if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) | |
4331 | { | |
4332 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode, | |
4333 | known_x, known_mode, known_ret); | |
4334 | return MAX (1, (num0 | |
4335 | - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) | |
4336 | - bitwidth))); | |
4337 | } | |
4338 | ||
4339 | #ifdef WORD_REGISTER_OPERATIONS | |
4340 | #ifdef LOAD_EXTEND_OP | |
4341 | /* For paradoxical SUBREGs on machines where all register operations | |
4342 | affect the entire register, just look inside. Note that we are | |
4343 | passing MODE to the recursive call, so the number of sign bit copies | |
4344 | will remain relative to that mode, not the inner mode. */ | |
4345 | ||
4346 | /* This works only if loads sign extend. Otherwise, if we get a | |
4347 | reload for the inner part, it may be loaded from the stack, and | |
4348 | then we lose all sign bit copies that existed before the store | |
4349 | to the stack. */ | |
4350 | ||
4351 | if ((GET_MODE_SIZE (GET_MODE (x)) | |
4352 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
4353 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND | |
3c0cb5de | 4354 | && MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
4355 | return cached_num_sign_bit_copies (SUBREG_REG (x), mode, |
4356 | known_x, known_mode, known_ret); | |
4357 | #endif | |
4358 | #endif | |
4359 | break; | |
4360 | ||
4361 | case SIGN_EXTRACT: | |
481683e1 | 4362 | if (CONST_INT_P (XEXP (x, 1))) |
2f93eea8 PB |
4363 | return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); |
4364 | break; | |
4365 | ||
4366 | case SIGN_EXTEND: | |
4367 | return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
4368 | + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, | |
4369 | known_x, known_mode, known_ret)); | |
4370 | ||
4371 | case TRUNCATE: | |
4372 | /* For a smaller object, just ignore the high bits. */ | |
4373 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, | |
4374 | known_x, known_mode, known_ret); | |
4375 | return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
4376 | - bitwidth))); | |
4377 | ||
4378 | case NOT: | |
4379 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4380 | known_x, known_mode, known_ret); | |
4381 | ||
4382 | case ROTATE: case ROTATERT: | |
4383 | /* If we are rotating left by a number of bits less than the number | |
4384 | of sign bit copies, we can just subtract that amount from the | |
4385 | number. */ | |
481683e1 | 4386 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 PB |
4387 | && INTVAL (XEXP (x, 1)) >= 0 |
4388 | && INTVAL (XEXP (x, 1)) < (int) bitwidth) | |
4389 | { | |
4390 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4391 | known_x, known_mode, known_ret); | |
4392 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
4393 | : (int) bitwidth - INTVAL (XEXP (x, 1)))); | |
4394 | } | |
4395 | break; | |
4396 | ||
4397 | case NEG: | |
4398 | /* In general, this subtracts one sign bit copy. But if the value | |
4399 | is known to be positive, the number of sign bit copies is the | |
4400 | same as that of the input. Finally, if the input has just one bit | |
4401 | that might be nonzero, all the bits are copies of the sign bit. */ | |
4402 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4403 | known_x, known_mode, known_ret); | |
4404 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4405 | return num0 > 1 ? num0 - 1 : 1; | |
4406 | ||
4407 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
4408 | if (nonzero == 1) | |
4409 | return bitwidth; | |
4410 | ||
4411 | if (num0 > 1 | |
4412 | && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) | |
4413 | num0--; | |
4414 | ||
4415 | return num0; | |
4416 | ||
4417 | case IOR: case AND: case XOR: | |
4418 | case SMIN: case SMAX: case UMIN: case UMAX: | |
4419 | /* Logical operations will preserve the number of sign-bit copies. | |
4420 | MIN and MAX operations always return one of the operands. */ | |
4421 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4422 | known_x, known_mode, known_ret); | |
4423 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4424 | known_x, known_mode, known_ret); | |
22761ec3 AN |
4425 | |
4426 | /* If num1 is clearing some of the top bits then regardless of | |
4427 | the other term, we are guaranteed to have at least that many | |
4428 | high-order zero bits. */ | |
4429 | if (code == AND | |
4430 | && num1 > 1 | |
4431 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 4432 | && CONST_INT_P (XEXP (x, 1)) |
22761ec3 AN |
4433 | && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))) |
4434 | return num1; | |
4435 | ||
4436 | /* Similarly for IOR when setting high-order bits. */ | |
4437 | if (code == IOR | |
4438 | && num1 > 1 | |
4439 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 4440 | && CONST_INT_P (XEXP (x, 1)) |
22761ec3 AN |
4441 | && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))) |
4442 | return num1; | |
4443 | ||
2f93eea8 PB |
4444 | return MIN (num0, num1); |
4445 | ||
4446 | case PLUS: case MINUS: | |
4447 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
4448 | if we are subtracting 1 from a positive number, there will not | |
4449 | be such a carry. Furthermore, if the positive number is known to | |
4450 | be 0 or 1, we know the result is either -1 or 0. */ | |
4451 | ||
4452 | if (code == PLUS && XEXP (x, 1) == constm1_rtx | |
4453 | && bitwidth <= HOST_BITS_PER_WIDE_INT) | |
4454 | { | |
4455 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
4456 | if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) | |
4457 | return (nonzero == 1 || nonzero == 0 ? bitwidth | |
4458 | : bitwidth - floor_log2 (nonzero) - 1); | |
4459 | } | |
4460 | ||
4461 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4462 | known_x, known_mode, known_ret); | |
4463 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4464 | known_x, known_mode, known_ret); | |
4465 | result = MAX (1, MIN (num0, num1) - 1); | |
4466 | ||
4467 | #ifdef POINTERS_EXTEND_UNSIGNED | |
4468 | /* If pointers extend signed and this is an addition or subtraction | |
4469 | to a pointer in Pmode, all the bits above ptr_mode are known to be | |
4470 | sign bit copies. */ | |
d4ebfa65 BE |
4471 | /* As we do not know which address space the pointer is refering to, |
4472 | we can do this only if the target does not support different pointer | |
4473 | or address modes depending on the address space. */ | |
4474 | if (target_default_pointer_address_modes_p () | |
4475 | && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
2f93eea8 | 4476 | && (code == PLUS || code == MINUS) |
f8cfc6aa | 4477 | && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0))) |
2f93eea8 PB |
4478 | result = MAX ((int) (GET_MODE_BITSIZE (Pmode) |
4479 | - GET_MODE_BITSIZE (ptr_mode) + 1), | |
4480 | result); | |
4481 | #endif | |
4482 | return result; | |
4483 | ||
4484 | case MULT: | |
4485 | /* The number of bits of the product is the sum of the number of | |
4486 | bits of both terms. However, unless one of the terms if known | |
4487 | to be positive, we must allow for an additional bit since negating | |
4488 | a negative number can remove one sign bit copy. */ | |
4489 | ||
4490 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4491 | known_x, known_mode, known_ret); | |
4492 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4493 | known_x, known_mode, known_ret); | |
4494 | ||
4495 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
4496 | if (result > 0 | |
4497 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4498 | || (((nonzero_bits (XEXP (x, 0), mode) | |
4499 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4500 | && ((nonzero_bits (XEXP (x, 1), mode) | |
4501 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)))) | |
4502 | result--; | |
4503 | ||
4504 | return MAX (1, result); | |
4505 | ||
4506 | case UDIV: | |
4507 | /* The result must be <= the first operand. If the first operand | |
4508 | has the high bit set, we know nothing about the number of sign | |
4509 | bit copies. */ | |
4510 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4511 | return 1; | |
4512 | else if ((nonzero_bits (XEXP (x, 0), mode) | |
4513 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4514 | return 1; | |
4515 | else | |
4516 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4517 | known_x, known_mode, known_ret); | |
4518 | ||
4519 | case UMOD: | |
4520 | /* The result must be <= the second operand. */ | |
4521 | return cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4522 | known_x, known_mode, known_ret); | |
4523 | ||
4524 | case DIV: | |
4525 | /* Similar to unsigned division, except that we have to worry about | |
4526 | the case where the divisor is negative, in which case we have | |
4527 | to add 1. */ | |
4528 | result = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4529 | known_x, known_mode, known_ret); | |
4530 | if (result > 1 | |
4531 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4532 | || (nonzero_bits (XEXP (x, 1), mode) | |
4533 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
4534 | result--; | |
4535 | ||
4536 | return result; | |
4537 | ||
4538 | case MOD: | |
4539 | result = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4540 | known_x, known_mode, known_ret); | |
4541 | if (result > 1 | |
4542 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4543 | || (nonzero_bits (XEXP (x, 1), mode) | |
4544 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
4545 | result--; | |
4546 | ||
4547 | return result; | |
4548 | ||
4549 | case ASHIFTRT: | |
4550 | /* Shifts by a constant add to the number of bits equal to the | |
4551 | sign bit. */ | |
4552 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4553 | known_x, known_mode, known_ret); | |
481683e1 | 4554 | if (CONST_INT_P (XEXP (x, 1)) |
39b2ac74 JJ |
4555 | && INTVAL (XEXP (x, 1)) > 0 |
4556 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))) | |
2f93eea8 PB |
4557 | num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); |
4558 | ||
4559 | return num0; | |
4560 | ||
4561 | case ASHIFT: | |
4562 | /* Left shifts destroy copies. */ | |
481683e1 | 4563 | if (!CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4564 | || INTVAL (XEXP (x, 1)) < 0 |
39b2ac74 JJ |
4565 | || INTVAL (XEXP (x, 1)) >= (int) bitwidth |
4566 | || INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x))) | |
2f93eea8 PB |
4567 | return 1; |
4568 | ||
4569 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4570 | known_x, known_mode, known_ret); | |
4571 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
4572 | ||
4573 | case IF_THEN_ELSE: | |
4574 | num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4575 | known_x, known_mode, known_ret); | |
4576 | num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode, | |
4577 | known_x, known_mode, known_ret); | |
4578 | return MIN (num0, num1); | |
4579 | ||
4580 | case EQ: case NE: case GE: case GT: case LE: case LT: | |
4581 | case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT: | |
4582 | case GEU: case GTU: case LEU: case LTU: | |
4583 | case UNORDERED: case ORDERED: | |
4584 | /* If the constant is negative, take its 1's complement and remask. | |
4585 | Then see how many zero bits we have. */ | |
4586 | nonzero = STORE_FLAG_VALUE; | |
4587 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
4588 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
4589 | nonzero = (~nonzero) & GET_MODE_MASK (mode); | |
4590 | ||
4591 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4592 | ||
4593 | default: | |
4594 | break; | |
4595 | } | |
4596 | ||
4597 | /* If we haven't been able to figure it out by one of the above rules, | |
4598 | see if some of the high-order bits are known to be zero. If so, | |
4599 | count those bits and return one less than that amount. If we can't | |
4600 | safely compute the mask for this mode, always return BITWIDTH. */ | |
4601 | ||
4602 | bitwidth = GET_MODE_BITSIZE (mode); | |
4603 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4604 | return 1; | |
4605 | ||
4606 | nonzero = nonzero_bits (x, mode); | |
4607 | return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) | |
4608 | ? 1 : bitwidth - floor_log2 (nonzero) - 1; | |
4609 | } | |
6fd21094 RS |
4610 | |
4611 | /* Calculate the rtx_cost of a single instruction. A return value of | |
4612 | zero indicates an instruction pattern without a known cost. */ | |
4613 | ||
4614 | int | |
f40751dd | 4615 | insn_rtx_cost (rtx pat, bool speed) |
6fd21094 RS |
4616 | { |
4617 | int i, cost; | |
4618 | rtx set; | |
4619 | ||
4620 | /* Extract the single set rtx from the instruction pattern. | |
4621 | We can't use single_set since we only have the pattern. */ | |
4622 | if (GET_CODE (pat) == SET) | |
4623 | set = pat; | |
4624 | else if (GET_CODE (pat) == PARALLEL) | |
4625 | { | |
4626 | set = NULL_RTX; | |
4627 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
4628 | { | |
4629 | rtx x = XVECEXP (pat, 0, i); | |
4630 | if (GET_CODE (x) == SET) | |
4631 | { | |
4632 | if (set) | |
4633 | return 0; | |
4634 | set = x; | |
4635 | } | |
4636 | } | |
4637 | if (!set) | |
4638 | return 0; | |
4639 | } | |
4640 | else | |
4641 | return 0; | |
4642 | ||
f40751dd | 4643 | cost = rtx_cost (SET_SRC (set), SET, speed); |
6fd21094 RS |
4644 | return cost > 0 ? cost : COSTS_N_INSNS (1); |
4645 | } | |
75473b02 SB |
4646 | |
4647 | /* Given an insn INSN and condition COND, return the condition in a | |
4648 | canonical form to simplify testing by callers. Specifically: | |
4649 | ||
4650 | (1) The code will always be a comparison operation (EQ, NE, GT, etc.). | |
4651 | (2) Both operands will be machine operands; (cc0) will have been replaced. | |
4652 | (3) If an operand is a constant, it will be the second operand. | |
4653 | (4) (LE x const) will be replaced with (LT x <const+1>) and similarly | |
4654 | for GE, GEU, and LEU. | |
4655 | ||
4656 | If the condition cannot be understood, or is an inequality floating-point | |
4657 | comparison which needs to be reversed, 0 will be returned. | |
4658 | ||
4659 | If REVERSE is nonzero, then reverse the condition prior to canonizing it. | |
4660 | ||
4661 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
4662 | insn used in locating the condition was found. If a replacement test | |
4663 | of the condition is desired, it should be placed in front of that | |
4664 | insn and we will be sure that the inputs are still valid. | |
4665 | ||
4666 | If WANT_REG is nonzero, we wish the condition to be relative to that | |
4667 | register, if possible. Therefore, do not canonicalize the condition | |
4668 | further. If ALLOW_CC_MODE is nonzero, allow the condition returned | |
4669 | to be a compare to a CC mode register. | |
4670 | ||
4671 | If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST | |
4672 | and at INSN. */ | |
4673 | ||
4674 | rtx | |
4675 | canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, | |
4676 | rtx want_reg, int allow_cc_mode, int valid_at_insn_p) | |
4677 | { | |
4678 | enum rtx_code code; | |
4679 | rtx prev = insn; | |
f7d504c2 | 4680 | const_rtx set; |
75473b02 SB |
4681 | rtx tem; |
4682 | rtx op0, op1; | |
4683 | int reverse_code = 0; | |
4684 | enum machine_mode mode; | |
569f8d98 | 4685 | basic_block bb = BLOCK_FOR_INSN (insn); |
75473b02 SB |
4686 | |
4687 | code = GET_CODE (cond); | |
4688 | mode = GET_MODE (cond); | |
4689 | op0 = XEXP (cond, 0); | |
4690 | op1 = XEXP (cond, 1); | |
4691 | ||
4692 | if (reverse) | |
4693 | code = reversed_comparison_code (cond, insn); | |
4694 | if (code == UNKNOWN) | |
4695 | return 0; | |
4696 | ||
4697 | if (earliest) | |
4698 | *earliest = insn; | |
4699 | ||
4700 | /* If we are comparing a register with zero, see if the register is set | |
4701 | in the previous insn to a COMPARE or a comparison operation. Perform | |
4702 | the same tests as a function of STORE_FLAG_VALUE as find_comparison_args | |
4703 | in cse.c */ | |
4704 | ||
4705 | while ((GET_RTX_CLASS (code) == RTX_COMPARE | |
4706 | || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) | |
4707 | && op1 == CONST0_RTX (GET_MODE (op0)) | |
4708 | && op0 != want_reg) | |
4709 | { | |
4710 | /* Set nonzero when we find something of interest. */ | |
4711 | rtx x = 0; | |
4712 | ||
4713 | #ifdef HAVE_cc0 | |
4714 | /* If comparison with cc0, import actual comparison from compare | |
4715 | insn. */ | |
4716 | if (op0 == cc0_rtx) | |
4717 | { | |
4718 | if ((prev = prev_nonnote_insn (prev)) == 0 | |
4719 | || !NONJUMP_INSN_P (prev) | |
4720 | || (set = single_set (prev)) == 0 | |
4721 | || SET_DEST (set) != cc0_rtx) | |
4722 | return 0; | |
4723 | ||
4724 | op0 = SET_SRC (set); | |
4725 | op1 = CONST0_RTX (GET_MODE (op0)); | |
4726 | if (earliest) | |
4727 | *earliest = prev; | |
4728 | } | |
4729 | #endif | |
4730 | ||
4731 | /* If this is a COMPARE, pick up the two things being compared. */ | |
4732 | if (GET_CODE (op0) == COMPARE) | |
4733 | { | |
4734 | op1 = XEXP (op0, 1); | |
4735 | op0 = XEXP (op0, 0); | |
4736 | continue; | |
4737 | } | |
4738 | else if (!REG_P (op0)) | |
4739 | break; | |
4740 | ||
4741 | /* Go back to the previous insn. Stop if it is not an INSN. We also | |
4742 | stop if it isn't a single set or if it has a REG_INC note because | |
4743 | we don't want to bother dealing with it. */ | |
4744 | ||
b5b8b0ac AO |
4745 | do |
4746 | prev = prev_nonnote_insn (prev); | |
4747 | while (prev && DEBUG_INSN_P (prev)); | |
4748 | ||
4749 | if (prev == 0 | |
75473b02 | 4750 | || !NONJUMP_INSN_P (prev) |
569f8d98 ZD |
4751 | || FIND_REG_INC_NOTE (prev, NULL_RTX) |
4752 | /* In cfglayout mode, there do not have to be labels at the | |
4753 | beginning of a block, or jumps at the end, so the previous | |
4754 | conditions would not stop us when we reach bb boundary. */ | |
4755 | || BLOCK_FOR_INSN (prev) != bb) | |
75473b02 SB |
4756 | break; |
4757 | ||
4758 | set = set_of (op0, prev); | |
4759 | ||
4760 | if (set | |
4761 | && (GET_CODE (set) != SET | |
4762 | || !rtx_equal_p (SET_DEST (set), op0))) | |
4763 | break; | |
4764 | ||
4765 | /* If this is setting OP0, get what it sets it to if it looks | |
4766 | relevant. */ | |
4767 | if (set) | |
4768 | { | |
4769 | enum machine_mode inner_mode = GET_MODE (SET_DEST (set)); | |
4770 | #ifdef FLOAT_STORE_FLAG_VALUE | |
4771 | REAL_VALUE_TYPE fsfv; | |
4772 | #endif | |
4773 | ||
4774 | /* ??? We may not combine comparisons done in a CCmode with | |
4775 | comparisons not done in a CCmode. This is to aid targets | |
4776 | like Alpha that have an IEEE compliant EQ instruction, and | |
4777 | a non-IEEE compliant BEQ instruction. The use of CCmode is | |
4778 | actually artificial, simply to prevent the combination, but | |
4779 | should not affect other platforms. | |
4780 | ||
4781 | However, we must allow VOIDmode comparisons to match either | |
4782 | CCmode or non-CCmode comparison, because some ports have | |
4783 | modeless comparisons inside branch patterns. | |
4784 | ||
4785 | ??? This mode check should perhaps look more like the mode check | |
4786 | in simplify_comparison in combine. */ | |
4787 | ||
4788 | if ((GET_CODE (SET_SRC (set)) == COMPARE | |
4789 | || (((code == NE | |
4790 | || (code == LT | |
4791 | && GET_MODE_CLASS (inner_mode) == MODE_INT | |
4792 | && (GET_MODE_BITSIZE (inner_mode) | |
4793 | <= HOST_BITS_PER_WIDE_INT) | |
4794 | && (STORE_FLAG_VALUE | |
4795 | & ((HOST_WIDE_INT) 1 | |
4796 | << (GET_MODE_BITSIZE (inner_mode) - 1)))) | |
4797 | #ifdef FLOAT_STORE_FLAG_VALUE | |
4798 | || (code == LT | |
3d8bf70f | 4799 | && SCALAR_FLOAT_MODE_P (inner_mode) |
75473b02 SB |
4800 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), |
4801 | REAL_VALUE_NEGATIVE (fsfv))) | |
4802 | #endif | |
4803 | )) | |
4804 | && COMPARISON_P (SET_SRC (set)))) | |
4805 | && (((GET_MODE_CLASS (mode) == MODE_CC) | |
4806 | == (GET_MODE_CLASS (inner_mode) == MODE_CC)) | |
4807 | || mode == VOIDmode || inner_mode == VOIDmode)) | |
4808 | x = SET_SRC (set); | |
4809 | else if (((code == EQ | |
4810 | || (code == GE | |
4811 | && (GET_MODE_BITSIZE (inner_mode) | |
4812 | <= HOST_BITS_PER_WIDE_INT) | |
4813 | && GET_MODE_CLASS (inner_mode) == MODE_INT | |
4814 | && (STORE_FLAG_VALUE | |
4815 | & ((HOST_WIDE_INT) 1 | |
4816 | << (GET_MODE_BITSIZE (inner_mode) - 1)))) | |
4817 | #ifdef FLOAT_STORE_FLAG_VALUE | |
4818 | || (code == GE | |
3d8bf70f | 4819 | && SCALAR_FLOAT_MODE_P (inner_mode) |
75473b02 SB |
4820 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), |
4821 | REAL_VALUE_NEGATIVE (fsfv))) | |
4822 | #endif | |
4823 | )) | |
4824 | && COMPARISON_P (SET_SRC (set)) | |
4825 | && (((GET_MODE_CLASS (mode) == MODE_CC) | |
4826 | == (GET_MODE_CLASS (inner_mode) == MODE_CC)) | |
4827 | || mode == VOIDmode || inner_mode == VOIDmode)) | |
4828 | ||
4829 | { | |
4830 | reverse_code = 1; | |
4831 | x = SET_SRC (set); | |
4832 | } | |
4833 | else | |
4834 | break; | |
4835 | } | |
4836 | ||
4837 | else if (reg_set_p (op0, prev)) | |
4838 | /* If this sets OP0, but not directly, we have to give up. */ | |
4839 | break; | |
4840 | ||
4841 | if (x) | |
4842 | { | |
4843 | /* If the caller is expecting the condition to be valid at INSN, | |
4844 | make sure X doesn't change before INSN. */ | |
4845 | if (valid_at_insn_p) | |
4846 | if (modified_in_p (x, prev) || modified_between_p (x, prev, insn)) | |
4847 | break; | |
4848 | if (COMPARISON_P (x)) | |
4849 | code = GET_CODE (x); | |
4850 | if (reverse_code) | |
4851 | { | |
4852 | code = reversed_comparison_code (x, prev); | |
4853 | if (code == UNKNOWN) | |
4854 | return 0; | |
4855 | reverse_code = 0; | |
4856 | } | |
4857 | ||
4858 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
4859 | if (earliest) | |
4860 | *earliest = prev; | |
4861 | } | |
4862 | } | |
4863 | ||
4864 | /* If constant is first, put it last. */ | |
4865 | if (CONSTANT_P (op0)) | |
4866 | code = swap_condition (code), tem = op0, op0 = op1, op1 = tem; | |
4867 | ||
4868 | /* If OP0 is the result of a comparison, we weren't able to find what | |
4869 | was really being compared, so fail. */ | |
4870 | if (!allow_cc_mode | |
4871 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) | |
4872 | return 0; | |
4873 | ||
4874 | /* Canonicalize any ordered comparison with integers involving equality | |
4875 | if we can do computations in the relevant mode and we do not | |
4876 | overflow. */ | |
4877 | ||
4878 | if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC | |
481683e1 | 4879 | && CONST_INT_P (op1) |
75473b02 SB |
4880 | && GET_MODE (op0) != VOIDmode |
4881 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) | |
4882 | { | |
4883 | HOST_WIDE_INT const_val = INTVAL (op1); | |
4884 | unsigned HOST_WIDE_INT uconst_val = const_val; | |
4885 | unsigned HOST_WIDE_INT max_val | |
4886 | = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0)); | |
4887 | ||
4888 | switch (code) | |
4889 | { | |
4890 | case LE: | |
4891 | if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1) | |
4892 | code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0)); | |
4893 | break; | |
4894 | ||
4895 | /* When cross-compiling, const_val might be sign-extended from | |
4896 | BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */ | |
4897 | case GE: | |
4898 | if ((HOST_WIDE_INT) (const_val & max_val) | |
4899 | != (((HOST_WIDE_INT) 1 | |
4900 | << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) | |
4901 | code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); | |
4902 | break; | |
4903 | ||
4904 | case LEU: | |
4905 | if (uconst_val < max_val) | |
4906 | code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0)); | |
4907 | break; | |
4908 | ||
4909 | case GEU: | |
4910 | if (uconst_val != 0) | |
4911 | code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0)); | |
4912 | break; | |
4913 | ||
4914 | default: | |
4915 | break; | |
4916 | } | |
4917 | } | |
4918 | ||
4919 | /* Never return CC0; return zero instead. */ | |
4920 | if (CC0_P (op0)) | |
4921 | return 0; | |
4922 | ||
4923 | return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); | |
4924 | } | |
4925 | ||
4926 | /* Given a jump insn JUMP, return the condition that will cause it to branch | |
4927 | to its JUMP_LABEL. If the condition cannot be understood, or is an | |
4928 | inequality floating-point comparison which needs to be reversed, 0 will | |
4929 | be returned. | |
4930 | ||
4931 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
4932 | insn used in locating the condition was found. If a replacement test | |
4933 | of the condition is desired, it should be placed in front of that | |
4934 | insn and we will be sure that the inputs are still valid. If EARLIEST | |
4935 | is null, the returned condition will be valid at INSN. | |
4936 | ||
4937 | If ALLOW_CC_MODE is nonzero, allow the condition returned to be a | |
4938 | compare CC mode register. | |
4939 | ||
4940 | VALID_AT_INSN_P is the same as for canonicalize_condition. */ | |
4941 | ||
4942 | rtx | |
4943 | get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p) | |
4944 | { | |
4945 | rtx cond; | |
4946 | int reverse; | |
4947 | rtx set; | |
4948 | ||
4949 | /* If this is not a standard conditional jump, we can't parse it. */ | |
4950 | if (!JUMP_P (jump) | |
4951 | || ! any_condjump_p (jump)) | |
4952 | return 0; | |
4953 | set = pc_set (jump); | |
4954 | ||
4955 | cond = XEXP (SET_SRC (set), 0); | |
4956 | ||
4957 | /* If this branches to JUMP_LABEL when the condition is false, reverse | |
4958 | the condition. */ | |
4959 | reverse | |
4960 | = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF | |
4961 | && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump); | |
4962 | ||
4963 | return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX, | |
4964 | allow_cc_mode, valid_at_insn_p); | |
4965 | } | |
4966 | ||
b12cbf2c AN |
4967 | /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on |
4968 | TARGET_MODE_REP_EXTENDED. | |
4969 | ||
4970 | Note that we assume that the property of | |
4971 | TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes | |
4972 | narrower than mode B. I.e., if A is a mode narrower than B then in | |
4973 | order to be able to operate on it in mode B, mode A needs to | |
4974 | satisfy the requirements set by the representation of mode B. */ | |
4975 | ||
4976 | static void | |
4977 | init_num_sign_bit_copies_in_rep (void) | |
4978 | { | |
4979 | enum machine_mode mode, in_mode; | |
4980 | ||
4981 | for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode; | |
4982 | in_mode = GET_MODE_WIDER_MODE (mode)) | |
4983 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode; | |
4984 | mode = GET_MODE_WIDER_MODE (mode)) | |
4985 | { | |
4986 | enum machine_mode i; | |
4987 | ||
4988 | /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED | |
4989 | extends to the next widest mode. */ | |
4990 | gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN | |
4991 | || GET_MODE_WIDER_MODE (mode) == in_mode); | |
4992 | ||
4993 | /* We are in in_mode. Count how many bits outside of mode | |
4994 | have to be copies of the sign-bit. */ | |
4995 | for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i)) | |
4996 | { | |
4997 | enum machine_mode wider = GET_MODE_WIDER_MODE (i); | |
4998 | ||
4999 | if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND | |
5000 | /* We can only check sign-bit copies starting from the | |
5001 | top-bit. In order to be able to check the bits we | |
5002 | have already seen we pretend that subsequent bits | |
5003 | have to be sign-bit copies too. */ | |
5004 | || num_sign_bit_copies_in_rep [in_mode][mode]) | |
5005 | num_sign_bit_copies_in_rep [in_mode][mode] | |
5006 | += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i); | |
5007 | } | |
5008 | } | |
5009 | } | |
5010 | ||
d3b72690 PB |
5011 | /* Suppose that truncation from the machine mode of X to MODE is not a |
5012 | no-op. See if there is anything special about X so that we can | |
5013 | assume it already contains a truncated value of MODE. */ | |
5014 | ||
5015 | bool | |
fa233e34 | 5016 | truncated_to_mode (enum machine_mode mode, const_rtx x) |
d3b72690 | 5017 | { |
b12cbf2c AN |
5018 | /* This register has already been used in MODE without explicit |
5019 | truncation. */ | |
5020 | if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x)) | |
5021 | return true; | |
5022 | ||
5023 | /* See if we already satisfy the requirements of MODE. If yes we | |
5024 | can just switch to MODE. */ | |
5025 | if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode] | |
5026 | && (num_sign_bit_copies (x, GET_MODE (x)) | |
5027 | >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1)) | |
5028 | return true; | |
d3b72690 | 5029 | |
b12cbf2c AN |
5030 | return false; |
5031 | } | |
cf94b0fc PB |
5032 | \f |
5033 | /* Initialize non_rtx_starting_operands, which is used to speed up | |
5034 | for_each_rtx. */ | |
5035 | void | |
5036 | init_rtlanal (void) | |
5037 | { | |
5038 | int i; | |
5039 | for (i = 0; i < NUM_RTX_CODE; i++) | |
5040 | { | |
5041 | const char *format = GET_RTX_FORMAT (i); | |
5042 | const char *first = strpbrk (format, "eEV"); | |
5043 | non_rtx_starting_operands[i] = first ? first - format : -1; | |
5044 | } | |
b12cbf2c AN |
5045 | |
5046 | init_num_sign_bit_copies_in_rep (); | |
cf94b0fc | 5047 | } |
3d8504ac RS |
5048 | \f |
5049 | /* Check whether this is a constant pool constant. */ | |
5050 | bool | |
5051 | constant_pool_constant_p (rtx x) | |
5052 | { | |
5053 | x = avoid_constant_pool_reference (x); | |
5054 | return GET_CODE (x) == CONST_DOUBLE; | |
5055 | } | |
842e098c AN |
5056 | \f |
5057 | /* If M is a bitmask that selects a field of low-order bits within an item but | |
5058 | not the entire word, return the length of the field. Return -1 otherwise. | |
5059 | M is used in machine mode MODE. */ | |
5060 | ||
5061 | int | |
5062 | low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m) | |
5063 | { | |
5064 | if (mode != VOIDmode) | |
5065 | { | |
5066 | if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) | |
5067 | return -1; | |
5068 | m &= GET_MODE_MASK (mode); | |
5069 | } | |
5070 | ||
5071 | return exact_log2 (m + 1); | |
5072 | } |