]>
Commit | Line | Data |
---|---|---|
af082de3 | 1 | /* Analyze RTL for GNU compiler. |
23a5b65a | 2 | Copyright (C) 1987-2014 Free Software Foundation, Inc. |
2c88418c | 3 | |
1322177d | 4 | This file is part of GCC. |
2c88418c | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
2c88418c | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
2c88418c RS |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
2c88418c RS |
19 | |
20 | ||
21 | #include "config.h" | |
670ee920 | 22 | #include "system.h" |
4977bab6 ZW |
23 | #include "coretypes.h" |
24 | #include "tm.h" | |
718f9c0f | 25 | #include "diagnostic-core.h" |
3335f1d9 | 26 | #include "hard-reg-set.h" |
9f02e6a5 | 27 | #include "rtl.h" |
bc204393 RH |
28 | #include "insn-config.h" |
29 | #include "recog.h" | |
f894b69b PB |
30 | #include "target.h" |
31 | #include "output.h" | |
91ea4f8d | 32 | #include "tm_p.h" |
f5eb5fd0 | 33 | #include "flags.h" |
66fd46b6 | 34 | #include "regs.h" |
2f93eea8 | 35 | #include "function.h" |
6fb5fa3c | 36 | #include "df.h" |
7ffb5e78 | 37 | #include "tree.h" |
5936d944 | 38 | #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ |
277f65de | 39 | #include "addresses.h" |
476dd0ce | 40 | #include "rtl-iter.h" |
2c88418c | 41 | |
e2373f95 | 42 | /* Forward declarations */ |
7bc980e1 | 43 | static void set_of_1 (rtx, const_rtx, void *); |
f7d504c2 KG |
44 | static bool covers_regno_p (const_rtx, unsigned int); |
45 | static bool covers_regno_no_parallel_p (const_rtx, unsigned int); | |
f7d504c2 | 46 | static int computed_jump_p_1 (const_rtx); |
7bc980e1 | 47 | static void parms_set (rtx, const_rtx, void *); |
2a1777af | 48 | |
fa233e34 KG |
49 | static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode, |
50 | const_rtx, enum machine_mode, | |
2f93eea8 | 51 | unsigned HOST_WIDE_INT); |
fa233e34 KG |
52 | static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode, |
53 | const_rtx, enum machine_mode, | |
2f93eea8 | 54 | unsigned HOST_WIDE_INT); |
fa233e34 | 55 | static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
56 | enum machine_mode, |
57 | unsigned int); | |
fa233e34 | 58 | static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
59 | enum machine_mode, unsigned int); |
60 | ||
cf94b0fc PB |
61 | /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or |
62 | -1 if a code has no such operand. */ | |
63 | static int non_rtx_starting_operands[NUM_RTX_CODE]; | |
64 | ||
476dd0ce RS |
65 | rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE]; |
66 | rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE]; | |
67 | ||
b12cbf2c AN |
68 | /* Truncation narrows the mode from SOURCE mode to DESTINATION mode. |
69 | If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is | |
70 | SIGN_EXTEND then while narrowing we also have to enforce the | |
71 | representation and sign-extend the value to mode DESTINATION_REP. | |
72 | ||
73 | If the value is already sign-extended to DESTINATION_REP mode we | |
74 | can just switch to DESTINATION mode on it. For each pair of | |
75 | integral modes SOURCE and DESTINATION, when truncating from SOURCE | |
76 | to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION] | |
77 | contains the number of high-order bits in SOURCE that have to be | |
78 | copies of the sign-bit so that we can do this mode-switch to | |
79 | DESTINATION. */ | |
80 | ||
81 | static unsigned int | |
82 | num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1]; | |
2c88418c | 83 | \f |
476dd0ce RS |
84 | /* Store X into index I of ARRAY. ARRAY is known to have at least I |
85 | elements. Return the new base of ARRAY. */ | |
86 | ||
87 | template <typename T> | |
88 | typename T::value_type * | |
89 | generic_subrtx_iterator <T>::add_single_to_queue (array_type &array, | |
90 | value_type *base, | |
91 | size_t i, value_type x) | |
92 | { | |
93 | if (base == array.stack) | |
94 | { | |
95 | if (i < LOCAL_ELEMS) | |
96 | { | |
97 | base[i] = x; | |
98 | return base; | |
99 | } | |
100 | gcc_checking_assert (i == LOCAL_ELEMS); | |
101 | vec_safe_grow (array.heap, i + 1); | |
102 | base = array.heap->address (); | |
103 | memcpy (base, array.stack, sizeof (array.stack)); | |
104 | base[LOCAL_ELEMS] = x; | |
105 | return base; | |
106 | } | |
107 | unsigned int length = array.heap->length (); | |
108 | if (length > i) | |
109 | { | |
110 | gcc_checking_assert (base == array.heap->address ()); | |
111 | base[i] = x; | |
112 | return base; | |
113 | } | |
114 | else | |
115 | { | |
116 | gcc_checking_assert (i == length); | |
117 | vec_safe_push (array.heap, x); | |
118 | return array.heap->address (); | |
119 | } | |
120 | } | |
121 | ||
122 | /* Add the subrtxes of X to worklist ARRAY, starting at END. Return the | |
123 | number of elements added to the worklist. */ | |
124 | ||
125 | template <typename T> | |
126 | size_t | |
127 | generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array, | |
128 | value_type *base, | |
129 | size_t end, rtx_type x) | |
130 | { | |
131 | const char *format = GET_RTX_FORMAT (GET_CODE (x)); | |
132 | size_t orig_end = end; | |
133 | for (int i = 0; format[i]; ++i) | |
134 | if (format[i] == 'e') | |
135 | { | |
136 | value_type subx = T::get_value (x->u.fld[i].rt_rtx); | |
137 | if (__builtin_expect (end < LOCAL_ELEMS, true)) | |
138 | base[end++] = subx; | |
139 | else | |
140 | base = add_single_to_queue (array, base, end++, subx); | |
141 | } | |
142 | else if (format[i] == 'E') | |
143 | { | |
144 | int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec); | |
145 | rtx *vec = x->u.fld[i].rt_rtvec->elem; | |
146 | if (__builtin_expect (end + length <= LOCAL_ELEMS, true)) | |
147 | for (int j = 0; j < length; j++) | |
148 | base[end++] = T::get_value (vec[j]); | |
149 | else | |
150 | for (int j = 0; j < length; j++) | |
151 | base = add_single_to_queue (array, base, end++, | |
152 | T::get_value (vec[j])); | |
153 | } | |
154 | return end - orig_end; | |
155 | } | |
156 | ||
157 | template <typename T> | |
158 | void | |
159 | generic_subrtx_iterator <T>::free_array (array_type &array) | |
160 | { | |
161 | vec_free (array.heap); | |
162 | } | |
163 | ||
164 | template <typename T> | |
165 | const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS; | |
166 | ||
167 | template class generic_subrtx_iterator <const_rtx_accessor>; | |
168 | template class generic_subrtx_iterator <rtx_var_accessor>; | |
169 | template class generic_subrtx_iterator <rtx_ptr_accessor>; | |
170 | ||
2c88418c RS |
171 | /* Return 1 if the value of X is unstable |
172 | (would be different at a different point in the program). | |
173 | The frame pointer, arg pointer, etc. are considered stable | |
174 | (within one function) and so is anything marked `unchanging'. */ | |
175 | ||
176 | int | |
f7d504c2 | 177 | rtx_unstable_p (const_rtx x) |
2c88418c | 178 | { |
f7d504c2 | 179 | const RTX_CODE code = GET_CODE (x); |
b3694847 SS |
180 | int i; |
181 | const char *fmt; | |
2c88418c | 182 | |
ae0fb1b9 JW |
183 | switch (code) |
184 | { | |
185 | case MEM: | |
389fdba0 | 186 | return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0)); |
2c88418c | 187 | |
ae0fb1b9 | 188 | case CONST: |
d8116890 | 189 | CASE_CONST_ANY: |
ae0fb1b9 JW |
190 | case SYMBOL_REF: |
191 | case LABEL_REF: | |
192 | return 0; | |
2c88418c | 193 | |
ae0fb1b9 JW |
194 | case REG: |
195 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
c0fc376b | 196 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 | 197 | /* The arg pointer varies if it is not a fixed register. */ |
389fdba0 | 198 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
c0fc376b | 199 | return 0; |
c0fc376b RH |
200 | /* ??? When call-clobbered, the value is stable modulo the restore |
201 | that must happen after a call. This currently screws up local-alloc | |
202 | into believing that the restore is not needed. */ | |
f8fe0a4a | 203 | if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx) |
c0fc376b | 204 | return 0; |
c0fc376b | 205 | return 1; |
ae0fb1b9 JW |
206 | |
207 | case ASM_OPERANDS: | |
208 | if (MEM_VOLATILE_P (x)) | |
209 | return 1; | |
210 | ||
5d3cc252 | 211 | /* Fall through. */ |
ae0fb1b9 JW |
212 | |
213 | default: | |
214 | break; | |
215 | } | |
2c88418c RS |
216 | |
217 | fmt = GET_RTX_FORMAT (code); | |
218 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
219 | if (fmt[i] == 'e') | |
9c82ac6b JW |
220 | { |
221 | if (rtx_unstable_p (XEXP (x, i))) | |
222 | return 1; | |
223 | } | |
224 | else if (fmt[i] == 'E') | |
225 | { | |
226 | int j; | |
227 | for (j = 0; j < XVECLEN (x, i); j++) | |
228 | if (rtx_unstable_p (XVECEXP (x, i, j))) | |
229 | return 1; | |
230 | } | |
231 | ||
2c88418c RS |
232 | return 0; |
233 | } | |
234 | ||
235 | /* Return 1 if X has a value that can vary even between two | |
236 | executions of the program. 0 means X can be compared reliably | |
237 | against certain constants or near-constants. | |
e38fe8e0 BS |
238 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
239 | zero, we are slightly more conservative. | |
2c88418c RS |
240 | The frame pointer and the arg pointer are considered constant. */ |
241 | ||
4f588890 KG |
242 | bool |
243 | rtx_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 244 | { |
e978d62e | 245 | RTX_CODE code; |
b3694847 SS |
246 | int i; |
247 | const char *fmt; | |
2c88418c | 248 | |
e978d62e PB |
249 | if (!x) |
250 | return 0; | |
251 | ||
252 | code = GET_CODE (x); | |
2c88418c RS |
253 | switch (code) |
254 | { | |
255 | case MEM: | |
389fdba0 | 256 | return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); |
55efb413 | 257 | |
2c88418c | 258 | case CONST: |
d8116890 | 259 | CASE_CONST_ANY: |
2c88418c RS |
260 | case SYMBOL_REF: |
261 | case LABEL_REF: | |
262 | return 0; | |
263 | ||
264 | case REG: | |
265 | /* Note that we have to test for the actual rtx used for the frame | |
266 | and arg pointers and not just the register number in case we have | |
267 | eliminated the frame and/or arg pointer and are using it | |
268 | for pseudos. */ | |
c0fc376b | 269 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 JL |
270 | /* The arg pointer varies if it is not a fixed register. */ |
271 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
c0fc376b | 272 | return 0; |
e38fe8e0 | 273 | if (x == pic_offset_table_rtx |
e38fe8e0 BS |
274 | /* ??? When call-clobbered, the value is stable modulo the restore |
275 | that must happen after a call. This currently screws up | |
276 | local-alloc into believing that the restore is not needed, so we | |
277 | must return 0 only if we are called from alias analysis. */ | |
f8fe0a4a | 278 | && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias)) |
e38fe8e0 | 279 | return 0; |
c0fc376b | 280 | return 1; |
2c88418c RS |
281 | |
282 | case LO_SUM: | |
283 | /* The operand 0 of a LO_SUM is considered constant | |
e7d96a83 JW |
284 | (in fact it is related specifically to operand 1) |
285 | during alias analysis. */ | |
286 | return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias)) | |
287 | || rtx_varies_p (XEXP (x, 1), for_alias); | |
a6a2274a | 288 | |
ae0fb1b9 JW |
289 | case ASM_OPERANDS: |
290 | if (MEM_VOLATILE_P (x)) | |
291 | return 1; | |
292 | ||
5d3cc252 | 293 | /* Fall through. */ |
ae0fb1b9 | 294 | |
e9a25f70 JL |
295 | default: |
296 | break; | |
2c88418c RS |
297 | } |
298 | ||
299 | fmt = GET_RTX_FORMAT (code); | |
300 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
301 | if (fmt[i] == 'e') | |
9c82ac6b | 302 | { |
e38fe8e0 | 303 | if (rtx_varies_p (XEXP (x, i), for_alias)) |
9c82ac6b JW |
304 | return 1; |
305 | } | |
306 | else if (fmt[i] == 'E') | |
307 | { | |
308 | int j; | |
309 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 310 | if (rtx_varies_p (XVECEXP (x, i, j), for_alias)) |
9c82ac6b JW |
311 | return 1; |
312 | } | |
313 | ||
2c88418c RS |
314 | return 0; |
315 | } | |
316 | ||
c7e30a96 EB |
317 | /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE |
318 | bytes can cause a trap. MODE is the mode of the MEM (not that of X) and | |
319 | UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory | |
320 | references on strict alignment machines. */ | |
2c88418c | 321 | |
2358ff91 | 322 | static int |
48e8382e PB |
323 | rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size, |
324 | enum machine_mode mode, bool unaligned_mems) | |
2c88418c | 325 | { |
b3694847 | 326 | enum rtx_code code = GET_CODE (x); |
2c88418c | 327 | |
c7e30a96 EB |
328 | /* The offset must be a multiple of the mode size if we are considering |
329 | unaligned memory references on strict alignment machines. */ | |
330 | if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0) | |
48e8382e PB |
331 | { |
332 | HOST_WIDE_INT actual_offset = offset; | |
c7e30a96 | 333 | |
48e8382e PB |
334 | #ifdef SPARC_STACK_BOUNDARY_HACK |
335 | /* ??? The SPARC port may claim a STACK_BOUNDARY higher than | |
336 | the real alignment of %sp. However, when it does this, the | |
337 | alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | |
338 | if (SPARC_STACK_BOUNDARY_HACK | |
339 | && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx)) | |
340 | actual_offset -= STACK_POINTER_OFFSET; | |
341 | #endif | |
342 | ||
65a74b5d PB |
343 | if (actual_offset % GET_MODE_SIZE (mode) != 0) |
344 | return 1; | |
48e8382e PB |
345 | } |
346 | ||
2c88418c RS |
347 | switch (code) |
348 | { | |
349 | case SYMBOL_REF: | |
48e8382e PB |
350 | if (SYMBOL_REF_WEAK (x)) |
351 | return 1; | |
352 | if (!CONSTANT_POOL_ADDRESS_P (x)) | |
353 | { | |
354 | tree decl; | |
355 | HOST_WIDE_INT decl_size; | |
356 | ||
357 | if (offset < 0) | |
358 | return 1; | |
359 | if (size == 0) | |
360 | size = GET_MODE_SIZE (mode); | |
361 | if (size == 0) | |
362 | return offset != 0; | |
363 | ||
364 | /* If the size of the access or of the symbol is unknown, | |
365 | assume the worst. */ | |
366 | decl = SYMBOL_REF_DECL (x); | |
367 | ||
368 | /* Else check that the access is in bounds. TODO: restructure | |
71c00b5c | 369 | expr_size/tree_expr_size/int_expr_size and just use the latter. */ |
48e8382e PB |
370 | if (!decl) |
371 | decl_size = -1; | |
372 | else if (DECL_P (decl) && DECL_SIZE_UNIT (decl)) | |
9541ffee | 373 | decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl)) |
9439e9a1 | 374 | ? tree_to_shwi (DECL_SIZE_UNIT (decl)) |
48e8382e PB |
375 | : -1); |
376 | else if (TREE_CODE (decl) == STRING_CST) | |
377 | decl_size = TREE_STRING_LENGTH (decl); | |
378 | else if (TYPE_SIZE_UNIT (TREE_TYPE (decl))) | |
379 | decl_size = int_size_in_bytes (TREE_TYPE (decl)); | |
380 | else | |
381 | decl_size = -1; | |
382 | ||
383 | return (decl_size <= 0 ? offset != 0 : offset + size > decl_size); | |
384 | } | |
385 | ||
386 | return 0; | |
ff0b6b99 | 387 | |
2c88418c | 388 | case LABEL_REF: |
2c88418c RS |
389 | return 0; |
390 | ||
391 | case REG: | |
c7e30a96 EB |
392 | /* Stack references are assumed not to trap, but we need to deal with |
393 | nonsensical offsets. */ | |
394 | if (x == frame_pointer_rtx) | |
395 | { | |
396 | HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET; | |
397 | if (size == 0) | |
398 | size = GET_MODE_SIZE (mode); | |
399 | if (FRAME_GROWS_DOWNWARD) | |
400 | { | |
401 | if (adj_offset < frame_offset || adj_offset + size - 1 >= 0) | |
402 | return 1; | |
403 | } | |
404 | else | |
405 | { | |
406 | if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset) | |
407 | return 1; | |
408 | } | |
409 | return 0; | |
410 | } | |
411 | /* ??? Need to add a similar guard for nonsensical offsets. */ | |
412 | if (x == hard_frame_pointer_rtx | |
4f73495e RH |
413 | || x == stack_pointer_rtx |
414 | /* The arg pointer varies if it is not a fixed register. */ | |
415 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
416 | return 0; | |
417 | /* All of the virtual frame registers are stack references. */ | |
418 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
419 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
420 | return 0; | |
421 | return 1; | |
2c88418c RS |
422 | |
423 | case CONST: | |
48e8382e PB |
424 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
425 | mode, unaligned_mems); | |
2c88418c RS |
426 | |
427 | case PLUS: | |
2358ff91 | 428 | /* An address is assumed not to trap if: |
48e8382e PB |
429 | - it is the pic register plus a constant. */ |
430 | if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1))) | |
431 | return 0; | |
432 | ||
c7e30a96 | 433 | /* - or it is an address that can't trap plus a constant integer. */ |
481683e1 | 434 | if (CONST_INT_P (XEXP (x, 1)) |
48e8382e PB |
435 | && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)), |
436 | size, mode, unaligned_mems)) | |
2358ff91 EB |
437 | return 0; |
438 | ||
439 | return 1; | |
2c88418c RS |
440 | |
441 | case LO_SUM: | |
4f73495e | 442 | case PRE_MODIFY: |
48e8382e PB |
443 | return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size, |
444 | mode, unaligned_mems); | |
4f73495e RH |
445 | |
446 | case PRE_DEC: | |
447 | case PRE_INC: | |
448 | case POST_DEC: | |
449 | case POST_INC: | |
450 | case POST_MODIFY: | |
48e8382e PB |
451 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
452 | mode, unaligned_mems); | |
4f73495e | 453 | |
e9a25f70 JL |
454 | default: |
455 | break; | |
2c88418c RS |
456 | } |
457 | ||
458 | /* If it isn't one of the case above, it can cause a trap. */ | |
459 | return 1; | |
460 | } | |
461 | ||
2358ff91 EB |
462 | /* Return nonzero if the use of X as an address in a MEM can cause a trap. */ |
463 | ||
464 | int | |
f7d504c2 | 465 | rtx_addr_can_trap_p (const_rtx x) |
2358ff91 | 466 | { |
48e8382e | 467 | return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false); |
2358ff91 EB |
468 | } |
469 | ||
4977bab6 ZW |
470 | /* Return true if X is an address that is known to not be zero. */ |
471 | ||
472 | bool | |
f7d504c2 | 473 | nonzero_address_p (const_rtx x) |
4977bab6 | 474 | { |
f7d504c2 | 475 | const enum rtx_code code = GET_CODE (x); |
4977bab6 ZW |
476 | |
477 | switch (code) | |
478 | { | |
479 | case SYMBOL_REF: | |
480 | return !SYMBOL_REF_WEAK (x); | |
481 | ||
482 | case LABEL_REF: | |
483 | return true; | |
484 | ||
4977bab6 ZW |
485 | case REG: |
486 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
487 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx | |
488 | || x == stack_pointer_rtx | |
489 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
490 | return true; | |
491 | /* All of the virtual frame registers are stack references. */ | |
492 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
493 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
494 | return true; | |
495 | return false; | |
496 | ||
497 | case CONST: | |
498 | return nonzero_address_p (XEXP (x, 0)); | |
499 | ||
500 | case PLUS: | |
4977bab6 | 501 | /* Handle PIC references. */ |
bc2164e8 | 502 | if (XEXP (x, 0) == pic_offset_table_rtx |
4977bab6 ZW |
503 | && CONSTANT_P (XEXP (x, 1))) |
504 | return true; | |
505 | return false; | |
506 | ||
507 | case PRE_MODIFY: | |
508 | /* Similar to the above; allow positive offsets. Further, since | |
509 | auto-inc is only allowed in memories, the register must be a | |
510 | pointer. */ | |
481683e1 | 511 | if (CONST_INT_P (XEXP (x, 1)) |
4977bab6 ZW |
512 | && INTVAL (XEXP (x, 1)) > 0) |
513 | return true; | |
514 | return nonzero_address_p (XEXP (x, 0)); | |
515 | ||
516 | case PRE_INC: | |
517 | /* Similarly. Further, the offset is always positive. */ | |
518 | return true; | |
519 | ||
520 | case PRE_DEC: | |
521 | case POST_DEC: | |
522 | case POST_INC: | |
523 | case POST_MODIFY: | |
524 | return nonzero_address_p (XEXP (x, 0)); | |
525 | ||
526 | case LO_SUM: | |
527 | return nonzero_address_p (XEXP (x, 1)); | |
528 | ||
529 | default: | |
530 | break; | |
531 | } | |
532 | ||
533 | /* If it isn't one of the case above, might be zero. */ | |
534 | return false; | |
535 | } | |
536 | ||
a6a2274a | 537 | /* Return 1 if X refers to a memory location whose address |
2c88418c | 538 | cannot be compared reliably with constant addresses, |
a6a2274a | 539 | or if X refers to a BLKmode memory object. |
e38fe8e0 BS |
540 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
541 | zero, we are slightly more conservative. */ | |
2c88418c | 542 | |
4f588890 KG |
543 | bool |
544 | rtx_addr_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 545 | { |
b3694847 SS |
546 | enum rtx_code code; |
547 | int i; | |
548 | const char *fmt; | |
2c88418c RS |
549 | |
550 | if (x == 0) | |
551 | return 0; | |
552 | ||
553 | code = GET_CODE (x); | |
554 | if (code == MEM) | |
e38fe8e0 | 555 | return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias); |
2c88418c RS |
556 | |
557 | fmt = GET_RTX_FORMAT (code); | |
558 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
559 | if (fmt[i] == 'e') | |
833c0b26 | 560 | { |
e38fe8e0 | 561 | if (rtx_addr_varies_p (XEXP (x, i), for_alias)) |
833c0b26 RK |
562 | return 1; |
563 | } | |
564 | else if (fmt[i] == 'E') | |
565 | { | |
566 | int j; | |
567 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 568 | if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias)) |
833c0b26 RK |
569 | return 1; |
570 | } | |
2c88418c RS |
571 | return 0; |
572 | } | |
573 | \f | |
da4fdf2d SB |
574 | /* Return the CALL in X if there is one. */ |
575 | ||
576 | rtx | |
577 | get_call_rtx_from (rtx x) | |
578 | { | |
579 | if (INSN_P (x)) | |
580 | x = PATTERN (x); | |
581 | if (GET_CODE (x) == PARALLEL) | |
582 | x = XVECEXP (x, 0, 0); | |
583 | if (GET_CODE (x) == SET) | |
584 | x = SET_SRC (x); | |
585 | if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0))) | |
586 | return x; | |
587 | return NULL_RTX; | |
588 | } | |
589 | \f | |
2c88418c RS |
590 | /* Return the value of the integer term in X, if one is apparent; |
591 | otherwise return 0. | |
592 | Only obvious integer terms are detected. | |
3ef42a0c | 593 | This is used in cse.c with the `related_value' field. */ |
2c88418c | 594 | |
c166a311 | 595 | HOST_WIDE_INT |
f7d504c2 | 596 | get_integer_term (const_rtx x) |
2c88418c RS |
597 | { |
598 | if (GET_CODE (x) == CONST) | |
599 | x = XEXP (x, 0); | |
600 | ||
601 | if (GET_CODE (x) == MINUS | |
481683e1 | 602 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
603 | return - INTVAL (XEXP (x, 1)); |
604 | if (GET_CODE (x) == PLUS | |
481683e1 | 605 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
606 | return INTVAL (XEXP (x, 1)); |
607 | return 0; | |
608 | } | |
609 | ||
610 | /* If X is a constant, return the value sans apparent integer term; | |
611 | otherwise return 0. | |
612 | Only obvious integer terms are detected. */ | |
613 | ||
614 | rtx | |
f7d504c2 | 615 | get_related_value (const_rtx x) |
2c88418c RS |
616 | { |
617 | if (GET_CODE (x) != CONST) | |
618 | return 0; | |
619 | x = XEXP (x, 0); | |
620 | if (GET_CODE (x) == PLUS | |
481683e1 | 621 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
622 | return XEXP (x, 0); |
623 | else if (GET_CODE (x) == MINUS | |
481683e1 | 624 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
625 | return XEXP (x, 0); |
626 | return 0; | |
627 | } | |
628 | \f | |
7ffb5e78 RS |
629 | /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points |
630 | to somewhere in the same object or object_block as SYMBOL. */ | |
631 | ||
632 | bool | |
f7d504c2 | 633 | offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset) |
7ffb5e78 RS |
634 | { |
635 | tree decl; | |
636 | ||
637 | if (GET_CODE (symbol) != SYMBOL_REF) | |
638 | return false; | |
639 | ||
640 | if (offset == 0) | |
641 | return true; | |
642 | ||
643 | if (offset > 0) | |
644 | { | |
645 | if (CONSTANT_POOL_ADDRESS_P (symbol) | |
646 | && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol))) | |
647 | return true; | |
648 | ||
649 | decl = SYMBOL_REF_DECL (symbol); | |
650 | if (decl && offset < int_size_in_bytes (TREE_TYPE (decl))) | |
651 | return true; | |
652 | } | |
653 | ||
654 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) | |
655 | && SYMBOL_REF_BLOCK (symbol) | |
656 | && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0 | |
657 | && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol) | |
658 | < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size)) | |
659 | return true; | |
660 | ||
661 | return false; | |
662 | } | |
663 | ||
664 | /* Split X into a base and a constant offset, storing them in *BASE_OUT | |
665 | and *OFFSET_OUT respectively. */ | |
666 | ||
667 | void | |
668 | split_const (rtx x, rtx *base_out, rtx *offset_out) | |
669 | { | |
670 | if (GET_CODE (x) == CONST) | |
671 | { | |
672 | x = XEXP (x, 0); | |
481683e1 | 673 | if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1))) |
7ffb5e78 RS |
674 | { |
675 | *base_out = XEXP (x, 0); | |
676 | *offset_out = XEXP (x, 1); | |
677 | return; | |
678 | } | |
679 | } | |
680 | *base_out = x; | |
681 | *offset_out = const0_rtx; | |
682 | } | |
683 | \f | |
4b983fdc RH |
684 | /* Return the number of places FIND appears within X. If COUNT_DEST is |
685 | zero, we do not count occurrences inside the destination of a SET. */ | |
686 | ||
687 | int | |
f7d504c2 | 688 | count_occurrences (const_rtx x, const_rtx find, int count_dest) |
4b983fdc RH |
689 | { |
690 | int i, j; | |
691 | enum rtx_code code; | |
692 | const char *format_ptr; | |
693 | int count; | |
694 | ||
695 | if (x == find) | |
696 | return 1; | |
697 | ||
698 | code = GET_CODE (x); | |
699 | ||
700 | switch (code) | |
701 | { | |
702 | case REG: | |
d8116890 | 703 | CASE_CONST_ANY: |
4b983fdc RH |
704 | case SYMBOL_REF: |
705 | case CODE_LABEL: | |
706 | case PC: | |
707 | case CC0: | |
708 | return 0; | |
709 | ||
2372a062 BS |
710 | case EXPR_LIST: |
711 | count = count_occurrences (XEXP (x, 0), find, count_dest); | |
712 | if (XEXP (x, 1)) | |
713 | count += count_occurrences (XEXP (x, 1), find, count_dest); | |
714 | return count; | |
b8698a0f | 715 | |
4b983fdc | 716 | case MEM: |
3c0cb5de | 717 | if (MEM_P (find) && rtx_equal_p (x, find)) |
4b983fdc RH |
718 | return 1; |
719 | break; | |
720 | ||
721 | case SET: | |
722 | if (SET_DEST (x) == find && ! count_dest) | |
723 | return count_occurrences (SET_SRC (x), find, count_dest); | |
724 | break; | |
725 | ||
726 | default: | |
727 | break; | |
728 | } | |
729 | ||
730 | format_ptr = GET_RTX_FORMAT (code); | |
731 | count = 0; | |
732 | ||
733 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
734 | { | |
735 | switch (*format_ptr++) | |
736 | { | |
737 | case 'e': | |
738 | count += count_occurrences (XEXP (x, i), find, count_dest); | |
739 | break; | |
740 | ||
741 | case 'E': | |
742 | for (j = 0; j < XVECLEN (x, i); j++) | |
743 | count += count_occurrences (XVECEXP (x, i, j), find, count_dest); | |
744 | break; | |
745 | } | |
746 | } | |
747 | return count; | |
748 | } | |
6fb5fa3c | 749 | |
7bc14a04 PB |
750 | \f |
751 | /* Return TRUE if OP is a register or subreg of a register that | |
752 | holds an unsigned quantity. Otherwise, return FALSE. */ | |
753 | ||
754 | bool | |
755 | unsigned_reg_p (rtx op) | |
756 | { | |
757 | if (REG_P (op) | |
758 | && REG_EXPR (op) | |
759 | && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op)))) | |
760 | return true; | |
761 | ||
762 | if (GET_CODE (op) == SUBREG | |
362d42dc | 763 | && SUBREG_PROMOTED_SIGN (op)) |
7bc14a04 PB |
764 | return true; |
765 | ||
766 | return false; | |
767 | } | |
768 | ||
4b983fdc | 769 | \f |
2c88418c RS |
770 | /* Nonzero if register REG appears somewhere within IN. |
771 | Also works if REG is not a register; in this case it checks | |
772 | for a subexpression of IN that is Lisp "equal" to REG. */ | |
773 | ||
774 | int | |
f7d504c2 | 775 | reg_mentioned_p (const_rtx reg, const_rtx in) |
2c88418c | 776 | { |
b3694847 SS |
777 | const char *fmt; |
778 | int i; | |
779 | enum rtx_code code; | |
2c88418c RS |
780 | |
781 | if (in == 0) | |
782 | return 0; | |
783 | ||
784 | if (reg == in) | |
785 | return 1; | |
786 | ||
787 | if (GET_CODE (in) == LABEL_REF) | |
a827d9b1 | 788 | return reg == LABEL_REF_LABEL (in); |
2c88418c RS |
789 | |
790 | code = GET_CODE (in); | |
791 | ||
792 | switch (code) | |
793 | { | |
794 | /* Compare registers by number. */ | |
795 | case REG: | |
f8cfc6aa | 796 | return REG_P (reg) && REGNO (in) == REGNO (reg); |
2c88418c RS |
797 | |
798 | /* These codes have no constituent expressions | |
799 | and are unique. */ | |
800 | case SCRATCH: | |
801 | case CC0: | |
802 | case PC: | |
803 | return 0; | |
804 | ||
d8116890 | 805 | CASE_CONST_ANY: |
2c88418c RS |
806 | /* These are kept unique for a given value. */ |
807 | return 0; | |
a6a2274a | 808 | |
e9a25f70 JL |
809 | default: |
810 | break; | |
2c88418c RS |
811 | } |
812 | ||
813 | if (GET_CODE (reg) == code && rtx_equal_p (reg, in)) | |
814 | return 1; | |
815 | ||
816 | fmt = GET_RTX_FORMAT (code); | |
817 | ||
818 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
819 | { | |
820 | if (fmt[i] == 'E') | |
821 | { | |
b3694847 | 822 | int j; |
2c88418c RS |
823 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) |
824 | if (reg_mentioned_p (reg, XVECEXP (in, i, j))) | |
825 | return 1; | |
826 | } | |
827 | else if (fmt[i] == 'e' | |
828 | && reg_mentioned_p (reg, XEXP (in, i))) | |
829 | return 1; | |
830 | } | |
831 | return 0; | |
832 | } | |
833 | \f | |
834 | /* Return 1 if in between BEG and END, exclusive of BEG and END, there is | |
835 | no CODE_LABEL insn. */ | |
836 | ||
837 | int | |
b32d5189 | 838 | no_labels_between_p (const rtx_insn *beg, const rtx_insn *end) |
2c88418c | 839 | { |
b32d5189 | 840 | rtx_insn *p; |
978f547f JH |
841 | if (beg == end) |
842 | return 0; | |
2c88418c | 843 | for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) |
4b4bf941 | 844 | if (LABEL_P (p)) |
2c88418c RS |
845 | return 0; |
846 | return 1; | |
847 | } | |
848 | ||
849 | /* Nonzero if register REG is used in an insn between | |
850 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
851 | ||
852 | int | |
b32d5189 DM |
853 | reg_used_between_p (const_rtx reg, const rtx_insn *from_insn, |
854 | const rtx_insn *to_insn) | |
2c88418c | 855 | { |
1bbbc4a3 | 856 | rtx_insn *insn; |
2c88418c RS |
857 | |
858 | if (from_insn == to_insn) | |
859 | return 0; | |
860 | ||
861 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
b5b8b0ac | 862 | if (NONDEBUG_INSN_P (insn) |
8f3e7a26 | 863 | && (reg_overlap_mentioned_p (reg, PATTERN (insn)) |
76dd5923 | 864 | || (CALL_P (insn) && find_reg_fusage (insn, USE, reg)))) |
2c88418c RS |
865 | return 1; |
866 | return 0; | |
867 | } | |
868 | \f | |
869 | /* Nonzero if the old value of X, a register, is referenced in BODY. If X | |
870 | is entirely replaced by a new value and the only use is as a SET_DEST, | |
871 | we do not consider it a reference. */ | |
872 | ||
873 | int | |
f7d504c2 | 874 | reg_referenced_p (const_rtx x, const_rtx body) |
2c88418c RS |
875 | { |
876 | int i; | |
877 | ||
878 | switch (GET_CODE (body)) | |
879 | { | |
880 | case SET: | |
881 | if (reg_overlap_mentioned_p (x, SET_SRC (body))) | |
882 | return 1; | |
883 | ||
884 | /* If the destination is anything other than CC0, PC, a REG or a SUBREG | |
885 | of a REG that occupies all of the REG, the insn references X if | |
886 | it is mentioned in the destination. */ | |
887 | if (GET_CODE (SET_DEST (body)) != CC0 | |
888 | && GET_CODE (SET_DEST (body)) != PC | |
f8cfc6aa | 889 | && !REG_P (SET_DEST (body)) |
2c88418c | 890 | && ! (GET_CODE (SET_DEST (body)) == SUBREG |
f8cfc6aa | 891 | && REG_P (SUBREG_REG (SET_DEST (body))) |
2c88418c RS |
892 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body)))) |
893 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
894 | == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body))) | |
895 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
896 | && reg_overlap_mentioned_p (x, SET_DEST (body))) | |
897 | return 1; | |
e9a25f70 | 898 | return 0; |
2c88418c RS |
899 | |
900 | case ASM_OPERANDS: | |
901 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
902 | if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i))) | |
903 | return 1; | |
e9a25f70 | 904 | return 0; |
2c88418c RS |
905 | |
906 | case CALL: | |
907 | case USE: | |
14a774a9 | 908 | case IF_THEN_ELSE: |
2c88418c RS |
909 | return reg_overlap_mentioned_p (x, body); |
910 | ||
911 | case TRAP_IF: | |
912 | return reg_overlap_mentioned_p (x, TRAP_CONDITION (body)); | |
913 | ||
21b8482a JJ |
914 | case PREFETCH: |
915 | return reg_overlap_mentioned_p (x, XEXP (body, 0)); | |
916 | ||
2ac4fed0 RK |
917 | case UNSPEC: |
918 | case UNSPEC_VOLATILE: | |
2f9fb4c2 R |
919 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
920 | if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i))) | |
921 | return 1; | |
922 | return 0; | |
923 | ||
2c88418c RS |
924 | case PARALLEL: |
925 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
926 | if (reg_referenced_p (x, XVECEXP (body, 0, i))) | |
927 | return 1; | |
e9a25f70 | 928 | return 0; |
a6a2274a | 929 | |
0d3ffb5a | 930 | case CLOBBER: |
3c0cb5de | 931 | if (MEM_P (XEXP (body, 0))) |
0d3ffb5a GK |
932 | if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) |
933 | return 1; | |
934 | return 0; | |
935 | ||
0c99ec5c RH |
936 | case COND_EXEC: |
937 | if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) | |
938 | return 1; | |
939 | return reg_referenced_p (x, COND_EXEC_CODE (body)); | |
940 | ||
e9a25f70 JL |
941 | default: |
942 | return 0; | |
2c88418c | 943 | } |
2c88418c | 944 | } |
2c88418c RS |
945 | \f |
946 | /* Nonzero if register REG is set or clobbered in an insn between | |
947 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
948 | ||
949 | int | |
a5d567ec DM |
950 | reg_set_between_p (const_rtx reg, const rtx_insn *from_insn, |
951 | const rtx_insn *to_insn) | |
2c88418c | 952 | { |
1bbbc4a3 | 953 | const rtx_insn *insn; |
2c88418c RS |
954 | |
955 | if (from_insn == to_insn) | |
956 | return 0; | |
957 | ||
958 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 959 | if (INSN_P (insn) && reg_set_p (reg, insn)) |
2c88418c RS |
960 | return 1; |
961 | return 0; | |
962 | } | |
963 | ||
964 | /* Internals of reg_set_between_p. */ | |
2c88418c | 965 | int |
ed7a4b4b | 966 | reg_set_p (const_rtx reg, const_rtx insn) |
2c88418c | 967 | { |
2c88418c RS |
968 | /* We can be passed an insn or part of one. If we are passed an insn, |
969 | check if a side-effect of the insn clobbers REG. */ | |
4977bab6 ZW |
970 | if (INSN_P (insn) |
971 | && (FIND_REG_INC_NOTE (insn, reg) | |
4b4bf941 | 972 | || (CALL_P (insn) |
f8cfc6aa | 973 | && ((REG_P (reg) |
4f1605d2 | 974 | && REGNO (reg) < FIRST_PSEUDO_REGISTER |
5da20cfe RS |
975 | && overlaps_hard_reg_set_p (regs_invalidated_by_call, |
976 | GET_MODE (reg), REGNO (reg))) | |
3c0cb5de | 977 | || MEM_P (reg) |
4977bab6 ZW |
978 | || find_reg_fusage (insn, CLOBBER, reg))))) |
979 | return 1; | |
2c88418c | 980 | |
91b2d119 | 981 | return set_of (reg, insn) != NULL_RTX; |
2c88418c RS |
982 | } |
983 | ||
984 | /* Similar to reg_set_between_p, but check all registers in X. Return 0 | |
985 | only if none of them are modified between START and END. Return 1 if | |
fa10beec | 986 | X contains a MEM; this routine does use memory aliasing. */ |
2c88418c RS |
987 | |
988 | int | |
8f6bce51 | 989 | modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end) |
2c88418c | 990 | { |
9678086d | 991 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 992 | const char *fmt; |
f8163c92 | 993 | int i, j; |
1bbbc4a3 | 994 | rtx_insn *insn; |
7b52eede JH |
995 | |
996 | if (start == end) | |
997 | return 0; | |
2c88418c RS |
998 | |
999 | switch (code) | |
1000 | { | |
d8116890 | 1001 | CASE_CONST_ANY: |
2c88418c RS |
1002 | case CONST: |
1003 | case SYMBOL_REF: | |
1004 | case LABEL_REF: | |
1005 | return 0; | |
1006 | ||
1007 | case PC: | |
1008 | case CC0: | |
1009 | return 1; | |
1010 | ||
1011 | case MEM: | |
7b52eede | 1012 | if (modified_between_p (XEXP (x, 0), start, end)) |
2c88418c | 1013 | return 1; |
550b7784 KK |
1014 | if (MEM_READONLY_P (x)) |
1015 | return 0; | |
7b52eede JH |
1016 | for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn)) |
1017 | if (memory_modified_in_insn_p (x, insn)) | |
1018 | return 1; | |
1019 | return 0; | |
2c88418c RS |
1020 | break; |
1021 | ||
1022 | case REG: | |
1023 | return reg_set_between_p (x, start, end); | |
a6a2274a | 1024 | |
e9a25f70 JL |
1025 | default: |
1026 | break; | |
2c88418c RS |
1027 | } |
1028 | ||
1029 | fmt = GET_RTX_FORMAT (code); | |
1030 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
f8163c92 RK |
1031 | { |
1032 | if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end)) | |
1033 | return 1; | |
1034 | ||
d4757e6a | 1035 | else if (fmt[i] == 'E') |
f8163c92 RK |
1036 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
1037 | if (modified_between_p (XVECEXP (x, i, j), start, end)) | |
1038 | return 1; | |
1039 | } | |
1040 | ||
1041 | return 0; | |
1042 | } | |
1043 | ||
1044 | /* Similar to reg_set_p, but check all registers in X. Return 0 only if none | |
1045 | of them are modified in INSN. Return 1 if X contains a MEM; this routine | |
7b52eede | 1046 | does use memory aliasing. */ |
f8163c92 RK |
1047 | |
1048 | int | |
9678086d | 1049 | modified_in_p (const_rtx x, const_rtx insn) |
f8163c92 | 1050 | { |
9678086d | 1051 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 1052 | const char *fmt; |
f8163c92 RK |
1053 | int i, j; |
1054 | ||
1055 | switch (code) | |
1056 | { | |
d8116890 | 1057 | CASE_CONST_ANY: |
f8163c92 RK |
1058 | case CONST: |
1059 | case SYMBOL_REF: | |
1060 | case LABEL_REF: | |
1061 | return 0; | |
1062 | ||
1063 | case PC: | |
1064 | case CC0: | |
2c88418c RS |
1065 | return 1; |
1066 | ||
f8163c92 | 1067 | case MEM: |
7b52eede | 1068 | if (modified_in_p (XEXP (x, 0), insn)) |
f8163c92 | 1069 | return 1; |
550b7784 KK |
1070 | if (MEM_READONLY_P (x)) |
1071 | return 0; | |
7b52eede JH |
1072 | if (memory_modified_in_insn_p (x, insn)) |
1073 | return 1; | |
1074 | return 0; | |
f8163c92 RK |
1075 | break; |
1076 | ||
1077 | case REG: | |
1078 | return reg_set_p (x, insn); | |
e9a25f70 JL |
1079 | |
1080 | default: | |
1081 | break; | |
f8163c92 RK |
1082 | } |
1083 | ||
1084 | fmt = GET_RTX_FORMAT (code); | |
1085 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1086 | { | |
1087 | if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn)) | |
1088 | return 1; | |
1089 | ||
d4757e6a | 1090 | else if (fmt[i] == 'E') |
f8163c92 RK |
1091 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
1092 | if (modified_in_p (XVECEXP (x, i, j), insn)) | |
1093 | return 1; | |
1094 | } | |
1095 | ||
2c88418c RS |
1096 | return 0; |
1097 | } | |
1098 | \f | |
91b2d119 JH |
1099 | /* Helper function for set_of. */ |
1100 | struct set_of_data | |
1101 | { | |
7bc980e1 KG |
1102 | const_rtx found; |
1103 | const_rtx pat; | |
91b2d119 JH |
1104 | }; |
1105 | ||
1106 | static void | |
7bc980e1 | 1107 | set_of_1 (rtx x, const_rtx pat, void *data1) |
91b2d119 | 1108 | { |
7bc980e1 KG |
1109 | struct set_of_data *const data = (struct set_of_data *) (data1); |
1110 | if (rtx_equal_p (x, data->pat) | |
1111 | || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x))) | |
1112 | data->found = pat; | |
91b2d119 JH |
1113 | } |
1114 | ||
1115 | /* Give an INSN, return a SET or CLOBBER expression that does modify PAT | |
eaec9b3d | 1116 | (either directly or via STRICT_LOW_PART and similar modifiers). */ |
7bc980e1 KG |
1117 | const_rtx |
1118 | set_of (const_rtx pat, const_rtx insn) | |
91b2d119 JH |
1119 | { |
1120 | struct set_of_data data; | |
1121 | data.found = NULL_RTX; | |
1122 | data.pat = pat; | |
1123 | note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data); | |
1124 | return data.found; | |
1125 | } | |
e2724e63 | 1126 | |
f7d0b0fc RS |
1127 | /* Add all hard register in X to *PSET. */ |
1128 | void | |
1129 | find_all_hard_regs (const_rtx x, HARD_REG_SET *pset) | |
1130 | { | |
1131 | subrtx_iterator::array_type array; | |
1132 | FOR_EACH_SUBRTX (iter, array, x, NONCONST) | |
1133 | { | |
1134 | const_rtx x = *iter; | |
1135 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
1136 | add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); | |
1137 | } | |
1138 | } | |
1139 | ||
e2724e63 BS |
1140 | /* This function, called through note_stores, collects sets and |
1141 | clobbers of hard registers in a HARD_REG_SET, which is pointed to | |
1142 | by DATA. */ | |
1143 | void | |
1144 | record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) | |
1145 | { | |
1146 | HARD_REG_SET *pset = (HARD_REG_SET *)data; | |
1147 | if (REG_P (x) && HARD_REGISTER_P (x)) | |
1148 | add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); | |
1149 | } | |
1150 | ||
1151 | /* Examine INSN, and compute the set of hard registers written by it. | |
1152 | Store it in *PSET. Should only be called after reload. */ | |
1153 | void | |
356bf593 | 1154 | find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset, bool implicit) |
e2724e63 BS |
1155 | { |
1156 | rtx link; | |
1157 | ||
1158 | CLEAR_HARD_REG_SET (*pset); | |
1159 | note_stores (PATTERN (insn), record_hard_reg_sets, pset); | |
3ee634fd TV |
1160 | if (CALL_P (insn)) |
1161 | { | |
1162 | if (implicit) | |
1163 | IOR_HARD_REG_SET (*pset, call_used_reg_set); | |
1164 | ||
1165 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
1166 | record_hard_reg_sets (XEXP (link, 0), NULL, pset); | |
1167 | } | |
e2724e63 BS |
1168 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1169 | if (REG_NOTE_KIND (link) == REG_INC) | |
1170 | record_hard_reg_sets (XEXP (link, 0), NULL, pset); | |
1171 | } | |
1172 | ||
e2724e63 BS |
1173 | /* Like record_hard_reg_sets, but called through note_uses. */ |
1174 | void | |
1175 | record_hard_reg_uses (rtx *px, void *data) | |
1176 | { | |
f7d0b0fc | 1177 | find_all_hard_regs (*px, (HARD_REG_SET *) data); |
e2724e63 | 1178 | } |
91b2d119 | 1179 | \f |
2c88418c RS |
1180 | /* Given an INSN, return a SET expression if this insn has only a single SET. |
1181 | It may also have CLOBBERs, USEs, or SET whose output | |
1182 | will not be used, which we ignore. */ | |
1183 | ||
1184 | rtx | |
e8a54173 | 1185 | single_set_2 (const rtx_insn *insn, const_rtx pat) |
2c88418c | 1186 | { |
c9b89a21 JH |
1187 | rtx set = NULL; |
1188 | int set_verified = 1; | |
2c88418c | 1189 | int i; |
c9b89a21 | 1190 | |
b1cdafbb | 1191 | if (GET_CODE (pat) == PARALLEL) |
2c88418c | 1192 | { |
c9b89a21 | 1193 | for (i = 0; i < XVECLEN (pat, 0); i++) |
b1cdafbb | 1194 | { |
c9b89a21 JH |
1195 | rtx sub = XVECEXP (pat, 0, i); |
1196 | switch (GET_CODE (sub)) | |
1197 | { | |
1198 | case USE: | |
1199 | case CLOBBER: | |
1200 | break; | |
1201 | ||
1202 | case SET: | |
1203 | /* We can consider insns having multiple sets, where all | |
1204 | but one are dead as single set insns. In common case | |
1205 | only single set is present in the pattern so we want | |
f63d1bf7 | 1206 | to avoid checking for REG_UNUSED notes unless necessary. |
c9b89a21 JH |
1207 | |
1208 | When we reach set first time, we just expect this is | |
1209 | the single set we are looking for and only when more | |
1210 | sets are found in the insn, we check them. */ | |
1211 | if (!set_verified) | |
1212 | { | |
1213 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (set)) | |
1214 | && !side_effects_p (set)) | |
1215 | set = NULL; | |
1216 | else | |
1217 | set_verified = 1; | |
1218 | } | |
1219 | if (!set) | |
1220 | set = sub, set_verified = 0; | |
1221 | else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub)) | |
1222 | || side_effects_p (sub)) | |
1223 | return NULL_RTX; | |
1224 | break; | |
1225 | ||
1226 | default: | |
1227 | return NULL_RTX; | |
1228 | } | |
787ccee0 | 1229 | } |
2c88418c | 1230 | } |
c9b89a21 | 1231 | return set; |
2c88418c | 1232 | } |
941c63ac JL |
1233 | |
1234 | /* Given an INSN, return nonzero if it has more than one SET, else return | |
1235 | zero. */ | |
1236 | ||
5f7d3786 | 1237 | int |
f7d504c2 | 1238 | multiple_sets (const_rtx insn) |
941c63ac | 1239 | { |
cae8acdd | 1240 | int found; |
941c63ac | 1241 | int i; |
a6a2274a | 1242 | |
941c63ac | 1243 | /* INSN must be an insn. */ |
2c3c49de | 1244 | if (! INSN_P (insn)) |
941c63ac JL |
1245 | return 0; |
1246 | ||
1247 | /* Only a PARALLEL can have multiple SETs. */ | |
1248 | if (GET_CODE (PATTERN (insn)) == PARALLEL) | |
1249 | { | |
1250 | for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
1251 | if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET) | |
1252 | { | |
1253 | /* If we have already found a SET, then return now. */ | |
1254 | if (found) | |
1255 | return 1; | |
1256 | else | |
1257 | found = 1; | |
1258 | } | |
1259 | } | |
a6a2274a | 1260 | |
941c63ac JL |
1261 | /* Either zero or one SET. */ |
1262 | return 0; | |
1263 | } | |
2c88418c | 1264 | \f |
7142e318 JW |
1265 | /* Return nonzero if the destination of SET equals the source |
1266 | and there are no side effects. */ | |
1267 | ||
1268 | int | |
f7d504c2 | 1269 | set_noop_p (const_rtx set) |
7142e318 JW |
1270 | { |
1271 | rtx src = SET_SRC (set); | |
1272 | rtx dst = SET_DEST (set); | |
1273 | ||
371b8fc0 JH |
1274 | if (dst == pc_rtx && src == pc_rtx) |
1275 | return 1; | |
1276 | ||
3c0cb5de | 1277 | if (MEM_P (dst) && MEM_P (src)) |
cd648cec JH |
1278 | return rtx_equal_p (dst, src) && !side_effects_p (dst); |
1279 | ||
46d096a3 | 1280 | if (GET_CODE (dst) == ZERO_EXTRACT) |
7142e318 | 1281 | return rtx_equal_p (XEXP (dst, 0), src) |
cd648cec JH |
1282 | && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx |
1283 | && !side_effects_p (src); | |
7142e318 JW |
1284 | |
1285 | if (GET_CODE (dst) == STRICT_LOW_PART) | |
1286 | dst = XEXP (dst, 0); | |
1287 | ||
1288 | if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) | |
1289 | { | |
1290 | if (SUBREG_BYTE (src) != SUBREG_BYTE (dst)) | |
1291 | return 0; | |
1292 | src = SUBREG_REG (src); | |
1293 | dst = SUBREG_REG (dst); | |
1294 | } | |
1295 | ||
8c895291 TB |
1296 | /* It is a NOOP if destination overlaps with selected src vector |
1297 | elements. */ | |
1298 | if (GET_CODE (src) == VEC_SELECT | |
1299 | && REG_P (XEXP (src, 0)) && REG_P (dst) | |
1300 | && HARD_REGISTER_P (XEXP (src, 0)) | |
1301 | && HARD_REGISTER_P (dst)) | |
1302 | { | |
1303 | int i; | |
1304 | rtx par = XEXP (src, 1); | |
1305 | rtx src0 = XEXP (src, 0); | |
1306 | int c0 = INTVAL (XVECEXP (par, 0, 0)); | |
1307 | HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0; | |
1308 | ||
1309 | for (i = 1; i < XVECLEN (par, 0); i++) | |
1310 | if (INTVAL (XVECEXP (par, 0, i)) != c0 + i) | |
1311 | return 0; | |
1312 | return | |
1313 | simplify_subreg_regno (REGNO (src0), GET_MODE (src0), | |
1314 | offset, GET_MODE (dst)) == (int) REGNO (dst); | |
1315 | } | |
1316 | ||
f8cfc6aa | 1317 | return (REG_P (src) && REG_P (dst) |
7142e318 JW |
1318 | && REGNO (src) == REGNO (dst)); |
1319 | } | |
0005550b JH |
1320 | \f |
1321 | /* Return nonzero if an insn consists only of SETs, each of which only sets a | |
1322 | value to itself. */ | |
1323 | ||
1324 | int | |
fa233e34 | 1325 | noop_move_p (const_rtx insn) |
0005550b JH |
1326 | { |
1327 | rtx pat = PATTERN (insn); | |
1328 | ||
b5832b43 JH |
1329 | if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE) |
1330 | return 1; | |
1331 | ||
0005550b JH |
1332 | /* Insns carrying these notes are useful later on. */ |
1333 | if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) | |
1334 | return 0; | |
1335 | ||
8f7e6e33 BC |
1336 | /* Check the code to be executed for COND_EXEC. */ |
1337 | if (GET_CODE (pat) == COND_EXEC) | |
1338 | pat = COND_EXEC_CODE (pat); | |
1339 | ||
0005550b JH |
1340 | if (GET_CODE (pat) == SET && set_noop_p (pat)) |
1341 | return 1; | |
1342 | ||
1343 | if (GET_CODE (pat) == PARALLEL) | |
1344 | { | |
1345 | int i; | |
1346 | /* If nothing but SETs of registers to themselves, | |
1347 | this insn can also be deleted. */ | |
1348 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1349 | { | |
1350 | rtx tem = XVECEXP (pat, 0, i); | |
1351 | ||
1352 | if (GET_CODE (tem) == USE | |
1353 | || GET_CODE (tem) == CLOBBER) | |
1354 | continue; | |
1355 | ||
1356 | if (GET_CODE (tem) != SET || ! set_noop_p (tem)) | |
1357 | return 0; | |
1358 | } | |
1359 | ||
1360 | return 1; | |
1361 | } | |
1362 | return 0; | |
1363 | } | |
1364 | \f | |
7142e318 | 1365 | |
2c88418c RS |
1366 | /* Return nonzero if register in range [REGNO, ENDREGNO) |
1367 | appears either explicitly or implicitly in X | |
1368 | other than being stored into. | |
1369 | ||
1370 | References contained within the substructure at LOC do not count. | |
1371 | LOC may be zero, meaning don't ignore anything. */ | |
1372 | ||
1373 | int | |
f7d504c2 | 1374 | refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, |
0c20a65f | 1375 | rtx *loc) |
2c88418c | 1376 | { |
770ae6cc RK |
1377 | int i; |
1378 | unsigned int x_regno; | |
1379 | RTX_CODE code; | |
1380 | const char *fmt; | |
2c88418c RS |
1381 | |
1382 | repeat: | |
1383 | /* The contents of a REG_NONNEG note is always zero, so we must come here | |
1384 | upon repeat in case the last REG_NOTE is a REG_NONNEG note. */ | |
1385 | if (x == 0) | |
1386 | return 0; | |
1387 | ||
1388 | code = GET_CODE (x); | |
1389 | ||
1390 | switch (code) | |
1391 | { | |
1392 | case REG: | |
770ae6cc | 1393 | x_regno = REGNO (x); |
f8163c92 RK |
1394 | |
1395 | /* If we modifying the stack, frame, or argument pointer, it will | |
1396 | clobber a virtual register. In fact, we could be more precise, | |
1397 | but it isn't worth it. */ | |
770ae6cc | 1398 | if ((x_regno == STACK_POINTER_REGNUM |
f8163c92 | 1399 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
770ae6cc | 1400 | || x_regno == ARG_POINTER_REGNUM |
f8163c92 | 1401 | #endif |
770ae6cc | 1402 | || x_regno == FRAME_POINTER_REGNUM) |
f8163c92 RK |
1403 | && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER) |
1404 | return 1; | |
1405 | ||
09e18274 | 1406 | return endregno > x_regno && regno < END_REGNO (x); |
2c88418c RS |
1407 | |
1408 | case SUBREG: | |
1409 | /* If this is a SUBREG of a hard reg, we can see exactly which | |
1410 | registers are being modified. Otherwise, handle normally. */ | |
f8cfc6aa | 1411 | if (REG_P (SUBREG_REG (x)) |
2c88418c RS |
1412 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) |
1413 | { | |
ddef6bc7 | 1414 | unsigned int inner_regno = subreg_regno (x); |
770ae6cc | 1415 | unsigned int inner_endregno |
403c659c | 1416 | = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER |
f1f4e530 | 1417 | ? subreg_nregs (x) : 1); |
2c88418c RS |
1418 | |
1419 | return endregno > inner_regno && regno < inner_endregno; | |
1420 | } | |
1421 | break; | |
1422 | ||
1423 | case CLOBBER: | |
1424 | case SET: | |
1425 | if (&SET_DEST (x) != loc | |
1426 | /* Note setting a SUBREG counts as referring to the REG it is in for | |
1427 | a pseudo but not for hard registers since we can | |
1428 | treat each word individually. */ | |
1429 | && ((GET_CODE (SET_DEST (x)) == SUBREG | |
1430 | && loc != &SUBREG_REG (SET_DEST (x)) | |
f8cfc6aa | 1431 | && REG_P (SUBREG_REG (SET_DEST (x))) |
2c88418c RS |
1432 | && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER |
1433 | && refers_to_regno_p (regno, endregno, | |
1434 | SUBREG_REG (SET_DEST (x)), loc)) | |
f8cfc6aa | 1435 | || (!REG_P (SET_DEST (x)) |
2c88418c RS |
1436 | && refers_to_regno_p (regno, endregno, SET_DEST (x), loc)))) |
1437 | return 1; | |
1438 | ||
1439 | if (code == CLOBBER || loc == &SET_SRC (x)) | |
1440 | return 0; | |
1441 | x = SET_SRC (x); | |
1442 | goto repeat; | |
e9a25f70 JL |
1443 | |
1444 | default: | |
1445 | break; | |
2c88418c RS |
1446 | } |
1447 | ||
1448 | /* X does not match, so try its subexpressions. */ | |
1449 | ||
1450 | fmt = GET_RTX_FORMAT (code); | |
1451 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1452 | { | |
1453 | if (fmt[i] == 'e' && loc != &XEXP (x, i)) | |
1454 | { | |
1455 | if (i == 0) | |
1456 | { | |
1457 | x = XEXP (x, 0); | |
1458 | goto repeat; | |
1459 | } | |
1460 | else | |
1461 | if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc)) | |
1462 | return 1; | |
1463 | } | |
1464 | else if (fmt[i] == 'E') | |
1465 | { | |
b3694847 | 1466 | int j; |
6a87d634 | 1467 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2c88418c RS |
1468 | if (loc != &XVECEXP (x, i, j) |
1469 | && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc)) | |
1470 | return 1; | |
1471 | } | |
1472 | } | |
1473 | return 0; | |
1474 | } | |
1475 | ||
1476 | /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, | |
1477 | we check if any register number in X conflicts with the relevant register | |
1478 | numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN | |
1479 | contains a MEM (we don't bother checking for memory addresses that can't | |
1480 | conflict because we expect this to be a rare case. */ | |
1481 | ||
1482 | int | |
f7d504c2 | 1483 | reg_overlap_mentioned_p (const_rtx x, const_rtx in) |
2c88418c | 1484 | { |
770ae6cc | 1485 | unsigned int regno, endregno; |
2c88418c | 1486 | |
6f626d1b PB |
1487 | /* If either argument is a constant, then modifying X can not |
1488 | affect IN. Here we look at IN, we can profitably combine | |
1489 | CONSTANT_P (x) with the switch statement below. */ | |
1490 | if (CONSTANT_P (in)) | |
b98b49ac | 1491 | return 0; |
0c99ec5c | 1492 | |
6f626d1b | 1493 | recurse: |
0c99ec5c | 1494 | switch (GET_CODE (x)) |
2c88418c | 1495 | { |
6f626d1b PB |
1496 | case STRICT_LOW_PART: |
1497 | case ZERO_EXTRACT: | |
1498 | case SIGN_EXTRACT: | |
1499 | /* Overly conservative. */ | |
1500 | x = XEXP (x, 0); | |
1501 | goto recurse; | |
1502 | ||
0c99ec5c | 1503 | case SUBREG: |
2c88418c RS |
1504 | regno = REGNO (SUBREG_REG (x)); |
1505 | if (regno < FIRST_PSEUDO_REGISTER) | |
ddef6bc7 | 1506 | regno = subreg_regno (x); |
f1f4e530 JM |
1507 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
1508 | ? subreg_nregs (x) : 1); | |
0c99ec5c | 1509 | goto do_reg; |
2c88418c | 1510 | |
0c99ec5c RH |
1511 | case REG: |
1512 | regno = REGNO (x); | |
09e18274 | 1513 | endregno = END_REGNO (x); |
f1f4e530 | 1514 | do_reg: |
8e2e89f7 | 1515 | return refers_to_regno_p (regno, endregno, in, (rtx*) 0); |
2c88418c | 1516 | |
0c99ec5c RH |
1517 | case MEM: |
1518 | { | |
1519 | const char *fmt; | |
1520 | int i; | |
2c88418c | 1521 | |
3c0cb5de | 1522 | if (MEM_P (in)) |
2c88418c RS |
1523 | return 1; |
1524 | ||
0c99ec5c RH |
1525 | fmt = GET_RTX_FORMAT (GET_CODE (in)); |
1526 | for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--) | |
3b009185 RH |
1527 | if (fmt[i] == 'e') |
1528 | { | |
1529 | if (reg_overlap_mentioned_p (x, XEXP (in, i))) | |
1530 | return 1; | |
1531 | } | |
1532 | else if (fmt[i] == 'E') | |
1533 | { | |
1534 | int j; | |
1535 | for (j = XVECLEN (in, i) - 1; j >= 0; --j) | |
1536 | if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j))) | |
1537 | return 1; | |
1538 | } | |
c0222c21 | 1539 | |
0c99ec5c RH |
1540 | return 0; |
1541 | } | |
1542 | ||
1543 | case SCRATCH: | |
1544 | case PC: | |
1545 | case CC0: | |
1546 | return reg_mentioned_p (x, in); | |
1547 | ||
1548 | case PARALLEL: | |
37ceff9d | 1549 | { |
90d036a0 | 1550 | int i; |
37ceff9d RH |
1551 | |
1552 | /* If any register in here refers to it we return true. */ | |
7193d1dc RK |
1553 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
1554 | if (XEXP (XVECEXP (x, 0, i), 0) != 0 | |
1555 | && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in)) | |
6f626d1b | 1556 | return 1; |
7193d1dc | 1557 | return 0; |
37ceff9d | 1558 | } |
2c88418c | 1559 | |
0c99ec5c | 1560 | default: |
41374e13 | 1561 | gcc_assert (CONSTANT_P (x)); |
6f626d1b PB |
1562 | return 0; |
1563 | } | |
2c88418c RS |
1564 | } |
1565 | \f | |
2c88418c | 1566 | /* Call FUN on each register or MEM that is stored into or clobbered by X. |
c3a1ef9d MM |
1567 | (X would be the pattern of an insn). DATA is an arbitrary pointer, |
1568 | ignored by note_stores, but passed to FUN. | |
1569 | ||
1570 | FUN receives three arguments: | |
1571 | 1. the REG, MEM, CC0 or PC being stored in or clobbered, | |
1572 | 2. the SET or CLOBBER rtx that does the store, | |
1573 | 3. the pointer DATA provided to note_stores. | |
2c88418c RS |
1574 | |
1575 | If the item being stored in or clobbered is a SUBREG of a hard register, | |
1576 | the SUBREG will be passed. */ | |
a6a2274a | 1577 | |
2c88418c | 1578 | void |
7bc980e1 | 1579 | note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data) |
2c88418c | 1580 | { |
aa317c97 | 1581 | int i; |
90d036a0 | 1582 | |
aa317c97 KG |
1583 | if (GET_CODE (x) == COND_EXEC) |
1584 | x = COND_EXEC_CODE (x); | |
90d036a0 | 1585 | |
aa317c97 KG |
1586 | if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) |
1587 | { | |
1588 | rtx dest = SET_DEST (x); | |
1589 | ||
1590 | while ((GET_CODE (dest) == SUBREG | |
1591 | && (!REG_P (SUBREG_REG (dest)) | |
1592 | || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER)) | |
1593 | || GET_CODE (dest) == ZERO_EXTRACT | |
1594 | || GET_CODE (dest) == STRICT_LOW_PART) | |
1595 | dest = XEXP (dest, 0); | |
1596 | ||
1597 | /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, | |
1598 | each of whose first operand is a register. */ | |
1599 | if (GET_CODE (dest) == PARALLEL) | |
1600 | { | |
1601 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1602 | if (XEXP (XVECEXP (dest, 0, i), 0) != 0) | |
1603 | (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data); | |
1604 | } | |
1605 | else | |
1606 | (*fun) (dest, x, data); | |
1607 | } | |
770ae6cc | 1608 | |
aa317c97 KG |
1609 | else if (GET_CODE (x) == PARALLEL) |
1610 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
1611 | note_stores (XVECEXP (x, 0, i), fun, data); | |
1612 | } | |
2c88418c | 1613 | \f |
e2373f95 RK |
1614 | /* Like notes_stores, but call FUN for each expression that is being |
1615 | referenced in PBODY, a pointer to the PATTERN of an insn. We only call | |
1616 | FUN for each expression, not any interior subexpressions. FUN receives a | |
1617 | pointer to the expression and the DATA passed to this function. | |
1618 | ||
1619 | Note that this is not quite the same test as that done in reg_referenced_p | |
1620 | since that considers something as being referenced if it is being | |
1621 | partially set, while we do not. */ | |
1622 | ||
1623 | void | |
0c20a65f | 1624 | note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data) |
e2373f95 RK |
1625 | { |
1626 | rtx body = *pbody; | |
1627 | int i; | |
1628 | ||
1629 | switch (GET_CODE (body)) | |
1630 | { | |
1631 | case COND_EXEC: | |
1632 | (*fun) (&COND_EXEC_TEST (body), data); | |
1633 | note_uses (&COND_EXEC_CODE (body), fun, data); | |
1634 | return; | |
1635 | ||
1636 | case PARALLEL: | |
1637 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1638 | note_uses (&XVECEXP (body, 0, i), fun, data); | |
1639 | return; | |
1640 | ||
bbbc206e BS |
1641 | case SEQUENCE: |
1642 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1643 | note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data); | |
1644 | return; | |
1645 | ||
e2373f95 RK |
1646 | case USE: |
1647 | (*fun) (&XEXP (body, 0), data); | |
1648 | return; | |
1649 | ||
1650 | case ASM_OPERANDS: | |
1651 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
1652 | (*fun) (&ASM_OPERANDS_INPUT (body, i), data); | |
1653 | return; | |
1654 | ||
1655 | case TRAP_IF: | |
1656 | (*fun) (&TRAP_CONDITION (body), data); | |
1657 | return; | |
1658 | ||
21b8482a JJ |
1659 | case PREFETCH: |
1660 | (*fun) (&XEXP (body, 0), data); | |
1661 | return; | |
1662 | ||
e2373f95 RK |
1663 | case UNSPEC: |
1664 | case UNSPEC_VOLATILE: | |
1665 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1666 | (*fun) (&XVECEXP (body, 0, i), data); | |
1667 | return; | |
1668 | ||
1669 | case CLOBBER: | |
3c0cb5de | 1670 | if (MEM_P (XEXP (body, 0))) |
e2373f95 RK |
1671 | (*fun) (&XEXP (XEXP (body, 0), 0), data); |
1672 | return; | |
1673 | ||
1674 | case SET: | |
1675 | { | |
1676 | rtx dest = SET_DEST (body); | |
1677 | ||
1678 | /* For sets we replace everything in source plus registers in memory | |
1679 | expression in store and operands of a ZERO_EXTRACT. */ | |
1680 | (*fun) (&SET_SRC (body), data); | |
1681 | ||
1682 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
1683 | { | |
1684 | (*fun) (&XEXP (dest, 1), data); | |
1685 | (*fun) (&XEXP (dest, 2), data); | |
1686 | } | |
1687 | ||
1688 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART) | |
1689 | dest = XEXP (dest, 0); | |
1690 | ||
3c0cb5de | 1691 | if (MEM_P (dest)) |
e2373f95 RK |
1692 | (*fun) (&XEXP (dest, 0), data); |
1693 | } | |
1694 | return; | |
1695 | ||
1696 | default: | |
1697 | /* All the other possibilities never store. */ | |
1698 | (*fun) (pbody, data); | |
1699 | return; | |
1700 | } | |
1701 | } | |
1702 | \f | |
2c88418c RS |
1703 | /* Return nonzero if X's old contents don't survive after INSN. |
1704 | This will be true if X is (cc0) or if X is a register and | |
1705 | X dies in INSN or because INSN entirely sets X. | |
1706 | ||
46d096a3 SB |
1707 | "Entirely set" means set directly and not through a SUBREG, or |
1708 | ZERO_EXTRACT, so no trace of the old contents remains. | |
2c88418c RS |
1709 | Likewise, REG_INC does not count. |
1710 | ||
1711 | REG may be a hard or pseudo reg. Renumbering is not taken into account, | |
1712 | but for this use that makes no difference, since regs don't overlap | |
1713 | during their lifetimes. Therefore, this function may be used | |
6fb5fa3c | 1714 | at any time after deaths have been computed. |
2c88418c RS |
1715 | |
1716 | If REG is a hard reg that occupies multiple machine registers, this | |
1717 | function will only return 1 if each of those registers will be replaced | |
1718 | by INSN. */ | |
1719 | ||
1720 | int | |
f7d504c2 | 1721 | dead_or_set_p (const_rtx insn, const_rtx x) |
2c88418c | 1722 | { |
09e18274 | 1723 | unsigned int regno, end_regno; |
770ae6cc | 1724 | unsigned int i; |
2c88418c RS |
1725 | |
1726 | /* Can't use cc0_rtx below since this file is used by genattrtab.c. */ | |
1727 | if (GET_CODE (x) == CC0) | |
1728 | return 1; | |
1729 | ||
41374e13 | 1730 | gcc_assert (REG_P (x)); |
2c88418c RS |
1731 | |
1732 | regno = REGNO (x); | |
09e18274 RS |
1733 | end_regno = END_REGNO (x); |
1734 | for (i = regno; i < end_regno; i++) | |
2c88418c RS |
1735 | if (! dead_or_set_regno_p (insn, i)) |
1736 | return 0; | |
1737 | ||
1738 | return 1; | |
1739 | } | |
1740 | ||
194acded HPN |
1741 | /* Return TRUE iff DEST is a register or subreg of a register and |
1742 | doesn't change the number of words of the inner register, and any | |
1743 | part of the register is TEST_REGNO. */ | |
1744 | ||
1745 | static bool | |
f7d504c2 | 1746 | covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1747 | { |
1748 | unsigned int regno, endregno; | |
1749 | ||
1750 | if (GET_CODE (dest) == SUBREG | |
1751 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
1752 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
1753 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
1754 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) | |
1755 | dest = SUBREG_REG (dest); | |
1756 | ||
1757 | if (!REG_P (dest)) | |
1758 | return false; | |
1759 | ||
1760 | regno = REGNO (dest); | |
09e18274 | 1761 | endregno = END_REGNO (dest); |
194acded HPN |
1762 | return (test_regno >= regno && test_regno < endregno); |
1763 | } | |
1764 | ||
1765 | /* Like covers_regno_no_parallel_p, but also handles PARALLELs where | |
1766 | any member matches the covers_regno_no_parallel_p criteria. */ | |
1767 | ||
1768 | static bool | |
f7d504c2 | 1769 | covers_regno_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1770 | { |
1771 | if (GET_CODE (dest) == PARALLEL) | |
1772 | { | |
1773 | /* Some targets place small structures in registers for return | |
1774 | values of functions, and those registers are wrapped in | |
1775 | PARALLELs that we may see as the destination of a SET. */ | |
1776 | int i; | |
1777 | ||
1778 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1779 | { | |
1780 | rtx inner = XEXP (XVECEXP (dest, 0, i), 0); | |
1781 | if (inner != NULL_RTX | |
1782 | && covers_regno_no_parallel_p (inner, test_regno)) | |
1783 | return true; | |
1784 | } | |
1785 | ||
1786 | return false; | |
1787 | } | |
1788 | else | |
1789 | return covers_regno_no_parallel_p (dest, test_regno); | |
1790 | } | |
1791 | ||
6fb5fa3c | 1792 | /* Utility function for dead_or_set_p to check an individual register. */ |
2c88418c RS |
1793 | |
1794 | int | |
f7d504c2 | 1795 | dead_or_set_regno_p (const_rtx insn, unsigned int test_regno) |
2c88418c | 1796 | { |
f7d504c2 | 1797 | const_rtx pattern; |
2c88418c | 1798 | |
0a2287bf RH |
1799 | /* See if there is a death note for something that includes TEST_REGNO. */ |
1800 | if (find_regno_note (insn, REG_DEAD, test_regno)) | |
1801 | return 1; | |
2c88418c | 1802 | |
4b4bf941 | 1803 | if (CALL_P (insn) |
8f3e7a26 RK |
1804 | && find_regno_fusage (insn, CLOBBER, test_regno)) |
1805 | return 1; | |
1806 | ||
0c99ec5c RH |
1807 | pattern = PATTERN (insn); |
1808 | ||
10439b59 | 1809 | /* If a COND_EXEC is not executed, the value survives. */ |
0c99ec5c | 1810 | if (GET_CODE (pattern) == COND_EXEC) |
10439b59 | 1811 | return 0; |
0c99ec5c RH |
1812 | |
1813 | if (GET_CODE (pattern) == SET) | |
194acded | 1814 | return covers_regno_p (SET_DEST (pattern), test_regno); |
0c99ec5c | 1815 | else if (GET_CODE (pattern) == PARALLEL) |
2c88418c | 1816 | { |
b3694847 | 1817 | int i; |
2c88418c | 1818 | |
0c99ec5c | 1819 | for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--) |
2c88418c | 1820 | { |
0c99ec5c RH |
1821 | rtx body = XVECEXP (pattern, 0, i); |
1822 | ||
1823 | if (GET_CODE (body) == COND_EXEC) | |
1824 | body = COND_EXEC_CODE (body); | |
2c88418c | 1825 | |
194acded HPN |
1826 | if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) |
1827 | && covers_regno_p (SET_DEST (body), test_regno)) | |
1828 | return 1; | |
2c88418c RS |
1829 | } |
1830 | } | |
1831 | ||
1832 | return 0; | |
1833 | } | |
1834 | ||
1835 | /* Return the reg-note of kind KIND in insn INSN, if there is one. | |
1836 | If DATUM is nonzero, look for one whose datum is DATUM. */ | |
1837 | ||
1838 | rtx | |
f7d504c2 | 1839 | find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum) |
2c88418c | 1840 | { |
b3694847 | 1841 | rtx link; |
2c88418c | 1842 | |
7a40b8b1 | 1843 | gcc_checking_assert (insn); |
af082de3 | 1844 | |
ae78d276 | 1845 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1846 | if (! INSN_P (insn)) |
ae78d276 | 1847 | return 0; |
cd798543 AP |
1848 | if (datum == 0) |
1849 | { | |
1850 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1851 | if (REG_NOTE_KIND (link) == kind) | |
1852 | return link; | |
1853 | return 0; | |
1854 | } | |
ae78d276 | 1855 | |
2c88418c | 1856 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
cd798543 | 1857 | if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0)) |
2c88418c RS |
1858 | return link; |
1859 | return 0; | |
1860 | } | |
1861 | ||
1862 | /* Return the reg-note of kind KIND in insn INSN which applies to register | |
99309f3b RK |
1863 | number REGNO, if any. Return 0 if there is no such reg-note. Note that |
1864 | the REGNO of this NOTE need not be REGNO if REGNO is a hard register; | |
1865 | it might be the case that the note overlaps REGNO. */ | |
2c88418c RS |
1866 | |
1867 | rtx | |
f7d504c2 | 1868 | find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno) |
2c88418c | 1869 | { |
b3694847 | 1870 | rtx link; |
2c88418c | 1871 | |
ae78d276 | 1872 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1873 | if (! INSN_P (insn)) |
ae78d276 MM |
1874 | return 0; |
1875 | ||
2c88418c RS |
1876 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1877 | if (REG_NOTE_KIND (link) == kind | |
1878 | /* Verify that it is a register, so that scratch and MEM won't cause a | |
1879 | problem here. */ | |
f8cfc6aa | 1880 | && REG_P (XEXP (link, 0)) |
99309f3b | 1881 | && REGNO (XEXP (link, 0)) <= regno |
09e18274 | 1882 | && END_REGNO (XEXP (link, 0)) > regno) |
2c88418c RS |
1883 | return link; |
1884 | return 0; | |
1885 | } | |
8f3e7a26 | 1886 | |
d9c695ff RK |
1887 | /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and |
1888 | has such a note. */ | |
1889 | ||
1890 | rtx | |
f7d504c2 | 1891 | find_reg_equal_equiv_note (const_rtx insn) |
d9c695ff | 1892 | { |
cd648cec | 1893 | rtx link; |
d9c695ff | 1894 | |
cd648cec | 1895 | if (!INSN_P (insn)) |
d9c695ff | 1896 | return 0; |
ea8f106d | 1897 | |
cd648cec JH |
1898 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1899 | if (REG_NOTE_KIND (link) == REG_EQUAL | |
1900 | || REG_NOTE_KIND (link) == REG_EQUIV) | |
1901 | { | |
ea8f106d SB |
1902 | /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on |
1903 | insns that have multiple sets. Checking single_set to | |
1904 | make sure of this is not the proper check, as explained | |
1905 | in the comment in set_unique_reg_note. | |
1906 | ||
1907 | This should be changed into an assert. */ | |
1908 | if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) | |
cd648cec JH |
1909 | return 0; |
1910 | return link; | |
1911 | } | |
1912 | return NULL; | |
d9c695ff RK |
1913 | } |
1914 | ||
2a450639 RS |
1915 | /* Check whether INSN is a single_set whose source is known to be |
1916 | equivalent to a constant. Return that constant if so, otherwise | |
1917 | return null. */ | |
1918 | ||
1919 | rtx | |
68a1a6c0 | 1920 | find_constant_src (const rtx_insn *insn) |
2a450639 RS |
1921 | { |
1922 | rtx note, set, x; | |
1923 | ||
1924 | set = single_set (insn); | |
1925 | if (set) | |
1926 | { | |
1927 | x = avoid_constant_pool_reference (SET_SRC (set)); | |
1928 | if (CONSTANT_P (x)) | |
1929 | return x; | |
1930 | } | |
1931 | ||
1932 | note = find_reg_equal_equiv_note (insn); | |
1933 | if (note && CONSTANT_P (XEXP (note, 0))) | |
1934 | return XEXP (note, 0); | |
1935 | ||
1936 | return NULL_RTX; | |
1937 | } | |
1938 | ||
8f3e7a26 RK |
1939 | /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found |
1940 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1941 | ||
1942 | int | |
f7d504c2 | 1943 | find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum) |
8f3e7a26 RK |
1944 | { |
1945 | /* If it's not a CALL_INSN, it can't possibly have a | |
1946 | CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */ | |
4b4bf941 | 1947 | if (!CALL_P (insn)) |
8f3e7a26 RK |
1948 | return 0; |
1949 | ||
41374e13 | 1950 | gcc_assert (datum); |
8f3e7a26 | 1951 | |
f8cfc6aa | 1952 | if (!REG_P (datum)) |
8f3e7a26 | 1953 | { |
b3694847 | 1954 | rtx link; |
8f3e7a26 RK |
1955 | |
1956 | for (link = CALL_INSN_FUNCTION_USAGE (insn); | |
a6a2274a | 1957 | link; |
8f3e7a26 | 1958 | link = XEXP (link, 1)) |
a6a2274a | 1959 | if (GET_CODE (XEXP (link, 0)) == code |
cc863bea | 1960 | && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0))) |
a6a2274a | 1961 | return 1; |
8f3e7a26 RK |
1962 | } |
1963 | else | |
1964 | { | |
770ae6cc | 1965 | unsigned int regno = REGNO (datum); |
8f3e7a26 RK |
1966 | |
1967 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
1968 | to pseudo registers, so don't bother checking. */ | |
1969 | ||
1970 | if (regno < FIRST_PSEUDO_REGISTER) | |
a6a2274a | 1971 | { |
09e18274 | 1972 | unsigned int end_regno = END_HARD_REGNO (datum); |
770ae6cc | 1973 | unsigned int i; |
8f3e7a26 RK |
1974 | |
1975 | for (i = regno; i < end_regno; i++) | |
1976 | if (find_regno_fusage (insn, code, i)) | |
1977 | return 1; | |
a6a2274a | 1978 | } |
8f3e7a26 RK |
1979 | } |
1980 | ||
1981 | return 0; | |
1982 | } | |
1983 | ||
1984 | /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found | |
1985 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1986 | ||
1987 | int | |
f7d504c2 | 1988 | find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno) |
8f3e7a26 | 1989 | { |
b3694847 | 1990 | rtx link; |
8f3e7a26 RK |
1991 | |
1992 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
1993 | to pseudo registers, so don't bother checking. */ | |
1994 | ||
1995 | if (regno >= FIRST_PSEUDO_REGISTER | |
4b4bf941 | 1996 | || !CALL_P (insn) ) |
8f3e7a26 RK |
1997 | return 0; |
1998 | ||
1999 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
83ab3839 | 2000 | { |
770ae6cc | 2001 | rtx op, reg; |
83ab3839 RH |
2002 | |
2003 | if (GET_CODE (op = XEXP (link, 0)) == code | |
f8cfc6aa | 2004 | && REG_P (reg = XEXP (op, 0)) |
09e18274 RS |
2005 | && REGNO (reg) <= regno |
2006 | && END_HARD_REGNO (reg) > regno) | |
83ab3839 RH |
2007 | return 1; |
2008 | } | |
8f3e7a26 RK |
2009 | |
2010 | return 0; | |
2011 | } | |
a6a063b8 | 2012 | |
2c88418c | 2013 | \f |
e5af9ddd RS |
2014 | /* Return true if KIND is an integer REG_NOTE. */ |
2015 | ||
2016 | static bool | |
2017 | int_reg_note_p (enum reg_note kind) | |
2018 | { | |
2019 | return kind == REG_BR_PROB; | |
2020 | } | |
2021 | ||
efc0b2bd ILT |
2022 | /* Allocate a register note with kind KIND and datum DATUM. LIST is |
2023 | stored as the pointer to the next register note. */ | |
65c5f2a6 | 2024 | |
efc0b2bd ILT |
2025 | rtx |
2026 | alloc_reg_note (enum reg_note kind, rtx datum, rtx list) | |
65c5f2a6 ILT |
2027 | { |
2028 | rtx note; | |
2029 | ||
e5af9ddd | 2030 | gcc_checking_assert (!int_reg_note_p (kind)); |
65c5f2a6 ILT |
2031 | switch (kind) |
2032 | { | |
2033 | case REG_CC_SETTER: | |
2034 | case REG_CC_USER: | |
2035 | case REG_LABEL_TARGET: | |
2036 | case REG_LABEL_OPERAND: | |
0a35513e | 2037 | case REG_TM: |
65c5f2a6 ILT |
2038 | /* These types of register notes use an INSN_LIST rather than an |
2039 | EXPR_LIST, so that copying is done right and dumps look | |
2040 | better. */ | |
efc0b2bd | 2041 | note = alloc_INSN_LIST (datum, list); |
65c5f2a6 ILT |
2042 | PUT_REG_NOTE_KIND (note, kind); |
2043 | break; | |
2044 | ||
2045 | default: | |
efc0b2bd | 2046 | note = alloc_EXPR_LIST (kind, datum, list); |
65c5f2a6 ILT |
2047 | break; |
2048 | } | |
2049 | ||
efc0b2bd ILT |
2050 | return note; |
2051 | } | |
2052 | ||
2053 | /* Add register note with kind KIND and datum DATUM to INSN. */ | |
2054 | ||
2055 | void | |
2056 | add_reg_note (rtx insn, enum reg_note kind, rtx datum) | |
2057 | { | |
2058 | REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn)); | |
65c5f2a6 ILT |
2059 | } |
2060 | ||
e5af9ddd RS |
2061 | /* Add an integer register note with kind KIND and datum DATUM to INSN. */ |
2062 | ||
2063 | void | |
2064 | add_int_reg_note (rtx insn, enum reg_note kind, int datum) | |
2065 | { | |
2066 | gcc_checking_assert (int_reg_note_p (kind)); | |
2067 | REG_NOTES (insn) = gen_rtx_INT_LIST ((enum machine_mode) kind, | |
2068 | datum, REG_NOTES (insn)); | |
2069 | } | |
2070 | ||
2071 | /* Add a register note like NOTE to INSN. */ | |
2072 | ||
2073 | void | |
2074 | add_shallow_copy_of_reg_note (rtx insn, rtx note) | |
2075 | { | |
2076 | if (GET_CODE (note) == INT_LIST) | |
2077 | add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0)); | |
2078 | else | |
2079 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
2080 | } | |
2081 | ||
2c88418c RS |
2082 | /* Remove register note NOTE from the REG_NOTES of INSN. */ |
2083 | ||
2084 | void | |
f7d504c2 | 2085 | remove_note (rtx insn, const_rtx note) |
2c88418c | 2086 | { |
b3694847 | 2087 | rtx link; |
2c88418c | 2088 | |
49c3bb12 RH |
2089 | if (note == NULL_RTX) |
2090 | return; | |
2091 | ||
2c88418c | 2092 | if (REG_NOTES (insn) == note) |
6fb5fa3c DB |
2093 | REG_NOTES (insn) = XEXP (note, 1); |
2094 | else | |
2095 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
2096 | if (XEXP (link, 1) == note) | |
2097 | { | |
2098 | XEXP (link, 1) = XEXP (note, 1); | |
2099 | break; | |
2100 | } | |
2101 | ||
2102 | switch (REG_NOTE_KIND (note)) | |
2c88418c | 2103 | { |
6fb5fa3c DB |
2104 | case REG_EQUAL: |
2105 | case REG_EQUIV: | |
b2908ba6 | 2106 | df_notes_rescan (as_a <rtx_insn *> (insn)); |
6fb5fa3c DB |
2107 | break; |
2108 | default: | |
2109 | break; | |
2c88418c | 2110 | } |
2c88418c | 2111 | } |
55a98783 | 2112 | |
7cd689bc SB |
2113 | /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */ |
2114 | ||
2115 | void | |
2116 | remove_reg_equal_equiv_notes (rtx insn) | |
2117 | { | |
2118 | rtx *loc; | |
2119 | ||
2120 | loc = ®_NOTES (insn); | |
2121 | while (*loc) | |
2122 | { | |
2123 | enum reg_note kind = REG_NOTE_KIND (*loc); | |
2124 | if (kind == REG_EQUAL || kind == REG_EQUIV) | |
2125 | *loc = XEXP (*loc, 1); | |
2126 | else | |
2127 | loc = &XEXP (*loc, 1); | |
2128 | } | |
2129 | } | |
885c9b5d EB |
2130 | |
2131 | /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */ | |
2132 | ||
2133 | void | |
2134 | remove_reg_equal_equiv_notes_for_regno (unsigned int regno) | |
2135 | { | |
2136 | df_ref eq_use; | |
2137 | ||
2138 | if (!df) | |
2139 | return; | |
2140 | ||
2141 | /* This loop is a little tricky. We cannot just go down the chain because | |
2142 | it is being modified by some actions in the loop. So we just iterate | |
2143 | over the head. We plan to drain the list anyway. */ | |
2144 | while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL) | |
2145 | { | |
1bbbc4a3 | 2146 | rtx_insn *insn = DF_REF_INSN (eq_use); |
885c9b5d EB |
2147 | rtx note = find_reg_equal_equiv_note (insn); |
2148 | ||
2149 | /* This assert is generally triggered when someone deletes a REG_EQUAL | |
2150 | or REG_EQUIV note by hacking the list manually rather than calling | |
2151 | remove_note. */ | |
2152 | gcc_assert (note); | |
2153 | ||
2154 | remove_note (insn, note); | |
2155 | } | |
2156 | } | |
7cd689bc | 2157 | |
5f0d2358 RK |
2158 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
2159 | return 1 if it is found. A simple equality test is used to determine if | |
2160 | NODE matches. */ | |
2161 | ||
2162 | int | |
f7d504c2 | 2163 | in_expr_list_p (const_rtx listp, const_rtx node) |
5f0d2358 | 2164 | { |
f7d504c2 | 2165 | const_rtx x; |
5f0d2358 RK |
2166 | |
2167 | for (x = listp; x; x = XEXP (x, 1)) | |
2168 | if (node == XEXP (x, 0)) | |
2169 | return 1; | |
2170 | ||
2171 | return 0; | |
2172 | } | |
2173 | ||
dd248abd RK |
2174 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
2175 | remove that entry from the list if it is found. | |
55a98783 | 2176 | |
dd248abd | 2177 | A simple equality test is used to determine if NODE matches. */ |
55a98783 JL |
2178 | |
2179 | void | |
2382940b | 2180 | remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp) |
55a98783 | 2181 | { |
2382940b | 2182 | rtx_expr_list *temp = *listp; |
55a98783 JL |
2183 | rtx prev = NULL_RTX; |
2184 | ||
2185 | while (temp) | |
2186 | { | |
2382940b | 2187 | if (node == temp->element ()) |
55a98783 JL |
2188 | { |
2189 | /* Splice the node out of the list. */ | |
2190 | if (prev) | |
2382940b | 2191 | XEXP (prev, 1) = temp->next (); |
55a98783 | 2192 | else |
2382940b | 2193 | *listp = temp->next (); |
55a98783 JL |
2194 | |
2195 | return; | |
2196 | } | |
dd248abd RK |
2197 | |
2198 | prev = temp; | |
2382940b | 2199 | temp = temp->next (); |
55a98783 JL |
2200 | } |
2201 | } | |
b5241a5a DM |
2202 | |
2203 | /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and | |
2204 | remove that entry from the list if it is found. | |
2205 | ||
2206 | A simple equality test is used to determine if NODE matches. */ | |
2207 | ||
2208 | void | |
2209 | remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp) | |
2210 | { | |
2211 | rtx_insn_list *temp = *listp; | |
2212 | rtx prev = NULL; | |
2213 | ||
2214 | while (temp) | |
2215 | { | |
2216 | if (node == temp->insn ()) | |
2217 | { | |
2218 | /* Splice the node out of the list. */ | |
2219 | if (prev) | |
2220 | XEXP (prev, 1) = temp->next (); | |
2221 | else | |
2222 | *listp = temp->next (); | |
2223 | ||
2224 | return; | |
2225 | } | |
2226 | ||
2227 | prev = temp; | |
2228 | temp = temp->next (); | |
2229 | } | |
2230 | } | |
2c88418c | 2231 | \f |
2b067faf RS |
2232 | /* Nonzero if X contains any volatile instructions. These are instructions |
2233 | which may cause unpredictable machine state instructions, and thus no | |
adddc347 HPN |
2234 | instructions or register uses should be moved or combined across them. |
2235 | This includes only volatile asms and UNSPEC_VOLATILE instructions. */ | |
2b067faf RS |
2236 | |
2237 | int | |
f7d504c2 | 2238 | volatile_insn_p (const_rtx x) |
2b067faf | 2239 | { |
f7d504c2 | 2240 | const RTX_CODE code = GET_CODE (x); |
2b067faf RS |
2241 | switch (code) |
2242 | { | |
2243 | case LABEL_REF: | |
2244 | case SYMBOL_REF: | |
2b067faf | 2245 | case CONST: |
d8116890 | 2246 | CASE_CONST_ANY: |
2b067faf RS |
2247 | case CC0: |
2248 | case PC: | |
2249 | case REG: | |
2250 | case SCRATCH: | |
2251 | case CLOBBER: | |
2b067faf RS |
2252 | case ADDR_VEC: |
2253 | case ADDR_DIFF_VEC: | |
2254 | case CALL: | |
2255 | case MEM: | |
2256 | return 0; | |
2257 | ||
2258 | case UNSPEC_VOLATILE: | |
2b067faf RS |
2259 | return 1; |
2260 | ||
4c46ea23 | 2261 | case ASM_INPUT: |
2b067faf RS |
2262 | case ASM_OPERANDS: |
2263 | if (MEM_VOLATILE_P (x)) | |
2264 | return 1; | |
e9a25f70 JL |
2265 | |
2266 | default: | |
2267 | break; | |
2b067faf RS |
2268 | } |
2269 | ||
2270 | /* Recursively scan the operands of this expression. */ | |
2271 | ||
2272 | { | |
f7d504c2 | 2273 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2274 | int i; |
a6a2274a | 2275 | |
2b067faf RS |
2276 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2277 | { | |
2278 | if (fmt[i] == 'e') | |
2279 | { | |
31001f72 | 2280 | if (volatile_insn_p (XEXP (x, i))) |
2b067faf RS |
2281 | return 1; |
2282 | } | |
d4757e6a | 2283 | else if (fmt[i] == 'E') |
2b067faf | 2284 | { |
b3694847 | 2285 | int j; |
2b067faf | 2286 | for (j = 0; j < XVECLEN (x, i); j++) |
31001f72 | 2287 | if (volatile_insn_p (XVECEXP (x, i, j))) |
2b067faf RS |
2288 | return 1; |
2289 | } | |
2290 | } | |
2291 | } | |
2292 | return 0; | |
2293 | } | |
2294 | ||
2c88418c | 2295 | /* Nonzero if X contains any volatile memory references |
2ac4fed0 | 2296 | UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */ |
2c88418c RS |
2297 | |
2298 | int | |
f7d504c2 | 2299 | volatile_refs_p (const_rtx x) |
2c88418c | 2300 | { |
f7d504c2 | 2301 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2302 | switch (code) |
2303 | { | |
2304 | case LABEL_REF: | |
2305 | case SYMBOL_REF: | |
2c88418c | 2306 | case CONST: |
d8116890 | 2307 | CASE_CONST_ANY: |
2c88418c RS |
2308 | case CC0: |
2309 | case PC: | |
2310 | case REG: | |
2311 | case SCRATCH: | |
2312 | case CLOBBER: | |
2c88418c RS |
2313 | case ADDR_VEC: |
2314 | case ADDR_DIFF_VEC: | |
2315 | return 0; | |
2316 | ||
2ac4fed0 | 2317 | case UNSPEC_VOLATILE: |
2c88418c RS |
2318 | return 1; |
2319 | ||
2320 | case MEM: | |
4c46ea23 | 2321 | case ASM_INPUT: |
2c88418c RS |
2322 | case ASM_OPERANDS: |
2323 | if (MEM_VOLATILE_P (x)) | |
2324 | return 1; | |
e9a25f70 JL |
2325 | |
2326 | default: | |
2327 | break; | |
2c88418c RS |
2328 | } |
2329 | ||
2330 | /* Recursively scan the operands of this expression. */ | |
2331 | ||
2332 | { | |
f7d504c2 | 2333 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2334 | int i; |
a6a2274a | 2335 | |
2c88418c RS |
2336 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2337 | { | |
2338 | if (fmt[i] == 'e') | |
2339 | { | |
2340 | if (volatile_refs_p (XEXP (x, i))) | |
2341 | return 1; | |
2342 | } | |
d4757e6a | 2343 | else if (fmt[i] == 'E') |
2c88418c | 2344 | { |
b3694847 | 2345 | int j; |
2c88418c RS |
2346 | for (j = 0; j < XVECLEN (x, i); j++) |
2347 | if (volatile_refs_p (XVECEXP (x, i, j))) | |
2348 | return 1; | |
2349 | } | |
2350 | } | |
2351 | } | |
2352 | return 0; | |
2353 | } | |
2354 | ||
2355 | /* Similar to above, except that it also rejects register pre- and post- | |
2356 | incrementing. */ | |
2357 | ||
2358 | int | |
f7d504c2 | 2359 | side_effects_p (const_rtx x) |
2c88418c | 2360 | { |
f7d504c2 | 2361 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2362 | switch (code) |
2363 | { | |
2364 | case LABEL_REF: | |
2365 | case SYMBOL_REF: | |
2c88418c | 2366 | case CONST: |
d8116890 | 2367 | CASE_CONST_ANY: |
2c88418c RS |
2368 | case CC0: |
2369 | case PC: | |
2370 | case REG: | |
2371 | case SCRATCH: | |
2c88418c RS |
2372 | case ADDR_VEC: |
2373 | case ADDR_DIFF_VEC: | |
b5b8b0ac | 2374 | case VAR_LOCATION: |
2c88418c RS |
2375 | return 0; |
2376 | ||
2377 | case CLOBBER: | |
2378 | /* Reject CLOBBER with a non-VOID mode. These are made by combine.c | |
2379 | when some combination can't be done. If we see one, don't think | |
2380 | that we can simplify the expression. */ | |
2381 | return (GET_MODE (x) != VOIDmode); | |
2382 | ||
2383 | case PRE_INC: | |
2384 | case PRE_DEC: | |
2385 | case POST_INC: | |
2386 | case POST_DEC: | |
1fb9c5cd MH |
2387 | case PRE_MODIFY: |
2388 | case POST_MODIFY: | |
2c88418c | 2389 | case CALL: |
2ac4fed0 | 2390 | case UNSPEC_VOLATILE: |
2c88418c RS |
2391 | return 1; |
2392 | ||
2393 | case MEM: | |
4c46ea23 | 2394 | case ASM_INPUT: |
2c88418c RS |
2395 | case ASM_OPERANDS: |
2396 | if (MEM_VOLATILE_P (x)) | |
2397 | return 1; | |
e9a25f70 JL |
2398 | |
2399 | default: | |
2400 | break; | |
2c88418c RS |
2401 | } |
2402 | ||
2403 | /* Recursively scan the operands of this expression. */ | |
2404 | ||
2405 | { | |
b3694847 SS |
2406 | const char *fmt = GET_RTX_FORMAT (code); |
2407 | int i; | |
a6a2274a | 2408 | |
2c88418c RS |
2409 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2410 | { | |
2411 | if (fmt[i] == 'e') | |
2412 | { | |
2413 | if (side_effects_p (XEXP (x, i))) | |
2414 | return 1; | |
2415 | } | |
d4757e6a | 2416 | else if (fmt[i] == 'E') |
2c88418c | 2417 | { |
b3694847 | 2418 | int j; |
2c88418c RS |
2419 | for (j = 0; j < XVECLEN (x, i); j++) |
2420 | if (side_effects_p (XVECEXP (x, i, j))) | |
2421 | return 1; | |
2422 | } | |
2423 | } | |
2424 | } | |
2425 | return 0; | |
2426 | } | |
2427 | \f | |
e755fcf5 | 2428 | /* Return nonzero if evaluating rtx X might cause a trap. |
48e8382e PB |
2429 | FLAGS controls how to consider MEMs. A nonzero means the context |
2430 | of the access may have changed from the original, such that the | |
2431 | address may have become invalid. */ | |
2c88418c | 2432 | |
215b063c | 2433 | int |
f7d504c2 | 2434 | may_trap_p_1 (const_rtx x, unsigned flags) |
2c88418c RS |
2435 | { |
2436 | int i; | |
2437 | enum rtx_code code; | |
6f7d635c | 2438 | const char *fmt; |
48e8382e PB |
2439 | |
2440 | /* We make no distinction currently, but this function is part of | |
2441 | the internal target-hooks ABI so we keep the parameter as | |
2442 | "unsigned flags". */ | |
2443 | bool code_changed = flags != 0; | |
2c88418c RS |
2444 | |
2445 | if (x == 0) | |
2446 | return 0; | |
2447 | code = GET_CODE (x); | |
2448 | switch (code) | |
2449 | { | |
2450 | /* Handle these cases quickly. */ | |
d8116890 | 2451 | CASE_CONST_ANY: |
2c88418c RS |
2452 | case SYMBOL_REF: |
2453 | case LABEL_REF: | |
2454 | case CONST: | |
2455 | case PC: | |
2456 | case CC0: | |
2457 | case REG: | |
2458 | case SCRATCH: | |
2459 | return 0; | |
2460 | ||
215b063c | 2461 | case UNSPEC: |
215b063c PB |
2462 | return targetm.unspec_may_trap_p (x, flags); |
2463 | ||
c84a808e | 2464 | case UNSPEC_VOLATILE: |
215b063c | 2465 | case ASM_INPUT: |
2c88418c RS |
2466 | case TRAP_IF: |
2467 | return 1; | |
2468 | ||
22aa60a1 RH |
2469 | case ASM_OPERANDS: |
2470 | return MEM_VOLATILE_P (x); | |
2471 | ||
2c88418c RS |
2472 | /* Memory ref can trap unless it's a static var or a stack slot. */ |
2473 | case MEM: | |
d809253a EB |
2474 | /* Recognize specific pattern of stack checking probes. */ |
2475 | if (flag_stack_check | |
2476 | && MEM_VOLATILE_P (x) | |
2477 | && XEXP (x, 0) == stack_pointer_rtx) | |
2478 | return 1; | |
e755fcf5 | 2479 | if (/* MEM_NOTRAP_P only relates to the actual position of the memory |
48e8382e PB |
2480 | reference; moving it out of context such as when moving code |
2481 | when optimizing, might cause its address to become invalid. */ | |
2482 | code_changed | |
2483 | || !MEM_NOTRAP_P (x)) | |
2484 | { | |
f5541398 | 2485 | HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0; |
48e8382e PB |
2486 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size, |
2487 | GET_MODE (x), code_changed); | |
2488 | } | |
2489 | ||
2490 | return 0; | |
2c88418c RS |
2491 | |
2492 | /* Division by a non-constant might trap. */ | |
2493 | case DIV: | |
2494 | case MOD: | |
2495 | case UDIV: | |
2496 | case UMOD: | |
52bfebf0 RS |
2497 | if (HONOR_SNANS (GET_MODE (x))) |
2498 | return 1; | |
3d8bf70f | 2499 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x))) |
f9013075 DE |
2500 | return flag_trapping_math; |
2501 | if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx)) | |
2c88418c | 2502 | return 1; |
e9a25f70 JL |
2503 | break; |
2504 | ||
b278301b RK |
2505 | case EXPR_LIST: |
2506 | /* An EXPR_LIST is used to represent a function call. This | |
2507 | certainly may trap. */ | |
2508 | return 1; | |
e9a25f70 | 2509 | |
734508ea JW |
2510 | case GE: |
2511 | case GT: | |
2512 | case LE: | |
2513 | case LT: | |
19aec195 | 2514 | case LTGT: |
55143861 | 2515 | case COMPARE: |
734508ea | 2516 | /* Some floating point comparisons may trap. */ |
f5eb5fd0 JH |
2517 | if (!flag_trapping_math) |
2518 | break; | |
734508ea JW |
2519 | /* ??? There is no machine independent way to check for tests that trap |
2520 | when COMPARE is used, though many targets do make this distinction. | |
2521 | For instance, sparc uses CCFPE for compares which generate exceptions | |
2522 | and CCFP for compares which do not generate exceptions. */ | |
52bfebf0 | 2523 | if (HONOR_NANS (GET_MODE (x))) |
55143861 JJ |
2524 | return 1; |
2525 | /* But often the compare has some CC mode, so check operand | |
2526 | modes as well. */ | |
52bfebf0 RS |
2527 | if (HONOR_NANS (GET_MODE (XEXP (x, 0))) |
2528 | || HONOR_NANS (GET_MODE (XEXP (x, 1)))) | |
2529 | return 1; | |
2530 | break; | |
2531 | ||
2532 | case EQ: | |
2533 | case NE: | |
2534 | if (HONOR_SNANS (GET_MODE (x))) | |
2535 | return 1; | |
2536 | /* Often comparison is CC mode, so check operand modes. */ | |
2537 | if (HONOR_SNANS (GET_MODE (XEXP (x, 0))) | |
2538 | || HONOR_SNANS (GET_MODE (XEXP (x, 1)))) | |
55143861 JJ |
2539 | return 1; |
2540 | break; | |
2541 | ||
22fd5743 FH |
2542 | case FIX: |
2543 | /* Conversion of floating point might trap. */ | |
2544 | if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0)))) | |
2545 | return 1; | |
2546 | break; | |
2547 | ||
05cc23e8 RH |
2548 | case NEG: |
2549 | case ABS: | |
e3947b34 | 2550 | case SUBREG: |
05cc23e8 RH |
2551 | /* These operations don't trap even with floating point. */ |
2552 | break; | |
2553 | ||
2c88418c RS |
2554 | default: |
2555 | /* Any floating arithmetic may trap. */ | |
c84a808e | 2556 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math) |
2c88418c RS |
2557 | return 1; |
2558 | } | |
2559 | ||
2560 | fmt = GET_RTX_FORMAT (code); | |
2561 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2562 | { | |
2563 | if (fmt[i] == 'e') | |
2564 | { | |
e755fcf5 | 2565 | if (may_trap_p_1 (XEXP (x, i), flags)) |
2c88418c RS |
2566 | return 1; |
2567 | } | |
2568 | else if (fmt[i] == 'E') | |
2569 | { | |
b3694847 | 2570 | int j; |
2c88418c | 2571 | for (j = 0; j < XVECLEN (x, i); j++) |
e755fcf5 | 2572 | if (may_trap_p_1 (XVECEXP (x, i, j), flags)) |
2c88418c RS |
2573 | return 1; |
2574 | } | |
2575 | } | |
2576 | return 0; | |
2577 | } | |
2358ff91 EB |
2578 | |
2579 | /* Return nonzero if evaluating rtx X might cause a trap. */ | |
2580 | ||
2581 | int | |
f7d504c2 | 2582 | may_trap_p (const_rtx x) |
2358ff91 | 2583 | { |
e755fcf5 ZD |
2584 | return may_trap_p_1 (x, 0); |
2585 | } | |
2586 | ||
c0220ea4 | 2587 | /* Same as above, but additionally return nonzero if evaluating rtx X might |
2358ff91 EB |
2588 | cause a fault. We define a fault for the purpose of this function as a |
2589 | erroneous execution condition that cannot be encountered during the normal | |
2590 | execution of a valid program; the typical example is an unaligned memory | |
2591 | access on a strict alignment machine. The compiler guarantees that it | |
2592 | doesn't generate code that will fault from a valid program, but this | |
2593 | guarantee doesn't mean anything for individual instructions. Consider | |
2594 | the following example: | |
2595 | ||
2596 | struct S { int d; union { char *cp; int *ip; }; }; | |
2597 | ||
2598 | int foo(struct S *s) | |
2599 | { | |
2600 | if (s->d == 1) | |
2601 | return *s->ip; | |
2602 | else | |
2603 | return *s->cp; | |
2604 | } | |
2605 | ||
2606 | on a strict alignment machine. In a valid program, foo will never be | |
2607 | invoked on a structure for which d is equal to 1 and the underlying | |
2608 | unique field of the union not aligned on a 4-byte boundary, but the | |
2609 | expression *s->ip might cause a fault if considered individually. | |
2610 | ||
2611 | At the RTL level, potentially problematic expressions will almost always | |
2612 | verify may_trap_p; for example, the above dereference can be emitted as | |
2613 | (mem:SI (reg:P)) and this expression is may_trap_p for a generic register. | |
2614 | However, suppose that foo is inlined in a caller that causes s->cp to | |
2615 | point to a local character variable and guarantees that s->d is not set | |
2616 | to 1; foo may have been effectively translated into pseudo-RTL as: | |
2617 | ||
2618 | if ((reg:SI) == 1) | |
2619 | (set (reg:SI) (mem:SI (%fp - 7))) | |
2620 | else | |
2621 | (set (reg:QI) (mem:QI (%fp - 7))) | |
2622 | ||
2623 | Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a | |
2624 | memory reference to a stack slot, but it will certainly cause a fault | |
2625 | on a strict alignment machine. */ | |
2626 | ||
2627 | int | |
f7d504c2 | 2628 | may_trap_or_fault_p (const_rtx x) |
2358ff91 | 2629 | { |
48e8382e | 2630 | return may_trap_p_1 (x, 1); |
2358ff91 | 2631 | } |
2c88418c RS |
2632 | \f |
2633 | /* Return nonzero if X contains a comparison that is not either EQ or NE, | |
2634 | i.e., an inequality. */ | |
2635 | ||
2636 | int | |
f7d504c2 | 2637 | inequality_comparisons_p (const_rtx x) |
2c88418c | 2638 | { |
b3694847 SS |
2639 | const char *fmt; |
2640 | int len, i; | |
f7d504c2 | 2641 | const enum rtx_code code = GET_CODE (x); |
2c88418c RS |
2642 | |
2643 | switch (code) | |
2644 | { | |
2645 | case REG: | |
2646 | case SCRATCH: | |
2647 | case PC: | |
2648 | case CC0: | |
d8116890 | 2649 | CASE_CONST_ANY: |
2c88418c RS |
2650 | case CONST: |
2651 | case LABEL_REF: | |
2652 | case SYMBOL_REF: | |
2653 | return 0; | |
2654 | ||
2655 | case LT: | |
2656 | case LTU: | |
2657 | case GT: | |
2658 | case GTU: | |
2659 | case LE: | |
2660 | case LEU: | |
2661 | case GE: | |
2662 | case GEU: | |
2663 | return 1; | |
a6a2274a | 2664 | |
e9a25f70 JL |
2665 | default: |
2666 | break; | |
2c88418c RS |
2667 | } |
2668 | ||
2669 | len = GET_RTX_LENGTH (code); | |
2670 | fmt = GET_RTX_FORMAT (code); | |
2671 | ||
2672 | for (i = 0; i < len; i++) | |
2673 | { | |
2674 | if (fmt[i] == 'e') | |
2675 | { | |
2676 | if (inequality_comparisons_p (XEXP (x, i))) | |
2677 | return 1; | |
2678 | } | |
2679 | else if (fmt[i] == 'E') | |
2680 | { | |
b3694847 | 2681 | int j; |
2c88418c RS |
2682 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2683 | if (inequality_comparisons_p (XVECEXP (x, i, j))) | |
2684 | return 1; | |
2685 | } | |
2686 | } | |
a6a2274a | 2687 | |
2c88418c RS |
2688 | return 0; |
2689 | } | |
2690 | \f | |
1ed0205e VM |
2691 | /* Replace any occurrence of FROM in X with TO. The function does |
2692 | not enter into CONST_DOUBLE for the replace. | |
2c88418c RS |
2693 | |
2694 | Note that copying is not done so X must not be shared unless all copies | |
2695 | are to be modified. */ | |
2696 | ||
2697 | rtx | |
0c20a65f | 2698 | replace_rtx (rtx x, rtx from, rtx to) |
2c88418c | 2699 | { |
b3694847 SS |
2700 | int i, j; |
2701 | const char *fmt; | |
2c88418c RS |
2702 | |
2703 | if (x == from) | |
2704 | return to; | |
2705 | ||
2706 | /* Allow this function to make replacements in EXPR_LISTs. */ | |
2707 | if (x == 0) | |
2708 | return 0; | |
2709 | ||
9dd791c8 AO |
2710 | if (GET_CODE (x) == SUBREG) |
2711 | { | |
55d796da | 2712 | rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to); |
9dd791c8 | 2713 | |
481683e1 | 2714 | if (CONST_INT_P (new_rtx)) |
9dd791c8 | 2715 | { |
55d796da | 2716 | x = simplify_subreg (GET_MODE (x), new_rtx, |
9dd791c8 AO |
2717 | GET_MODE (SUBREG_REG (x)), |
2718 | SUBREG_BYTE (x)); | |
41374e13 | 2719 | gcc_assert (x); |
9dd791c8 AO |
2720 | } |
2721 | else | |
55d796da | 2722 | SUBREG_REG (x) = new_rtx; |
9dd791c8 AO |
2723 | |
2724 | return x; | |
2725 | } | |
2726 | else if (GET_CODE (x) == ZERO_EXTEND) | |
2727 | { | |
55d796da | 2728 | rtx new_rtx = replace_rtx (XEXP (x, 0), from, to); |
9dd791c8 | 2729 | |
481683e1 | 2730 | if (CONST_INT_P (new_rtx)) |
9dd791c8 AO |
2731 | { |
2732 | x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
55d796da | 2733 | new_rtx, GET_MODE (XEXP (x, 0))); |
41374e13 | 2734 | gcc_assert (x); |
9dd791c8 AO |
2735 | } |
2736 | else | |
55d796da | 2737 | XEXP (x, 0) = new_rtx; |
9dd791c8 AO |
2738 | |
2739 | return x; | |
2740 | } | |
2741 | ||
2c88418c RS |
2742 | fmt = GET_RTX_FORMAT (GET_CODE (x)); |
2743 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
2744 | { | |
2745 | if (fmt[i] == 'e') | |
2746 | XEXP (x, i) = replace_rtx (XEXP (x, i), from, to); | |
2747 | else if (fmt[i] == 'E') | |
2748 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2749 | XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to); | |
2750 | } | |
2751 | ||
2752 | return x; | |
a6a2274a | 2753 | } |
2c88418c | 2754 | \f |
a2b7026c RS |
2755 | /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track |
2756 | the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */ | |
39811184 | 2757 | |
a2b7026c RS |
2758 | void |
2759 | replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses) | |
39811184 | 2760 | { |
a2b7026c RS |
2761 | /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */ |
2762 | rtx x = *loc; | |
2763 | if (JUMP_TABLE_DATA_P (x)) | |
4af16369 | 2764 | { |
a2b7026c RS |
2765 | x = PATTERN (x); |
2766 | rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC); | |
2767 | int len = GET_NUM_ELEM (vec); | |
2768 | for (int i = 0; i < len; ++i) | |
4af16369 | 2769 | { |
a2b7026c RS |
2770 | rtx ref = RTVEC_ELT (vec, i); |
2771 | if (XEXP (ref, 0) == old_label) | |
2772 | { | |
2773 | XEXP (ref, 0) = new_label; | |
2774 | if (update_label_nuses) | |
2775 | { | |
2776 | ++LABEL_NUSES (new_label); | |
2777 | --LABEL_NUSES (old_label); | |
2778 | } | |
2779 | } | |
4af16369 | 2780 | } |
a2b7026c | 2781 | return; |
4af16369 JZ |
2782 | } |
2783 | ||
39811184 | 2784 | /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL |
a2b7026c | 2785 | field. This is not handled by the iterator because it doesn't |
39811184 | 2786 | handle unprinted ('0') fields. */ |
a2b7026c RS |
2787 | if (JUMP_P (x) && JUMP_LABEL (x) == old_label) |
2788 | JUMP_LABEL (x) = new_label; | |
39811184 | 2789 | |
a2b7026c RS |
2790 | subrtx_ptr_iterator::array_type array; |
2791 | FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL) | |
4af16369 | 2792 | { |
a2b7026c RS |
2793 | rtx *loc = *iter; |
2794 | if (rtx x = *loc) | |
4af16369 | 2795 | { |
a2b7026c RS |
2796 | if (GET_CODE (x) == SYMBOL_REF |
2797 | && CONSTANT_POOL_ADDRESS_P (x)) | |
2798 | { | |
2799 | rtx c = get_pool_constant (x); | |
2800 | if (rtx_referenced_p (old_label, c)) | |
2801 | { | |
2802 | /* Create a copy of constant C; replace the label inside | |
2803 | but do not update LABEL_NUSES because uses in constant pool | |
2804 | are not counted. */ | |
2805 | rtx new_c = copy_rtx (c); | |
2806 | replace_label (&new_c, old_label, new_label, false); | |
2807 | ||
2808 | /* Add the new constant NEW_C to constant pool and replace | |
2809 | the old reference to constant by new reference. */ | |
2810 | rtx new_mem = force_const_mem (get_pool_mode (x), new_c); | |
2811 | *loc = replace_rtx (x, x, XEXP (new_mem, 0)); | |
2812 | } | |
2813 | } | |
2814 | ||
2815 | if ((GET_CODE (x) == LABEL_REF | |
2816 | || GET_CODE (x) == INSN_LIST) | |
2817 | && XEXP (x, 0) == old_label) | |
2818 | { | |
2819 | XEXP (x, 0) = new_label; | |
2820 | if (update_label_nuses) | |
2821 | { | |
2822 | ++LABEL_NUSES (new_label); | |
2823 | --LABEL_NUSES (old_label); | |
2824 | } | |
2825 | } | |
4af16369 | 2826 | } |
4af16369 | 2827 | } |
a2b7026c | 2828 | } |
39811184 | 2829 | |
a2b7026c RS |
2830 | void |
2831 | replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label, | |
2832 | bool update_label_nuses) | |
2833 | { | |
2834 | rtx insn_as_rtx = insn; | |
2835 | replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses); | |
2836 | gcc_checking_assert (insn_as_rtx == insn); | |
39811184 JZ |
2837 | } |
2838 | ||
e08cf836 | 2839 | /* Return true if X is referenced in BODY. */ |
39811184 | 2840 | |
e08cf836 RS |
2841 | bool |
2842 | rtx_referenced_p (const_rtx x, const_rtx body) | |
39811184 | 2843 | { |
e08cf836 RS |
2844 | subrtx_iterator::array_type array; |
2845 | FOR_EACH_SUBRTX (iter, array, body, ALL) | |
2846 | if (const_rtx y = *iter) | |
2847 | { | |
2848 | /* Check if a label_ref Y refers to label X. */ | |
a827d9b1 DM |
2849 | if (GET_CODE (y) == LABEL_REF |
2850 | && LABEL_P (x) | |
2851 | && LABEL_REF_LABEL (y) == x) | |
e08cf836 | 2852 | return true; |
39811184 | 2853 | |
e08cf836 RS |
2854 | if (rtx_equal_p (x, y)) |
2855 | return true; | |
39811184 | 2856 | |
e08cf836 RS |
2857 | /* If Y is a reference to pool constant traverse the constant. */ |
2858 | if (GET_CODE (y) == SYMBOL_REF | |
2859 | && CONSTANT_POOL_ADDRESS_P (y)) | |
2860 | iter.substitute (get_pool_constant (y)); | |
2861 | } | |
2862 | return false; | |
39811184 JZ |
2863 | } |
2864 | ||
ee735eef JZ |
2865 | /* If INSN is a tablejump return true and store the label (before jump table) to |
2866 | *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */ | |
39811184 JZ |
2867 | |
2868 | bool | |
c5241a21 | 2869 | tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep) |
39811184 | 2870 | { |
ee735eef JZ |
2871 | rtx label, table; |
2872 | ||
dc0ff1c8 BS |
2873 | if (!JUMP_P (insn)) |
2874 | return false; | |
2875 | ||
2876 | label = JUMP_LABEL (insn); | |
2877 | if (label != NULL_RTX && !ANY_RETURN_P (label) | |
b32d5189 | 2878 | && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX |
481683e1 | 2879 | && JUMP_TABLE_DATA_P (table)) |
39811184 | 2880 | { |
ee735eef JZ |
2881 | if (labelp) |
2882 | *labelp = label; | |
2883 | if (tablep) | |
8942ee0f | 2884 | *tablep = as_a <rtx_jump_table_data *> (table); |
39811184 JZ |
2885 | return true; |
2886 | } | |
2887 | return false; | |
2888 | } | |
2889 | ||
fce7e199 RH |
2890 | /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or |
2891 | constant that is not in the constant pool and not in the condition | |
2892 | of an IF_THEN_ELSE. */ | |
2a1777af JL |
2893 | |
2894 | static int | |
f7d504c2 | 2895 | computed_jump_p_1 (const_rtx x) |
2a1777af | 2896 | { |
f7d504c2 | 2897 | const enum rtx_code code = GET_CODE (x); |
2a1777af | 2898 | int i, j; |
6f7d635c | 2899 | const char *fmt; |
2a1777af JL |
2900 | |
2901 | switch (code) | |
2902 | { | |
2a1777af JL |
2903 | case LABEL_REF: |
2904 | case PC: | |
2905 | return 0; | |
2906 | ||
fce7e199 | 2907 | case CONST: |
d8116890 | 2908 | CASE_CONST_ANY: |
fce7e199 | 2909 | case SYMBOL_REF: |
2a1777af JL |
2910 | case REG: |
2911 | return 1; | |
2912 | ||
2913 | case MEM: | |
2914 | return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
2915 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))); | |
2916 | ||
2917 | case IF_THEN_ELSE: | |
fce7e199 RH |
2918 | return (computed_jump_p_1 (XEXP (x, 1)) |
2919 | || computed_jump_p_1 (XEXP (x, 2))); | |
1d300e19 KG |
2920 | |
2921 | default: | |
2922 | break; | |
2a1777af JL |
2923 | } |
2924 | ||
2925 | fmt = GET_RTX_FORMAT (code); | |
2926 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2927 | { | |
2928 | if (fmt[i] == 'e' | |
fce7e199 | 2929 | && computed_jump_p_1 (XEXP (x, i))) |
2a1777af JL |
2930 | return 1; |
2931 | ||
d4757e6a | 2932 | else if (fmt[i] == 'E') |
2a1777af | 2933 | for (j = 0; j < XVECLEN (x, i); j++) |
fce7e199 | 2934 | if (computed_jump_p_1 (XVECEXP (x, i, j))) |
2a1777af JL |
2935 | return 1; |
2936 | } | |
2937 | ||
2938 | return 0; | |
2939 | } | |
2940 | ||
2941 | /* Return nonzero if INSN is an indirect jump (aka computed jump). | |
2942 | ||
2943 | Tablejumps and casesi insns are not considered indirect jumps; | |
4eb00163 | 2944 | we can recognize them by a (use (label_ref)). */ |
2a1777af JL |
2945 | |
2946 | int | |
f7d504c2 | 2947 | computed_jump_p (const_rtx insn) |
2a1777af JL |
2948 | { |
2949 | int i; | |
4b4bf941 | 2950 | if (JUMP_P (insn)) |
2a1777af JL |
2951 | { |
2952 | rtx pat = PATTERN (insn); | |
2a1777af | 2953 | |
cf7c4aa6 HPN |
2954 | /* If we have a JUMP_LABEL set, we're not a computed jump. */ |
2955 | if (JUMP_LABEL (insn) != NULL) | |
f759eb8b | 2956 | return 0; |
cf7c4aa6 HPN |
2957 | |
2958 | if (GET_CODE (pat) == PARALLEL) | |
2a1777af JL |
2959 | { |
2960 | int len = XVECLEN (pat, 0); | |
2961 | int has_use_labelref = 0; | |
2962 | ||
2963 | for (i = len - 1; i >= 0; i--) | |
2964 | if (GET_CODE (XVECEXP (pat, 0, i)) == USE | |
2965 | && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) | |
2966 | == LABEL_REF)) | |
c7b3b99f PCC |
2967 | { |
2968 | has_use_labelref = 1; | |
2969 | break; | |
2970 | } | |
2a1777af JL |
2971 | |
2972 | if (! has_use_labelref) | |
2973 | for (i = len - 1; i >= 0; i--) | |
2974 | if (GET_CODE (XVECEXP (pat, 0, i)) == SET | |
2975 | && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx | |
fce7e199 | 2976 | && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i)))) |
2a1777af JL |
2977 | return 1; |
2978 | } | |
2979 | else if (GET_CODE (pat) == SET | |
2980 | && SET_DEST (pat) == pc_rtx | |
fce7e199 | 2981 | && computed_jump_p_1 (SET_SRC (pat))) |
2a1777af JL |
2982 | return 1; |
2983 | } | |
2984 | return 0; | |
2985 | } | |
ccc2d6d0 | 2986 | |
cf94b0fc PB |
2987 | /* Optimized loop of for_each_rtx, trying to avoid useless recursive |
2988 | calls. Processes the subexpressions of EXP and passes them to F. */ | |
2989 | static int | |
2990 | for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data) | |
2991 | { | |
2992 | int result, i, j; | |
2993 | const char *format = GET_RTX_FORMAT (GET_CODE (exp)); | |
2994 | rtx *x; | |
2995 | ||
2996 | for (; format[n] != '\0'; n++) | |
2997 | { | |
2998 | switch (format[n]) | |
2999 | { | |
3000 | case 'e': | |
3001 | /* Call F on X. */ | |
3002 | x = &XEXP (exp, n); | |
3003 | result = (*f) (x, data); | |
3004 | if (result == -1) | |
3005 | /* Do not traverse sub-expressions. */ | |
3006 | continue; | |
3007 | else if (result != 0) | |
3008 | /* Stop the traversal. */ | |
3009 | return result; | |
b8698a0f | 3010 | |
cf94b0fc PB |
3011 | if (*x == NULL_RTX) |
3012 | /* There are no sub-expressions. */ | |
3013 | continue; | |
b8698a0f | 3014 | |
cf94b0fc PB |
3015 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
3016 | if (i >= 0) | |
3017 | { | |
3018 | result = for_each_rtx_1 (*x, i, f, data); | |
3019 | if (result != 0) | |
3020 | return result; | |
3021 | } | |
3022 | break; | |
3023 | ||
3024 | case 'V': | |
3025 | case 'E': | |
3026 | if (XVEC (exp, n) == 0) | |
3027 | continue; | |
3028 | for (j = 0; j < XVECLEN (exp, n); ++j) | |
3029 | { | |
3030 | /* Call F on X. */ | |
3031 | x = &XVECEXP (exp, n, j); | |
3032 | result = (*f) (x, data); | |
3033 | if (result == -1) | |
3034 | /* Do not traverse sub-expressions. */ | |
3035 | continue; | |
3036 | else if (result != 0) | |
3037 | /* Stop the traversal. */ | |
3038 | return result; | |
b8698a0f | 3039 | |
cf94b0fc PB |
3040 | if (*x == NULL_RTX) |
3041 | /* There are no sub-expressions. */ | |
3042 | continue; | |
b8698a0f | 3043 | |
cf94b0fc PB |
3044 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
3045 | if (i >= 0) | |
3046 | { | |
3047 | result = for_each_rtx_1 (*x, i, f, data); | |
3048 | if (result != 0) | |
3049 | return result; | |
3050 | } | |
3051 | } | |
3052 | break; | |
3053 | ||
3054 | default: | |
3055 | /* Nothing to do. */ | |
3056 | break; | |
3057 | } | |
3058 | } | |
3059 | ||
3060 | return 0; | |
3061 | } | |
3062 | ||
ccc2d6d0 MM |
3063 | /* Traverse X via depth-first search, calling F for each |
3064 | sub-expression (including X itself). F is also passed the DATA. | |
3065 | If F returns -1, do not traverse sub-expressions, but continue | |
3066 | traversing the rest of the tree. If F ever returns any other | |
40f03658 | 3067 | nonzero value, stop the traversal, and return the value returned |
ccc2d6d0 MM |
3068 | by F. Otherwise, return 0. This function does not traverse inside |
3069 | tree structure that contains RTX_EXPRs, or into sub-expressions | |
3070 | whose format code is `0' since it is not known whether or not those | |
3071 | codes are actually RTL. | |
3072 | ||
3073 | This routine is very general, and could (should?) be used to | |
3074 | implement many of the other routines in this file. */ | |
3075 | ||
ae0b51ef | 3076 | int |
0c20a65f | 3077 | for_each_rtx (rtx *x, rtx_function f, void *data) |
ccc2d6d0 MM |
3078 | { |
3079 | int result; | |
ccc2d6d0 MM |
3080 | int i; |
3081 | ||
3082 | /* Call F on X. */ | |
b987f237 | 3083 | result = (*f) (x, data); |
ccc2d6d0 MM |
3084 | if (result == -1) |
3085 | /* Do not traverse sub-expressions. */ | |
3086 | return 0; | |
3087 | else if (result != 0) | |
3088 | /* Stop the traversal. */ | |
3089 | return result; | |
3090 | ||
3091 | if (*x == NULL_RTX) | |
3092 | /* There are no sub-expressions. */ | |
3093 | return 0; | |
3094 | ||
cf94b0fc PB |
3095 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
3096 | if (i < 0) | |
3097 | return 0; | |
ccc2d6d0 | 3098 | |
cf94b0fc | 3099 | return for_each_rtx_1 (*x, i, f, data); |
ccc2d6d0 | 3100 | } |
3ec2b590 | 3101 | |
70e7f57d DM |
3102 | /* Like "for_each_rtx", but for calling on an rtx_insn **. */ |
3103 | ||
3104 | int | |
3105 | for_each_rtx_in_insn (rtx_insn **insn, rtx_function f, void *data) | |
3106 | { | |
3107 | rtx insn_as_rtx = *insn; | |
3108 | int result; | |
3109 | ||
3110 | result = for_each_rtx (&insn_as_rtx, f, data); | |
3111 | ||
3112 | if (insn_as_rtx != *insn) | |
3113 | *insn = safe_as_a <rtx_insn *> (insn_as_rtx); | |
3114 | ||
3115 | return result; | |
3116 | } | |
3117 | ||
4deef538 AO |
3118 | \f |
3119 | ||
8d8e205b RS |
3120 | /* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of |
3121 | the equivalent add insn and pass the result to FN, using DATA as the | |
3122 | final argument. */ | |
4deef538 AO |
3123 | |
3124 | static int | |
8d8e205b | 3125 | for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data) |
4deef538 | 3126 | { |
8d8e205b | 3127 | rtx x = XEXP (mem, 0); |
4deef538 AO |
3128 | switch (GET_CODE (x)) |
3129 | { | |
3130 | case PRE_INC: | |
3131 | case POST_INC: | |
3132 | { | |
8d8e205b | 3133 | int size = GET_MODE_SIZE (GET_MODE (mem)); |
4deef538 AO |
3134 | rtx r1 = XEXP (x, 0); |
3135 | rtx c = gen_int_mode (size, GET_MODE (r1)); | |
8d8e205b | 3136 | return fn (mem, x, r1, r1, c, data); |
4deef538 AO |
3137 | } |
3138 | ||
3139 | case PRE_DEC: | |
3140 | case POST_DEC: | |
3141 | { | |
8d8e205b | 3142 | int size = GET_MODE_SIZE (GET_MODE (mem)); |
4deef538 AO |
3143 | rtx r1 = XEXP (x, 0); |
3144 | rtx c = gen_int_mode (-size, GET_MODE (r1)); | |
8d8e205b | 3145 | return fn (mem, x, r1, r1, c, data); |
4deef538 AO |
3146 | } |
3147 | ||
3148 | case PRE_MODIFY: | |
3149 | case POST_MODIFY: | |
3150 | { | |
3151 | rtx r1 = XEXP (x, 0); | |
3152 | rtx add = XEXP (x, 1); | |
8d8e205b | 3153 | return fn (mem, x, r1, add, NULL, data); |
4deef538 AO |
3154 | } |
3155 | ||
3156 | default: | |
8d8e205b | 3157 | gcc_unreachable (); |
4deef538 AO |
3158 | } |
3159 | } | |
3160 | ||
8d8e205b RS |
3161 | /* Traverse *LOC looking for MEMs that have autoinc addresses. |
3162 | For each such autoinc operation found, call FN, passing it | |
4deef538 AO |
3163 | the innermost enclosing MEM, the operation itself, the RTX modified |
3164 | by the operation, two RTXs (the second may be NULL) that, once | |
3165 | added, represent the value to be held by the modified RTX | |
8d8e205b RS |
3166 | afterwards, and DATA. FN is to return 0 to continue the |
3167 | traversal or any other value to have it returned to the caller of | |
4deef538 AO |
3168 | for_each_inc_dec. */ |
3169 | ||
3170 | int | |
8d8e205b | 3171 | for_each_inc_dec (rtx x, |
4deef538 | 3172 | for_each_inc_dec_fn fn, |
8d8e205b | 3173 | void *data) |
4deef538 | 3174 | { |
8d8e205b RS |
3175 | subrtx_var_iterator::array_type array; |
3176 | FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST) | |
3177 | { | |
3178 | rtx mem = *iter; | |
3179 | if (mem | |
3180 | && MEM_P (mem) | |
3181 | && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC) | |
3182 | { | |
3183 | int res = for_each_inc_dec_find_inc_dec (mem, fn, data); | |
3184 | if (res != 0) | |
3185 | return res; | |
3186 | iter.skip_subrtxes (); | |
3187 | } | |
3188 | } | |
3189 | return 0; | |
4deef538 AO |
3190 | } |
3191 | ||
3192 | \f | |
777b1b71 RH |
3193 | /* Searches X for any reference to REGNO, returning the rtx of the |
3194 | reference found if any. Otherwise, returns NULL_RTX. */ | |
3195 | ||
3196 | rtx | |
0c20a65f | 3197 | regno_use_in (unsigned int regno, rtx x) |
777b1b71 | 3198 | { |
b3694847 | 3199 | const char *fmt; |
777b1b71 RH |
3200 | int i, j; |
3201 | rtx tem; | |
3202 | ||
f8cfc6aa | 3203 | if (REG_P (x) && REGNO (x) == regno) |
777b1b71 RH |
3204 | return x; |
3205 | ||
3206 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
3207 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
3208 | { | |
3209 | if (fmt[i] == 'e') | |
3210 | { | |
3211 | if ((tem = regno_use_in (regno, XEXP (x, i)))) | |
3212 | return tem; | |
3213 | } | |
3214 | else if (fmt[i] == 'E') | |
3215 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
3216 | if ((tem = regno_use_in (regno , XVECEXP (x, i, j)))) | |
3217 | return tem; | |
3218 | } | |
3219 | ||
3220 | return NULL_RTX; | |
3221 | } | |
2dfa9a87 | 3222 | |
e5c56fd9 JH |
3223 | /* Return a value indicating whether OP, an operand of a commutative |
3224 | operation, is preferred as the first or second operand. The higher | |
3225 | the value, the stronger the preference for being the first operand. | |
3226 | We use negative values to indicate a preference for the first operand | |
3227 | and positive values for the second operand. */ | |
3228 | ||
9b3bd424 | 3229 | int |
0c20a65f | 3230 | commutative_operand_precedence (rtx op) |
e5c56fd9 | 3231 | { |
e3d6e740 | 3232 | enum rtx_code code = GET_CODE (op); |
b8698a0f | 3233 | |
e5c56fd9 | 3234 | /* Constants always come the second operand. Prefer "nice" constants. */ |
e3d6e740 | 3235 | if (code == CONST_INT) |
7e0b4eae | 3236 | return -8; |
807e902e KZ |
3237 | if (code == CONST_WIDE_INT) |
3238 | return -8; | |
e3d6e740 | 3239 | if (code == CONST_DOUBLE) |
7e0b4eae | 3240 | return -7; |
091a3ac7 CF |
3241 | if (code == CONST_FIXED) |
3242 | return -7; | |
9ce79a7a | 3243 | op = avoid_constant_pool_reference (op); |
79b82df3 | 3244 | code = GET_CODE (op); |
ec8e098d PB |
3245 | |
3246 | switch (GET_RTX_CLASS (code)) | |
3247 | { | |
3248 | case RTX_CONST_OBJ: | |
3249 | if (code == CONST_INT) | |
7e0b4eae | 3250 | return -6; |
807e902e KZ |
3251 | if (code == CONST_WIDE_INT) |
3252 | return -6; | |
ec8e098d | 3253 | if (code == CONST_DOUBLE) |
7e0b4eae | 3254 | return -5; |
091a3ac7 CF |
3255 | if (code == CONST_FIXED) |
3256 | return -5; | |
7e0b4eae | 3257 | return -4; |
ec8e098d PB |
3258 | |
3259 | case RTX_EXTRA: | |
3260 | /* SUBREGs of objects should come second. */ | |
3261 | if (code == SUBREG && OBJECT_P (SUBREG_REG (op))) | |
7e0b4eae | 3262 | return -3; |
6fb5fa3c | 3263 | return 0; |
ec8e098d PB |
3264 | |
3265 | case RTX_OBJ: | |
3266 | /* Complex expressions should be the first, so decrease priority | |
7e0b4eae PB |
3267 | of objects. Prefer pointer objects over non pointer objects. */ |
3268 | if ((REG_P (op) && REG_POINTER (op)) | |
3269 | || (MEM_P (op) && MEM_POINTER (op))) | |
3270 | return -1; | |
3271 | return -2; | |
ec8e098d PB |
3272 | |
3273 | case RTX_COMM_ARITH: | |
3274 | /* Prefer operands that are themselves commutative to be first. | |
3275 | This helps to make things linear. In particular, | |
3276 | (and (and (reg) (reg)) (not (reg))) is canonical. */ | |
3277 | return 4; | |
3278 | ||
3279 | case RTX_BIN_ARITH: | |
3280 | /* If only one operand is a binary expression, it will be the first | |
3281 | operand. In particular, (plus (minus (reg) (reg)) (neg (reg))) | |
3282 | is canonical, although it will usually be further simplified. */ | |
3283 | return 2; | |
b8698a0f | 3284 | |
ec8e098d PB |
3285 | case RTX_UNARY: |
3286 | /* Then prefer NEG and NOT. */ | |
3287 | if (code == NEG || code == NOT) | |
3288 | return 1; | |
e5c56fd9 | 3289 | |
ec8e098d PB |
3290 | default: |
3291 | return 0; | |
3292 | } | |
e5c56fd9 JH |
3293 | } |
3294 | ||
f63d1bf7 | 3295 | /* Return 1 iff it is necessary to swap operands of commutative operation |
e5c56fd9 JH |
3296 | in order to canonicalize expression. */ |
3297 | ||
7e0b4eae | 3298 | bool |
0c20a65f | 3299 | swap_commutative_operands_p (rtx x, rtx y) |
e5c56fd9 | 3300 | { |
9b3bd424 RH |
3301 | return (commutative_operand_precedence (x) |
3302 | < commutative_operand_precedence (y)); | |
e5c56fd9 | 3303 | } |
2dfa9a87 MH |
3304 | |
3305 | /* Return 1 if X is an autoincrement side effect and the register is | |
3306 | not the stack pointer. */ | |
3307 | int | |
f7d504c2 | 3308 | auto_inc_p (const_rtx x) |
2dfa9a87 MH |
3309 | { |
3310 | switch (GET_CODE (x)) | |
3311 | { | |
3312 | case PRE_INC: | |
3313 | case POST_INC: | |
3314 | case PRE_DEC: | |
3315 | case POST_DEC: | |
3316 | case PRE_MODIFY: | |
3317 | case POST_MODIFY: | |
3318 | /* There are no REG_INC notes for SP. */ | |
3319 | if (XEXP (x, 0) != stack_pointer_rtx) | |
3320 | return 1; | |
3321 | default: | |
3322 | break; | |
3323 | } | |
3324 | return 0; | |
3325 | } | |
3b10cf4b | 3326 | |
f9da5064 | 3327 | /* Return nonzero if IN contains a piece of rtl that has the address LOC. */ |
db7ba742 | 3328 | int |
f7d504c2 | 3329 | loc_mentioned_in_p (rtx *loc, const_rtx in) |
db7ba742 | 3330 | { |
a52b023a PB |
3331 | enum rtx_code code; |
3332 | const char *fmt; | |
db7ba742 R |
3333 | int i, j; |
3334 | ||
a52b023a PB |
3335 | if (!in) |
3336 | return 0; | |
3337 | ||
3338 | code = GET_CODE (in); | |
3339 | fmt = GET_RTX_FORMAT (code); | |
db7ba742 R |
3340 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
3341 | { | |
db7ba742 R |
3342 | if (fmt[i] == 'e') |
3343 | { | |
e0651058 | 3344 | if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i))) |
db7ba742 R |
3345 | return 1; |
3346 | } | |
3347 | else if (fmt[i] == 'E') | |
3348 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) | |
e0651058 AO |
3349 | if (loc == &XVECEXP (in, i, j) |
3350 | || loc_mentioned_in_p (loc, XVECEXP (in, i, j))) | |
db7ba742 R |
3351 | return 1; |
3352 | } | |
3353 | return 0; | |
3354 | } | |
ddef6bc7 | 3355 | |
bb51e270 RS |
3356 | /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE, |
3357 | and SUBREG_BYTE, return the bit offset where the subreg begins | |
3358 | (counting from the least significant bit of the operand). */ | |
33aceff2 JW |
3359 | |
3360 | unsigned int | |
bb51e270 RS |
3361 | subreg_lsb_1 (enum machine_mode outer_mode, |
3362 | enum machine_mode inner_mode, | |
3363 | unsigned int subreg_byte) | |
33aceff2 | 3364 | { |
33aceff2 JW |
3365 | unsigned int bitpos; |
3366 | unsigned int byte; | |
3367 | unsigned int word; | |
3368 | ||
3369 | /* A paradoxical subreg begins at bit position 0. */ | |
5511bc5a | 3370 | if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode)) |
33aceff2 JW |
3371 | return 0; |
3372 | ||
3373 | if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) | |
3374 | /* If the subreg crosses a word boundary ensure that | |
3375 | it also begins and ends on a word boundary. */ | |
41374e13 NS |
3376 | gcc_assert (!((subreg_byte % UNITS_PER_WORD |
3377 | + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD | |
3378 | && (subreg_byte % UNITS_PER_WORD | |
3379 | || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))); | |
33aceff2 JW |
3380 | |
3381 | if (WORDS_BIG_ENDIAN) | |
3382 | word = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3383 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD; |
33aceff2 | 3384 | else |
bb51e270 | 3385 | word = subreg_byte / UNITS_PER_WORD; |
33aceff2 JW |
3386 | bitpos = word * BITS_PER_WORD; |
3387 | ||
3388 | if (BYTES_BIG_ENDIAN) | |
3389 | byte = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3390 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD; |
33aceff2 | 3391 | else |
bb51e270 | 3392 | byte = subreg_byte % UNITS_PER_WORD; |
33aceff2 JW |
3393 | bitpos += byte * BITS_PER_UNIT; |
3394 | ||
3395 | return bitpos; | |
3396 | } | |
3397 | ||
bb51e270 RS |
3398 | /* Given a subreg X, return the bit offset where the subreg begins |
3399 | (counting from the least significant bit of the reg). */ | |
3400 | ||
3401 | unsigned int | |
f7d504c2 | 3402 | subreg_lsb (const_rtx x) |
bb51e270 RS |
3403 | { |
3404 | return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
3405 | SUBREG_BYTE (x)); | |
3406 | } | |
3407 | ||
f1f4e530 | 3408 | /* Fill in information about a subreg of a hard register. |
ddef6bc7 JJ |
3409 | xregno - A regno of an inner hard subreg_reg (or what will become one). |
3410 | xmode - The mode of xregno. | |
3411 | offset - The byte offset. | |
3412 | ymode - The mode of a top level SUBREG (or what may become one). | |
0cb07998 RS |
3413 | info - Pointer to structure to fill in. |
3414 | ||
3415 | Rather than considering one particular inner register (and thus one | |
3416 | particular "outer" register) in isolation, this function really uses | |
3417 | XREGNO as a model for a sequence of isomorphic hard registers. Thus the | |
3418 | function does not check whether adding INFO->offset to XREGNO gives | |
3419 | a valid hard register; even if INFO->offset + XREGNO is out of range, | |
3420 | there might be another register of the same type that is in range. | |
3421 | Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new | |
3422 | register, since that can depend on things like whether the final | |
3423 | register number is even or odd. Callers that want to check whether | |
3424 | this particular subreg can be replaced by a simple (reg ...) should | |
3425 | use simplify_subreg_regno. */ | |
3426 | ||
c619e982 | 3427 | void |
f1f4e530 JM |
3428 | subreg_get_info (unsigned int xregno, enum machine_mode xmode, |
3429 | unsigned int offset, enum machine_mode ymode, | |
3430 | struct subreg_info *info) | |
04c5580f | 3431 | { |
8521c414 | 3432 | int nregs_xmode, nregs_ymode; |
04c5580f | 3433 | int mode_multiple, nregs_multiple; |
f1f4e530 | 3434 | int offset_adj, y_offset, y_offset_adj; |
8521c414 | 3435 | int regsize_xmode, regsize_ymode; |
f1f4e530 | 3436 | bool rknown; |
04c5580f | 3437 | |
41374e13 | 3438 | gcc_assert (xregno < FIRST_PSEUDO_REGISTER); |
04c5580f | 3439 | |
f1f4e530 JM |
3440 | rknown = false; |
3441 | ||
dd79bb7e GK |
3442 | /* If there are holes in a non-scalar mode in registers, we expect |
3443 | that it is made up of its units concatenated together. */ | |
8521c414 | 3444 | if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)) |
dd79bb7e | 3445 | { |
8521c414 JM |
3446 | enum machine_mode xmode_unit; |
3447 | ||
3448 | nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode); | |
3449 | if (GET_MODE_INNER (xmode) == VOIDmode) | |
3450 | xmode_unit = xmode; | |
3451 | else | |
3452 | xmode_unit = GET_MODE_INNER (xmode); | |
3453 | gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit)); | |
3454 | gcc_assert (nregs_xmode | |
3455 | == (GET_MODE_NUNITS (xmode) | |
3456 | * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit))); | |
3457 | gcc_assert (hard_regno_nregs[xregno][xmode] | |
3458 | == (hard_regno_nregs[xregno][xmode_unit] | |
3459 | * GET_MODE_NUNITS (xmode))); | |
dd79bb7e GK |
3460 | |
3461 | /* You can only ask for a SUBREG of a value with holes in the middle | |
3462 | if you don't cross the holes. (Such a SUBREG should be done by | |
3463 | picking a different register class, or doing it in memory if | |
3464 | necessary.) An example of a value with holes is XCmode on 32-bit | |
3465 | x86 with -m128bit-long-double; it's represented in 6 32-bit registers, | |
b8698a0f | 3466 | 3 for each part, but in memory it's two 128-bit parts. |
dd79bb7e GK |
3467 | Padding is assumed to be at the end (not necessarily the 'high part') |
3468 | of each unit. */ | |
b8698a0f | 3469 | if ((offset / GET_MODE_SIZE (xmode_unit) + 1 |
8521c414 JM |
3470 | < GET_MODE_NUNITS (xmode)) |
3471 | && (offset / GET_MODE_SIZE (xmode_unit) | |
dd79bb7e | 3472 | != ((offset + GET_MODE_SIZE (ymode) - 1) |
8521c414 | 3473 | / GET_MODE_SIZE (xmode_unit)))) |
f1f4e530 JM |
3474 | { |
3475 | info->representable_p = false; | |
3476 | rknown = true; | |
3477 | } | |
dd79bb7e GK |
3478 | } |
3479 | else | |
3480 | nregs_xmode = hard_regno_nregs[xregno][xmode]; | |
b8698a0f | 3481 | |
66fd46b6 | 3482 | nregs_ymode = hard_regno_nregs[xregno][ymode]; |
04c5580f | 3483 | |
dd79bb7e | 3484 | /* Paradoxical subregs are otherwise valid. */ |
f1f4e530 JM |
3485 | if (!rknown |
3486 | && offset == 0 | |
5511bc5a | 3487 | && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode)) |
f1f4e530 JM |
3488 | { |
3489 | info->representable_p = true; | |
3490 | /* If this is a big endian paradoxical subreg, which uses more | |
3491 | actual hard registers than the original register, we must | |
3492 | return a negative offset so that we find the proper highpart | |
3493 | of the register. */ | |
3494 | if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD | |
c0a6a1ef | 3495 | ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
f1f4e530 JM |
3496 | info->offset = nregs_xmode - nregs_ymode; |
3497 | else | |
3498 | info->offset = 0; | |
3499 | info->nregs = nregs_ymode; | |
3500 | return; | |
3501 | } | |
04c5580f | 3502 | |
8521c414 JM |
3503 | /* If registers store different numbers of bits in the different |
3504 | modes, we cannot generally form this subreg. */ | |
f1f4e530 | 3505 | if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode) |
5f7fc2b8 JM |
3506 | && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode) |
3507 | && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0 | |
3508 | && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0) | |
f1f4e530 JM |
3509 | { |
3510 | regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode; | |
f1f4e530 | 3511 | regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode; |
f1f4e530 JM |
3512 | if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1) |
3513 | { | |
3514 | info->representable_p = false; | |
3515 | info->nregs | |
3516 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3517 | info->offset = offset / regsize_xmode; | |
3518 | return; | |
3519 | } | |
3520 | if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1) | |
3521 | { | |
3522 | info->representable_p = false; | |
3523 | info->nregs | |
3524 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3525 | info->offset = offset / regsize_xmode; | |
3526 | return; | |
3527 | } | |
3528 | } | |
8521c414 | 3529 | |
dd79bb7e | 3530 | /* Lowpart subregs are otherwise valid. */ |
f1f4e530 JM |
3531 | if (!rknown && offset == subreg_lowpart_offset (ymode, xmode)) |
3532 | { | |
3533 | info->representable_p = true; | |
3534 | rknown = true; | |
a446b4e8 JM |
3535 | |
3536 | if (offset == 0 || nregs_xmode == nregs_ymode) | |
3537 | { | |
3538 | info->offset = 0; | |
3539 | info->nregs = nregs_ymode; | |
3540 | return; | |
3541 | } | |
f1f4e530 | 3542 | } |
04c5580f | 3543 | |
dd79bb7e GK |
3544 | /* This should always pass, otherwise we don't know how to verify |
3545 | the constraint. These conditions may be relaxed but | |
3546 | subreg_regno_offset would need to be redesigned. */ | |
41374e13 | 3547 | gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0); |
41374e13 | 3548 | gcc_assert ((nregs_xmode % nregs_ymode) == 0); |
04c5580f | 3549 | |
c0a6a1ef BS |
3550 | if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN |
3551 | && GET_MODE_SIZE (xmode) > UNITS_PER_WORD) | |
3552 | { | |
3553 | HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode); | |
3554 | HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode); | |
3555 | HOST_WIDE_INT off_low = offset & (ysize - 1); | |
3556 | HOST_WIDE_INT off_high = offset & ~(ysize - 1); | |
3557 | offset = (xsize - ysize - off_high) | off_low; | |
3558 | } | |
b20b352b | 3559 | /* The XMODE value can be seen as a vector of NREGS_XMODE |
dcc24678 | 3560 | values. The subreg must represent a lowpart of given field. |
04c5580f | 3561 | Compute what field it is. */ |
f1f4e530 JM |
3562 | offset_adj = offset; |
3563 | offset_adj -= subreg_lowpart_offset (ymode, | |
3564 | mode_for_size (GET_MODE_BITSIZE (xmode) | |
3565 | / nregs_xmode, | |
3566 | MODE_INT, 0)); | |
04c5580f | 3567 | |
dd79bb7e | 3568 | /* Size of ymode must not be greater than the size of xmode. */ |
04c5580f | 3569 | mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); |
41374e13 | 3570 | gcc_assert (mode_multiple != 0); |
04c5580f JH |
3571 | |
3572 | y_offset = offset / GET_MODE_SIZE (ymode); | |
f1f4e530 JM |
3573 | y_offset_adj = offset_adj / GET_MODE_SIZE (ymode); |
3574 | nregs_multiple = nregs_xmode / nregs_ymode; | |
41374e13 | 3575 | |
f1f4e530 | 3576 | gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0); |
41374e13 NS |
3577 | gcc_assert ((mode_multiple % nregs_multiple) == 0); |
3578 | ||
f1f4e530 JM |
3579 | if (!rknown) |
3580 | { | |
3581 | info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple))); | |
3582 | rknown = true; | |
3583 | } | |
3584 | info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode; | |
3585 | info->nregs = nregs_ymode; | |
3586 | } | |
3587 | ||
3588 | /* This function returns the regno offset of a subreg expression. | |
3589 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3590 | xmode - The mode of xregno. | |
3591 | offset - The byte offset. | |
3592 | ymode - The mode of a top level SUBREG (or what may become one). | |
3593 | RETURN - The regno offset which would be used. */ | |
3594 | unsigned int | |
3595 | subreg_regno_offset (unsigned int xregno, enum machine_mode xmode, | |
3596 | unsigned int offset, enum machine_mode ymode) | |
3597 | { | |
3598 | struct subreg_info info; | |
3599 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3600 | return info.offset; | |
3601 | } | |
3602 | ||
3603 | /* This function returns true when the offset is representable via | |
3604 | subreg_offset in the given regno. | |
3605 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3606 | xmode - The mode of xregno. | |
3607 | offset - The byte offset. | |
3608 | ymode - The mode of a top level SUBREG (or what may become one). | |
3609 | RETURN - Whether the offset is representable. */ | |
3610 | bool | |
3611 | subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, | |
3612 | unsigned int offset, enum machine_mode ymode) | |
3613 | { | |
3614 | struct subreg_info info; | |
3615 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
05cee290 | 3616 | return info.representable_p; |
04c5580f JH |
3617 | } |
3618 | ||
eef302d2 RS |
3619 | /* Return the number of a YMODE register to which |
3620 | ||
3621 | (subreg:YMODE (reg:XMODE XREGNO) OFFSET) | |
3622 | ||
3623 | can be simplified. Return -1 if the subreg can't be simplified. | |
3624 | ||
3625 | XREGNO is a hard register number. */ | |
3626 | ||
3627 | int | |
3628 | simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode, | |
3629 | unsigned int offset, enum machine_mode ymode) | |
3630 | { | |
3631 | struct subreg_info info; | |
3632 | unsigned int yregno; | |
3633 | ||
3634 | #ifdef CANNOT_CHANGE_MODE_CLASS | |
3635 | /* Give the backend a chance to disallow the mode change. */ | |
3636 | if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT | |
3637 | && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT | |
55a2c322 VM |
3638 | && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode) |
3639 | /* We can use mode change in LRA for some transformations. */ | |
3640 | && ! lra_in_progress) | |
eef302d2 RS |
3641 | return -1; |
3642 | #endif | |
3643 | ||
3644 | /* We shouldn't simplify stack-related registers. */ | |
3645 | if ((!reload_completed || frame_pointer_needed) | |
d4e0d036 | 3646 | && xregno == FRAME_POINTER_REGNUM) |
eef302d2 RS |
3647 | return -1; |
3648 | ||
3649 | if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
98072ee5 | 3650 | && xregno == ARG_POINTER_REGNUM) |
eef302d2 RS |
3651 | return -1; |
3652 | ||
55a2c322 VM |
3653 | if (xregno == STACK_POINTER_REGNUM |
3654 | /* We should convert hard stack register in LRA if it is | |
3655 | possible. */ | |
3656 | && ! lra_in_progress) | |
eef302d2 RS |
3657 | return -1; |
3658 | ||
3659 | /* Try to get the register offset. */ | |
3660 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3661 | if (!info.representable_p) | |
3662 | return -1; | |
3663 | ||
3664 | /* Make sure that the offsetted register value is in range. */ | |
3665 | yregno = xregno + info.offset; | |
3666 | if (!HARD_REGISTER_NUM_P (yregno)) | |
3667 | return -1; | |
3668 | ||
3669 | /* See whether (reg:YMODE YREGNO) is valid. | |
3670 | ||
3671 | ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid. | |
eb93b31f EB |
3672 | This is a kludge to work around how complex FP arguments are passed |
3673 | on IA-64 and should be fixed. See PR target/49226. */ | |
eef302d2 RS |
3674 | if (!HARD_REGNO_MODE_OK (yregno, ymode) |
3675 | && HARD_REGNO_MODE_OK (xregno, xmode)) | |
3676 | return -1; | |
3677 | ||
3678 | return (int) yregno; | |
3679 | } | |
3680 | ||
dc297297 | 3681 | /* Return the final regno that a subreg expression refers to. */ |
a6a2274a | 3682 | unsigned int |
f7d504c2 | 3683 | subreg_regno (const_rtx x) |
ddef6bc7 JJ |
3684 | { |
3685 | unsigned int ret; | |
3686 | rtx subreg = SUBREG_REG (x); | |
3687 | int regno = REGNO (subreg); | |
3688 | ||
a6a2274a KH |
3689 | ret = regno + subreg_regno_offset (regno, |
3690 | GET_MODE (subreg), | |
ddef6bc7 JJ |
3691 | SUBREG_BYTE (x), |
3692 | GET_MODE (x)); | |
3693 | return ret; | |
3694 | ||
3695 | } | |
f1f4e530 JM |
3696 | |
3697 | /* Return the number of registers that a subreg expression refers | |
3698 | to. */ | |
3699 | unsigned int | |
f7d504c2 | 3700 | subreg_nregs (const_rtx x) |
ba49cb7b KZ |
3701 | { |
3702 | return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x); | |
3703 | } | |
3704 | ||
3705 | /* Return the number of registers that a subreg REG with REGNO | |
3706 | expression refers to. This is a copy of the rtlanal.c:subreg_nregs | |
3707 | changed so that the regno can be passed in. */ | |
3708 | ||
3709 | unsigned int | |
3710 | subreg_nregs_with_regno (unsigned int regno, const_rtx x) | |
f1f4e530 JM |
3711 | { |
3712 | struct subreg_info info; | |
3713 | rtx subreg = SUBREG_REG (x); | |
f1f4e530 JM |
3714 | |
3715 | subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x), | |
3716 | &info); | |
3717 | return info.nregs; | |
3718 | } | |
3719 | ||
ba49cb7b | 3720 | |
833366d6 JH |
3721 | struct parms_set_data |
3722 | { | |
3723 | int nregs; | |
3724 | HARD_REG_SET regs; | |
3725 | }; | |
3726 | ||
3727 | /* Helper function for noticing stores to parameter registers. */ | |
3728 | static void | |
7bc980e1 | 3729 | parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) |
833366d6 | 3730 | { |
1634b18f | 3731 | struct parms_set_data *const d = (struct parms_set_data *) data; |
833366d6 JH |
3732 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER |
3733 | && TEST_HARD_REG_BIT (d->regs, REGNO (x))) | |
3734 | { | |
3735 | CLEAR_HARD_REG_BIT (d->regs, REGNO (x)); | |
3736 | d->nregs--; | |
3737 | } | |
3738 | } | |
3739 | ||
a6a2274a | 3740 | /* Look backward for first parameter to be loaded. |
b2df20b4 DJ |
3741 | Note that loads of all parameters will not necessarily be |
3742 | found if CSE has eliminated some of them (e.g., an argument | |
3743 | to the outer function is passed down as a parameter). | |
833366d6 | 3744 | Do not skip BOUNDARY. */ |
62fc98cc | 3745 | rtx_insn * |
9321cf00 | 3746 | find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary) |
833366d6 JH |
3747 | { |
3748 | struct parms_set_data parm; | |
9321cf00 DM |
3749 | rtx p; |
3750 | rtx_insn *before, *first_set; | |
833366d6 JH |
3751 | |
3752 | /* Since different machines initialize their parameter registers | |
3753 | in different orders, assume nothing. Collect the set of all | |
3754 | parameter registers. */ | |
3755 | CLEAR_HARD_REG_SET (parm.regs); | |
3756 | parm.nregs = 0; | |
3757 | for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) | |
3758 | if (GET_CODE (XEXP (p, 0)) == USE | |
f8cfc6aa | 3759 | && REG_P (XEXP (XEXP (p, 0), 0))) |
833366d6 | 3760 | { |
41374e13 | 3761 | gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER); |
833366d6 JH |
3762 | |
3763 | /* We only care about registers which can hold function | |
3764 | arguments. */ | |
3765 | if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0)))) | |
3766 | continue; | |
3767 | ||
3768 | SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0))); | |
3769 | parm.nregs++; | |
3770 | } | |
3771 | before = call_insn; | |
b2df20b4 | 3772 | first_set = call_insn; |
833366d6 JH |
3773 | |
3774 | /* Search backward for the first set of a register in this set. */ | |
3775 | while (parm.nregs && before != boundary) | |
3776 | { | |
3777 | before = PREV_INSN (before); | |
3778 | ||
3779 | /* It is possible that some loads got CSEed from one call to | |
3780 | another. Stop in that case. */ | |
4b4bf941 | 3781 | if (CALL_P (before)) |
833366d6 JH |
3782 | break; |
3783 | ||
dbc1a163 | 3784 | /* Our caller needs either ensure that we will find all sets |
833366d6 | 3785 | (in case code has not been optimized yet), or take care |
eaec9b3d | 3786 | for possible labels in a way by setting boundary to preceding |
833366d6 | 3787 | CODE_LABEL. */ |
4b4bf941 | 3788 | if (LABEL_P (before)) |
dbc1a163 | 3789 | { |
41374e13 | 3790 | gcc_assert (before == boundary); |
dbc1a163 RH |
3791 | break; |
3792 | } | |
833366d6 | 3793 | |
0d025d43 | 3794 | if (INSN_P (before)) |
b2df20b4 DJ |
3795 | { |
3796 | int nregs_old = parm.nregs; | |
3797 | note_stores (PATTERN (before), parms_set, &parm); | |
3798 | /* If we found something that did not set a parameter reg, | |
3799 | we're done. Do not keep going, as that might result | |
3800 | in hoisting an insn before the setting of a pseudo | |
3801 | that is used by the hoisted insn. */ | |
3802 | if (nregs_old != parm.nregs) | |
3803 | first_set = before; | |
3804 | else | |
3805 | break; | |
3806 | } | |
833366d6 | 3807 | } |
9321cf00 | 3808 | return first_set; |
833366d6 | 3809 | } |
3dec4024 | 3810 | |
14b493d6 | 3811 | /* Return true if we should avoid inserting code between INSN and preceding |
3dec4024 JH |
3812 | call instruction. */ |
3813 | ||
3814 | bool | |
e4685bc8 | 3815 | keep_with_call_p (const rtx_insn *insn) |
3dec4024 JH |
3816 | { |
3817 | rtx set; | |
3818 | ||
3819 | if (INSN_P (insn) && (set = single_set (insn)) != NULL) | |
3820 | { | |
f8cfc6aa | 3821 | if (REG_P (SET_DEST (set)) |
5df533b3 | 3822 | && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER |
3dec4024 JH |
3823 | && fixed_regs[REGNO (SET_DEST (set))] |
3824 | && general_operand (SET_SRC (set), VOIDmode)) | |
3825 | return true; | |
f8cfc6aa | 3826 | if (REG_P (SET_SRC (set)) |
82f81f18 | 3827 | && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set))) |
f8cfc6aa | 3828 | && REG_P (SET_DEST (set)) |
3dec4024 JH |
3829 | && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER) |
3830 | return true; | |
bc204393 RH |
3831 | /* There may be a stack pop just after the call and before the store |
3832 | of the return register. Search for the actual store when deciding | |
3833 | if we can break or not. */ | |
3dec4024 JH |
3834 | if (SET_DEST (set) == stack_pointer_rtx) |
3835 | { | |
75547801 | 3836 | /* This CONST_CAST is okay because next_nonnote_insn just |
4e9b57fa | 3837 | returns its argument and we assign it to a const_rtx |
75547801 | 3838 | variable. */ |
e4685bc8 TS |
3839 | const rtx_insn *i2 |
3840 | = next_nonnote_insn (const_cast<rtx_insn *> (insn)); | |
bc204393 | 3841 | if (i2 && keep_with_call_p (i2)) |
3dec4024 JH |
3842 | return true; |
3843 | } | |
3844 | } | |
3845 | return false; | |
3846 | } | |
71d2c5bd | 3847 | |
432f982f JH |
3848 | /* Return true if LABEL is a target of JUMP_INSN. This applies only |
3849 | to non-complex jumps. That is, direct unconditional, conditional, | |
3850 | and tablejumps, but not computed jumps or returns. It also does | |
3851 | not apply to the fallthru case of a conditional jump. */ | |
3852 | ||
3853 | bool | |
c5241a21 | 3854 | label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn) |
432f982f JH |
3855 | { |
3856 | rtx tmp = JUMP_LABEL (jump_insn); | |
8942ee0f | 3857 | rtx_jump_table_data *table; |
432f982f JH |
3858 | |
3859 | if (label == tmp) | |
3860 | return true; | |
3861 | ||
8942ee0f | 3862 | if (tablejump_p (jump_insn, NULL, &table)) |
432f982f | 3863 | { |
95c43227 | 3864 | rtvec vec = table->get_labels (); |
432f982f JH |
3865 | int i, veclen = GET_NUM_ELEM (vec); |
3866 | ||
3867 | for (i = 0; i < veclen; ++i) | |
3868 | if (XEXP (RTVEC_ELT (vec, i), 0) == label) | |
3869 | return true; | |
3870 | } | |
3871 | ||
cb2f563b HPN |
3872 | if (find_reg_note (jump_insn, REG_LABEL_TARGET, label)) |
3873 | return true; | |
3874 | ||
432f982f JH |
3875 | return false; |
3876 | } | |
3877 | ||
f894b69b PB |
3878 | \f |
3879 | /* Return an estimate of the cost of computing rtx X. | |
3880 | One use is in cse, to decide which expression to keep in the hash table. | |
3881 | Another is in rtl generation, to pick the cheapest way to multiply. | |
b8698a0f | 3882 | Other uses like the latter are expected in the future. |
f40751dd | 3883 | |
68f932c4 RS |
3884 | X appears as operand OPNO in an expression with code OUTER_CODE. |
3885 | SPEED specifies whether costs optimized for speed or size should | |
f40751dd | 3886 | be returned. */ |
f894b69b PB |
3887 | |
3888 | int | |
68f932c4 | 3889 | rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed) |
f894b69b PB |
3890 | { |
3891 | int i, j; | |
3892 | enum rtx_code code; | |
3893 | const char *fmt; | |
3894 | int total; | |
e098c169 | 3895 | int factor; |
f894b69b PB |
3896 | |
3897 | if (x == 0) | |
3898 | return 0; | |
3899 | ||
e098c169 HPN |
3900 | /* A size N times larger than UNITS_PER_WORD likely needs N times as |
3901 | many insns, taking N times as long. */ | |
3902 | factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD; | |
3903 | if (factor == 0) | |
3904 | factor = 1; | |
3905 | ||
f894b69b PB |
3906 | /* Compute the default costs of certain things. |
3907 | Note that targetm.rtx_costs can override the defaults. */ | |
3908 | ||
3909 | code = GET_CODE (x); | |
3910 | switch (code) | |
3911 | { | |
3912 | case MULT: | |
e098c169 HPN |
3913 | /* Multiplication has time-complexity O(N*N), where N is the |
3914 | number of units (translated from digits) when using | |
3915 | schoolbook long multiplication. */ | |
3916 | total = factor * factor * COSTS_N_INSNS (5); | |
f894b69b PB |
3917 | break; |
3918 | case DIV: | |
3919 | case UDIV: | |
3920 | case MOD: | |
3921 | case UMOD: | |
e098c169 HPN |
3922 | /* Similarly, complexity for schoolbook long division. */ |
3923 | total = factor * factor * COSTS_N_INSNS (7); | |
f894b69b PB |
3924 | break; |
3925 | case USE: | |
db3edc20 | 3926 | /* Used in combine.c as a marker. */ |
f894b69b PB |
3927 | total = 0; |
3928 | break; | |
e098c169 HPN |
3929 | case SET: |
3930 | /* A SET doesn't have a mode, so let's look at the SET_DEST to get | |
3931 | the mode for the factor. */ | |
3932 | factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD; | |
3933 | if (factor == 0) | |
3934 | factor = 1; | |
3935 | /* Pass through. */ | |
f894b69b | 3936 | default: |
e098c169 | 3937 | total = factor * COSTS_N_INSNS (1); |
f894b69b PB |
3938 | } |
3939 | ||
3940 | switch (code) | |
3941 | { | |
3942 | case REG: | |
3943 | return 0; | |
3944 | ||
3945 | case SUBREG: | |
edb81165 | 3946 | total = 0; |
f894b69b PB |
3947 | /* If we can't tie these modes, make this expensive. The larger |
3948 | the mode, the more expensive it is. */ | |
3949 | if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x)))) | |
e098c169 | 3950 | return COSTS_N_INSNS (2 + factor); |
f894b69b PB |
3951 | break; |
3952 | ||
3953 | default: | |
68f932c4 | 3954 | if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed)) |
f894b69b PB |
3955 | return total; |
3956 | break; | |
3957 | } | |
3958 | ||
3959 | /* Sum the costs of the sub-rtx's, plus cost of this operation, | |
3960 | which is already in total. */ | |
3961 | ||
3962 | fmt = GET_RTX_FORMAT (code); | |
3963 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3964 | if (fmt[i] == 'e') | |
68f932c4 | 3965 | total += rtx_cost (XEXP (x, i), code, i, speed); |
f894b69b PB |
3966 | else if (fmt[i] == 'E') |
3967 | for (j = 0; j < XVECLEN (x, i); j++) | |
68f932c4 | 3968 | total += rtx_cost (XVECEXP (x, i, j), code, i, speed); |
f894b69b PB |
3969 | |
3970 | return total; | |
3971 | } | |
22939744 BS |
3972 | |
3973 | /* Fill in the structure C with information about both speed and size rtx | |
68f932c4 | 3974 | costs for X, which is operand OPNO in an expression with code OUTER. */ |
22939744 BS |
3975 | |
3976 | void | |
68f932c4 RS |
3977 | get_full_rtx_cost (rtx x, enum rtx_code outer, int opno, |
3978 | struct full_rtx_costs *c) | |
22939744 | 3979 | { |
68f932c4 RS |
3980 | c->speed = rtx_cost (x, outer, opno, true); |
3981 | c->size = rtx_cost (x, outer, opno, false); | |
22939744 BS |
3982 | } |
3983 | ||
f894b69b PB |
3984 | \f |
3985 | /* Return cost of address expression X. | |
b8698a0f | 3986 | Expect that X is properly formed address reference. |
f40751dd JH |
3987 | |
3988 | SPEED parameter specify whether costs optimized for speed or size should | |
3989 | be returned. */ | |
f894b69b PB |
3990 | |
3991 | int | |
09e881c9 | 3992 | address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed) |
f894b69b | 3993 | { |
f894b69b PB |
3994 | /* We may be asked for cost of various unusual addresses, such as operands |
3995 | of push instruction. It is not worthwhile to complicate writing | |
3996 | of the target hook by such cases. */ | |
3997 | ||
09e881c9 | 3998 | if (!memory_address_addr_space_p (mode, x, as)) |
f894b69b PB |
3999 | return 1000; |
4000 | ||
b413068c | 4001 | return targetm.address_cost (x, mode, as, speed); |
f894b69b PB |
4002 | } |
4003 | ||
4004 | /* If the target doesn't override, compute the cost as with arithmetic. */ | |
4005 | ||
4006 | int | |
b413068c | 4007 | default_address_cost (rtx x, enum machine_mode, addr_space_t, bool speed) |
f894b69b | 4008 | { |
68f932c4 | 4009 | return rtx_cost (x, MEM, 0, speed); |
f894b69b | 4010 | } |
2f93eea8 PB |
4011 | \f |
4012 | ||
4013 | unsigned HOST_WIDE_INT | |
fa233e34 | 4014 | nonzero_bits (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
4015 | { |
4016 | return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0); | |
4017 | } | |
4018 | ||
4019 | unsigned int | |
fa233e34 | 4020 | num_sign_bit_copies (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
4021 | { |
4022 | return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0); | |
4023 | } | |
4024 | ||
4025 | /* The function cached_nonzero_bits is a wrapper around nonzero_bits1. | |
4026 | It avoids exponential behavior in nonzero_bits1 when X has | |
4027 | identical subexpressions on the first or the second level. */ | |
4028 | ||
4029 | static unsigned HOST_WIDE_INT | |
fa233e34 | 4030 | cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4031 | enum machine_mode known_mode, |
4032 | unsigned HOST_WIDE_INT known_ret) | |
4033 | { | |
4034 | if (x == known_x && mode == known_mode) | |
4035 | return known_ret; | |
4036 | ||
4037 | /* Try to find identical subexpressions. If found call | |
4038 | nonzero_bits1 on X with the subexpressions as KNOWN_X and the | |
4039 | precomputed value for the subexpression as KNOWN_RET. */ | |
4040 | ||
4041 | if (ARITHMETIC_P (x)) | |
4042 | { | |
4043 | rtx x0 = XEXP (x, 0); | |
4044 | rtx x1 = XEXP (x, 1); | |
4045 | ||
4046 | /* Check the first level. */ | |
4047 | if (x0 == x1) | |
4048 | return nonzero_bits1 (x, mode, x0, mode, | |
4049 | cached_nonzero_bits (x0, mode, known_x, | |
4050 | known_mode, known_ret)); | |
4051 | ||
4052 | /* Check the second level. */ | |
4053 | if (ARITHMETIC_P (x0) | |
4054 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
4055 | return nonzero_bits1 (x, mode, x1, mode, | |
4056 | cached_nonzero_bits (x1, mode, known_x, | |
4057 | known_mode, known_ret)); | |
4058 | ||
4059 | if (ARITHMETIC_P (x1) | |
4060 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
4061 | return nonzero_bits1 (x, mode, x0, mode, | |
4062 | cached_nonzero_bits (x0, mode, known_x, | |
4063 | known_mode, known_ret)); | |
4064 | } | |
4065 | ||
4066 | return nonzero_bits1 (x, mode, known_x, known_mode, known_ret); | |
4067 | } | |
4068 | ||
4069 | /* We let num_sign_bit_copies recur into nonzero_bits as that is useful. | |
4070 | We don't let nonzero_bits recur into num_sign_bit_copies, because that | |
4071 | is less useful. We can't allow both, because that results in exponential | |
4072 | run time recursion. There is a nullstone testcase that triggered | |
4073 | this. This macro avoids accidental uses of num_sign_bit_copies. */ | |
4074 | #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior | |
4075 | ||
4076 | /* Given an expression, X, compute which bits in X can be nonzero. | |
4077 | We don't care about bits outside of those defined in MODE. | |
4078 | ||
4079 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
4080 | an arithmetic operation, we can do better. */ | |
4081 | ||
4082 | static unsigned HOST_WIDE_INT | |
fa233e34 | 4083 | nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4084 | enum machine_mode known_mode, |
4085 | unsigned HOST_WIDE_INT known_ret) | |
4086 | { | |
4087 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); | |
4088 | unsigned HOST_WIDE_INT inner_nz; | |
4089 | enum rtx_code code; | |
2d0c270f | 4090 | enum machine_mode inner_mode; |
5511bc5a | 4091 | unsigned int mode_width = GET_MODE_PRECISION (mode); |
2f93eea8 | 4092 | |
ff596cd2 RL |
4093 | /* For floating-point and vector values, assume all bits are needed. */ |
4094 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode) | |
4095 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
2f93eea8 PB |
4096 | return nonzero; |
4097 | ||
4098 | /* If X is wider than MODE, use its mode instead. */ | |
5511bc5a | 4099 | if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width) |
2f93eea8 PB |
4100 | { |
4101 | mode = GET_MODE (x); | |
4102 | nonzero = GET_MODE_MASK (mode); | |
5511bc5a | 4103 | mode_width = GET_MODE_PRECISION (mode); |
2f93eea8 PB |
4104 | } |
4105 | ||
4106 | if (mode_width > HOST_BITS_PER_WIDE_INT) | |
4107 | /* Our only callers in this case look for single bit values. So | |
4108 | just return the mode mask. Those tests will then be false. */ | |
4109 | return nonzero; | |
4110 | ||
4111 | #ifndef WORD_REGISTER_OPERATIONS | |
4112 | /* If MODE is wider than X, but both are a single word for both the host | |
4113 | and target machines, we can compute this from which bits of the | |
4114 | object might be nonzero in its own mode, taking into account the fact | |
4115 | that on many CISC machines, accessing an object in a wider mode | |
4116 | causes the high-order bits to become undefined. So they are | |
4117 | not known to be zero. */ | |
4118 | ||
4119 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
5511bc5a BS |
4120 | && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD |
4121 | && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
4122 | && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x))) | |
2f93eea8 PB |
4123 | { |
4124 | nonzero &= cached_nonzero_bits (x, GET_MODE (x), | |
4125 | known_x, known_mode, known_ret); | |
4126 | nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); | |
4127 | return nonzero; | |
4128 | } | |
4129 | #endif | |
4130 | ||
4131 | code = GET_CODE (x); | |
4132 | switch (code) | |
4133 | { | |
4134 | case REG: | |
4135 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4136 | /* If pointers extend unsigned and this is a pointer in Pmode, say that | |
4137 | all the bits above ptr_mode are known to be zero. */ | |
5932a4d4 | 4138 | /* As we do not know which address space the pointer is referring to, |
d4ebfa65 BE |
4139 | we can do this only if the target does not support different pointer |
4140 | or address modes depending on the address space. */ | |
4141 | if (target_default_pointer_address_modes_p () | |
4142 | && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
2f93eea8 PB |
4143 | && REG_POINTER (x)) |
4144 | nonzero &= GET_MODE_MASK (ptr_mode); | |
4145 | #endif | |
4146 | ||
4147 | /* Include declared information about alignment of pointers. */ | |
4148 | /* ??? We don't properly preserve REG_POINTER changes across | |
4149 | pointer-to-integer casts, so we can't trust it except for | |
4150 | things that we know must be pointers. See execute/960116-1.c. */ | |
4151 | if ((x == stack_pointer_rtx | |
4152 | || x == frame_pointer_rtx | |
4153 | || x == arg_pointer_rtx) | |
4154 | && REGNO_POINTER_ALIGN (REGNO (x))) | |
4155 | { | |
4156 | unsigned HOST_WIDE_INT alignment | |
4157 | = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT; | |
4158 | ||
4159 | #ifdef PUSH_ROUNDING | |
4160 | /* If PUSH_ROUNDING is defined, it is possible for the | |
4161 | stack to be momentarily aligned only to that amount, | |
4162 | so we pick the least alignment. */ | |
4163 | if (x == stack_pointer_rtx && PUSH_ARGS) | |
4164 | alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1), | |
4165 | alignment); | |
4166 | #endif | |
4167 | ||
4168 | nonzero &= ~(alignment - 1); | |
4169 | } | |
4170 | ||
4171 | { | |
4172 | unsigned HOST_WIDE_INT nonzero_for_hook = nonzero; | |
55d796da | 4173 | rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x, |
2f93eea8 PB |
4174 | known_mode, known_ret, |
4175 | &nonzero_for_hook); | |
4176 | ||
55d796da KG |
4177 | if (new_rtx) |
4178 | nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x, | |
2f93eea8 PB |
4179 | known_mode, known_ret); |
4180 | ||
4181 | return nonzero_for_hook; | |
4182 | } | |
4183 | ||
4184 | case CONST_INT: | |
4185 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
4186 | /* If X is negative in MODE, sign-extend the value. */ | |
c04fc4f0 EB |
4187 | if (INTVAL (x) > 0 |
4188 | && mode_width < BITS_PER_WORD | |
4189 | && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) | |
4190 | != 0) | |
0cadbfaa | 4191 | return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width); |
2f93eea8 PB |
4192 | #endif |
4193 | ||
c04fc4f0 | 4194 | return UINTVAL (x); |
2f93eea8 PB |
4195 | |
4196 | case MEM: | |
4197 | #ifdef LOAD_EXTEND_OP | |
4198 | /* In many, if not most, RISC machines, reading a byte from memory | |
4199 | zeros the rest of the register. Noticing that fact saves a lot | |
4200 | of extra zero-extends. */ | |
4201 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) | |
4202 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
4203 | #endif | |
4204 | break; | |
4205 | ||
4206 | case EQ: case NE: | |
4207 | case UNEQ: case LTGT: | |
4208 | case GT: case GTU: case UNGT: | |
4209 | case LT: case LTU: case UNLT: | |
4210 | case GE: case GEU: case UNGE: | |
4211 | case LE: case LEU: case UNLE: | |
4212 | case UNORDERED: case ORDERED: | |
2f93eea8 PB |
4213 | /* If this produces an integer result, we know which bits are set. |
4214 | Code here used to clear bits outside the mode of X, but that is | |
4215 | now done above. */ | |
b8698a0f L |
4216 | /* Mind that MODE is the mode the caller wants to look at this |
4217 | operation in, and not the actual operation mode. We can wind | |
505ac507 RH |
4218 | up with (subreg:DI (gt:V4HI x y)), and we don't have anything |
4219 | that describes the results of a vector compare. */ | |
4220 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT | |
2f93eea8 PB |
4221 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
4222 | nonzero = STORE_FLAG_VALUE; | |
4223 | break; | |
4224 | ||
4225 | case NEG: | |
4226 | #if 0 | |
4227 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
4228 | and num_sign_bit_copies. */ | |
4229 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
5511bc5a | 4230 | == GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4231 | nonzero = 1; |
4232 | #endif | |
4233 | ||
86cdf393 | 4234 | if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width) |
2f93eea8 PB |
4235 | nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); |
4236 | break; | |
4237 | ||
4238 | case ABS: | |
4239 | #if 0 | |
4240 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
4241 | and num_sign_bit_copies. */ | |
4242 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
5511bc5a | 4243 | == GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4244 | nonzero = 1; |
4245 | #endif | |
4246 | break; | |
4247 | ||
4248 | case TRUNCATE: | |
4249 | nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode, | |
4250 | known_x, known_mode, known_ret) | |
4251 | & GET_MODE_MASK (mode)); | |
4252 | break; | |
4253 | ||
4254 | case ZERO_EXTEND: | |
4255 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
4256 | known_x, known_mode, known_ret); | |
4257 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
4258 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
4259 | break; | |
4260 | ||
4261 | case SIGN_EXTEND: | |
4262 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
4263 | Otherwise, show all the bits in the outer mode but not the inner | |
4264 | may be nonzero. */ | |
4265 | inner_nz = cached_nonzero_bits (XEXP (x, 0), mode, | |
4266 | known_x, known_mode, known_ret); | |
4267 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
4268 | { | |
4269 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
2d0c270f | 4270 | if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz)) |
2f93eea8 PB |
4271 | inner_nz |= (GET_MODE_MASK (mode) |
4272 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); | |
4273 | } | |
4274 | ||
4275 | nonzero &= inner_nz; | |
4276 | break; | |
4277 | ||
4278 | case AND: | |
4279 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
4280 | known_x, known_mode, known_ret) | |
4281 | & cached_nonzero_bits (XEXP (x, 1), mode, | |
4282 | known_x, known_mode, known_ret); | |
4283 | break; | |
4284 | ||
4285 | case XOR: case IOR: | |
4286 | case UMIN: case UMAX: case SMIN: case SMAX: | |
4287 | { | |
c04fc4f0 EB |
4288 | unsigned HOST_WIDE_INT nonzero0 |
4289 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4290 | known_x, known_mode, known_ret); | |
2f93eea8 PB |
4291 | |
4292 | /* Don't call nonzero_bits for the second time if it cannot change | |
4293 | anything. */ | |
4294 | if ((nonzero & nonzero0) != nonzero) | |
4295 | nonzero &= nonzero0 | |
4296 | | cached_nonzero_bits (XEXP (x, 1), mode, | |
4297 | known_x, known_mode, known_ret); | |
4298 | } | |
4299 | break; | |
4300 | ||
4301 | case PLUS: case MINUS: | |
4302 | case MULT: | |
4303 | case DIV: case UDIV: | |
4304 | case MOD: case UMOD: | |
4305 | /* We can apply the rules of arithmetic to compute the number of | |
4306 | high- and low-order zero bits of these operations. We start by | |
4307 | computing the width (position of the highest-order nonzero bit) | |
4308 | and the number of low-order zero bits for each value. */ | |
4309 | { | |
c04fc4f0 EB |
4310 | unsigned HOST_WIDE_INT nz0 |
4311 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4312 | known_x, known_mode, known_ret); | |
4313 | unsigned HOST_WIDE_INT nz1 | |
4314 | = cached_nonzero_bits (XEXP (x, 1), mode, | |
4315 | known_x, known_mode, known_ret); | |
5511bc5a | 4316 | int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1; |
2f93eea8 PB |
4317 | int width0 = floor_log2 (nz0) + 1; |
4318 | int width1 = floor_log2 (nz1) + 1; | |
4319 | int low0 = floor_log2 (nz0 & -nz0); | |
4320 | int low1 = floor_log2 (nz1 & -nz1); | |
c04fc4f0 EB |
4321 | unsigned HOST_WIDE_INT op0_maybe_minusp |
4322 | = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index); | |
4323 | unsigned HOST_WIDE_INT op1_maybe_minusp | |
4324 | = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index); | |
2f93eea8 PB |
4325 | unsigned int result_width = mode_width; |
4326 | int result_low = 0; | |
4327 | ||
4328 | switch (code) | |
4329 | { | |
4330 | case PLUS: | |
4331 | result_width = MAX (width0, width1) + 1; | |
4332 | result_low = MIN (low0, low1); | |
4333 | break; | |
4334 | case MINUS: | |
4335 | result_low = MIN (low0, low1); | |
4336 | break; | |
4337 | case MULT: | |
4338 | result_width = width0 + width1; | |
4339 | result_low = low0 + low1; | |
4340 | break; | |
4341 | case DIV: | |
4342 | if (width1 == 0) | |
4343 | break; | |
c04fc4f0 | 4344 | if (!op0_maybe_minusp && !op1_maybe_minusp) |
2f93eea8 PB |
4345 | result_width = width0; |
4346 | break; | |
4347 | case UDIV: | |
4348 | if (width1 == 0) | |
4349 | break; | |
4350 | result_width = width0; | |
4351 | break; | |
4352 | case MOD: | |
4353 | if (width1 == 0) | |
4354 | break; | |
c04fc4f0 | 4355 | if (!op0_maybe_minusp && !op1_maybe_minusp) |
2f93eea8 PB |
4356 | result_width = MIN (width0, width1); |
4357 | result_low = MIN (low0, low1); | |
4358 | break; | |
4359 | case UMOD: | |
4360 | if (width1 == 0) | |
4361 | break; | |
4362 | result_width = MIN (width0, width1); | |
4363 | result_low = MIN (low0, low1); | |
4364 | break; | |
4365 | default: | |
41374e13 | 4366 | gcc_unreachable (); |
2f93eea8 PB |
4367 | } |
4368 | ||
4369 | if (result_width < mode_width) | |
c04fc4f0 | 4370 | nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1; |
2f93eea8 PB |
4371 | |
4372 | if (result_low > 0) | |
c04fc4f0 | 4373 | nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1); |
2f93eea8 PB |
4374 | } |
4375 | break; | |
4376 | ||
4377 | case ZERO_EXTRACT: | |
481683e1 | 4378 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4379 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
c04fc4f0 | 4380 | nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; |
2f93eea8 PB |
4381 | break; |
4382 | ||
4383 | case SUBREG: | |
4384 | /* If this is a SUBREG formed for a promoted variable that has | |
4385 | been zero-extended, we know that at least the high-order bits | |
4386 | are zero, though others might be too. */ | |
4387 | ||
362d42dc | 4388 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x)) |
2f93eea8 PB |
4389 | nonzero = GET_MODE_MASK (GET_MODE (x)) |
4390 | & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x), | |
4391 | known_x, known_mode, known_ret); | |
4392 | ||
2d0c270f | 4393 | inner_mode = GET_MODE (SUBREG_REG (x)); |
2f93eea8 PB |
4394 | /* If the inner mode is a single word for both the host and target |
4395 | machines, we can compute this from which bits of the inner | |
4396 | object might be nonzero. */ | |
5511bc5a BS |
4397 | if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD |
4398 | && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT)) | |
2f93eea8 PB |
4399 | { |
4400 | nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, | |
4401 | known_x, known_mode, known_ret); | |
4402 | ||
4403 | #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) | |
4404 | /* If this is a typical RISC machine, we only have to worry | |
4405 | about the way loads are extended. */ | |
2d0c270f BS |
4406 | if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND |
4407 | ? val_signbit_known_set_p (inner_mode, nonzero) | |
4408 | : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND) | |
3c0cb5de | 4409 | || !MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
4410 | #endif |
4411 | { | |
4412 | /* On many CISC machines, accessing an object in a wider mode | |
4413 | causes the high-order bits to become undefined. So they are | |
4414 | not known to be zero. */ | |
5511bc5a BS |
4415 | if (GET_MODE_PRECISION (GET_MODE (x)) |
4416 | > GET_MODE_PRECISION (inner_mode)) | |
2f93eea8 | 4417 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) |
2d0c270f | 4418 | & ~GET_MODE_MASK (inner_mode)); |
2f93eea8 PB |
4419 | } |
4420 | } | |
4421 | break; | |
4422 | ||
4423 | case ASHIFTRT: | |
4424 | case LSHIFTRT: | |
4425 | case ASHIFT: | |
4426 | case ROTATE: | |
4427 | /* The nonzero bits are in two classes: any bits within MODE | |
4428 | that aren't in GET_MODE (x) are always significant. The rest of the | |
4429 | nonzero bits are those that are significant in the operand of | |
4430 | the shift when shifted the appropriate number of bits. This | |
4431 | shows that high-order bits are cleared by the right shift and | |
4432 | low-order bits by left shifts. */ | |
481683e1 | 4433 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4434 | && INTVAL (XEXP (x, 1)) >= 0 |
39b2ac74 | 4435 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
5511bc5a | 4436 | && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4437 | { |
4438 | enum machine_mode inner_mode = GET_MODE (x); | |
5511bc5a | 4439 | unsigned int width = GET_MODE_PRECISION (inner_mode); |
2f93eea8 PB |
4440 | int count = INTVAL (XEXP (x, 1)); |
4441 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); | |
c04fc4f0 EB |
4442 | unsigned HOST_WIDE_INT op_nonzero |
4443 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4444 | known_x, known_mode, known_ret); | |
2f93eea8 PB |
4445 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; |
4446 | unsigned HOST_WIDE_INT outer = 0; | |
4447 | ||
4448 | if (mode_width > width) | |
4449 | outer = (op_nonzero & nonzero & ~mode_mask); | |
4450 | ||
4451 | if (code == LSHIFTRT) | |
4452 | inner >>= count; | |
4453 | else if (code == ASHIFTRT) | |
4454 | { | |
4455 | inner >>= count; | |
4456 | ||
4457 | /* If the sign bit may have been nonzero before the shift, we | |
4458 | need to mark all the places it could have been copied to | |
4459 | by the shift as possibly nonzero. */ | |
c04fc4f0 EB |
4460 | if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count))) |
4461 | inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1) | |
4462 | << (width - count); | |
2f93eea8 PB |
4463 | } |
4464 | else if (code == ASHIFT) | |
4465 | inner <<= count; | |
4466 | else | |
4467 | inner = ((inner << (count % width) | |
4468 | | (inner >> (width - (count % width)))) & mode_mask); | |
4469 | ||
4470 | nonzero &= (outer | inner); | |
4471 | } | |
4472 | break; | |
4473 | ||
4474 | case FFS: | |
4475 | case POPCOUNT: | |
4476 | /* This is at most the number of bits in the mode. */ | |
c04fc4f0 | 4477 | nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1; |
2f93eea8 PB |
4478 | break; |
4479 | ||
4480 | case CLZ: | |
4481 | /* If CLZ has a known value at zero, then the nonzero bits are | |
4482 | that value, plus the number of bits in the mode minus one. */ | |
4483 | if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
c04fc4f0 EB |
4484 | nonzero |
4485 | |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
2f93eea8 PB |
4486 | else |
4487 | nonzero = -1; | |
4488 | break; | |
4489 | ||
4490 | case CTZ: | |
4491 | /* If CTZ has a known value at zero, then the nonzero bits are | |
4492 | that value, plus the number of bits in the mode minus one. */ | |
4493 | if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
c04fc4f0 EB |
4494 | nonzero |
4495 | |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
2f93eea8 PB |
4496 | else |
4497 | nonzero = -1; | |
4498 | break; | |
4499 | ||
8840ae2b JJ |
4500 | case CLRSB: |
4501 | /* This is at most the number of bits in the mode minus 1. */ | |
4502 | nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
4503 | break; | |
4504 | ||
2f93eea8 PB |
4505 | case PARITY: |
4506 | nonzero = 1; | |
4507 | break; | |
4508 | ||
4509 | case IF_THEN_ELSE: | |
4510 | { | |
c04fc4f0 EB |
4511 | unsigned HOST_WIDE_INT nonzero_true |
4512 | = cached_nonzero_bits (XEXP (x, 1), mode, | |
4513 | known_x, known_mode, known_ret); | |
2f93eea8 PB |
4514 | |
4515 | /* Don't call nonzero_bits for the second time if it cannot change | |
4516 | anything. */ | |
4517 | if ((nonzero & nonzero_true) != nonzero) | |
4518 | nonzero &= nonzero_true | |
4519 | | cached_nonzero_bits (XEXP (x, 2), mode, | |
4520 | known_x, known_mode, known_ret); | |
4521 | } | |
4522 | break; | |
4523 | ||
4524 | default: | |
4525 | break; | |
4526 | } | |
4527 | ||
4528 | return nonzero; | |
4529 | } | |
4530 | ||
4531 | /* See the macro definition above. */ | |
4532 | #undef cached_num_sign_bit_copies | |
4533 | ||
4534 | \f | |
4535 | /* The function cached_num_sign_bit_copies is a wrapper around | |
4536 | num_sign_bit_copies1. It avoids exponential behavior in | |
4537 | num_sign_bit_copies1 when X has identical subexpressions on the | |
4538 | first or the second level. */ | |
4539 | ||
4540 | static unsigned int | |
fa233e34 | 4541 | cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4542 | enum machine_mode known_mode, |
4543 | unsigned int known_ret) | |
4544 | { | |
4545 | if (x == known_x && mode == known_mode) | |
4546 | return known_ret; | |
4547 | ||
4548 | /* Try to find identical subexpressions. If found call | |
4549 | num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and | |
4550 | the precomputed value for the subexpression as KNOWN_RET. */ | |
4551 | ||
4552 | if (ARITHMETIC_P (x)) | |
4553 | { | |
4554 | rtx x0 = XEXP (x, 0); | |
4555 | rtx x1 = XEXP (x, 1); | |
4556 | ||
4557 | /* Check the first level. */ | |
4558 | if (x0 == x1) | |
4559 | return | |
4560 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4561 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4562 | known_mode, | |
4563 | known_ret)); | |
4564 | ||
4565 | /* Check the second level. */ | |
4566 | if (ARITHMETIC_P (x0) | |
4567 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
4568 | return | |
4569 | num_sign_bit_copies1 (x, mode, x1, mode, | |
4570 | cached_num_sign_bit_copies (x1, mode, known_x, | |
4571 | known_mode, | |
4572 | known_ret)); | |
4573 | ||
4574 | if (ARITHMETIC_P (x1) | |
4575 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
4576 | return | |
4577 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4578 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4579 | known_mode, | |
4580 | known_ret)); | |
4581 | } | |
4582 | ||
4583 | return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret); | |
4584 | } | |
4585 | ||
4586 | /* Return the number of bits at the high-order end of X that are known to | |
4587 | be equal to the sign bit. X will be used in mode MODE; if MODE is | |
4588 | VOIDmode, X will be used in its own mode. The returned value will always | |
4589 | be between 1 and the number of bits in MODE. */ | |
4590 | ||
4591 | static unsigned int | |
fa233e34 | 4592 | num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4593 | enum machine_mode known_mode, |
4594 | unsigned int known_ret) | |
4595 | { | |
4596 | enum rtx_code code = GET_CODE (x); | |
5511bc5a | 4597 | unsigned int bitwidth = GET_MODE_PRECISION (mode); |
2f93eea8 PB |
4598 | int num0, num1, result; |
4599 | unsigned HOST_WIDE_INT nonzero; | |
4600 | ||
4601 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
4602 | VOIDmode, we don't know anything. Likewise if one of the modes is | |
4603 | floating-point. */ | |
4604 | ||
4605 | if (mode == VOIDmode) | |
4606 | mode = GET_MODE (x); | |
4607 | ||
ff596cd2 RL |
4608 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)) |
4609 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
2f93eea8 PB |
4610 | return 1; |
4611 | ||
4612 | /* For a smaller object, just ignore the high bits. */ | |
5511bc5a | 4613 | if (bitwidth < GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4614 | { |
4615 | num0 = cached_num_sign_bit_copies (x, GET_MODE (x), | |
4616 | known_x, known_mode, known_ret); | |
4617 | return MAX (1, | |
5511bc5a | 4618 | num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth)); |
2f93eea8 PB |
4619 | } |
4620 | ||
5511bc5a | 4621 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4622 | { |
4623 | #ifndef WORD_REGISTER_OPERATIONS | |
5511bc5a BS |
4624 | /* If this machine does not do all register operations on the entire |
4625 | register and MODE is wider than the mode of X, we can say nothing | |
4626 | at all about the high-order bits. */ | |
2f93eea8 PB |
4627 | return 1; |
4628 | #else | |
4629 | /* Likewise on machines that do, if the mode of the object is smaller | |
4630 | than a word and loads of that size don't sign extend, we can say | |
4631 | nothing about the high order bits. */ | |
5511bc5a | 4632 | if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD |
2f93eea8 PB |
4633 | #ifdef LOAD_EXTEND_OP |
4634 | && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND | |
4635 | #endif | |
4636 | ) | |
4637 | return 1; | |
4638 | #endif | |
4639 | } | |
4640 | ||
4641 | switch (code) | |
4642 | { | |
4643 | case REG: | |
4644 | ||
4645 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4646 | /* If pointers extend signed and this is a pointer in Pmode, say that | |
4647 | all the bits above ptr_mode are known to be sign bit copies. */ | |
5932a4d4 | 4648 | /* As we do not know which address space the pointer is referring to, |
d4ebfa65 BE |
4649 | we can do this only if the target does not support different pointer |
4650 | or address modes depending on the address space. */ | |
4651 | if (target_default_pointer_address_modes_p () | |
4652 | && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
4653 | && mode == Pmode && REG_POINTER (x)) | |
5511bc5a | 4654 | return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1; |
2f93eea8 PB |
4655 | #endif |
4656 | ||
4657 | { | |
4658 | unsigned int copies_for_hook = 1, copies = 1; | |
55d796da | 4659 | rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x, |
2f93eea8 PB |
4660 | known_mode, known_ret, |
4661 | &copies_for_hook); | |
4662 | ||
55d796da KG |
4663 | if (new_rtx) |
4664 | copies = cached_num_sign_bit_copies (new_rtx, mode, known_x, | |
2f93eea8 PB |
4665 | known_mode, known_ret); |
4666 | ||
4667 | if (copies > 1 || copies_for_hook > 1) | |
4668 | return MAX (copies, copies_for_hook); | |
4669 | ||
4670 | /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */ | |
4671 | } | |
4672 | break; | |
4673 | ||
4674 | case MEM: | |
4675 | #ifdef LOAD_EXTEND_OP | |
4676 | /* Some RISC machines sign-extend all loads of smaller than a word. */ | |
4677 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) | |
4678 | return MAX (1, ((int) bitwidth | |
5511bc5a | 4679 | - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1)); |
2f93eea8 PB |
4680 | #endif |
4681 | break; | |
4682 | ||
4683 | case CONST_INT: | |
4684 | /* If the constant is negative, take its 1's complement and remask. | |
4685 | Then see how many zero bits we have. */ | |
c04fc4f0 | 4686 | nonzero = UINTVAL (x) & GET_MODE_MASK (mode); |
2f93eea8 | 4687 | if (bitwidth <= HOST_BITS_PER_WIDE_INT |
c04fc4f0 | 4688 | && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 PB |
4689 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
4690 | ||
4691 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4692 | ||
4693 | case SUBREG: | |
4694 | /* If this is a SUBREG for a promoted object that is sign-extended | |
4695 | and we are looking at it in a wider mode, we know that at least the | |
4696 | high-order bits are known to be sign bit copies. */ | |
4697 | ||
362d42dc | 4698 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x)) |
2f93eea8 PB |
4699 | { |
4700 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode, | |
4701 | known_x, known_mode, known_ret); | |
4702 | return MAX ((int) bitwidth | |
5511bc5a | 4703 | - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1, |
2f93eea8 PB |
4704 | num0); |
4705 | } | |
4706 | ||
4707 | /* For a smaller object, just ignore the high bits. */ | |
5511bc5a | 4708 | if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))) |
2f93eea8 PB |
4709 | { |
4710 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode, | |
4711 | known_x, known_mode, known_ret); | |
4712 | return MAX (1, (num0 | |
5511bc5a | 4713 | - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))) |
2f93eea8 PB |
4714 | - bitwidth))); |
4715 | } | |
4716 | ||
4717 | #ifdef WORD_REGISTER_OPERATIONS | |
4718 | #ifdef LOAD_EXTEND_OP | |
4719 | /* For paradoxical SUBREGs on machines where all register operations | |
4720 | affect the entire register, just look inside. Note that we are | |
4721 | passing MODE to the recursive call, so the number of sign bit copies | |
4722 | will remain relative to that mode, not the inner mode. */ | |
4723 | ||
4724 | /* This works only if loads sign extend. Otherwise, if we get a | |
4725 | reload for the inner part, it may be loaded from the stack, and | |
4726 | then we lose all sign bit copies that existed before the store | |
4727 | to the stack. */ | |
4728 | ||
6a4bdc79 | 4729 | if (paradoxical_subreg_p (x) |
2f93eea8 | 4730 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND |
3c0cb5de | 4731 | && MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
4732 | return cached_num_sign_bit_copies (SUBREG_REG (x), mode, |
4733 | known_x, known_mode, known_ret); | |
4734 | #endif | |
4735 | #endif | |
4736 | break; | |
4737 | ||
4738 | case SIGN_EXTRACT: | |
481683e1 | 4739 | if (CONST_INT_P (XEXP (x, 1))) |
2f93eea8 PB |
4740 | return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); |
4741 | break; | |
4742 | ||
4743 | case SIGN_EXTEND: | |
5511bc5a | 4744 | return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) |
2f93eea8 PB |
4745 | + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, |
4746 | known_x, known_mode, known_ret)); | |
4747 | ||
4748 | case TRUNCATE: | |
4749 | /* For a smaller object, just ignore the high bits. */ | |
4750 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, | |
4751 | known_x, known_mode, known_ret); | |
5511bc5a | 4752 | return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) |
2f93eea8 PB |
4753 | - bitwidth))); |
4754 | ||
4755 | case NOT: | |
4756 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4757 | known_x, known_mode, known_ret); | |
4758 | ||
4759 | case ROTATE: case ROTATERT: | |
4760 | /* If we are rotating left by a number of bits less than the number | |
4761 | of sign bit copies, we can just subtract that amount from the | |
4762 | number. */ | |
481683e1 | 4763 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 PB |
4764 | && INTVAL (XEXP (x, 1)) >= 0 |
4765 | && INTVAL (XEXP (x, 1)) < (int) bitwidth) | |
4766 | { | |
4767 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4768 | known_x, known_mode, known_ret); | |
4769 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
4770 | : (int) bitwidth - INTVAL (XEXP (x, 1)))); | |
4771 | } | |
4772 | break; | |
4773 | ||
4774 | case NEG: | |
4775 | /* In general, this subtracts one sign bit copy. But if the value | |
4776 | is known to be positive, the number of sign bit copies is the | |
4777 | same as that of the input. Finally, if the input has just one bit | |
4778 | that might be nonzero, all the bits are copies of the sign bit. */ | |
4779 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4780 | known_x, known_mode, known_ret); | |
4781 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4782 | return num0 > 1 ? num0 - 1 : 1; | |
4783 | ||
4784 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
4785 | if (nonzero == 1) | |
4786 | return bitwidth; | |
4787 | ||
4788 | if (num0 > 1 | |
c04fc4f0 | 4789 | && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) |
2f93eea8 PB |
4790 | num0--; |
4791 | ||
4792 | return num0; | |
4793 | ||
4794 | case IOR: case AND: case XOR: | |
4795 | case SMIN: case SMAX: case UMIN: case UMAX: | |
4796 | /* Logical operations will preserve the number of sign-bit copies. | |
4797 | MIN and MAX operations always return one of the operands. */ | |
4798 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4799 | known_x, known_mode, known_ret); | |
4800 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4801 | known_x, known_mode, known_ret); | |
22761ec3 AN |
4802 | |
4803 | /* If num1 is clearing some of the top bits then regardless of | |
4804 | the other term, we are guaranteed to have at least that many | |
4805 | high-order zero bits. */ | |
4806 | if (code == AND | |
4807 | && num1 > 1 | |
4808 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 4809 | && CONST_INT_P (XEXP (x, 1)) |
c04fc4f0 EB |
4810 | && (UINTVAL (XEXP (x, 1)) |
4811 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0) | |
22761ec3 AN |
4812 | return num1; |
4813 | ||
4814 | /* Similarly for IOR when setting high-order bits. */ | |
4815 | if (code == IOR | |
4816 | && num1 > 1 | |
4817 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 4818 | && CONST_INT_P (XEXP (x, 1)) |
c04fc4f0 EB |
4819 | && (UINTVAL (XEXP (x, 1)) |
4820 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
22761ec3 AN |
4821 | return num1; |
4822 | ||
2f93eea8 PB |
4823 | return MIN (num0, num1); |
4824 | ||
4825 | case PLUS: case MINUS: | |
4826 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
4827 | if we are subtracting 1 from a positive number, there will not | |
4828 | be such a carry. Furthermore, if the positive number is known to | |
4829 | be 0 or 1, we know the result is either -1 or 0. */ | |
4830 | ||
4831 | if (code == PLUS && XEXP (x, 1) == constm1_rtx | |
4832 | && bitwidth <= HOST_BITS_PER_WIDE_INT) | |
4833 | { | |
4834 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
c04fc4f0 | 4835 | if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) |
2f93eea8 PB |
4836 | return (nonzero == 1 || nonzero == 0 ? bitwidth |
4837 | : bitwidth - floor_log2 (nonzero) - 1); | |
4838 | } | |
4839 | ||
4840 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4841 | known_x, known_mode, known_ret); | |
4842 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4843 | known_x, known_mode, known_ret); | |
4844 | result = MAX (1, MIN (num0, num1) - 1); | |
4845 | ||
2f93eea8 PB |
4846 | return result; |
4847 | ||
4848 | case MULT: | |
4849 | /* The number of bits of the product is the sum of the number of | |
4850 | bits of both terms. However, unless one of the terms if known | |
4851 | to be positive, we must allow for an additional bit since negating | |
4852 | a negative number can remove one sign bit copy. */ | |
4853 | ||
4854 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4855 | known_x, known_mode, known_ret); | |
4856 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4857 | known_x, known_mode, known_ret); | |
4858 | ||
4859 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
4860 | if (result > 0 | |
4861 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4862 | || (((nonzero_bits (XEXP (x, 0), mode) | |
c04fc4f0 | 4863 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 | 4864 | && ((nonzero_bits (XEXP (x, 1), mode) |
c04fc4f0 EB |
4865 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) |
4866 | != 0)))) | |
2f93eea8 PB |
4867 | result--; |
4868 | ||
4869 | return MAX (1, result); | |
4870 | ||
4871 | case UDIV: | |
4872 | /* The result must be <= the first operand. If the first operand | |
4873 | has the high bit set, we know nothing about the number of sign | |
4874 | bit copies. */ | |
4875 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4876 | return 1; | |
4877 | else if ((nonzero_bits (XEXP (x, 0), mode) | |
c04fc4f0 | 4878 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 PB |
4879 | return 1; |
4880 | else | |
4881 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4882 | known_x, known_mode, known_ret); | |
4883 | ||
4884 | case UMOD: | |
24d179b4 JJ |
4885 | /* The result must be <= the second operand. If the second operand |
4886 | has (or just might have) the high bit set, we know nothing about | |
4887 | the number of sign bit copies. */ | |
4888 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4889 | return 1; | |
4890 | else if ((nonzero_bits (XEXP (x, 1), mode) | |
c04fc4f0 | 4891 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
24d179b4 JJ |
4892 | return 1; |
4893 | else | |
4894 | return cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
2f93eea8 PB |
4895 | known_x, known_mode, known_ret); |
4896 | ||
4897 | case DIV: | |
4898 | /* Similar to unsigned division, except that we have to worry about | |
4899 | the case where the divisor is negative, in which case we have | |
4900 | to add 1. */ | |
4901 | result = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4902 | known_x, known_mode, known_ret); | |
4903 | if (result > 1 | |
4904 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4905 | || (nonzero_bits (XEXP (x, 1), mode) | |
c04fc4f0 | 4906 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) |
2f93eea8 PB |
4907 | result--; |
4908 | ||
4909 | return result; | |
4910 | ||
4911 | case MOD: | |
4912 | result = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4913 | known_x, known_mode, known_ret); | |
4914 | if (result > 1 | |
4915 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4916 | || (nonzero_bits (XEXP (x, 1), mode) | |
c04fc4f0 | 4917 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) |
2f93eea8 PB |
4918 | result--; |
4919 | ||
4920 | return result; | |
4921 | ||
4922 | case ASHIFTRT: | |
4923 | /* Shifts by a constant add to the number of bits equal to the | |
4924 | sign bit. */ | |
4925 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4926 | known_x, known_mode, known_ret); | |
481683e1 | 4927 | if (CONST_INT_P (XEXP (x, 1)) |
39b2ac74 | 4928 | && INTVAL (XEXP (x, 1)) > 0 |
5511bc5a | 4929 | && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4930 | num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); |
4931 | ||
4932 | return num0; | |
4933 | ||
4934 | case ASHIFT: | |
4935 | /* Left shifts destroy copies. */ | |
481683e1 | 4936 | if (!CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4937 | || INTVAL (XEXP (x, 1)) < 0 |
39b2ac74 | 4938 | || INTVAL (XEXP (x, 1)) >= (int) bitwidth |
5511bc5a | 4939 | || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4940 | return 1; |
4941 | ||
4942 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4943 | known_x, known_mode, known_ret); | |
4944 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
4945 | ||
4946 | case IF_THEN_ELSE: | |
4947 | num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4948 | known_x, known_mode, known_ret); | |
4949 | num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode, | |
4950 | known_x, known_mode, known_ret); | |
4951 | return MIN (num0, num1); | |
4952 | ||
4953 | case EQ: case NE: case GE: case GT: case LE: case LT: | |
4954 | case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT: | |
4955 | case GEU: case GTU: case LEU: case LTU: | |
4956 | case UNORDERED: case ORDERED: | |
4957 | /* If the constant is negative, take its 1's complement and remask. | |
4958 | Then see how many zero bits we have. */ | |
4959 | nonzero = STORE_FLAG_VALUE; | |
4960 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
c04fc4f0 | 4961 | && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 PB |
4962 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
4963 | ||
4964 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4965 | ||
4966 | default: | |
4967 | break; | |
4968 | } | |
4969 | ||
4970 | /* If we haven't been able to figure it out by one of the above rules, | |
4971 | see if some of the high-order bits are known to be zero. If so, | |
4972 | count those bits and return one less than that amount. If we can't | |
4973 | safely compute the mask for this mode, always return BITWIDTH. */ | |
4974 | ||
5511bc5a | 4975 | bitwidth = GET_MODE_PRECISION (mode); |
2f93eea8 PB |
4976 | if (bitwidth > HOST_BITS_PER_WIDE_INT) |
4977 | return 1; | |
4978 | ||
4979 | nonzero = nonzero_bits (x, mode); | |
c04fc4f0 | 4980 | return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) |
2f93eea8 PB |
4981 | ? 1 : bitwidth - floor_log2 (nonzero) - 1; |
4982 | } | |
6fd21094 RS |
4983 | |
4984 | /* Calculate the rtx_cost of a single instruction. A return value of | |
4985 | zero indicates an instruction pattern without a known cost. */ | |
4986 | ||
4987 | int | |
f40751dd | 4988 | insn_rtx_cost (rtx pat, bool speed) |
6fd21094 RS |
4989 | { |
4990 | int i, cost; | |
4991 | rtx set; | |
4992 | ||
4993 | /* Extract the single set rtx from the instruction pattern. | |
4994 | We can't use single_set since we only have the pattern. */ | |
4995 | if (GET_CODE (pat) == SET) | |
4996 | set = pat; | |
4997 | else if (GET_CODE (pat) == PARALLEL) | |
4998 | { | |
4999 | set = NULL_RTX; | |
5000 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
5001 | { | |
5002 | rtx x = XVECEXP (pat, 0, i); | |
5003 | if (GET_CODE (x) == SET) | |
5004 | { | |
5005 | if (set) | |
5006 | return 0; | |
5007 | set = x; | |
5008 | } | |
5009 | } | |
5010 | if (!set) | |
5011 | return 0; | |
5012 | } | |
5013 | else | |
5014 | return 0; | |
5015 | ||
5e8f01f4 | 5016 | cost = set_src_cost (SET_SRC (set), speed); |
6fd21094 RS |
5017 | return cost > 0 ? cost : COSTS_N_INSNS (1); |
5018 | } | |
75473b02 SB |
5019 | |
5020 | /* Given an insn INSN and condition COND, return the condition in a | |
5021 | canonical form to simplify testing by callers. Specifically: | |
5022 | ||
5023 | (1) The code will always be a comparison operation (EQ, NE, GT, etc.). | |
5024 | (2) Both operands will be machine operands; (cc0) will have been replaced. | |
5025 | (3) If an operand is a constant, it will be the second operand. | |
5026 | (4) (LE x const) will be replaced with (LT x <const+1>) and similarly | |
5027 | for GE, GEU, and LEU. | |
5028 | ||
5029 | If the condition cannot be understood, or is an inequality floating-point | |
5030 | comparison which needs to be reversed, 0 will be returned. | |
5031 | ||
5032 | If REVERSE is nonzero, then reverse the condition prior to canonizing it. | |
5033 | ||
5034 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
5035 | insn used in locating the condition was found. If a replacement test | |
5036 | of the condition is desired, it should be placed in front of that | |
5037 | insn and we will be sure that the inputs are still valid. | |
5038 | ||
5039 | If WANT_REG is nonzero, we wish the condition to be relative to that | |
5040 | register, if possible. Therefore, do not canonicalize the condition | |
b8698a0f | 5041 | further. If ALLOW_CC_MODE is nonzero, allow the condition returned |
75473b02 SB |
5042 | to be a compare to a CC mode register. |
5043 | ||
5044 | If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST | |
5045 | and at INSN. */ | |
5046 | ||
5047 | rtx | |
61aa0978 DM |
5048 | canonicalize_condition (rtx_insn *insn, rtx cond, int reverse, |
5049 | rtx_insn **earliest, | |
75473b02 SB |
5050 | rtx want_reg, int allow_cc_mode, int valid_at_insn_p) |
5051 | { | |
5052 | enum rtx_code code; | |
61aa0978 | 5053 | rtx_insn *prev = insn; |
f7d504c2 | 5054 | const_rtx set; |
75473b02 SB |
5055 | rtx tem; |
5056 | rtx op0, op1; | |
5057 | int reverse_code = 0; | |
5058 | enum machine_mode mode; | |
569f8d98 | 5059 | basic_block bb = BLOCK_FOR_INSN (insn); |
75473b02 SB |
5060 | |
5061 | code = GET_CODE (cond); | |
5062 | mode = GET_MODE (cond); | |
5063 | op0 = XEXP (cond, 0); | |
5064 | op1 = XEXP (cond, 1); | |
5065 | ||
5066 | if (reverse) | |
5067 | code = reversed_comparison_code (cond, insn); | |
5068 | if (code == UNKNOWN) | |
5069 | return 0; | |
5070 | ||
5071 | if (earliest) | |
5072 | *earliest = insn; | |
5073 | ||
5074 | /* If we are comparing a register with zero, see if the register is set | |
5075 | in the previous insn to a COMPARE or a comparison operation. Perform | |
5076 | the same tests as a function of STORE_FLAG_VALUE as find_comparison_args | |
5077 | in cse.c */ | |
5078 | ||
5079 | while ((GET_RTX_CLASS (code) == RTX_COMPARE | |
5080 | || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) | |
5081 | && op1 == CONST0_RTX (GET_MODE (op0)) | |
5082 | && op0 != want_reg) | |
5083 | { | |
5084 | /* Set nonzero when we find something of interest. */ | |
5085 | rtx x = 0; | |
5086 | ||
5087 | #ifdef HAVE_cc0 | |
5088 | /* If comparison with cc0, import actual comparison from compare | |
5089 | insn. */ | |
5090 | if (op0 == cc0_rtx) | |
5091 | { | |
5092 | if ((prev = prev_nonnote_insn (prev)) == 0 | |
5093 | || !NONJUMP_INSN_P (prev) | |
5094 | || (set = single_set (prev)) == 0 | |
5095 | || SET_DEST (set) != cc0_rtx) | |
5096 | return 0; | |
5097 | ||
5098 | op0 = SET_SRC (set); | |
5099 | op1 = CONST0_RTX (GET_MODE (op0)); | |
5100 | if (earliest) | |
5101 | *earliest = prev; | |
5102 | } | |
5103 | #endif | |
5104 | ||
5105 | /* If this is a COMPARE, pick up the two things being compared. */ | |
5106 | if (GET_CODE (op0) == COMPARE) | |
5107 | { | |
5108 | op1 = XEXP (op0, 1); | |
5109 | op0 = XEXP (op0, 0); | |
5110 | continue; | |
5111 | } | |
5112 | else if (!REG_P (op0)) | |
5113 | break; | |
5114 | ||
5115 | /* Go back to the previous insn. Stop if it is not an INSN. We also | |
5116 | stop if it isn't a single set or if it has a REG_INC note because | |
5117 | we don't want to bother dealing with it. */ | |
5118 | ||
f0fc0803 | 5119 | prev = prev_nonnote_nondebug_insn (prev); |
b5b8b0ac AO |
5120 | |
5121 | if (prev == 0 | |
75473b02 | 5122 | || !NONJUMP_INSN_P (prev) |
569f8d98 ZD |
5123 | || FIND_REG_INC_NOTE (prev, NULL_RTX) |
5124 | /* In cfglayout mode, there do not have to be labels at the | |
5125 | beginning of a block, or jumps at the end, so the previous | |
5126 | conditions would not stop us when we reach bb boundary. */ | |
5127 | || BLOCK_FOR_INSN (prev) != bb) | |
75473b02 SB |
5128 | break; |
5129 | ||
5130 | set = set_of (op0, prev); | |
5131 | ||
5132 | if (set | |
5133 | && (GET_CODE (set) != SET | |
5134 | || !rtx_equal_p (SET_DEST (set), op0))) | |
5135 | break; | |
5136 | ||
5137 | /* If this is setting OP0, get what it sets it to if it looks | |
5138 | relevant. */ | |
5139 | if (set) | |
5140 | { | |
5141 | enum machine_mode inner_mode = GET_MODE (SET_DEST (set)); | |
5142 | #ifdef FLOAT_STORE_FLAG_VALUE | |
5143 | REAL_VALUE_TYPE fsfv; | |
5144 | #endif | |
5145 | ||
5146 | /* ??? We may not combine comparisons done in a CCmode with | |
5147 | comparisons not done in a CCmode. This is to aid targets | |
5148 | like Alpha that have an IEEE compliant EQ instruction, and | |
5149 | a non-IEEE compliant BEQ instruction. The use of CCmode is | |
5150 | actually artificial, simply to prevent the combination, but | |
5151 | should not affect other platforms. | |
5152 | ||
5153 | However, we must allow VOIDmode comparisons to match either | |
5154 | CCmode or non-CCmode comparison, because some ports have | |
5155 | modeless comparisons inside branch patterns. | |
5156 | ||
5157 | ??? This mode check should perhaps look more like the mode check | |
5158 | in simplify_comparison in combine. */ | |
2c8798a2 RS |
5159 | if (((GET_MODE_CLASS (mode) == MODE_CC) |
5160 | != (GET_MODE_CLASS (inner_mode) == MODE_CC)) | |
5161 | && mode != VOIDmode | |
5162 | && inner_mode != VOIDmode) | |
5163 | break; | |
5164 | if (GET_CODE (SET_SRC (set)) == COMPARE | |
5165 | || (((code == NE | |
5166 | || (code == LT | |
5167 | && val_signbit_known_set_p (inner_mode, | |
5168 | STORE_FLAG_VALUE)) | |
75473b02 | 5169 | #ifdef FLOAT_STORE_FLAG_VALUE |
2c8798a2 RS |
5170 | || (code == LT |
5171 | && SCALAR_FLOAT_MODE_P (inner_mode) | |
5172 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), | |
5173 | REAL_VALUE_NEGATIVE (fsfv))) | |
75473b02 | 5174 | #endif |
2c8798a2 RS |
5175 | )) |
5176 | && COMPARISON_P (SET_SRC (set)))) | |
75473b02 SB |
5177 | x = SET_SRC (set); |
5178 | else if (((code == EQ | |
5179 | || (code == GE | |
2d0c270f BS |
5180 | && val_signbit_known_set_p (inner_mode, |
5181 | STORE_FLAG_VALUE)) | |
75473b02 SB |
5182 | #ifdef FLOAT_STORE_FLAG_VALUE |
5183 | || (code == GE | |
3d8bf70f | 5184 | && SCALAR_FLOAT_MODE_P (inner_mode) |
75473b02 SB |
5185 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), |
5186 | REAL_VALUE_NEGATIVE (fsfv))) | |
5187 | #endif | |
5188 | )) | |
2c8798a2 | 5189 | && COMPARISON_P (SET_SRC (set))) |
75473b02 SB |
5190 | { |
5191 | reverse_code = 1; | |
5192 | x = SET_SRC (set); | |
5193 | } | |
2c8798a2 RS |
5194 | else if ((code == EQ || code == NE) |
5195 | && GET_CODE (SET_SRC (set)) == XOR) | |
5196 | /* Handle sequences like: | |
5197 | ||
5198 | (set op0 (xor X Y)) | |
5199 | ...(eq|ne op0 (const_int 0))... | |
5200 | ||
5201 | in which case: | |
5202 | ||
5203 | (eq op0 (const_int 0)) reduces to (eq X Y) | |
5204 | (ne op0 (const_int 0)) reduces to (ne X Y) | |
5205 | ||
5206 | This is the form used by MIPS16, for example. */ | |
5207 | x = SET_SRC (set); | |
75473b02 SB |
5208 | else |
5209 | break; | |
5210 | } | |
5211 | ||
5212 | else if (reg_set_p (op0, prev)) | |
5213 | /* If this sets OP0, but not directly, we have to give up. */ | |
5214 | break; | |
5215 | ||
5216 | if (x) | |
5217 | { | |
5218 | /* If the caller is expecting the condition to be valid at INSN, | |
5219 | make sure X doesn't change before INSN. */ | |
5220 | if (valid_at_insn_p) | |
5221 | if (modified_in_p (x, prev) || modified_between_p (x, prev, insn)) | |
5222 | break; | |
5223 | if (COMPARISON_P (x)) | |
5224 | code = GET_CODE (x); | |
5225 | if (reverse_code) | |
5226 | { | |
5227 | code = reversed_comparison_code (x, prev); | |
5228 | if (code == UNKNOWN) | |
5229 | return 0; | |
5230 | reverse_code = 0; | |
5231 | } | |
5232 | ||
5233 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
5234 | if (earliest) | |
5235 | *earliest = prev; | |
5236 | } | |
5237 | } | |
5238 | ||
5239 | /* If constant is first, put it last. */ | |
5240 | if (CONSTANT_P (op0)) | |
5241 | code = swap_condition (code), tem = op0, op0 = op1, op1 = tem; | |
5242 | ||
5243 | /* If OP0 is the result of a comparison, we weren't able to find what | |
5244 | was really being compared, so fail. */ | |
5245 | if (!allow_cc_mode | |
5246 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) | |
5247 | return 0; | |
5248 | ||
5249 | /* Canonicalize any ordered comparison with integers involving equality | |
5250 | if we can do computations in the relevant mode and we do not | |
5251 | overflow. */ | |
5252 | ||
5253 | if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC | |
481683e1 | 5254 | && CONST_INT_P (op1) |
75473b02 | 5255 | && GET_MODE (op0) != VOIDmode |
5511bc5a | 5256 | && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) |
75473b02 SB |
5257 | { |
5258 | HOST_WIDE_INT const_val = INTVAL (op1); | |
5259 | unsigned HOST_WIDE_INT uconst_val = const_val; | |
5260 | unsigned HOST_WIDE_INT max_val | |
5261 | = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0)); | |
5262 | ||
5263 | switch (code) | |
5264 | { | |
5265 | case LE: | |
5266 | if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1) | |
5267 | code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0)); | |
5268 | break; | |
5269 | ||
5270 | /* When cross-compiling, const_val might be sign-extended from | |
5271 | BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */ | |
5272 | case GE: | |
c04fc4f0 EB |
5273 | if ((const_val & max_val) |
5274 | != ((unsigned HOST_WIDE_INT) 1 | |
5511bc5a | 5275 | << (GET_MODE_PRECISION (GET_MODE (op0)) - 1))) |
75473b02 SB |
5276 | code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); |
5277 | break; | |
5278 | ||
5279 | case LEU: | |
5280 | if (uconst_val < max_val) | |
5281 | code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0)); | |
5282 | break; | |
5283 | ||
5284 | case GEU: | |
5285 | if (uconst_val != 0) | |
5286 | code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0)); | |
5287 | break; | |
5288 | ||
5289 | default: | |
5290 | break; | |
5291 | } | |
5292 | } | |
5293 | ||
5294 | /* Never return CC0; return zero instead. */ | |
5295 | if (CC0_P (op0)) | |
5296 | return 0; | |
5297 | ||
5298 | return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); | |
5299 | } | |
5300 | ||
5301 | /* Given a jump insn JUMP, return the condition that will cause it to branch | |
5302 | to its JUMP_LABEL. If the condition cannot be understood, or is an | |
5303 | inequality floating-point comparison which needs to be reversed, 0 will | |
5304 | be returned. | |
5305 | ||
5306 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
5307 | insn used in locating the condition was found. If a replacement test | |
5308 | of the condition is desired, it should be placed in front of that | |
5309 | insn and we will be sure that the inputs are still valid. If EARLIEST | |
5310 | is null, the returned condition will be valid at INSN. | |
5311 | ||
5312 | If ALLOW_CC_MODE is nonzero, allow the condition returned to be a | |
5313 | compare CC mode register. | |
5314 | ||
5315 | VALID_AT_INSN_P is the same as for canonicalize_condition. */ | |
5316 | ||
5317 | rtx | |
61aa0978 DM |
5318 | get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode, |
5319 | int valid_at_insn_p) | |
75473b02 SB |
5320 | { |
5321 | rtx cond; | |
5322 | int reverse; | |
5323 | rtx set; | |
5324 | ||
5325 | /* If this is not a standard conditional jump, we can't parse it. */ | |
5326 | if (!JUMP_P (jump) | |
5327 | || ! any_condjump_p (jump)) | |
5328 | return 0; | |
5329 | set = pc_set (jump); | |
5330 | ||
5331 | cond = XEXP (SET_SRC (set), 0); | |
5332 | ||
5333 | /* If this branches to JUMP_LABEL when the condition is false, reverse | |
5334 | the condition. */ | |
5335 | reverse | |
5336 | = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF | |
a827d9b1 | 5337 | && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump); |
75473b02 SB |
5338 | |
5339 | return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX, | |
5340 | allow_cc_mode, valid_at_insn_p); | |
5341 | } | |
5342 | ||
b12cbf2c AN |
5343 | /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on |
5344 | TARGET_MODE_REP_EXTENDED. | |
5345 | ||
5346 | Note that we assume that the property of | |
5347 | TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes | |
5348 | narrower than mode B. I.e., if A is a mode narrower than B then in | |
5349 | order to be able to operate on it in mode B, mode A needs to | |
5350 | satisfy the requirements set by the representation of mode B. */ | |
5351 | ||
5352 | static void | |
5353 | init_num_sign_bit_copies_in_rep (void) | |
5354 | { | |
5355 | enum machine_mode mode, in_mode; | |
5356 | ||
5357 | for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode; | |
5358 | in_mode = GET_MODE_WIDER_MODE (mode)) | |
5359 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode; | |
5360 | mode = GET_MODE_WIDER_MODE (mode)) | |
5361 | { | |
5362 | enum machine_mode i; | |
5363 | ||
5364 | /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED | |
5365 | extends to the next widest mode. */ | |
5366 | gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN | |
5367 | || GET_MODE_WIDER_MODE (mode) == in_mode); | |
5368 | ||
5369 | /* We are in in_mode. Count how many bits outside of mode | |
5370 | have to be copies of the sign-bit. */ | |
5371 | for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i)) | |
5372 | { | |
5373 | enum machine_mode wider = GET_MODE_WIDER_MODE (i); | |
5374 | ||
5375 | if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND | |
5376 | /* We can only check sign-bit copies starting from the | |
5377 | top-bit. In order to be able to check the bits we | |
5378 | have already seen we pretend that subsequent bits | |
5379 | have to be sign-bit copies too. */ | |
5380 | || num_sign_bit_copies_in_rep [in_mode][mode]) | |
5381 | num_sign_bit_copies_in_rep [in_mode][mode] | |
5511bc5a | 5382 | += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i); |
b12cbf2c AN |
5383 | } |
5384 | } | |
5385 | } | |
5386 | ||
d3b72690 PB |
5387 | /* Suppose that truncation from the machine mode of X to MODE is not a |
5388 | no-op. See if there is anything special about X so that we can | |
5389 | assume it already contains a truncated value of MODE. */ | |
5390 | ||
5391 | bool | |
fa233e34 | 5392 | truncated_to_mode (enum machine_mode mode, const_rtx x) |
d3b72690 | 5393 | { |
b12cbf2c AN |
5394 | /* This register has already been used in MODE without explicit |
5395 | truncation. */ | |
5396 | if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x)) | |
5397 | return true; | |
5398 | ||
5399 | /* See if we already satisfy the requirements of MODE. If yes we | |
5400 | can just switch to MODE. */ | |
5401 | if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode] | |
5402 | && (num_sign_bit_copies (x, GET_MODE (x)) | |
5403 | >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1)) | |
5404 | return true; | |
d3b72690 | 5405 | |
b12cbf2c AN |
5406 | return false; |
5407 | } | |
cf94b0fc | 5408 | \f |
476dd0ce RS |
5409 | /* Return true if RTX code CODE has a single sequence of zero or more |
5410 | "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds | |
5411 | entry in that case. */ | |
5412 | ||
5413 | static bool | |
5414 | setup_reg_subrtx_bounds (unsigned int code) | |
5415 | { | |
5416 | const char *format = GET_RTX_FORMAT ((enum rtx_code) code); | |
5417 | unsigned int i = 0; | |
5418 | for (; format[i] != 'e'; ++i) | |
5419 | { | |
5420 | if (!format[i]) | |
5421 | /* No subrtxes. Leave start and count as 0. */ | |
5422 | return true; | |
5423 | if (format[i] == 'E' || format[i] == 'V') | |
5424 | return false; | |
5425 | } | |
5426 | ||
5427 | /* Record the sequence of 'e's. */ | |
5428 | rtx_all_subrtx_bounds[code].start = i; | |
5429 | do | |
5430 | ++i; | |
5431 | while (format[i] == 'e'); | |
5432 | rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start; | |
5433 | /* rtl-iter.h relies on this. */ | |
5434 | gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3); | |
5435 | ||
5436 | for (; format[i]; ++i) | |
5437 | if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e') | |
5438 | return false; | |
5439 | ||
5440 | return true; | |
5441 | } | |
5442 | ||
cf94b0fc | 5443 | /* Initialize non_rtx_starting_operands, which is used to speed up |
476dd0ce | 5444 | for_each_rtx, and rtx_all_subrtx_bounds. */ |
cf94b0fc PB |
5445 | void |
5446 | init_rtlanal (void) | |
5447 | { | |
5448 | int i; | |
5449 | for (i = 0; i < NUM_RTX_CODE; i++) | |
5450 | { | |
5451 | const char *format = GET_RTX_FORMAT (i); | |
5452 | const char *first = strpbrk (format, "eEV"); | |
5453 | non_rtx_starting_operands[i] = first ? first - format : -1; | |
476dd0ce RS |
5454 | if (!setup_reg_subrtx_bounds (i)) |
5455 | rtx_all_subrtx_bounds[i].count = UCHAR_MAX; | |
5456 | if (GET_RTX_CLASS (i) != RTX_CONST_OBJ) | |
5457 | rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i]; | |
cf94b0fc | 5458 | } |
b12cbf2c AN |
5459 | |
5460 | init_num_sign_bit_copies_in_rep (); | |
cf94b0fc | 5461 | } |
3d8504ac RS |
5462 | \f |
5463 | /* Check whether this is a constant pool constant. */ | |
5464 | bool | |
5465 | constant_pool_constant_p (rtx x) | |
5466 | { | |
5467 | x = avoid_constant_pool_reference (x); | |
48175537 | 5468 | return CONST_DOUBLE_P (x); |
3d8504ac | 5469 | } |
842e098c AN |
5470 | \f |
5471 | /* If M is a bitmask that selects a field of low-order bits within an item but | |
5472 | not the entire word, return the length of the field. Return -1 otherwise. | |
5473 | M is used in machine mode MODE. */ | |
5474 | ||
5475 | int | |
5476 | low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m) | |
5477 | { | |
5478 | if (mode != VOIDmode) | |
5479 | { | |
5511bc5a | 5480 | if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT) |
842e098c AN |
5481 | return -1; |
5482 | m &= GET_MODE_MASK (mode); | |
5483 | } | |
5484 | ||
5485 | return exact_log2 (m + 1); | |
5486 | } | |
372d6395 RS |
5487 | |
5488 | /* Return the mode of MEM's address. */ | |
5489 | ||
5490 | enum machine_mode | |
5491 | get_address_mode (rtx mem) | |
5492 | { | |
5493 | enum machine_mode mode; | |
5494 | ||
5495 | gcc_assert (MEM_P (mem)); | |
5496 | mode = GET_MODE (XEXP (mem, 0)); | |
5497 | if (mode != VOIDmode) | |
5498 | return mode; | |
5499 | return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem)); | |
5500 | } | |
ca3f2950 SB |
5501 | \f |
5502 | /* Split up a CONST_DOUBLE or integer constant rtx | |
5503 | into two rtx's for single words, | |
5504 | storing in *FIRST the word that comes first in memory in the target | |
807e902e KZ |
5505 | and in *SECOND the other. |
5506 | ||
5507 | TODO: This function needs to be rewritten to work on any size | |
5508 | integer. */ | |
ca3f2950 SB |
5509 | |
5510 | void | |
5511 | split_double (rtx value, rtx *first, rtx *second) | |
5512 | { | |
5513 | if (CONST_INT_P (value)) | |
5514 | { | |
5515 | if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD)) | |
5516 | { | |
5517 | /* In this case the CONST_INT holds both target words. | |
5518 | Extract the bits from it into two word-sized pieces. | |
5519 | Sign extend each half to HOST_WIDE_INT. */ | |
5520 | unsigned HOST_WIDE_INT low, high; | |
5521 | unsigned HOST_WIDE_INT mask, sign_bit, sign_extend; | |
5522 | unsigned bits_per_word = BITS_PER_WORD; | |
5523 | ||
5524 | /* Set sign_bit to the most significant bit of a word. */ | |
5525 | sign_bit = 1; | |
5526 | sign_bit <<= bits_per_word - 1; | |
5527 | ||
5528 | /* Set mask so that all bits of the word are set. We could | |
5529 | have used 1 << BITS_PER_WORD instead of basing the | |
5530 | calculation on sign_bit. However, on machines where | |
5531 | HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a | |
5532 | compiler warning, even though the code would never be | |
5533 | executed. */ | |
5534 | mask = sign_bit << 1; | |
5535 | mask--; | |
5536 | ||
5537 | /* Set sign_extend as any remaining bits. */ | |
5538 | sign_extend = ~mask; | |
5539 | ||
5540 | /* Pick the lower word and sign-extend it. */ | |
5541 | low = INTVAL (value); | |
5542 | low &= mask; | |
5543 | if (low & sign_bit) | |
5544 | low |= sign_extend; | |
5545 | ||
5546 | /* Pick the higher word, shifted to the least significant | |
5547 | bits, and sign-extend it. */ | |
5548 | high = INTVAL (value); | |
5549 | high >>= bits_per_word - 1; | |
5550 | high >>= 1; | |
5551 | high &= mask; | |
5552 | if (high & sign_bit) | |
5553 | high |= sign_extend; | |
5554 | ||
5555 | /* Store the words in the target machine order. */ | |
5556 | if (WORDS_BIG_ENDIAN) | |
5557 | { | |
5558 | *first = GEN_INT (high); | |
5559 | *second = GEN_INT (low); | |
5560 | } | |
5561 | else | |
5562 | { | |
5563 | *first = GEN_INT (low); | |
5564 | *second = GEN_INT (high); | |
5565 | } | |
5566 | } | |
5567 | else | |
5568 | { | |
5569 | /* The rule for using CONST_INT for a wider mode | |
5570 | is that we regard the value as signed. | |
5571 | So sign-extend it. */ | |
5572 | rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx); | |
5573 | if (WORDS_BIG_ENDIAN) | |
5574 | { | |
5575 | *first = high; | |
5576 | *second = value; | |
5577 | } | |
5578 | else | |
5579 | { | |
5580 | *first = value; | |
5581 | *second = high; | |
5582 | } | |
5583 | } | |
5584 | } | |
807e902e KZ |
5585 | else if (GET_CODE (value) == CONST_WIDE_INT) |
5586 | { | |
5587 | /* All of this is scary code and needs to be converted to | |
5588 | properly work with any size integer. */ | |
5589 | gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2); | |
5590 | if (WORDS_BIG_ENDIAN) | |
5591 | { | |
5592 | *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1)); | |
5593 | *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0)); | |
5594 | } | |
5595 | else | |
5596 | { | |
5597 | *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0)); | |
5598 | *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1)); | |
5599 | } | |
5600 | } | |
48175537 | 5601 | else if (!CONST_DOUBLE_P (value)) |
ca3f2950 SB |
5602 | { |
5603 | if (WORDS_BIG_ENDIAN) | |
5604 | { | |
5605 | *first = const0_rtx; | |
5606 | *second = value; | |
5607 | } | |
5608 | else | |
5609 | { | |
5610 | *first = value; | |
5611 | *second = const0_rtx; | |
5612 | } | |
5613 | } | |
5614 | else if (GET_MODE (value) == VOIDmode | |
5615 | /* This is the old way we did CONST_DOUBLE integers. */ | |
5616 | || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT) | |
5617 | { | |
5618 | /* In an integer, the words are defined as most and least significant. | |
5619 | So order them by the target's convention. */ | |
5620 | if (WORDS_BIG_ENDIAN) | |
5621 | { | |
5622 | *first = GEN_INT (CONST_DOUBLE_HIGH (value)); | |
5623 | *second = GEN_INT (CONST_DOUBLE_LOW (value)); | |
5624 | } | |
5625 | else | |
5626 | { | |
5627 | *first = GEN_INT (CONST_DOUBLE_LOW (value)); | |
5628 | *second = GEN_INT (CONST_DOUBLE_HIGH (value)); | |
5629 | } | |
5630 | } | |
5631 | else | |
5632 | { | |
5633 | REAL_VALUE_TYPE r; | |
5634 | long l[2]; | |
5635 | REAL_VALUE_FROM_CONST_DOUBLE (r, value); | |
5636 | ||
5637 | /* Note, this converts the REAL_VALUE_TYPE to the target's | |
5638 | format, splits up the floating point double and outputs | |
5639 | exactly 32 bits of it into each of l[0] and l[1] -- | |
5640 | not necessarily BITS_PER_WORD bits. */ | |
5641 | REAL_VALUE_TO_TARGET_DOUBLE (r, l); | |
5642 | ||
5643 | /* If 32 bits is an entire word for the target, but not for the host, | |
5644 | then sign-extend on the host so that the number will look the same | |
5645 | way on the host that it would on the target. See for instance | |
5646 | simplify_unary_operation. The #if is needed to avoid compiler | |
5647 | warnings. */ | |
5648 | ||
5649 | #if HOST_BITS_PER_LONG > 32 | |
5650 | if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32) | |
5651 | { | |
5652 | if (l[0] & ((long) 1 << 31)) | |
5653 | l[0] |= ((long) (-1) << 32); | |
5654 | if (l[1] & ((long) 1 << 31)) | |
5655 | l[1] |= ((long) (-1) << 32); | |
5656 | } | |
5657 | #endif | |
5658 | ||
5659 | *first = GEN_INT (l[0]); | |
5660 | *second = GEN_INT (l[1]); | |
5661 | } | |
5662 | } | |
5663 | ||
3936bafc YR |
5664 | /* Return true if X is a sign_extract or zero_extract from the least |
5665 | significant bit. */ | |
5666 | ||
5667 | static bool | |
5668 | lsb_bitfield_op_p (rtx x) | |
5669 | { | |
5670 | if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS) | |
5671 | { | |
5672 | enum machine_mode mode = GET_MODE (XEXP (x, 0)); | |
a9195970 | 5673 | HOST_WIDE_INT len = INTVAL (XEXP (x, 1)); |
3936bafc YR |
5674 | HOST_WIDE_INT pos = INTVAL (XEXP (x, 2)); |
5675 | ||
5676 | return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0)); | |
5677 | } | |
5678 | return false; | |
5679 | } | |
5680 | ||
277f65de RS |
5681 | /* Strip outer address "mutations" from LOC and return a pointer to the |
5682 | inner value. If OUTER_CODE is nonnull, store the code of the innermost | |
5683 | stripped expression there. | |
5684 | ||
5685 | "Mutations" either convert between modes or apply some kind of | |
3936bafc | 5686 | extension, truncation or alignment. */ |
277f65de RS |
5687 | |
5688 | rtx * | |
5689 | strip_address_mutations (rtx *loc, enum rtx_code *outer_code) | |
5690 | { | |
5691 | for (;;) | |
5692 | { | |
5693 | enum rtx_code code = GET_CODE (*loc); | |
5694 | if (GET_RTX_CLASS (code) == RTX_UNARY) | |
5695 | /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be | |
5696 | used to convert between pointer sizes. */ | |
5697 | loc = &XEXP (*loc, 0); | |
3936bafc YR |
5698 | else if (lsb_bitfield_op_p (*loc)) |
5699 | /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively | |
5700 | acts as a combined truncation and extension. */ | |
5701 | loc = &XEXP (*loc, 0); | |
277f65de RS |
5702 | else if (code == AND && CONST_INT_P (XEXP (*loc, 1))) |
5703 | /* (and ... (const_int -X)) is used to align to X bytes. */ | |
5704 | loc = &XEXP (*loc, 0); | |
163497f1 VM |
5705 | else if (code == SUBREG |
5706 | && !OBJECT_P (SUBREG_REG (*loc)) | |
5707 | && subreg_lowpart_p (*loc)) | |
5708 | /* (subreg (operator ...) ...) inside and is used for mode | |
5709 | conversion too. */ | |
99a0106f | 5710 | loc = &SUBREG_REG (*loc); |
277f65de RS |
5711 | else |
5712 | return loc; | |
5713 | if (outer_code) | |
5714 | *outer_code = code; | |
5715 | } | |
5716 | } | |
5717 | ||
ec5a3504 RS |
5718 | /* Return true if CODE applies some kind of scale. The scaled value is |
5719 | is the first operand and the scale is the second. */ | |
277f65de RS |
5720 | |
5721 | static bool | |
ec5a3504 | 5722 | binary_scale_code_p (enum rtx_code code) |
277f65de | 5723 | { |
ec5a3504 RS |
5724 | return (code == MULT |
5725 | || code == ASHIFT | |
5726 | /* Needed by ARM targets. */ | |
5727 | || code == ASHIFTRT | |
5728 | || code == LSHIFTRT | |
5729 | || code == ROTATE | |
5730 | || code == ROTATERT); | |
277f65de RS |
5731 | } |
5732 | ||
ec5a3504 RS |
5733 | /* If *INNER can be interpreted as a base, return a pointer to the inner term |
5734 | (see address_info). Return null otherwise. */ | |
277f65de | 5735 | |
ec5a3504 RS |
5736 | static rtx * |
5737 | get_base_term (rtx *inner) | |
277f65de | 5738 | { |
ec5a3504 RS |
5739 | if (GET_CODE (*inner) == LO_SUM) |
5740 | inner = strip_address_mutations (&XEXP (*inner, 0)); | |
5741 | if (REG_P (*inner) | |
5742 | || MEM_P (*inner) | |
5743 | || GET_CODE (*inner) == SUBREG) | |
5744 | return inner; | |
5745 | return 0; | |
5746 | } | |
5747 | ||
5748 | /* If *INNER can be interpreted as an index, return a pointer to the inner term | |
5749 | (see address_info). Return null otherwise. */ | |
5750 | ||
5751 | static rtx * | |
5752 | get_index_term (rtx *inner) | |
5753 | { | |
5754 | /* At present, only constant scales are allowed. */ | |
5755 | if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1))) | |
5756 | inner = strip_address_mutations (&XEXP (*inner, 0)); | |
5757 | if (REG_P (*inner) | |
5758 | || MEM_P (*inner) | |
5759 | || GET_CODE (*inner) == SUBREG) | |
5760 | return inner; | |
5761 | return 0; | |
277f65de RS |
5762 | } |
5763 | ||
5764 | /* Set the segment part of address INFO to LOC, given that INNER is the | |
5765 | unmutated value. */ | |
5766 | ||
5767 | static void | |
5768 | set_address_segment (struct address_info *info, rtx *loc, rtx *inner) | |
5769 | { | |
277f65de RS |
5770 | gcc_assert (!info->segment); |
5771 | info->segment = loc; | |
5772 | info->segment_term = inner; | |
5773 | } | |
5774 | ||
5775 | /* Set the base part of address INFO to LOC, given that INNER is the | |
5776 | unmutated value. */ | |
5777 | ||
5778 | static void | |
5779 | set_address_base (struct address_info *info, rtx *loc, rtx *inner) | |
5780 | { | |
277f65de RS |
5781 | gcc_assert (!info->base); |
5782 | info->base = loc; | |
5783 | info->base_term = inner; | |
5784 | } | |
5785 | ||
5786 | /* Set the index part of address INFO to LOC, given that INNER is the | |
5787 | unmutated value. */ | |
5788 | ||
5789 | static void | |
5790 | set_address_index (struct address_info *info, rtx *loc, rtx *inner) | |
5791 | { | |
277f65de RS |
5792 | gcc_assert (!info->index); |
5793 | info->index = loc; | |
5794 | info->index_term = inner; | |
5795 | } | |
5796 | ||
5797 | /* Set the displacement part of address INFO to LOC, given that INNER | |
5798 | is the constant term. */ | |
5799 | ||
5800 | static void | |
5801 | set_address_disp (struct address_info *info, rtx *loc, rtx *inner) | |
5802 | { | |
277f65de RS |
5803 | gcc_assert (!info->disp); |
5804 | info->disp = loc; | |
5805 | info->disp_term = inner; | |
5806 | } | |
5807 | ||
5808 | /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the | |
5809 | rest of INFO accordingly. */ | |
5810 | ||
5811 | static void | |
5812 | decompose_incdec_address (struct address_info *info) | |
5813 | { | |
5814 | info->autoinc_p = true; | |
5815 | ||
5816 | rtx *base = &XEXP (*info->inner, 0); | |
5817 | set_address_base (info, base, base); | |
5818 | gcc_checking_assert (info->base == info->base_term); | |
5819 | ||
5820 | /* These addresses are only valid when the size of the addressed | |
5821 | value is known. */ | |
5822 | gcc_checking_assert (info->mode != VOIDmode); | |
5823 | } | |
5824 | ||
5825 | /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest | |
5826 | of INFO accordingly. */ | |
5827 | ||
5828 | static void | |
5829 | decompose_automod_address (struct address_info *info) | |
5830 | { | |
5831 | info->autoinc_p = true; | |
5832 | ||
5833 | rtx *base = &XEXP (*info->inner, 0); | |
5834 | set_address_base (info, base, base); | |
5835 | gcc_checking_assert (info->base == info->base_term); | |
5836 | ||
5837 | rtx plus = XEXP (*info->inner, 1); | |
5838 | gcc_assert (GET_CODE (plus) == PLUS); | |
5839 | ||
5840 | info->base_term2 = &XEXP (plus, 0); | |
5841 | gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2)); | |
5842 | ||
5843 | rtx *step = &XEXP (plus, 1); | |
5844 | rtx *inner_step = strip_address_mutations (step); | |
5845 | if (CONSTANT_P (*inner_step)) | |
5846 | set_address_disp (info, step, inner_step); | |
5847 | else | |
5848 | set_address_index (info, step, inner_step); | |
5849 | } | |
5850 | ||
5851 | /* Treat *LOC as a tree of PLUS operands and store pointers to the summed | |
5852 | values in [PTR, END). Return a pointer to the end of the used array. */ | |
5853 | ||
5854 | static rtx ** | |
5855 | extract_plus_operands (rtx *loc, rtx **ptr, rtx **end) | |
5856 | { | |
5857 | rtx x = *loc; | |
5858 | if (GET_CODE (x) == PLUS) | |
5859 | { | |
5860 | ptr = extract_plus_operands (&XEXP (x, 0), ptr, end); | |
5861 | ptr = extract_plus_operands (&XEXP (x, 1), ptr, end); | |
5862 | } | |
5863 | else | |
5864 | { | |
5865 | gcc_assert (ptr != end); | |
5866 | *ptr++ = loc; | |
5867 | } | |
5868 | return ptr; | |
5869 | } | |
5870 | ||
5871 | /* Evaluate the likelihood of X being a base or index value, returning | |
5872 | positive if it is likely to be a base, negative if it is likely to be | |
5873 | an index, and 0 if we can't tell. Make the magnitude of the return | |
5874 | value reflect the amount of confidence we have in the answer. | |
5875 | ||
5876 | MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */ | |
5877 | ||
5878 | static int | |
5879 | baseness (rtx x, enum machine_mode mode, addr_space_t as, | |
5880 | enum rtx_code outer_code, enum rtx_code index_code) | |
5881 | { | |
277f65de RS |
5882 | /* Believe *_POINTER unless the address shape requires otherwise. */ |
5883 | if (REG_P (x) && REG_POINTER (x)) | |
5884 | return 2; | |
5885 | if (MEM_P (x) && MEM_POINTER (x)) | |
5886 | return 2; | |
5887 | ||
5888 | if (REG_P (x) && HARD_REGISTER_P (x)) | |
5889 | { | |
5890 | /* X is a hard register. If it only fits one of the base | |
5891 | or index classes, choose that interpretation. */ | |
5892 | int regno = REGNO (x); | |
5893 | bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code); | |
5894 | bool index_p = REGNO_OK_FOR_INDEX_P (regno); | |
5895 | if (base_p != index_p) | |
5896 | return base_p ? 1 : -1; | |
5897 | } | |
5898 | return 0; | |
5899 | } | |
5900 | ||
5901 | /* INFO->INNER describes a normal, non-automodified address. | |
5902 | Fill in the rest of INFO accordingly. */ | |
5903 | ||
5904 | static void | |
5905 | decompose_normal_address (struct address_info *info) | |
5906 | { | |
5907 | /* Treat the address as the sum of up to four values. */ | |
5908 | rtx *ops[4]; | |
5909 | size_t n_ops = extract_plus_operands (info->inner, ops, | |
5910 | ops + ARRAY_SIZE (ops)) - ops; | |
5911 | ||
5912 | /* If there is more than one component, any base component is in a PLUS. */ | |
5913 | if (n_ops > 1) | |
5914 | info->base_outer_code = PLUS; | |
5915 | ||
ec5a3504 RS |
5916 | /* Try to classify each sum operand now. Leave those that could be |
5917 | either a base or an index in OPS. */ | |
277f65de RS |
5918 | rtx *inner_ops[4]; |
5919 | size_t out = 0; | |
5920 | for (size_t in = 0; in < n_ops; ++in) | |
5921 | { | |
5922 | rtx *loc = ops[in]; | |
5923 | rtx *inner = strip_address_mutations (loc); | |
5924 | if (CONSTANT_P (*inner)) | |
5925 | set_address_disp (info, loc, inner); | |
5926 | else if (GET_CODE (*inner) == UNSPEC) | |
5927 | set_address_segment (info, loc, inner); | |
5928 | else | |
5929 | { | |
ec5a3504 RS |
5930 | /* The only other possibilities are a base or an index. */ |
5931 | rtx *base_term = get_base_term (inner); | |
5932 | rtx *index_term = get_index_term (inner); | |
5933 | gcc_assert (base_term || index_term); | |
5934 | if (!base_term) | |
5935 | set_address_index (info, loc, index_term); | |
5936 | else if (!index_term) | |
5937 | set_address_base (info, loc, base_term); | |
5938 | else | |
5939 | { | |
5940 | gcc_assert (base_term == index_term); | |
5941 | ops[out] = loc; | |
5942 | inner_ops[out] = base_term; | |
5943 | ++out; | |
5944 | } | |
277f65de RS |
5945 | } |
5946 | } | |
5947 | ||
5948 | /* Classify the remaining OPS members as bases and indexes. */ | |
5949 | if (out == 1) | |
5950 | { | |
ec5a3504 RS |
5951 | /* If we haven't seen a base or an index yet, assume that this is |
5952 | the base. If we were confident that another term was the base | |
5953 | or index, treat the remaining operand as the other kind. */ | |
5954 | if (!info->base) | |
277f65de RS |
5955 | set_address_base (info, ops[0], inner_ops[0]); |
5956 | else | |
5957 | set_address_index (info, ops[0], inner_ops[0]); | |
5958 | } | |
5959 | else if (out == 2) | |
5960 | { | |
5961 | /* In the event of a tie, assume the base comes first. */ | |
5962 | if (baseness (*inner_ops[0], info->mode, info->as, PLUS, | |
5963 | GET_CODE (*ops[1])) | |
5964 | >= baseness (*inner_ops[1], info->mode, info->as, PLUS, | |
5965 | GET_CODE (*ops[0]))) | |
5966 | { | |
5967 | set_address_base (info, ops[0], inner_ops[0]); | |
5968 | set_address_index (info, ops[1], inner_ops[1]); | |
5969 | } | |
5970 | else | |
5971 | { | |
5972 | set_address_base (info, ops[1], inner_ops[1]); | |
5973 | set_address_index (info, ops[0], inner_ops[0]); | |
5974 | } | |
5975 | } | |
5976 | else | |
5977 | gcc_assert (out == 0); | |
5978 | } | |
5979 | ||
5980 | /* Describe address *LOC in *INFO. MODE is the mode of the addressed value, | |
5981 | or VOIDmode if not known. AS is the address space associated with LOC. | |
5982 | OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */ | |
5983 | ||
5984 | void | |
5985 | decompose_address (struct address_info *info, rtx *loc, enum machine_mode mode, | |
5986 | addr_space_t as, enum rtx_code outer_code) | |
5987 | { | |
5988 | memset (info, 0, sizeof (*info)); | |
5989 | info->mode = mode; | |
5990 | info->as = as; | |
5991 | info->addr_outer_code = outer_code; | |
5992 | info->outer = loc; | |
5993 | info->inner = strip_address_mutations (loc, &outer_code); | |
5994 | info->base_outer_code = outer_code; | |
5995 | switch (GET_CODE (*info->inner)) | |
5996 | { | |
5997 | case PRE_DEC: | |
5998 | case PRE_INC: | |
5999 | case POST_DEC: | |
6000 | case POST_INC: | |
6001 | decompose_incdec_address (info); | |
6002 | break; | |
6003 | ||
6004 | case PRE_MODIFY: | |
6005 | case POST_MODIFY: | |
6006 | decompose_automod_address (info); | |
6007 | break; | |
6008 | ||
6009 | default: | |
6010 | decompose_normal_address (info); | |
6011 | break; | |
6012 | } | |
6013 | } | |
6014 | ||
6015 | /* Describe address operand LOC in INFO. */ | |
6016 | ||
6017 | void | |
6018 | decompose_lea_address (struct address_info *info, rtx *loc) | |
6019 | { | |
6020 | decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS); | |
6021 | } | |
6022 | ||
6023 | /* Describe the address of MEM X in INFO. */ | |
6024 | ||
6025 | void | |
6026 | decompose_mem_address (struct address_info *info, rtx x) | |
6027 | { | |
6028 | gcc_assert (MEM_P (x)); | |
6029 | decompose_address (info, &XEXP (x, 0), GET_MODE (x), | |
6030 | MEM_ADDR_SPACE (x), MEM); | |
6031 | } | |
6032 | ||
6033 | /* Update INFO after a change to the address it describes. */ | |
6034 | ||
6035 | void | |
6036 | update_address (struct address_info *info) | |
6037 | { | |
6038 | decompose_address (info, info->outer, info->mode, info->as, | |
6039 | info->addr_outer_code); | |
6040 | } | |
6041 | ||
6042 | /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is | |
6043 | more complicated than that. */ | |
6044 | ||
6045 | HOST_WIDE_INT | |
6046 | get_index_scale (const struct address_info *info) | |
6047 | { | |
6048 | rtx index = *info->index; | |
6049 | if (GET_CODE (index) == MULT | |
6050 | && CONST_INT_P (XEXP (index, 1)) | |
6051 | && info->index_term == &XEXP (index, 0)) | |
6052 | return INTVAL (XEXP (index, 1)); | |
6053 | ||
6054 | if (GET_CODE (index) == ASHIFT | |
6055 | && CONST_INT_P (XEXP (index, 1)) | |
6056 | && info->index_term == &XEXP (index, 0)) | |
6057 | return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1)); | |
6058 | ||
6059 | if (info->index == info->index_term) | |
6060 | return 1; | |
6061 | ||
6062 | return 0; | |
6063 | } | |
6064 | ||
6065 | /* Return the "index code" of INFO, in the form required by | |
6066 | ok_for_base_p_1. */ | |
6067 | ||
6068 | enum rtx_code | |
6069 | get_index_code (const struct address_info *info) | |
6070 | { | |
6071 | if (info->index) | |
6072 | return GET_CODE (*info->index); | |
6073 | ||
6074 | if (info->disp) | |
6075 | return GET_CODE (*info->disp); | |
6076 | ||
6077 | return SCRATCH; | |
6078 | } | |
093a6c99 | 6079 | |
093a6c99 RS |
6080 | /* Return true if X contains a thread-local symbol. */ |
6081 | ||
6082 | bool | |
6180e3d8 | 6083 | tls_referenced_p (const_rtx x) |
093a6c99 RS |
6084 | { |
6085 | if (!targetm.have_tls) | |
6086 | return false; | |
6087 | ||
6180e3d8 | 6088 | subrtx_iterator::array_type array; |
ebd3cb12 | 6089 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
6180e3d8 RS |
6090 | if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0) |
6091 | return true; | |
6092 | return false; | |
093a6c99 | 6093 | } |