]>
Commit | Line | Data |
---|---|---|
af082de3 | 1 | /* Analyze RTL for GNU compiler. |
23a5b65a | 2 | Copyright (C) 1987-2014 Free Software Foundation, Inc. |
2c88418c | 3 | |
1322177d | 4 | This file is part of GCC. |
2c88418c | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
2c88418c | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
2c88418c RS |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
2c88418c RS |
19 | |
20 | ||
21 | #include "config.h" | |
670ee920 | 22 | #include "system.h" |
4977bab6 ZW |
23 | #include "coretypes.h" |
24 | #include "tm.h" | |
718f9c0f | 25 | #include "diagnostic-core.h" |
3335f1d9 | 26 | #include "hard-reg-set.h" |
9f02e6a5 | 27 | #include "rtl.h" |
bc204393 RH |
28 | #include "insn-config.h" |
29 | #include "recog.h" | |
f894b69b PB |
30 | #include "target.h" |
31 | #include "output.h" | |
91ea4f8d | 32 | #include "tm_p.h" |
f5eb5fd0 | 33 | #include "flags.h" |
66fd46b6 | 34 | #include "regs.h" |
83685514 AM |
35 | #include "hashtab.h" |
36 | #include "hash-set.h" | |
37 | #include "vec.h" | |
38 | #include "machmode.h" | |
39 | #include "input.h" | |
2f93eea8 | 40 | #include "function.h" |
60393bbc AM |
41 | #include "predict.h" |
42 | #include "basic-block.h" | |
6fb5fa3c | 43 | #include "df.h" |
7ffb5e78 | 44 | #include "tree.h" |
5936d944 | 45 | #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ |
277f65de | 46 | #include "addresses.h" |
476dd0ce | 47 | #include "rtl-iter.h" |
2c88418c | 48 | |
e2373f95 | 49 | /* Forward declarations */ |
7bc980e1 | 50 | static void set_of_1 (rtx, const_rtx, void *); |
f7d504c2 KG |
51 | static bool covers_regno_p (const_rtx, unsigned int); |
52 | static bool covers_regno_no_parallel_p (const_rtx, unsigned int); | |
f7d504c2 | 53 | static int computed_jump_p_1 (const_rtx); |
7bc980e1 | 54 | static void parms_set (rtx, const_rtx, void *); |
2a1777af | 55 | |
fa233e34 KG |
56 | static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode, |
57 | const_rtx, enum machine_mode, | |
2f93eea8 | 58 | unsigned HOST_WIDE_INT); |
fa233e34 KG |
59 | static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode, |
60 | const_rtx, enum machine_mode, | |
2f93eea8 | 61 | unsigned HOST_WIDE_INT); |
fa233e34 | 62 | static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
63 | enum machine_mode, |
64 | unsigned int); | |
fa233e34 | 65 | static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx, |
2f93eea8 PB |
66 | enum machine_mode, unsigned int); |
67 | ||
cf94b0fc PB |
68 | /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or |
69 | -1 if a code has no such operand. */ | |
70 | static int non_rtx_starting_operands[NUM_RTX_CODE]; | |
71 | ||
476dd0ce RS |
72 | rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE]; |
73 | rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE]; | |
74 | ||
b12cbf2c AN |
75 | /* Truncation narrows the mode from SOURCE mode to DESTINATION mode. |
76 | If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is | |
77 | SIGN_EXTEND then while narrowing we also have to enforce the | |
78 | representation and sign-extend the value to mode DESTINATION_REP. | |
79 | ||
80 | If the value is already sign-extended to DESTINATION_REP mode we | |
81 | can just switch to DESTINATION mode on it. For each pair of | |
82 | integral modes SOURCE and DESTINATION, when truncating from SOURCE | |
83 | to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION] | |
84 | contains the number of high-order bits in SOURCE that have to be | |
85 | copies of the sign-bit so that we can do this mode-switch to | |
86 | DESTINATION. */ | |
87 | ||
88 | static unsigned int | |
89 | num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1]; | |
2c88418c | 90 | \f |
476dd0ce RS |
91 | /* Store X into index I of ARRAY. ARRAY is known to have at least I |
92 | elements. Return the new base of ARRAY. */ | |
93 | ||
94 | template <typename T> | |
95 | typename T::value_type * | |
96 | generic_subrtx_iterator <T>::add_single_to_queue (array_type &array, | |
97 | value_type *base, | |
98 | size_t i, value_type x) | |
99 | { | |
100 | if (base == array.stack) | |
101 | { | |
102 | if (i < LOCAL_ELEMS) | |
103 | { | |
104 | base[i] = x; | |
105 | return base; | |
106 | } | |
107 | gcc_checking_assert (i == LOCAL_ELEMS); | |
108 | vec_safe_grow (array.heap, i + 1); | |
109 | base = array.heap->address (); | |
110 | memcpy (base, array.stack, sizeof (array.stack)); | |
111 | base[LOCAL_ELEMS] = x; | |
112 | return base; | |
113 | } | |
114 | unsigned int length = array.heap->length (); | |
115 | if (length > i) | |
116 | { | |
117 | gcc_checking_assert (base == array.heap->address ()); | |
118 | base[i] = x; | |
119 | return base; | |
120 | } | |
121 | else | |
122 | { | |
123 | gcc_checking_assert (i == length); | |
124 | vec_safe_push (array.heap, x); | |
125 | return array.heap->address (); | |
126 | } | |
127 | } | |
128 | ||
129 | /* Add the subrtxes of X to worklist ARRAY, starting at END. Return the | |
130 | number of elements added to the worklist. */ | |
131 | ||
132 | template <typename T> | |
133 | size_t | |
134 | generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array, | |
135 | value_type *base, | |
136 | size_t end, rtx_type x) | |
137 | { | |
641123eb RS |
138 | enum rtx_code code = GET_CODE (x); |
139 | const char *format = GET_RTX_FORMAT (code); | |
476dd0ce | 140 | size_t orig_end = end; |
641123eb RS |
141 | if (__builtin_expect (INSN_P (x), false)) |
142 | { | |
143 | /* Put the pattern at the top of the queue, since that's what | |
144 | we're likely to want most. It also allows for the SEQUENCE | |
145 | code below. */ | |
146 | for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i) | |
147 | if (format[i] == 'e') | |
148 | { | |
149 | value_type subx = T::get_value (x->u.fld[i].rt_rtx); | |
150 | if (__builtin_expect (end < LOCAL_ELEMS, true)) | |
151 | base[end++] = subx; | |
152 | else | |
153 | base = add_single_to_queue (array, base, end++, subx); | |
154 | } | |
155 | } | |
156 | else | |
157 | for (int i = 0; format[i]; ++i) | |
158 | if (format[i] == 'e') | |
159 | { | |
160 | value_type subx = T::get_value (x->u.fld[i].rt_rtx); | |
161 | if (__builtin_expect (end < LOCAL_ELEMS, true)) | |
162 | base[end++] = subx; | |
163 | else | |
164 | base = add_single_to_queue (array, base, end++, subx); | |
165 | } | |
166 | else if (format[i] == 'E') | |
167 | { | |
168 | unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec); | |
169 | rtx *vec = x->u.fld[i].rt_rtvec->elem; | |
170 | if (__builtin_expect (end + length <= LOCAL_ELEMS, true)) | |
171 | for (unsigned int j = 0; j < length; j++) | |
172 | base[end++] = T::get_value (vec[j]); | |
173 | else | |
174 | for (unsigned int j = 0; j < length; j++) | |
175 | base = add_single_to_queue (array, base, end++, | |
176 | T::get_value (vec[j])); | |
177 | if (code == SEQUENCE && end == length) | |
178 | /* If the subrtxes of the sequence fill the entire array then | |
179 | we know that no other parts of a containing insn are queued. | |
180 | The caller is therefore iterating over the sequence as a | |
181 | PATTERN (...), so we also want the patterns of the | |
182 | subinstructions. */ | |
183 | for (unsigned int j = 0; j < length; j++) | |
184 | { | |
185 | typename T::rtx_type x = T::get_rtx (base[j]); | |
186 | if (INSN_P (x)) | |
187 | base[j] = T::get_value (PATTERN (x)); | |
188 | } | |
189 | } | |
476dd0ce RS |
190 | return end - orig_end; |
191 | } | |
192 | ||
193 | template <typename T> | |
194 | void | |
195 | generic_subrtx_iterator <T>::free_array (array_type &array) | |
196 | { | |
197 | vec_free (array.heap); | |
198 | } | |
199 | ||
200 | template <typename T> | |
201 | const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS; | |
202 | ||
203 | template class generic_subrtx_iterator <const_rtx_accessor>; | |
204 | template class generic_subrtx_iterator <rtx_var_accessor>; | |
205 | template class generic_subrtx_iterator <rtx_ptr_accessor>; | |
206 | ||
2c88418c RS |
207 | /* Return 1 if the value of X is unstable |
208 | (would be different at a different point in the program). | |
209 | The frame pointer, arg pointer, etc. are considered stable | |
210 | (within one function) and so is anything marked `unchanging'. */ | |
211 | ||
212 | int | |
f7d504c2 | 213 | rtx_unstable_p (const_rtx x) |
2c88418c | 214 | { |
f7d504c2 | 215 | const RTX_CODE code = GET_CODE (x); |
b3694847 SS |
216 | int i; |
217 | const char *fmt; | |
2c88418c | 218 | |
ae0fb1b9 JW |
219 | switch (code) |
220 | { | |
221 | case MEM: | |
389fdba0 | 222 | return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0)); |
2c88418c | 223 | |
ae0fb1b9 | 224 | case CONST: |
d8116890 | 225 | CASE_CONST_ANY: |
ae0fb1b9 JW |
226 | case SYMBOL_REF: |
227 | case LABEL_REF: | |
228 | return 0; | |
2c88418c | 229 | |
ae0fb1b9 JW |
230 | case REG: |
231 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
c0fc376b | 232 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 | 233 | /* The arg pointer varies if it is not a fixed register. */ |
389fdba0 | 234 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
c0fc376b | 235 | return 0; |
c0fc376b RH |
236 | /* ??? When call-clobbered, the value is stable modulo the restore |
237 | that must happen after a call. This currently screws up local-alloc | |
238 | into believing that the restore is not needed. */ | |
f8fe0a4a | 239 | if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx) |
c0fc376b | 240 | return 0; |
c0fc376b | 241 | return 1; |
ae0fb1b9 JW |
242 | |
243 | case ASM_OPERANDS: | |
244 | if (MEM_VOLATILE_P (x)) | |
245 | return 1; | |
246 | ||
5d3cc252 | 247 | /* Fall through. */ |
ae0fb1b9 JW |
248 | |
249 | default: | |
250 | break; | |
251 | } | |
2c88418c RS |
252 | |
253 | fmt = GET_RTX_FORMAT (code); | |
254 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
255 | if (fmt[i] == 'e') | |
9c82ac6b JW |
256 | { |
257 | if (rtx_unstable_p (XEXP (x, i))) | |
258 | return 1; | |
259 | } | |
260 | else if (fmt[i] == 'E') | |
261 | { | |
262 | int j; | |
263 | for (j = 0; j < XVECLEN (x, i); j++) | |
264 | if (rtx_unstable_p (XVECEXP (x, i, j))) | |
265 | return 1; | |
266 | } | |
267 | ||
2c88418c RS |
268 | return 0; |
269 | } | |
270 | ||
271 | /* Return 1 if X has a value that can vary even between two | |
272 | executions of the program. 0 means X can be compared reliably | |
273 | against certain constants or near-constants. | |
e38fe8e0 BS |
274 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
275 | zero, we are slightly more conservative. | |
2c88418c RS |
276 | The frame pointer and the arg pointer are considered constant. */ |
277 | ||
4f588890 KG |
278 | bool |
279 | rtx_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 280 | { |
e978d62e | 281 | RTX_CODE code; |
b3694847 SS |
282 | int i; |
283 | const char *fmt; | |
2c88418c | 284 | |
e978d62e PB |
285 | if (!x) |
286 | return 0; | |
287 | ||
288 | code = GET_CODE (x); | |
2c88418c RS |
289 | switch (code) |
290 | { | |
291 | case MEM: | |
389fdba0 | 292 | return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); |
55efb413 | 293 | |
2c88418c | 294 | case CONST: |
d8116890 | 295 | CASE_CONST_ANY: |
2c88418c RS |
296 | case SYMBOL_REF: |
297 | case LABEL_REF: | |
298 | return 0; | |
299 | ||
300 | case REG: | |
301 | /* Note that we have to test for the actual rtx used for the frame | |
302 | and arg pointers and not just the register number in case we have | |
303 | eliminated the frame and/or arg pointer and are using it | |
304 | for pseudos. */ | |
c0fc376b | 305 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
3335f1d9 JL |
306 | /* The arg pointer varies if it is not a fixed register. */ |
307 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
c0fc376b | 308 | return 0; |
e38fe8e0 | 309 | if (x == pic_offset_table_rtx |
e38fe8e0 BS |
310 | /* ??? When call-clobbered, the value is stable modulo the restore |
311 | that must happen after a call. This currently screws up | |
312 | local-alloc into believing that the restore is not needed, so we | |
313 | must return 0 only if we are called from alias analysis. */ | |
f8fe0a4a | 314 | && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias)) |
e38fe8e0 | 315 | return 0; |
c0fc376b | 316 | return 1; |
2c88418c RS |
317 | |
318 | case LO_SUM: | |
319 | /* The operand 0 of a LO_SUM is considered constant | |
e7d96a83 JW |
320 | (in fact it is related specifically to operand 1) |
321 | during alias analysis. */ | |
322 | return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias)) | |
323 | || rtx_varies_p (XEXP (x, 1), for_alias); | |
a6a2274a | 324 | |
ae0fb1b9 JW |
325 | case ASM_OPERANDS: |
326 | if (MEM_VOLATILE_P (x)) | |
327 | return 1; | |
328 | ||
5d3cc252 | 329 | /* Fall through. */ |
ae0fb1b9 | 330 | |
e9a25f70 JL |
331 | default: |
332 | break; | |
2c88418c RS |
333 | } |
334 | ||
335 | fmt = GET_RTX_FORMAT (code); | |
336 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
337 | if (fmt[i] == 'e') | |
9c82ac6b | 338 | { |
e38fe8e0 | 339 | if (rtx_varies_p (XEXP (x, i), for_alias)) |
9c82ac6b JW |
340 | return 1; |
341 | } | |
342 | else if (fmt[i] == 'E') | |
343 | { | |
344 | int j; | |
345 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 346 | if (rtx_varies_p (XVECEXP (x, i, j), for_alias)) |
9c82ac6b JW |
347 | return 1; |
348 | } | |
349 | ||
2c88418c RS |
350 | return 0; |
351 | } | |
352 | ||
c7e30a96 EB |
353 | /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE |
354 | bytes can cause a trap. MODE is the mode of the MEM (not that of X) and | |
355 | UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory | |
356 | references on strict alignment machines. */ | |
2c88418c | 357 | |
2358ff91 | 358 | static int |
48e8382e PB |
359 | rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size, |
360 | enum machine_mode mode, bool unaligned_mems) | |
2c88418c | 361 | { |
b3694847 | 362 | enum rtx_code code = GET_CODE (x); |
2c88418c | 363 | |
c7e30a96 EB |
364 | /* The offset must be a multiple of the mode size if we are considering |
365 | unaligned memory references on strict alignment machines. */ | |
366 | if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0) | |
48e8382e PB |
367 | { |
368 | HOST_WIDE_INT actual_offset = offset; | |
c7e30a96 | 369 | |
48e8382e PB |
370 | #ifdef SPARC_STACK_BOUNDARY_HACK |
371 | /* ??? The SPARC port may claim a STACK_BOUNDARY higher than | |
372 | the real alignment of %sp. However, when it does this, the | |
373 | alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ | |
374 | if (SPARC_STACK_BOUNDARY_HACK | |
375 | && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx)) | |
376 | actual_offset -= STACK_POINTER_OFFSET; | |
377 | #endif | |
378 | ||
65a74b5d PB |
379 | if (actual_offset % GET_MODE_SIZE (mode) != 0) |
380 | return 1; | |
48e8382e PB |
381 | } |
382 | ||
2c88418c RS |
383 | switch (code) |
384 | { | |
385 | case SYMBOL_REF: | |
48e8382e PB |
386 | if (SYMBOL_REF_WEAK (x)) |
387 | return 1; | |
388 | if (!CONSTANT_POOL_ADDRESS_P (x)) | |
389 | { | |
390 | tree decl; | |
391 | HOST_WIDE_INT decl_size; | |
392 | ||
393 | if (offset < 0) | |
394 | return 1; | |
395 | if (size == 0) | |
396 | size = GET_MODE_SIZE (mode); | |
397 | if (size == 0) | |
398 | return offset != 0; | |
399 | ||
400 | /* If the size of the access or of the symbol is unknown, | |
401 | assume the worst. */ | |
402 | decl = SYMBOL_REF_DECL (x); | |
403 | ||
404 | /* Else check that the access is in bounds. TODO: restructure | |
71c00b5c | 405 | expr_size/tree_expr_size/int_expr_size and just use the latter. */ |
48e8382e PB |
406 | if (!decl) |
407 | decl_size = -1; | |
408 | else if (DECL_P (decl) && DECL_SIZE_UNIT (decl)) | |
9541ffee | 409 | decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl)) |
9439e9a1 | 410 | ? tree_to_shwi (DECL_SIZE_UNIT (decl)) |
48e8382e PB |
411 | : -1); |
412 | else if (TREE_CODE (decl) == STRING_CST) | |
413 | decl_size = TREE_STRING_LENGTH (decl); | |
414 | else if (TYPE_SIZE_UNIT (TREE_TYPE (decl))) | |
415 | decl_size = int_size_in_bytes (TREE_TYPE (decl)); | |
416 | else | |
417 | decl_size = -1; | |
418 | ||
419 | return (decl_size <= 0 ? offset != 0 : offset + size > decl_size); | |
420 | } | |
421 | ||
422 | return 0; | |
ff0b6b99 | 423 | |
2c88418c | 424 | case LABEL_REF: |
2c88418c RS |
425 | return 0; |
426 | ||
427 | case REG: | |
c7e30a96 EB |
428 | /* Stack references are assumed not to trap, but we need to deal with |
429 | nonsensical offsets. */ | |
430 | if (x == frame_pointer_rtx) | |
431 | { | |
432 | HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET; | |
433 | if (size == 0) | |
434 | size = GET_MODE_SIZE (mode); | |
435 | if (FRAME_GROWS_DOWNWARD) | |
436 | { | |
437 | if (adj_offset < frame_offset || adj_offset + size - 1 >= 0) | |
438 | return 1; | |
439 | } | |
440 | else | |
441 | { | |
442 | if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset) | |
443 | return 1; | |
444 | } | |
445 | return 0; | |
446 | } | |
447 | /* ??? Need to add a similar guard for nonsensical offsets. */ | |
448 | if (x == hard_frame_pointer_rtx | |
4f73495e RH |
449 | || x == stack_pointer_rtx |
450 | /* The arg pointer varies if it is not a fixed register. */ | |
451 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
452 | return 0; | |
453 | /* All of the virtual frame registers are stack references. */ | |
454 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
455 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
456 | return 0; | |
457 | return 1; | |
2c88418c RS |
458 | |
459 | case CONST: | |
48e8382e PB |
460 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
461 | mode, unaligned_mems); | |
2c88418c RS |
462 | |
463 | case PLUS: | |
2358ff91 | 464 | /* An address is assumed not to trap if: |
48e8382e PB |
465 | - it is the pic register plus a constant. */ |
466 | if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1))) | |
467 | return 0; | |
468 | ||
c7e30a96 | 469 | /* - or it is an address that can't trap plus a constant integer. */ |
481683e1 | 470 | if (CONST_INT_P (XEXP (x, 1)) |
48e8382e PB |
471 | && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)), |
472 | size, mode, unaligned_mems)) | |
2358ff91 EB |
473 | return 0; |
474 | ||
475 | return 1; | |
2c88418c RS |
476 | |
477 | case LO_SUM: | |
4f73495e | 478 | case PRE_MODIFY: |
48e8382e PB |
479 | return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size, |
480 | mode, unaligned_mems); | |
4f73495e RH |
481 | |
482 | case PRE_DEC: | |
483 | case PRE_INC: | |
484 | case POST_DEC: | |
485 | case POST_INC: | |
486 | case POST_MODIFY: | |
48e8382e PB |
487 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
488 | mode, unaligned_mems); | |
4f73495e | 489 | |
e9a25f70 JL |
490 | default: |
491 | break; | |
2c88418c RS |
492 | } |
493 | ||
494 | /* If it isn't one of the case above, it can cause a trap. */ | |
495 | return 1; | |
496 | } | |
497 | ||
2358ff91 EB |
498 | /* Return nonzero if the use of X as an address in a MEM can cause a trap. */ |
499 | ||
500 | int | |
f7d504c2 | 501 | rtx_addr_can_trap_p (const_rtx x) |
2358ff91 | 502 | { |
48e8382e | 503 | return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false); |
2358ff91 EB |
504 | } |
505 | ||
4977bab6 ZW |
506 | /* Return true if X is an address that is known to not be zero. */ |
507 | ||
508 | bool | |
f7d504c2 | 509 | nonzero_address_p (const_rtx x) |
4977bab6 | 510 | { |
f7d504c2 | 511 | const enum rtx_code code = GET_CODE (x); |
4977bab6 ZW |
512 | |
513 | switch (code) | |
514 | { | |
515 | case SYMBOL_REF: | |
516 | return !SYMBOL_REF_WEAK (x); | |
517 | ||
518 | case LABEL_REF: | |
519 | return true; | |
520 | ||
4977bab6 ZW |
521 | case REG: |
522 | /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ | |
523 | if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx | |
524 | || x == stack_pointer_rtx | |
525 | || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) | |
526 | return true; | |
527 | /* All of the virtual frame registers are stack references. */ | |
528 | if (REGNO (x) >= FIRST_VIRTUAL_REGISTER | |
529 | && REGNO (x) <= LAST_VIRTUAL_REGISTER) | |
530 | return true; | |
531 | return false; | |
532 | ||
533 | case CONST: | |
534 | return nonzero_address_p (XEXP (x, 0)); | |
535 | ||
536 | case PLUS: | |
4977bab6 | 537 | /* Handle PIC references. */ |
bc2164e8 | 538 | if (XEXP (x, 0) == pic_offset_table_rtx |
4977bab6 ZW |
539 | && CONSTANT_P (XEXP (x, 1))) |
540 | return true; | |
541 | return false; | |
542 | ||
543 | case PRE_MODIFY: | |
544 | /* Similar to the above; allow positive offsets. Further, since | |
545 | auto-inc is only allowed in memories, the register must be a | |
546 | pointer. */ | |
481683e1 | 547 | if (CONST_INT_P (XEXP (x, 1)) |
4977bab6 ZW |
548 | && INTVAL (XEXP (x, 1)) > 0) |
549 | return true; | |
550 | return nonzero_address_p (XEXP (x, 0)); | |
551 | ||
552 | case PRE_INC: | |
553 | /* Similarly. Further, the offset is always positive. */ | |
554 | return true; | |
555 | ||
556 | case PRE_DEC: | |
557 | case POST_DEC: | |
558 | case POST_INC: | |
559 | case POST_MODIFY: | |
560 | return nonzero_address_p (XEXP (x, 0)); | |
561 | ||
562 | case LO_SUM: | |
563 | return nonzero_address_p (XEXP (x, 1)); | |
564 | ||
565 | default: | |
566 | break; | |
567 | } | |
568 | ||
569 | /* If it isn't one of the case above, might be zero. */ | |
570 | return false; | |
571 | } | |
572 | ||
a6a2274a | 573 | /* Return 1 if X refers to a memory location whose address |
2c88418c | 574 | cannot be compared reliably with constant addresses, |
a6a2274a | 575 | or if X refers to a BLKmode memory object. |
e38fe8e0 BS |
576 | FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
577 | zero, we are slightly more conservative. */ | |
2c88418c | 578 | |
4f588890 KG |
579 | bool |
580 | rtx_addr_varies_p (const_rtx x, bool for_alias) | |
2c88418c | 581 | { |
b3694847 SS |
582 | enum rtx_code code; |
583 | int i; | |
584 | const char *fmt; | |
2c88418c RS |
585 | |
586 | if (x == 0) | |
587 | return 0; | |
588 | ||
589 | code = GET_CODE (x); | |
590 | if (code == MEM) | |
e38fe8e0 | 591 | return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias); |
2c88418c RS |
592 | |
593 | fmt = GET_RTX_FORMAT (code); | |
594 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
595 | if (fmt[i] == 'e') | |
833c0b26 | 596 | { |
e38fe8e0 | 597 | if (rtx_addr_varies_p (XEXP (x, i), for_alias)) |
833c0b26 RK |
598 | return 1; |
599 | } | |
600 | else if (fmt[i] == 'E') | |
601 | { | |
602 | int j; | |
603 | for (j = 0; j < XVECLEN (x, i); j++) | |
e38fe8e0 | 604 | if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias)) |
833c0b26 RK |
605 | return 1; |
606 | } | |
2c88418c RS |
607 | return 0; |
608 | } | |
609 | \f | |
da4fdf2d SB |
610 | /* Return the CALL in X if there is one. */ |
611 | ||
612 | rtx | |
613 | get_call_rtx_from (rtx x) | |
614 | { | |
615 | if (INSN_P (x)) | |
616 | x = PATTERN (x); | |
617 | if (GET_CODE (x) == PARALLEL) | |
618 | x = XVECEXP (x, 0, 0); | |
619 | if (GET_CODE (x) == SET) | |
620 | x = SET_SRC (x); | |
621 | if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0))) | |
622 | return x; | |
623 | return NULL_RTX; | |
624 | } | |
625 | \f | |
2c88418c RS |
626 | /* Return the value of the integer term in X, if one is apparent; |
627 | otherwise return 0. | |
628 | Only obvious integer terms are detected. | |
3ef42a0c | 629 | This is used in cse.c with the `related_value' field. */ |
2c88418c | 630 | |
c166a311 | 631 | HOST_WIDE_INT |
f7d504c2 | 632 | get_integer_term (const_rtx x) |
2c88418c RS |
633 | { |
634 | if (GET_CODE (x) == CONST) | |
635 | x = XEXP (x, 0); | |
636 | ||
637 | if (GET_CODE (x) == MINUS | |
481683e1 | 638 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
639 | return - INTVAL (XEXP (x, 1)); |
640 | if (GET_CODE (x) == PLUS | |
481683e1 | 641 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
642 | return INTVAL (XEXP (x, 1)); |
643 | return 0; | |
644 | } | |
645 | ||
646 | /* If X is a constant, return the value sans apparent integer term; | |
647 | otherwise return 0. | |
648 | Only obvious integer terms are detected. */ | |
649 | ||
650 | rtx | |
f7d504c2 | 651 | get_related_value (const_rtx x) |
2c88418c RS |
652 | { |
653 | if (GET_CODE (x) != CONST) | |
654 | return 0; | |
655 | x = XEXP (x, 0); | |
656 | if (GET_CODE (x) == PLUS | |
481683e1 | 657 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
658 | return XEXP (x, 0); |
659 | else if (GET_CODE (x) == MINUS | |
481683e1 | 660 | && CONST_INT_P (XEXP (x, 1))) |
2c88418c RS |
661 | return XEXP (x, 0); |
662 | return 0; | |
663 | } | |
664 | \f | |
7ffb5e78 RS |
665 | /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points |
666 | to somewhere in the same object or object_block as SYMBOL. */ | |
667 | ||
668 | bool | |
f7d504c2 | 669 | offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset) |
7ffb5e78 RS |
670 | { |
671 | tree decl; | |
672 | ||
673 | if (GET_CODE (symbol) != SYMBOL_REF) | |
674 | return false; | |
675 | ||
676 | if (offset == 0) | |
677 | return true; | |
678 | ||
679 | if (offset > 0) | |
680 | { | |
681 | if (CONSTANT_POOL_ADDRESS_P (symbol) | |
682 | && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol))) | |
683 | return true; | |
684 | ||
685 | decl = SYMBOL_REF_DECL (symbol); | |
686 | if (decl && offset < int_size_in_bytes (TREE_TYPE (decl))) | |
687 | return true; | |
688 | } | |
689 | ||
690 | if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) | |
691 | && SYMBOL_REF_BLOCK (symbol) | |
692 | && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0 | |
693 | && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol) | |
694 | < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size)) | |
695 | return true; | |
696 | ||
697 | return false; | |
698 | } | |
699 | ||
700 | /* Split X into a base and a constant offset, storing them in *BASE_OUT | |
701 | and *OFFSET_OUT respectively. */ | |
702 | ||
703 | void | |
704 | split_const (rtx x, rtx *base_out, rtx *offset_out) | |
705 | { | |
706 | if (GET_CODE (x) == CONST) | |
707 | { | |
708 | x = XEXP (x, 0); | |
481683e1 | 709 | if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1))) |
7ffb5e78 RS |
710 | { |
711 | *base_out = XEXP (x, 0); | |
712 | *offset_out = XEXP (x, 1); | |
713 | return; | |
714 | } | |
715 | } | |
716 | *base_out = x; | |
717 | *offset_out = const0_rtx; | |
718 | } | |
719 | \f | |
4b983fdc RH |
720 | /* Return the number of places FIND appears within X. If COUNT_DEST is |
721 | zero, we do not count occurrences inside the destination of a SET. */ | |
722 | ||
723 | int | |
f7d504c2 | 724 | count_occurrences (const_rtx x, const_rtx find, int count_dest) |
4b983fdc RH |
725 | { |
726 | int i, j; | |
727 | enum rtx_code code; | |
728 | const char *format_ptr; | |
729 | int count; | |
730 | ||
731 | if (x == find) | |
732 | return 1; | |
733 | ||
734 | code = GET_CODE (x); | |
735 | ||
736 | switch (code) | |
737 | { | |
738 | case REG: | |
d8116890 | 739 | CASE_CONST_ANY: |
4b983fdc RH |
740 | case SYMBOL_REF: |
741 | case CODE_LABEL: | |
742 | case PC: | |
743 | case CC0: | |
744 | return 0; | |
745 | ||
2372a062 BS |
746 | case EXPR_LIST: |
747 | count = count_occurrences (XEXP (x, 0), find, count_dest); | |
748 | if (XEXP (x, 1)) | |
749 | count += count_occurrences (XEXP (x, 1), find, count_dest); | |
750 | return count; | |
b8698a0f | 751 | |
4b983fdc | 752 | case MEM: |
3c0cb5de | 753 | if (MEM_P (find) && rtx_equal_p (x, find)) |
4b983fdc RH |
754 | return 1; |
755 | break; | |
756 | ||
757 | case SET: | |
758 | if (SET_DEST (x) == find && ! count_dest) | |
759 | return count_occurrences (SET_SRC (x), find, count_dest); | |
760 | break; | |
761 | ||
762 | default: | |
763 | break; | |
764 | } | |
765 | ||
766 | format_ptr = GET_RTX_FORMAT (code); | |
767 | count = 0; | |
768 | ||
769 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
770 | { | |
771 | switch (*format_ptr++) | |
772 | { | |
773 | case 'e': | |
774 | count += count_occurrences (XEXP (x, i), find, count_dest); | |
775 | break; | |
776 | ||
777 | case 'E': | |
778 | for (j = 0; j < XVECLEN (x, i); j++) | |
779 | count += count_occurrences (XVECEXP (x, i, j), find, count_dest); | |
780 | break; | |
781 | } | |
782 | } | |
783 | return count; | |
784 | } | |
6fb5fa3c | 785 | |
7bc14a04 PB |
786 | \f |
787 | /* Return TRUE if OP is a register or subreg of a register that | |
788 | holds an unsigned quantity. Otherwise, return FALSE. */ | |
789 | ||
790 | bool | |
791 | unsigned_reg_p (rtx op) | |
792 | { | |
793 | if (REG_P (op) | |
794 | && REG_EXPR (op) | |
795 | && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op)))) | |
796 | return true; | |
797 | ||
798 | if (GET_CODE (op) == SUBREG | |
362d42dc | 799 | && SUBREG_PROMOTED_SIGN (op)) |
7bc14a04 PB |
800 | return true; |
801 | ||
802 | return false; | |
803 | } | |
804 | ||
4b983fdc | 805 | \f |
2c88418c RS |
806 | /* Nonzero if register REG appears somewhere within IN. |
807 | Also works if REG is not a register; in this case it checks | |
808 | for a subexpression of IN that is Lisp "equal" to REG. */ | |
809 | ||
810 | int | |
f7d504c2 | 811 | reg_mentioned_p (const_rtx reg, const_rtx in) |
2c88418c | 812 | { |
b3694847 SS |
813 | const char *fmt; |
814 | int i; | |
815 | enum rtx_code code; | |
2c88418c RS |
816 | |
817 | if (in == 0) | |
818 | return 0; | |
819 | ||
820 | if (reg == in) | |
821 | return 1; | |
822 | ||
823 | if (GET_CODE (in) == LABEL_REF) | |
a827d9b1 | 824 | return reg == LABEL_REF_LABEL (in); |
2c88418c RS |
825 | |
826 | code = GET_CODE (in); | |
827 | ||
828 | switch (code) | |
829 | { | |
830 | /* Compare registers by number. */ | |
831 | case REG: | |
f8cfc6aa | 832 | return REG_P (reg) && REGNO (in) == REGNO (reg); |
2c88418c RS |
833 | |
834 | /* These codes have no constituent expressions | |
835 | and are unique. */ | |
836 | case SCRATCH: | |
837 | case CC0: | |
838 | case PC: | |
839 | return 0; | |
840 | ||
d8116890 | 841 | CASE_CONST_ANY: |
2c88418c RS |
842 | /* These are kept unique for a given value. */ |
843 | return 0; | |
a6a2274a | 844 | |
e9a25f70 JL |
845 | default: |
846 | break; | |
2c88418c RS |
847 | } |
848 | ||
849 | if (GET_CODE (reg) == code && rtx_equal_p (reg, in)) | |
850 | return 1; | |
851 | ||
852 | fmt = GET_RTX_FORMAT (code); | |
853 | ||
854 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
855 | { | |
856 | if (fmt[i] == 'E') | |
857 | { | |
b3694847 | 858 | int j; |
2c88418c RS |
859 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) |
860 | if (reg_mentioned_p (reg, XVECEXP (in, i, j))) | |
861 | return 1; | |
862 | } | |
863 | else if (fmt[i] == 'e' | |
864 | && reg_mentioned_p (reg, XEXP (in, i))) | |
865 | return 1; | |
866 | } | |
867 | return 0; | |
868 | } | |
869 | \f | |
870 | /* Return 1 if in between BEG and END, exclusive of BEG and END, there is | |
871 | no CODE_LABEL insn. */ | |
872 | ||
873 | int | |
b32d5189 | 874 | no_labels_between_p (const rtx_insn *beg, const rtx_insn *end) |
2c88418c | 875 | { |
b32d5189 | 876 | rtx_insn *p; |
978f547f JH |
877 | if (beg == end) |
878 | return 0; | |
2c88418c | 879 | for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) |
4b4bf941 | 880 | if (LABEL_P (p)) |
2c88418c RS |
881 | return 0; |
882 | return 1; | |
883 | } | |
884 | ||
885 | /* Nonzero if register REG is used in an insn between | |
886 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
887 | ||
888 | int | |
b32d5189 DM |
889 | reg_used_between_p (const_rtx reg, const rtx_insn *from_insn, |
890 | const rtx_insn *to_insn) | |
2c88418c | 891 | { |
1bbbc4a3 | 892 | rtx_insn *insn; |
2c88418c RS |
893 | |
894 | if (from_insn == to_insn) | |
895 | return 0; | |
896 | ||
897 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
b5b8b0ac | 898 | if (NONDEBUG_INSN_P (insn) |
8f3e7a26 | 899 | && (reg_overlap_mentioned_p (reg, PATTERN (insn)) |
76dd5923 | 900 | || (CALL_P (insn) && find_reg_fusage (insn, USE, reg)))) |
2c88418c RS |
901 | return 1; |
902 | return 0; | |
903 | } | |
904 | \f | |
905 | /* Nonzero if the old value of X, a register, is referenced in BODY. If X | |
906 | is entirely replaced by a new value and the only use is as a SET_DEST, | |
907 | we do not consider it a reference. */ | |
908 | ||
909 | int | |
f7d504c2 | 910 | reg_referenced_p (const_rtx x, const_rtx body) |
2c88418c RS |
911 | { |
912 | int i; | |
913 | ||
914 | switch (GET_CODE (body)) | |
915 | { | |
916 | case SET: | |
917 | if (reg_overlap_mentioned_p (x, SET_SRC (body))) | |
918 | return 1; | |
919 | ||
920 | /* If the destination is anything other than CC0, PC, a REG or a SUBREG | |
921 | of a REG that occupies all of the REG, the insn references X if | |
922 | it is mentioned in the destination. */ | |
923 | if (GET_CODE (SET_DEST (body)) != CC0 | |
924 | && GET_CODE (SET_DEST (body)) != PC | |
f8cfc6aa | 925 | && !REG_P (SET_DEST (body)) |
2c88418c | 926 | && ! (GET_CODE (SET_DEST (body)) == SUBREG |
f8cfc6aa | 927 | && REG_P (SUBREG_REG (SET_DEST (body))) |
2c88418c RS |
928 | && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body)))) |
929 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
930 | == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body))) | |
931 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
932 | && reg_overlap_mentioned_p (x, SET_DEST (body))) | |
933 | return 1; | |
e9a25f70 | 934 | return 0; |
2c88418c RS |
935 | |
936 | case ASM_OPERANDS: | |
937 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
938 | if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i))) | |
939 | return 1; | |
e9a25f70 | 940 | return 0; |
2c88418c RS |
941 | |
942 | case CALL: | |
943 | case USE: | |
14a774a9 | 944 | case IF_THEN_ELSE: |
2c88418c RS |
945 | return reg_overlap_mentioned_p (x, body); |
946 | ||
947 | case TRAP_IF: | |
948 | return reg_overlap_mentioned_p (x, TRAP_CONDITION (body)); | |
949 | ||
21b8482a JJ |
950 | case PREFETCH: |
951 | return reg_overlap_mentioned_p (x, XEXP (body, 0)); | |
952 | ||
2ac4fed0 RK |
953 | case UNSPEC: |
954 | case UNSPEC_VOLATILE: | |
2f9fb4c2 R |
955 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
956 | if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i))) | |
957 | return 1; | |
958 | return 0; | |
959 | ||
2c88418c RS |
960 | case PARALLEL: |
961 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
962 | if (reg_referenced_p (x, XVECEXP (body, 0, i))) | |
963 | return 1; | |
e9a25f70 | 964 | return 0; |
a6a2274a | 965 | |
0d3ffb5a | 966 | case CLOBBER: |
3c0cb5de | 967 | if (MEM_P (XEXP (body, 0))) |
0d3ffb5a GK |
968 | if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) |
969 | return 1; | |
970 | return 0; | |
971 | ||
0c99ec5c RH |
972 | case COND_EXEC: |
973 | if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) | |
974 | return 1; | |
975 | return reg_referenced_p (x, COND_EXEC_CODE (body)); | |
976 | ||
e9a25f70 JL |
977 | default: |
978 | return 0; | |
2c88418c | 979 | } |
2c88418c | 980 | } |
2c88418c RS |
981 | \f |
982 | /* Nonzero if register REG is set or clobbered in an insn between | |
983 | FROM_INSN and TO_INSN (exclusive of those two). */ | |
984 | ||
985 | int | |
a5d567ec DM |
986 | reg_set_between_p (const_rtx reg, const rtx_insn *from_insn, |
987 | const rtx_insn *to_insn) | |
2c88418c | 988 | { |
1bbbc4a3 | 989 | const rtx_insn *insn; |
2c88418c RS |
990 | |
991 | if (from_insn == to_insn) | |
992 | return 0; | |
993 | ||
994 | for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) | |
2c3c49de | 995 | if (INSN_P (insn) && reg_set_p (reg, insn)) |
2c88418c RS |
996 | return 1; |
997 | return 0; | |
998 | } | |
999 | ||
1000 | /* Internals of reg_set_between_p. */ | |
2c88418c | 1001 | int |
ed7a4b4b | 1002 | reg_set_p (const_rtx reg, const_rtx insn) |
2c88418c | 1003 | { |
2c88418c RS |
1004 | /* We can be passed an insn or part of one. If we are passed an insn, |
1005 | check if a side-effect of the insn clobbers REG. */ | |
4977bab6 ZW |
1006 | if (INSN_P (insn) |
1007 | && (FIND_REG_INC_NOTE (insn, reg) | |
4b4bf941 | 1008 | || (CALL_P (insn) |
f8cfc6aa | 1009 | && ((REG_P (reg) |
4f1605d2 | 1010 | && REGNO (reg) < FIRST_PSEUDO_REGISTER |
5da20cfe RS |
1011 | && overlaps_hard_reg_set_p (regs_invalidated_by_call, |
1012 | GET_MODE (reg), REGNO (reg))) | |
3c0cb5de | 1013 | || MEM_P (reg) |
4977bab6 ZW |
1014 | || find_reg_fusage (insn, CLOBBER, reg))))) |
1015 | return 1; | |
2c88418c | 1016 | |
91b2d119 | 1017 | return set_of (reg, insn) != NULL_RTX; |
2c88418c RS |
1018 | } |
1019 | ||
1020 | /* Similar to reg_set_between_p, but check all registers in X. Return 0 | |
1021 | only if none of them are modified between START and END. Return 1 if | |
fa10beec | 1022 | X contains a MEM; this routine does use memory aliasing. */ |
2c88418c RS |
1023 | |
1024 | int | |
8f6bce51 | 1025 | modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end) |
2c88418c | 1026 | { |
9678086d | 1027 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 1028 | const char *fmt; |
f8163c92 | 1029 | int i, j; |
1bbbc4a3 | 1030 | rtx_insn *insn; |
7b52eede JH |
1031 | |
1032 | if (start == end) | |
1033 | return 0; | |
2c88418c RS |
1034 | |
1035 | switch (code) | |
1036 | { | |
d8116890 | 1037 | CASE_CONST_ANY: |
2c88418c RS |
1038 | case CONST: |
1039 | case SYMBOL_REF: | |
1040 | case LABEL_REF: | |
1041 | return 0; | |
1042 | ||
1043 | case PC: | |
1044 | case CC0: | |
1045 | return 1; | |
1046 | ||
1047 | case MEM: | |
7b52eede | 1048 | if (modified_between_p (XEXP (x, 0), start, end)) |
2c88418c | 1049 | return 1; |
550b7784 KK |
1050 | if (MEM_READONLY_P (x)) |
1051 | return 0; | |
7b52eede JH |
1052 | for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn)) |
1053 | if (memory_modified_in_insn_p (x, insn)) | |
1054 | return 1; | |
1055 | return 0; | |
2c88418c RS |
1056 | break; |
1057 | ||
1058 | case REG: | |
1059 | return reg_set_between_p (x, start, end); | |
a6a2274a | 1060 | |
e9a25f70 JL |
1061 | default: |
1062 | break; | |
2c88418c RS |
1063 | } |
1064 | ||
1065 | fmt = GET_RTX_FORMAT (code); | |
1066 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
f8163c92 RK |
1067 | { |
1068 | if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end)) | |
1069 | return 1; | |
1070 | ||
d4757e6a | 1071 | else if (fmt[i] == 'E') |
f8163c92 RK |
1072 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
1073 | if (modified_between_p (XVECEXP (x, i, j), start, end)) | |
1074 | return 1; | |
1075 | } | |
1076 | ||
1077 | return 0; | |
1078 | } | |
1079 | ||
1080 | /* Similar to reg_set_p, but check all registers in X. Return 0 only if none | |
1081 | of them are modified in INSN. Return 1 if X contains a MEM; this routine | |
7b52eede | 1082 | does use memory aliasing. */ |
f8163c92 RK |
1083 | |
1084 | int | |
9678086d | 1085 | modified_in_p (const_rtx x, const_rtx insn) |
f8163c92 | 1086 | { |
9678086d | 1087 | const enum rtx_code code = GET_CODE (x); |
6f7d635c | 1088 | const char *fmt; |
f8163c92 RK |
1089 | int i, j; |
1090 | ||
1091 | switch (code) | |
1092 | { | |
d8116890 | 1093 | CASE_CONST_ANY: |
f8163c92 RK |
1094 | case CONST: |
1095 | case SYMBOL_REF: | |
1096 | case LABEL_REF: | |
1097 | return 0; | |
1098 | ||
1099 | case PC: | |
1100 | case CC0: | |
2c88418c RS |
1101 | return 1; |
1102 | ||
f8163c92 | 1103 | case MEM: |
7b52eede | 1104 | if (modified_in_p (XEXP (x, 0), insn)) |
f8163c92 | 1105 | return 1; |
550b7784 KK |
1106 | if (MEM_READONLY_P (x)) |
1107 | return 0; | |
7b52eede JH |
1108 | if (memory_modified_in_insn_p (x, insn)) |
1109 | return 1; | |
1110 | return 0; | |
f8163c92 RK |
1111 | break; |
1112 | ||
1113 | case REG: | |
1114 | return reg_set_p (x, insn); | |
e9a25f70 JL |
1115 | |
1116 | default: | |
1117 | break; | |
f8163c92 RK |
1118 | } |
1119 | ||
1120 | fmt = GET_RTX_FORMAT (code); | |
1121 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1122 | { | |
1123 | if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn)) | |
1124 | return 1; | |
1125 | ||
d4757e6a | 1126 | else if (fmt[i] == 'E') |
f8163c92 RK |
1127 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
1128 | if (modified_in_p (XVECEXP (x, i, j), insn)) | |
1129 | return 1; | |
1130 | } | |
1131 | ||
2c88418c RS |
1132 | return 0; |
1133 | } | |
1134 | \f | |
91b2d119 JH |
1135 | /* Helper function for set_of. */ |
1136 | struct set_of_data | |
1137 | { | |
7bc980e1 KG |
1138 | const_rtx found; |
1139 | const_rtx pat; | |
91b2d119 JH |
1140 | }; |
1141 | ||
1142 | static void | |
7bc980e1 | 1143 | set_of_1 (rtx x, const_rtx pat, void *data1) |
91b2d119 | 1144 | { |
7bc980e1 KG |
1145 | struct set_of_data *const data = (struct set_of_data *) (data1); |
1146 | if (rtx_equal_p (x, data->pat) | |
1147 | || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x))) | |
1148 | data->found = pat; | |
91b2d119 JH |
1149 | } |
1150 | ||
1151 | /* Give an INSN, return a SET or CLOBBER expression that does modify PAT | |
eaec9b3d | 1152 | (either directly or via STRICT_LOW_PART and similar modifiers). */ |
7bc980e1 KG |
1153 | const_rtx |
1154 | set_of (const_rtx pat, const_rtx insn) | |
91b2d119 JH |
1155 | { |
1156 | struct set_of_data data; | |
1157 | data.found = NULL_RTX; | |
1158 | data.pat = pat; | |
1159 | note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data); | |
1160 | return data.found; | |
1161 | } | |
e2724e63 | 1162 | |
f7d0b0fc RS |
1163 | /* Add all hard register in X to *PSET. */ |
1164 | void | |
1165 | find_all_hard_regs (const_rtx x, HARD_REG_SET *pset) | |
1166 | { | |
1167 | subrtx_iterator::array_type array; | |
1168 | FOR_EACH_SUBRTX (iter, array, x, NONCONST) | |
1169 | { | |
1170 | const_rtx x = *iter; | |
1171 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) | |
1172 | add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); | |
1173 | } | |
1174 | } | |
1175 | ||
e2724e63 BS |
1176 | /* This function, called through note_stores, collects sets and |
1177 | clobbers of hard registers in a HARD_REG_SET, which is pointed to | |
1178 | by DATA. */ | |
1179 | void | |
1180 | record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) | |
1181 | { | |
1182 | HARD_REG_SET *pset = (HARD_REG_SET *)data; | |
1183 | if (REG_P (x) && HARD_REGISTER_P (x)) | |
1184 | add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); | |
1185 | } | |
1186 | ||
1187 | /* Examine INSN, and compute the set of hard registers written by it. | |
1188 | Store it in *PSET. Should only be called after reload. */ | |
1189 | void | |
356bf593 | 1190 | find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset, bool implicit) |
e2724e63 BS |
1191 | { |
1192 | rtx link; | |
1193 | ||
1194 | CLEAR_HARD_REG_SET (*pset); | |
1195 | note_stores (PATTERN (insn), record_hard_reg_sets, pset); | |
3ee634fd TV |
1196 | if (CALL_P (insn)) |
1197 | { | |
1198 | if (implicit) | |
1199 | IOR_HARD_REG_SET (*pset, call_used_reg_set); | |
1200 | ||
1201 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
1202 | record_hard_reg_sets (XEXP (link, 0), NULL, pset); | |
1203 | } | |
e2724e63 BS |
1204 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1205 | if (REG_NOTE_KIND (link) == REG_INC) | |
1206 | record_hard_reg_sets (XEXP (link, 0), NULL, pset); | |
1207 | } | |
1208 | ||
e2724e63 BS |
1209 | /* Like record_hard_reg_sets, but called through note_uses. */ |
1210 | void | |
1211 | record_hard_reg_uses (rtx *px, void *data) | |
1212 | { | |
f7d0b0fc | 1213 | find_all_hard_regs (*px, (HARD_REG_SET *) data); |
e2724e63 | 1214 | } |
91b2d119 | 1215 | \f |
2c88418c RS |
1216 | /* Given an INSN, return a SET expression if this insn has only a single SET. |
1217 | It may also have CLOBBERs, USEs, or SET whose output | |
1218 | will not be used, which we ignore. */ | |
1219 | ||
1220 | rtx | |
e8a54173 | 1221 | single_set_2 (const rtx_insn *insn, const_rtx pat) |
2c88418c | 1222 | { |
c9b89a21 JH |
1223 | rtx set = NULL; |
1224 | int set_verified = 1; | |
2c88418c | 1225 | int i; |
c9b89a21 | 1226 | |
b1cdafbb | 1227 | if (GET_CODE (pat) == PARALLEL) |
2c88418c | 1228 | { |
c9b89a21 | 1229 | for (i = 0; i < XVECLEN (pat, 0); i++) |
b1cdafbb | 1230 | { |
c9b89a21 JH |
1231 | rtx sub = XVECEXP (pat, 0, i); |
1232 | switch (GET_CODE (sub)) | |
1233 | { | |
1234 | case USE: | |
1235 | case CLOBBER: | |
1236 | break; | |
1237 | ||
1238 | case SET: | |
1239 | /* We can consider insns having multiple sets, where all | |
1240 | but one are dead as single set insns. In common case | |
1241 | only single set is present in the pattern so we want | |
f63d1bf7 | 1242 | to avoid checking for REG_UNUSED notes unless necessary. |
c9b89a21 JH |
1243 | |
1244 | When we reach set first time, we just expect this is | |
1245 | the single set we are looking for and only when more | |
1246 | sets are found in the insn, we check them. */ | |
1247 | if (!set_verified) | |
1248 | { | |
1249 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (set)) | |
1250 | && !side_effects_p (set)) | |
1251 | set = NULL; | |
1252 | else | |
1253 | set_verified = 1; | |
1254 | } | |
1255 | if (!set) | |
1256 | set = sub, set_verified = 0; | |
1257 | else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub)) | |
1258 | || side_effects_p (sub)) | |
1259 | return NULL_RTX; | |
1260 | break; | |
1261 | ||
1262 | default: | |
1263 | return NULL_RTX; | |
1264 | } | |
787ccee0 | 1265 | } |
2c88418c | 1266 | } |
c9b89a21 | 1267 | return set; |
2c88418c | 1268 | } |
941c63ac JL |
1269 | |
1270 | /* Given an INSN, return nonzero if it has more than one SET, else return | |
1271 | zero. */ | |
1272 | ||
5f7d3786 | 1273 | int |
f7d504c2 | 1274 | multiple_sets (const_rtx insn) |
941c63ac | 1275 | { |
cae8acdd | 1276 | int found; |
941c63ac | 1277 | int i; |
a6a2274a | 1278 | |
941c63ac | 1279 | /* INSN must be an insn. */ |
2c3c49de | 1280 | if (! INSN_P (insn)) |
941c63ac JL |
1281 | return 0; |
1282 | ||
1283 | /* Only a PARALLEL can have multiple SETs. */ | |
1284 | if (GET_CODE (PATTERN (insn)) == PARALLEL) | |
1285 | { | |
1286 | for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
1287 | if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET) | |
1288 | { | |
1289 | /* If we have already found a SET, then return now. */ | |
1290 | if (found) | |
1291 | return 1; | |
1292 | else | |
1293 | found = 1; | |
1294 | } | |
1295 | } | |
a6a2274a | 1296 | |
941c63ac JL |
1297 | /* Either zero or one SET. */ |
1298 | return 0; | |
1299 | } | |
2c88418c | 1300 | \f |
7142e318 JW |
1301 | /* Return nonzero if the destination of SET equals the source |
1302 | and there are no side effects. */ | |
1303 | ||
1304 | int | |
f7d504c2 | 1305 | set_noop_p (const_rtx set) |
7142e318 JW |
1306 | { |
1307 | rtx src = SET_SRC (set); | |
1308 | rtx dst = SET_DEST (set); | |
1309 | ||
371b8fc0 JH |
1310 | if (dst == pc_rtx && src == pc_rtx) |
1311 | return 1; | |
1312 | ||
3c0cb5de | 1313 | if (MEM_P (dst) && MEM_P (src)) |
cd648cec JH |
1314 | return rtx_equal_p (dst, src) && !side_effects_p (dst); |
1315 | ||
46d096a3 | 1316 | if (GET_CODE (dst) == ZERO_EXTRACT) |
7142e318 | 1317 | return rtx_equal_p (XEXP (dst, 0), src) |
cd648cec JH |
1318 | && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx |
1319 | && !side_effects_p (src); | |
7142e318 JW |
1320 | |
1321 | if (GET_CODE (dst) == STRICT_LOW_PART) | |
1322 | dst = XEXP (dst, 0); | |
1323 | ||
1324 | if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) | |
1325 | { | |
1326 | if (SUBREG_BYTE (src) != SUBREG_BYTE (dst)) | |
1327 | return 0; | |
1328 | src = SUBREG_REG (src); | |
1329 | dst = SUBREG_REG (dst); | |
1330 | } | |
1331 | ||
8c895291 TB |
1332 | /* It is a NOOP if destination overlaps with selected src vector |
1333 | elements. */ | |
1334 | if (GET_CODE (src) == VEC_SELECT | |
1335 | && REG_P (XEXP (src, 0)) && REG_P (dst) | |
1336 | && HARD_REGISTER_P (XEXP (src, 0)) | |
1337 | && HARD_REGISTER_P (dst)) | |
1338 | { | |
1339 | int i; | |
1340 | rtx par = XEXP (src, 1); | |
1341 | rtx src0 = XEXP (src, 0); | |
1342 | int c0 = INTVAL (XVECEXP (par, 0, 0)); | |
1343 | HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0; | |
1344 | ||
1345 | for (i = 1; i < XVECLEN (par, 0); i++) | |
1346 | if (INTVAL (XVECEXP (par, 0, i)) != c0 + i) | |
1347 | return 0; | |
1348 | return | |
1349 | simplify_subreg_regno (REGNO (src0), GET_MODE (src0), | |
1350 | offset, GET_MODE (dst)) == (int) REGNO (dst); | |
1351 | } | |
1352 | ||
f8cfc6aa | 1353 | return (REG_P (src) && REG_P (dst) |
7142e318 JW |
1354 | && REGNO (src) == REGNO (dst)); |
1355 | } | |
0005550b JH |
1356 | \f |
1357 | /* Return nonzero if an insn consists only of SETs, each of which only sets a | |
1358 | value to itself. */ | |
1359 | ||
1360 | int | |
fa233e34 | 1361 | noop_move_p (const_rtx insn) |
0005550b JH |
1362 | { |
1363 | rtx pat = PATTERN (insn); | |
1364 | ||
b5832b43 JH |
1365 | if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE) |
1366 | return 1; | |
1367 | ||
0005550b JH |
1368 | /* Insns carrying these notes are useful later on. */ |
1369 | if (find_reg_note (insn, REG_EQUAL, NULL_RTX)) | |
1370 | return 0; | |
1371 | ||
8f7e6e33 BC |
1372 | /* Check the code to be executed for COND_EXEC. */ |
1373 | if (GET_CODE (pat) == COND_EXEC) | |
1374 | pat = COND_EXEC_CODE (pat); | |
1375 | ||
0005550b JH |
1376 | if (GET_CODE (pat) == SET && set_noop_p (pat)) |
1377 | return 1; | |
1378 | ||
1379 | if (GET_CODE (pat) == PARALLEL) | |
1380 | { | |
1381 | int i; | |
1382 | /* If nothing but SETs of registers to themselves, | |
1383 | this insn can also be deleted. */ | |
1384 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
1385 | { | |
1386 | rtx tem = XVECEXP (pat, 0, i); | |
1387 | ||
1388 | if (GET_CODE (tem) == USE | |
1389 | || GET_CODE (tem) == CLOBBER) | |
1390 | continue; | |
1391 | ||
1392 | if (GET_CODE (tem) != SET || ! set_noop_p (tem)) | |
1393 | return 0; | |
1394 | } | |
1395 | ||
1396 | return 1; | |
1397 | } | |
1398 | return 0; | |
1399 | } | |
1400 | \f | |
7142e318 | 1401 | |
2c88418c RS |
1402 | /* Return nonzero if register in range [REGNO, ENDREGNO) |
1403 | appears either explicitly or implicitly in X | |
1404 | other than being stored into. | |
1405 | ||
1406 | References contained within the substructure at LOC do not count. | |
1407 | LOC may be zero, meaning don't ignore anything. */ | |
1408 | ||
1409 | int | |
f7d504c2 | 1410 | refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, |
0c20a65f | 1411 | rtx *loc) |
2c88418c | 1412 | { |
770ae6cc RK |
1413 | int i; |
1414 | unsigned int x_regno; | |
1415 | RTX_CODE code; | |
1416 | const char *fmt; | |
2c88418c RS |
1417 | |
1418 | repeat: | |
1419 | /* The contents of a REG_NONNEG note is always zero, so we must come here | |
1420 | upon repeat in case the last REG_NOTE is a REG_NONNEG note. */ | |
1421 | if (x == 0) | |
1422 | return 0; | |
1423 | ||
1424 | code = GET_CODE (x); | |
1425 | ||
1426 | switch (code) | |
1427 | { | |
1428 | case REG: | |
770ae6cc | 1429 | x_regno = REGNO (x); |
f8163c92 RK |
1430 | |
1431 | /* If we modifying the stack, frame, or argument pointer, it will | |
1432 | clobber a virtual register. In fact, we could be more precise, | |
1433 | but it isn't worth it. */ | |
770ae6cc | 1434 | if ((x_regno == STACK_POINTER_REGNUM |
f8163c92 | 1435 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
770ae6cc | 1436 | || x_regno == ARG_POINTER_REGNUM |
f8163c92 | 1437 | #endif |
770ae6cc | 1438 | || x_regno == FRAME_POINTER_REGNUM) |
f8163c92 RK |
1439 | && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER) |
1440 | return 1; | |
1441 | ||
09e18274 | 1442 | return endregno > x_regno && regno < END_REGNO (x); |
2c88418c RS |
1443 | |
1444 | case SUBREG: | |
1445 | /* If this is a SUBREG of a hard reg, we can see exactly which | |
1446 | registers are being modified. Otherwise, handle normally. */ | |
f8cfc6aa | 1447 | if (REG_P (SUBREG_REG (x)) |
2c88418c RS |
1448 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) |
1449 | { | |
ddef6bc7 | 1450 | unsigned int inner_regno = subreg_regno (x); |
770ae6cc | 1451 | unsigned int inner_endregno |
403c659c | 1452 | = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER |
f1f4e530 | 1453 | ? subreg_nregs (x) : 1); |
2c88418c RS |
1454 | |
1455 | return endregno > inner_regno && regno < inner_endregno; | |
1456 | } | |
1457 | break; | |
1458 | ||
1459 | case CLOBBER: | |
1460 | case SET: | |
1461 | if (&SET_DEST (x) != loc | |
1462 | /* Note setting a SUBREG counts as referring to the REG it is in for | |
1463 | a pseudo but not for hard registers since we can | |
1464 | treat each word individually. */ | |
1465 | && ((GET_CODE (SET_DEST (x)) == SUBREG | |
1466 | && loc != &SUBREG_REG (SET_DEST (x)) | |
f8cfc6aa | 1467 | && REG_P (SUBREG_REG (SET_DEST (x))) |
2c88418c RS |
1468 | && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER |
1469 | && refers_to_regno_p (regno, endregno, | |
1470 | SUBREG_REG (SET_DEST (x)), loc)) | |
f8cfc6aa | 1471 | || (!REG_P (SET_DEST (x)) |
2c88418c RS |
1472 | && refers_to_regno_p (regno, endregno, SET_DEST (x), loc)))) |
1473 | return 1; | |
1474 | ||
1475 | if (code == CLOBBER || loc == &SET_SRC (x)) | |
1476 | return 0; | |
1477 | x = SET_SRC (x); | |
1478 | goto repeat; | |
e9a25f70 JL |
1479 | |
1480 | default: | |
1481 | break; | |
2c88418c RS |
1482 | } |
1483 | ||
1484 | /* X does not match, so try its subexpressions. */ | |
1485 | ||
1486 | fmt = GET_RTX_FORMAT (code); | |
1487 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1488 | { | |
1489 | if (fmt[i] == 'e' && loc != &XEXP (x, i)) | |
1490 | { | |
1491 | if (i == 0) | |
1492 | { | |
1493 | x = XEXP (x, 0); | |
1494 | goto repeat; | |
1495 | } | |
1496 | else | |
1497 | if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc)) | |
1498 | return 1; | |
1499 | } | |
1500 | else if (fmt[i] == 'E') | |
1501 | { | |
b3694847 | 1502 | int j; |
6a87d634 | 1503 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2c88418c RS |
1504 | if (loc != &XVECEXP (x, i, j) |
1505 | && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc)) | |
1506 | return 1; | |
1507 | } | |
1508 | } | |
1509 | return 0; | |
1510 | } | |
1511 | ||
1512 | /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, | |
1513 | we check if any register number in X conflicts with the relevant register | |
1514 | numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN | |
1515 | contains a MEM (we don't bother checking for memory addresses that can't | |
1516 | conflict because we expect this to be a rare case. */ | |
1517 | ||
1518 | int | |
f7d504c2 | 1519 | reg_overlap_mentioned_p (const_rtx x, const_rtx in) |
2c88418c | 1520 | { |
770ae6cc | 1521 | unsigned int regno, endregno; |
2c88418c | 1522 | |
6f626d1b PB |
1523 | /* If either argument is a constant, then modifying X can not |
1524 | affect IN. Here we look at IN, we can profitably combine | |
1525 | CONSTANT_P (x) with the switch statement below. */ | |
1526 | if (CONSTANT_P (in)) | |
b98b49ac | 1527 | return 0; |
0c99ec5c | 1528 | |
6f626d1b | 1529 | recurse: |
0c99ec5c | 1530 | switch (GET_CODE (x)) |
2c88418c | 1531 | { |
6f626d1b PB |
1532 | case STRICT_LOW_PART: |
1533 | case ZERO_EXTRACT: | |
1534 | case SIGN_EXTRACT: | |
1535 | /* Overly conservative. */ | |
1536 | x = XEXP (x, 0); | |
1537 | goto recurse; | |
1538 | ||
0c99ec5c | 1539 | case SUBREG: |
2c88418c RS |
1540 | regno = REGNO (SUBREG_REG (x)); |
1541 | if (regno < FIRST_PSEUDO_REGISTER) | |
ddef6bc7 | 1542 | regno = subreg_regno (x); |
f1f4e530 JM |
1543 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
1544 | ? subreg_nregs (x) : 1); | |
0c99ec5c | 1545 | goto do_reg; |
2c88418c | 1546 | |
0c99ec5c RH |
1547 | case REG: |
1548 | regno = REGNO (x); | |
09e18274 | 1549 | endregno = END_REGNO (x); |
f1f4e530 | 1550 | do_reg: |
8e2e89f7 | 1551 | return refers_to_regno_p (regno, endregno, in, (rtx*) 0); |
2c88418c | 1552 | |
0c99ec5c RH |
1553 | case MEM: |
1554 | { | |
1555 | const char *fmt; | |
1556 | int i; | |
2c88418c | 1557 | |
3c0cb5de | 1558 | if (MEM_P (in)) |
2c88418c RS |
1559 | return 1; |
1560 | ||
0c99ec5c RH |
1561 | fmt = GET_RTX_FORMAT (GET_CODE (in)); |
1562 | for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--) | |
3b009185 RH |
1563 | if (fmt[i] == 'e') |
1564 | { | |
1565 | if (reg_overlap_mentioned_p (x, XEXP (in, i))) | |
1566 | return 1; | |
1567 | } | |
1568 | else if (fmt[i] == 'E') | |
1569 | { | |
1570 | int j; | |
1571 | for (j = XVECLEN (in, i) - 1; j >= 0; --j) | |
1572 | if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j))) | |
1573 | return 1; | |
1574 | } | |
c0222c21 | 1575 | |
0c99ec5c RH |
1576 | return 0; |
1577 | } | |
1578 | ||
1579 | case SCRATCH: | |
1580 | case PC: | |
1581 | case CC0: | |
1582 | return reg_mentioned_p (x, in); | |
1583 | ||
1584 | case PARALLEL: | |
37ceff9d | 1585 | { |
90d036a0 | 1586 | int i; |
37ceff9d RH |
1587 | |
1588 | /* If any register in here refers to it we return true. */ | |
7193d1dc RK |
1589 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
1590 | if (XEXP (XVECEXP (x, 0, i), 0) != 0 | |
1591 | && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in)) | |
6f626d1b | 1592 | return 1; |
7193d1dc | 1593 | return 0; |
37ceff9d | 1594 | } |
2c88418c | 1595 | |
0c99ec5c | 1596 | default: |
41374e13 | 1597 | gcc_assert (CONSTANT_P (x)); |
6f626d1b PB |
1598 | return 0; |
1599 | } | |
2c88418c RS |
1600 | } |
1601 | \f | |
2c88418c | 1602 | /* Call FUN on each register or MEM that is stored into or clobbered by X. |
c3a1ef9d MM |
1603 | (X would be the pattern of an insn). DATA is an arbitrary pointer, |
1604 | ignored by note_stores, but passed to FUN. | |
1605 | ||
1606 | FUN receives three arguments: | |
1607 | 1. the REG, MEM, CC0 or PC being stored in or clobbered, | |
1608 | 2. the SET or CLOBBER rtx that does the store, | |
1609 | 3. the pointer DATA provided to note_stores. | |
2c88418c RS |
1610 | |
1611 | If the item being stored in or clobbered is a SUBREG of a hard register, | |
1612 | the SUBREG will be passed. */ | |
a6a2274a | 1613 | |
2c88418c | 1614 | void |
7bc980e1 | 1615 | note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data) |
2c88418c | 1616 | { |
aa317c97 | 1617 | int i; |
90d036a0 | 1618 | |
aa317c97 KG |
1619 | if (GET_CODE (x) == COND_EXEC) |
1620 | x = COND_EXEC_CODE (x); | |
90d036a0 | 1621 | |
aa317c97 KG |
1622 | if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) |
1623 | { | |
1624 | rtx dest = SET_DEST (x); | |
1625 | ||
1626 | while ((GET_CODE (dest) == SUBREG | |
1627 | && (!REG_P (SUBREG_REG (dest)) | |
1628 | || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER)) | |
1629 | || GET_CODE (dest) == ZERO_EXTRACT | |
1630 | || GET_CODE (dest) == STRICT_LOW_PART) | |
1631 | dest = XEXP (dest, 0); | |
1632 | ||
1633 | /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, | |
1634 | each of whose first operand is a register. */ | |
1635 | if (GET_CODE (dest) == PARALLEL) | |
1636 | { | |
1637 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1638 | if (XEXP (XVECEXP (dest, 0, i), 0) != 0) | |
1639 | (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data); | |
1640 | } | |
1641 | else | |
1642 | (*fun) (dest, x, data); | |
1643 | } | |
770ae6cc | 1644 | |
aa317c97 KG |
1645 | else if (GET_CODE (x) == PARALLEL) |
1646 | for (i = XVECLEN (x, 0) - 1; i >= 0; i--) | |
1647 | note_stores (XVECEXP (x, 0, i), fun, data); | |
1648 | } | |
2c88418c | 1649 | \f |
e2373f95 RK |
1650 | /* Like notes_stores, but call FUN for each expression that is being |
1651 | referenced in PBODY, a pointer to the PATTERN of an insn. We only call | |
1652 | FUN for each expression, not any interior subexpressions. FUN receives a | |
1653 | pointer to the expression and the DATA passed to this function. | |
1654 | ||
1655 | Note that this is not quite the same test as that done in reg_referenced_p | |
1656 | since that considers something as being referenced if it is being | |
1657 | partially set, while we do not. */ | |
1658 | ||
1659 | void | |
0c20a65f | 1660 | note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data) |
e2373f95 RK |
1661 | { |
1662 | rtx body = *pbody; | |
1663 | int i; | |
1664 | ||
1665 | switch (GET_CODE (body)) | |
1666 | { | |
1667 | case COND_EXEC: | |
1668 | (*fun) (&COND_EXEC_TEST (body), data); | |
1669 | note_uses (&COND_EXEC_CODE (body), fun, data); | |
1670 | return; | |
1671 | ||
1672 | case PARALLEL: | |
1673 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1674 | note_uses (&XVECEXP (body, 0, i), fun, data); | |
1675 | return; | |
1676 | ||
bbbc206e BS |
1677 | case SEQUENCE: |
1678 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1679 | note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data); | |
1680 | return; | |
1681 | ||
e2373f95 RK |
1682 | case USE: |
1683 | (*fun) (&XEXP (body, 0), data); | |
1684 | return; | |
1685 | ||
1686 | case ASM_OPERANDS: | |
1687 | for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) | |
1688 | (*fun) (&ASM_OPERANDS_INPUT (body, i), data); | |
1689 | return; | |
1690 | ||
1691 | case TRAP_IF: | |
1692 | (*fun) (&TRAP_CONDITION (body), data); | |
1693 | return; | |
1694 | ||
21b8482a JJ |
1695 | case PREFETCH: |
1696 | (*fun) (&XEXP (body, 0), data); | |
1697 | return; | |
1698 | ||
e2373f95 RK |
1699 | case UNSPEC: |
1700 | case UNSPEC_VOLATILE: | |
1701 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
1702 | (*fun) (&XVECEXP (body, 0, i), data); | |
1703 | return; | |
1704 | ||
1705 | case CLOBBER: | |
3c0cb5de | 1706 | if (MEM_P (XEXP (body, 0))) |
e2373f95 RK |
1707 | (*fun) (&XEXP (XEXP (body, 0), 0), data); |
1708 | return; | |
1709 | ||
1710 | case SET: | |
1711 | { | |
1712 | rtx dest = SET_DEST (body); | |
1713 | ||
1714 | /* For sets we replace everything in source plus registers in memory | |
1715 | expression in store and operands of a ZERO_EXTRACT. */ | |
1716 | (*fun) (&SET_SRC (body), data); | |
1717 | ||
1718 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
1719 | { | |
1720 | (*fun) (&XEXP (dest, 1), data); | |
1721 | (*fun) (&XEXP (dest, 2), data); | |
1722 | } | |
1723 | ||
1724 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART) | |
1725 | dest = XEXP (dest, 0); | |
1726 | ||
3c0cb5de | 1727 | if (MEM_P (dest)) |
e2373f95 RK |
1728 | (*fun) (&XEXP (dest, 0), data); |
1729 | } | |
1730 | return; | |
1731 | ||
1732 | default: | |
1733 | /* All the other possibilities never store. */ | |
1734 | (*fun) (pbody, data); | |
1735 | return; | |
1736 | } | |
1737 | } | |
1738 | \f | |
2c88418c RS |
1739 | /* Return nonzero if X's old contents don't survive after INSN. |
1740 | This will be true if X is (cc0) or if X is a register and | |
1741 | X dies in INSN or because INSN entirely sets X. | |
1742 | ||
46d096a3 SB |
1743 | "Entirely set" means set directly and not through a SUBREG, or |
1744 | ZERO_EXTRACT, so no trace of the old contents remains. | |
2c88418c RS |
1745 | Likewise, REG_INC does not count. |
1746 | ||
1747 | REG may be a hard or pseudo reg. Renumbering is not taken into account, | |
1748 | but for this use that makes no difference, since regs don't overlap | |
1749 | during their lifetimes. Therefore, this function may be used | |
6fb5fa3c | 1750 | at any time after deaths have been computed. |
2c88418c RS |
1751 | |
1752 | If REG is a hard reg that occupies multiple machine registers, this | |
1753 | function will only return 1 if each of those registers will be replaced | |
1754 | by INSN. */ | |
1755 | ||
1756 | int | |
f7d504c2 | 1757 | dead_or_set_p (const_rtx insn, const_rtx x) |
2c88418c | 1758 | { |
09e18274 | 1759 | unsigned int regno, end_regno; |
770ae6cc | 1760 | unsigned int i; |
2c88418c RS |
1761 | |
1762 | /* Can't use cc0_rtx below since this file is used by genattrtab.c. */ | |
1763 | if (GET_CODE (x) == CC0) | |
1764 | return 1; | |
1765 | ||
41374e13 | 1766 | gcc_assert (REG_P (x)); |
2c88418c RS |
1767 | |
1768 | regno = REGNO (x); | |
09e18274 RS |
1769 | end_regno = END_REGNO (x); |
1770 | for (i = regno; i < end_regno; i++) | |
2c88418c RS |
1771 | if (! dead_or_set_regno_p (insn, i)) |
1772 | return 0; | |
1773 | ||
1774 | return 1; | |
1775 | } | |
1776 | ||
194acded HPN |
1777 | /* Return TRUE iff DEST is a register or subreg of a register and |
1778 | doesn't change the number of words of the inner register, and any | |
1779 | part of the register is TEST_REGNO. */ | |
1780 | ||
1781 | static bool | |
f7d504c2 | 1782 | covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1783 | { |
1784 | unsigned int regno, endregno; | |
1785 | ||
1786 | if (GET_CODE (dest) == SUBREG | |
1787 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
1788 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
1789 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
1790 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD))) | |
1791 | dest = SUBREG_REG (dest); | |
1792 | ||
1793 | if (!REG_P (dest)) | |
1794 | return false; | |
1795 | ||
1796 | regno = REGNO (dest); | |
09e18274 | 1797 | endregno = END_REGNO (dest); |
194acded HPN |
1798 | return (test_regno >= regno && test_regno < endregno); |
1799 | } | |
1800 | ||
1801 | /* Like covers_regno_no_parallel_p, but also handles PARALLELs where | |
1802 | any member matches the covers_regno_no_parallel_p criteria. */ | |
1803 | ||
1804 | static bool | |
f7d504c2 | 1805 | covers_regno_p (const_rtx dest, unsigned int test_regno) |
194acded HPN |
1806 | { |
1807 | if (GET_CODE (dest) == PARALLEL) | |
1808 | { | |
1809 | /* Some targets place small structures in registers for return | |
1810 | values of functions, and those registers are wrapped in | |
1811 | PARALLELs that we may see as the destination of a SET. */ | |
1812 | int i; | |
1813 | ||
1814 | for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) | |
1815 | { | |
1816 | rtx inner = XEXP (XVECEXP (dest, 0, i), 0); | |
1817 | if (inner != NULL_RTX | |
1818 | && covers_regno_no_parallel_p (inner, test_regno)) | |
1819 | return true; | |
1820 | } | |
1821 | ||
1822 | return false; | |
1823 | } | |
1824 | else | |
1825 | return covers_regno_no_parallel_p (dest, test_regno); | |
1826 | } | |
1827 | ||
6fb5fa3c | 1828 | /* Utility function for dead_or_set_p to check an individual register. */ |
2c88418c RS |
1829 | |
1830 | int | |
f7d504c2 | 1831 | dead_or_set_regno_p (const_rtx insn, unsigned int test_regno) |
2c88418c | 1832 | { |
f7d504c2 | 1833 | const_rtx pattern; |
2c88418c | 1834 | |
0a2287bf RH |
1835 | /* See if there is a death note for something that includes TEST_REGNO. */ |
1836 | if (find_regno_note (insn, REG_DEAD, test_regno)) | |
1837 | return 1; | |
2c88418c | 1838 | |
4b4bf941 | 1839 | if (CALL_P (insn) |
8f3e7a26 RK |
1840 | && find_regno_fusage (insn, CLOBBER, test_regno)) |
1841 | return 1; | |
1842 | ||
0c99ec5c RH |
1843 | pattern = PATTERN (insn); |
1844 | ||
10439b59 | 1845 | /* If a COND_EXEC is not executed, the value survives. */ |
0c99ec5c | 1846 | if (GET_CODE (pattern) == COND_EXEC) |
10439b59 | 1847 | return 0; |
0c99ec5c RH |
1848 | |
1849 | if (GET_CODE (pattern) == SET) | |
194acded | 1850 | return covers_regno_p (SET_DEST (pattern), test_regno); |
0c99ec5c | 1851 | else if (GET_CODE (pattern) == PARALLEL) |
2c88418c | 1852 | { |
b3694847 | 1853 | int i; |
2c88418c | 1854 | |
0c99ec5c | 1855 | for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--) |
2c88418c | 1856 | { |
0c99ec5c RH |
1857 | rtx body = XVECEXP (pattern, 0, i); |
1858 | ||
1859 | if (GET_CODE (body) == COND_EXEC) | |
1860 | body = COND_EXEC_CODE (body); | |
2c88418c | 1861 | |
194acded HPN |
1862 | if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) |
1863 | && covers_regno_p (SET_DEST (body), test_regno)) | |
1864 | return 1; | |
2c88418c RS |
1865 | } |
1866 | } | |
1867 | ||
1868 | return 0; | |
1869 | } | |
1870 | ||
1871 | /* Return the reg-note of kind KIND in insn INSN, if there is one. | |
1872 | If DATUM is nonzero, look for one whose datum is DATUM. */ | |
1873 | ||
1874 | rtx | |
f7d504c2 | 1875 | find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum) |
2c88418c | 1876 | { |
b3694847 | 1877 | rtx link; |
2c88418c | 1878 | |
7a40b8b1 | 1879 | gcc_checking_assert (insn); |
af082de3 | 1880 | |
ae78d276 | 1881 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1882 | if (! INSN_P (insn)) |
ae78d276 | 1883 | return 0; |
cd798543 AP |
1884 | if (datum == 0) |
1885 | { | |
1886 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1887 | if (REG_NOTE_KIND (link) == kind) | |
1888 | return link; | |
1889 | return 0; | |
1890 | } | |
ae78d276 | 1891 | |
2c88418c | 1892 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
cd798543 | 1893 | if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0)) |
2c88418c RS |
1894 | return link; |
1895 | return 0; | |
1896 | } | |
1897 | ||
1898 | /* Return the reg-note of kind KIND in insn INSN which applies to register | |
99309f3b RK |
1899 | number REGNO, if any. Return 0 if there is no such reg-note. Note that |
1900 | the REGNO of this NOTE need not be REGNO if REGNO is a hard register; | |
1901 | it might be the case that the note overlaps REGNO. */ | |
2c88418c RS |
1902 | |
1903 | rtx | |
f7d504c2 | 1904 | find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno) |
2c88418c | 1905 | { |
b3694847 | 1906 | rtx link; |
2c88418c | 1907 | |
ae78d276 | 1908 | /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
2c3c49de | 1909 | if (! INSN_P (insn)) |
ae78d276 MM |
1910 | return 0; |
1911 | ||
2c88418c RS |
1912 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1913 | if (REG_NOTE_KIND (link) == kind | |
1914 | /* Verify that it is a register, so that scratch and MEM won't cause a | |
1915 | problem here. */ | |
f8cfc6aa | 1916 | && REG_P (XEXP (link, 0)) |
99309f3b | 1917 | && REGNO (XEXP (link, 0)) <= regno |
09e18274 | 1918 | && END_REGNO (XEXP (link, 0)) > regno) |
2c88418c RS |
1919 | return link; |
1920 | return 0; | |
1921 | } | |
8f3e7a26 | 1922 | |
d9c695ff RK |
1923 | /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and |
1924 | has such a note. */ | |
1925 | ||
1926 | rtx | |
f7d504c2 | 1927 | find_reg_equal_equiv_note (const_rtx insn) |
d9c695ff | 1928 | { |
cd648cec | 1929 | rtx link; |
d9c695ff | 1930 | |
cd648cec | 1931 | if (!INSN_P (insn)) |
d9c695ff | 1932 | return 0; |
ea8f106d | 1933 | |
cd648cec JH |
1934 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
1935 | if (REG_NOTE_KIND (link) == REG_EQUAL | |
1936 | || REG_NOTE_KIND (link) == REG_EQUIV) | |
1937 | { | |
ea8f106d SB |
1938 | /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on |
1939 | insns that have multiple sets. Checking single_set to | |
1940 | make sure of this is not the proper check, as explained | |
1941 | in the comment in set_unique_reg_note. | |
1942 | ||
1943 | This should be changed into an assert. */ | |
1944 | if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) | |
cd648cec JH |
1945 | return 0; |
1946 | return link; | |
1947 | } | |
1948 | return NULL; | |
d9c695ff RK |
1949 | } |
1950 | ||
2a450639 RS |
1951 | /* Check whether INSN is a single_set whose source is known to be |
1952 | equivalent to a constant. Return that constant if so, otherwise | |
1953 | return null. */ | |
1954 | ||
1955 | rtx | |
68a1a6c0 | 1956 | find_constant_src (const rtx_insn *insn) |
2a450639 RS |
1957 | { |
1958 | rtx note, set, x; | |
1959 | ||
1960 | set = single_set (insn); | |
1961 | if (set) | |
1962 | { | |
1963 | x = avoid_constant_pool_reference (SET_SRC (set)); | |
1964 | if (CONSTANT_P (x)) | |
1965 | return x; | |
1966 | } | |
1967 | ||
1968 | note = find_reg_equal_equiv_note (insn); | |
1969 | if (note && CONSTANT_P (XEXP (note, 0))) | |
1970 | return XEXP (note, 0); | |
1971 | ||
1972 | return NULL_RTX; | |
1973 | } | |
1974 | ||
8f3e7a26 RK |
1975 | /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found |
1976 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
1977 | ||
1978 | int | |
f7d504c2 | 1979 | find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum) |
8f3e7a26 RK |
1980 | { |
1981 | /* If it's not a CALL_INSN, it can't possibly have a | |
1982 | CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */ | |
4b4bf941 | 1983 | if (!CALL_P (insn)) |
8f3e7a26 RK |
1984 | return 0; |
1985 | ||
41374e13 | 1986 | gcc_assert (datum); |
8f3e7a26 | 1987 | |
f8cfc6aa | 1988 | if (!REG_P (datum)) |
8f3e7a26 | 1989 | { |
b3694847 | 1990 | rtx link; |
8f3e7a26 RK |
1991 | |
1992 | for (link = CALL_INSN_FUNCTION_USAGE (insn); | |
a6a2274a | 1993 | link; |
8f3e7a26 | 1994 | link = XEXP (link, 1)) |
a6a2274a | 1995 | if (GET_CODE (XEXP (link, 0)) == code |
cc863bea | 1996 | && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0))) |
a6a2274a | 1997 | return 1; |
8f3e7a26 RK |
1998 | } |
1999 | else | |
2000 | { | |
770ae6cc | 2001 | unsigned int regno = REGNO (datum); |
8f3e7a26 RK |
2002 | |
2003 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
2004 | to pseudo registers, so don't bother checking. */ | |
2005 | ||
2006 | if (regno < FIRST_PSEUDO_REGISTER) | |
a6a2274a | 2007 | { |
09e18274 | 2008 | unsigned int end_regno = END_HARD_REGNO (datum); |
770ae6cc | 2009 | unsigned int i; |
8f3e7a26 RK |
2010 | |
2011 | for (i = regno; i < end_regno; i++) | |
2012 | if (find_regno_fusage (insn, code, i)) | |
2013 | return 1; | |
a6a2274a | 2014 | } |
8f3e7a26 RK |
2015 | } |
2016 | ||
2017 | return 0; | |
2018 | } | |
2019 | ||
2020 | /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found | |
2021 | in the CALL_INSN_FUNCTION_USAGE information of INSN. */ | |
2022 | ||
2023 | int | |
f7d504c2 | 2024 | find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno) |
8f3e7a26 | 2025 | { |
b3694847 | 2026 | rtx link; |
8f3e7a26 RK |
2027 | |
2028 | /* CALL_INSN_FUNCTION_USAGE information cannot contain references | |
2029 | to pseudo registers, so don't bother checking. */ | |
2030 | ||
2031 | if (regno >= FIRST_PSEUDO_REGISTER | |
4b4bf941 | 2032 | || !CALL_P (insn) ) |
8f3e7a26 RK |
2033 | return 0; |
2034 | ||
2035 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
83ab3839 | 2036 | { |
770ae6cc | 2037 | rtx op, reg; |
83ab3839 RH |
2038 | |
2039 | if (GET_CODE (op = XEXP (link, 0)) == code | |
f8cfc6aa | 2040 | && REG_P (reg = XEXP (op, 0)) |
09e18274 RS |
2041 | && REGNO (reg) <= regno |
2042 | && END_HARD_REGNO (reg) > regno) | |
83ab3839 RH |
2043 | return 1; |
2044 | } | |
8f3e7a26 RK |
2045 | |
2046 | return 0; | |
2047 | } | |
a6a063b8 | 2048 | |
2c88418c | 2049 | \f |
e5af9ddd RS |
2050 | /* Return true if KIND is an integer REG_NOTE. */ |
2051 | ||
2052 | static bool | |
2053 | int_reg_note_p (enum reg_note kind) | |
2054 | { | |
2055 | return kind == REG_BR_PROB; | |
2056 | } | |
2057 | ||
efc0b2bd ILT |
2058 | /* Allocate a register note with kind KIND and datum DATUM. LIST is |
2059 | stored as the pointer to the next register note. */ | |
65c5f2a6 | 2060 | |
efc0b2bd ILT |
2061 | rtx |
2062 | alloc_reg_note (enum reg_note kind, rtx datum, rtx list) | |
65c5f2a6 ILT |
2063 | { |
2064 | rtx note; | |
2065 | ||
e5af9ddd | 2066 | gcc_checking_assert (!int_reg_note_p (kind)); |
65c5f2a6 ILT |
2067 | switch (kind) |
2068 | { | |
2069 | case REG_CC_SETTER: | |
2070 | case REG_CC_USER: | |
2071 | case REG_LABEL_TARGET: | |
2072 | case REG_LABEL_OPERAND: | |
0a35513e | 2073 | case REG_TM: |
65c5f2a6 ILT |
2074 | /* These types of register notes use an INSN_LIST rather than an |
2075 | EXPR_LIST, so that copying is done right and dumps look | |
2076 | better. */ | |
efc0b2bd | 2077 | note = alloc_INSN_LIST (datum, list); |
65c5f2a6 ILT |
2078 | PUT_REG_NOTE_KIND (note, kind); |
2079 | break; | |
2080 | ||
2081 | default: | |
efc0b2bd | 2082 | note = alloc_EXPR_LIST (kind, datum, list); |
65c5f2a6 ILT |
2083 | break; |
2084 | } | |
2085 | ||
efc0b2bd ILT |
2086 | return note; |
2087 | } | |
2088 | ||
2089 | /* Add register note with kind KIND and datum DATUM to INSN. */ | |
2090 | ||
2091 | void | |
2092 | add_reg_note (rtx insn, enum reg_note kind, rtx datum) | |
2093 | { | |
2094 | REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn)); | |
65c5f2a6 ILT |
2095 | } |
2096 | ||
e5af9ddd RS |
2097 | /* Add an integer register note with kind KIND and datum DATUM to INSN. */ |
2098 | ||
2099 | void | |
2100 | add_int_reg_note (rtx insn, enum reg_note kind, int datum) | |
2101 | { | |
2102 | gcc_checking_assert (int_reg_note_p (kind)); | |
2103 | REG_NOTES (insn) = gen_rtx_INT_LIST ((enum machine_mode) kind, | |
2104 | datum, REG_NOTES (insn)); | |
2105 | } | |
2106 | ||
2107 | /* Add a register note like NOTE to INSN. */ | |
2108 | ||
2109 | void | |
2110 | add_shallow_copy_of_reg_note (rtx insn, rtx note) | |
2111 | { | |
2112 | if (GET_CODE (note) == INT_LIST) | |
2113 | add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0)); | |
2114 | else | |
2115 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
2116 | } | |
2117 | ||
2c88418c RS |
2118 | /* Remove register note NOTE from the REG_NOTES of INSN. */ |
2119 | ||
2120 | void | |
f7d504c2 | 2121 | remove_note (rtx insn, const_rtx note) |
2c88418c | 2122 | { |
b3694847 | 2123 | rtx link; |
2c88418c | 2124 | |
49c3bb12 RH |
2125 | if (note == NULL_RTX) |
2126 | return; | |
2127 | ||
2c88418c | 2128 | if (REG_NOTES (insn) == note) |
6fb5fa3c DB |
2129 | REG_NOTES (insn) = XEXP (note, 1); |
2130 | else | |
2131 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
2132 | if (XEXP (link, 1) == note) | |
2133 | { | |
2134 | XEXP (link, 1) = XEXP (note, 1); | |
2135 | break; | |
2136 | } | |
2137 | ||
2138 | switch (REG_NOTE_KIND (note)) | |
2c88418c | 2139 | { |
6fb5fa3c DB |
2140 | case REG_EQUAL: |
2141 | case REG_EQUIV: | |
b2908ba6 | 2142 | df_notes_rescan (as_a <rtx_insn *> (insn)); |
6fb5fa3c DB |
2143 | break; |
2144 | default: | |
2145 | break; | |
2c88418c | 2146 | } |
2c88418c | 2147 | } |
55a98783 | 2148 | |
7cd689bc SB |
2149 | /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */ |
2150 | ||
2151 | void | |
2152 | remove_reg_equal_equiv_notes (rtx insn) | |
2153 | { | |
2154 | rtx *loc; | |
2155 | ||
2156 | loc = ®_NOTES (insn); | |
2157 | while (*loc) | |
2158 | { | |
2159 | enum reg_note kind = REG_NOTE_KIND (*loc); | |
2160 | if (kind == REG_EQUAL || kind == REG_EQUIV) | |
2161 | *loc = XEXP (*loc, 1); | |
2162 | else | |
2163 | loc = &XEXP (*loc, 1); | |
2164 | } | |
2165 | } | |
885c9b5d EB |
2166 | |
2167 | /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */ | |
2168 | ||
2169 | void | |
2170 | remove_reg_equal_equiv_notes_for_regno (unsigned int regno) | |
2171 | { | |
2172 | df_ref eq_use; | |
2173 | ||
2174 | if (!df) | |
2175 | return; | |
2176 | ||
2177 | /* This loop is a little tricky. We cannot just go down the chain because | |
2178 | it is being modified by some actions in the loop. So we just iterate | |
2179 | over the head. We plan to drain the list anyway. */ | |
2180 | while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL) | |
2181 | { | |
1bbbc4a3 | 2182 | rtx_insn *insn = DF_REF_INSN (eq_use); |
885c9b5d EB |
2183 | rtx note = find_reg_equal_equiv_note (insn); |
2184 | ||
2185 | /* This assert is generally triggered when someone deletes a REG_EQUAL | |
2186 | or REG_EQUIV note by hacking the list manually rather than calling | |
2187 | remove_note. */ | |
2188 | gcc_assert (note); | |
2189 | ||
2190 | remove_note (insn, note); | |
2191 | } | |
2192 | } | |
7cd689bc | 2193 | |
5f0d2358 RK |
2194 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
2195 | return 1 if it is found. A simple equality test is used to determine if | |
2196 | NODE matches. */ | |
2197 | ||
2198 | int | |
f7d504c2 | 2199 | in_expr_list_p (const_rtx listp, const_rtx node) |
5f0d2358 | 2200 | { |
f7d504c2 | 2201 | const_rtx x; |
5f0d2358 RK |
2202 | |
2203 | for (x = listp; x; x = XEXP (x, 1)) | |
2204 | if (node == XEXP (x, 0)) | |
2205 | return 1; | |
2206 | ||
2207 | return 0; | |
2208 | } | |
2209 | ||
dd248abd RK |
2210 | /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
2211 | remove that entry from the list if it is found. | |
55a98783 | 2212 | |
dd248abd | 2213 | A simple equality test is used to determine if NODE matches. */ |
55a98783 JL |
2214 | |
2215 | void | |
2382940b | 2216 | remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp) |
55a98783 | 2217 | { |
2382940b | 2218 | rtx_expr_list *temp = *listp; |
55a98783 JL |
2219 | rtx prev = NULL_RTX; |
2220 | ||
2221 | while (temp) | |
2222 | { | |
2382940b | 2223 | if (node == temp->element ()) |
55a98783 JL |
2224 | { |
2225 | /* Splice the node out of the list. */ | |
2226 | if (prev) | |
2382940b | 2227 | XEXP (prev, 1) = temp->next (); |
55a98783 | 2228 | else |
2382940b | 2229 | *listp = temp->next (); |
55a98783 JL |
2230 | |
2231 | return; | |
2232 | } | |
dd248abd RK |
2233 | |
2234 | prev = temp; | |
2382940b | 2235 | temp = temp->next (); |
55a98783 JL |
2236 | } |
2237 | } | |
b5241a5a DM |
2238 | |
2239 | /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and | |
2240 | remove that entry from the list if it is found. | |
2241 | ||
2242 | A simple equality test is used to determine if NODE matches. */ | |
2243 | ||
2244 | void | |
2245 | remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp) | |
2246 | { | |
2247 | rtx_insn_list *temp = *listp; | |
2248 | rtx prev = NULL; | |
2249 | ||
2250 | while (temp) | |
2251 | { | |
2252 | if (node == temp->insn ()) | |
2253 | { | |
2254 | /* Splice the node out of the list. */ | |
2255 | if (prev) | |
2256 | XEXP (prev, 1) = temp->next (); | |
2257 | else | |
2258 | *listp = temp->next (); | |
2259 | ||
2260 | return; | |
2261 | } | |
2262 | ||
2263 | prev = temp; | |
2264 | temp = temp->next (); | |
2265 | } | |
2266 | } | |
2c88418c | 2267 | \f |
2b067faf RS |
2268 | /* Nonzero if X contains any volatile instructions. These are instructions |
2269 | which may cause unpredictable machine state instructions, and thus no | |
adddc347 HPN |
2270 | instructions or register uses should be moved or combined across them. |
2271 | This includes only volatile asms and UNSPEC_VOLATILE instructions. */ | |
2b067faf RS |
2272 | |
2273 | int | |
f7d504c2 | 2274 | volatile_insn_p (const_rtx x) |
2b067faf | 2275 | { |
f7d504c2 | 2276 | const RTX_CODE code = GET_CODE (x); |
2b067faf RS |
2277 | switch (code) |
2278 | { | |
2279 | case LABEL_REF: | |
2280 | case SYMBOL_REF: | |
2b067faf | 2281 | case CONST: |
d8116890 | 2282 | CASE_CONST_ANY: |
2b067faf RS |
2283 | case CC0: |
2284 | case PC: | |
2285 | case REG: | |
2286 | case SCRATCH: | |
2287 | case CLOBBER: | |
2b067faf RS |
2288 | case ADDR_VEC: |
2289 | case ADDR_DIFF_VEC: | |
2290 | case CALL: | |
2291 | case MEM: | |
2292 | return 0; | |
2293 | ||
2294 | case UNSPEC_VOLATILE: | |
2b067faf RS |
2295 | return 1; |
2296 | ||
4c46ea23 | 2297 | case ASM_INPUT: |
2b067faf RS |
2298 | case ASM_OPERANDS: |
2299 | if (MEM_VOLATILE_P (x)) | |
2300 | return 1; | |
e9a25f70 JL |
2301 | |
2302 | default: | |
2303 | break; | |
2b067faf RS |
2304 | } |
2305 | ||
2306 | /* Recursively scan the operands of this expression. */ | |
2307 | ||
2308 | { | |
f7d504c2 | 2309 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2310 | int i; |
a6a2274a | 2311 | |
2b067faf RS |
2312 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2313 | { | |
2314 | if (fmt[i] == 'e') | |
2315 | { | |
31001f72 | 2316 | if (volatile_insn_p (XEXP (x, i))) |
2b067faf RS |
2317 | return 1; |
2318 | } | |
d4757e6a | 2319 | else if (fmt[i] == 'E') |
2b067faf | 2320 | { |
b3694847 | 2321 | int j; |
2b067faf | 2322 | for (j = 0; j < XVECLEN (x, i); j++) |
31001f72 | 2323 | if (volatile_insn_p (XVECEXP (x, i, j))) |
2b067faf RS |
2324 | return 1; |
2325 | } | |
2326 | } | |
2327 | } | |
2328 | return 0; | |
2329 | } | |
2330 | ||
2c88418c | 2331 | /* Nonzero if X contains any volatile memory references |
2ac4fed0 | 2332 | UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */ |
2c88418c RS |
2333 | |
2334 | int | |
f7d504c2 | 2335 | volatile_refs_p (const_rtx x) |
2c88418c | 2336 | { |
f7d504c2 | 2337 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2338 | switch (code) |
2339 | { | |
2340 | case LABEL_REF: | |
2341 | case SYMBOL_REF: | |
2c88418c | 2342 | case CONST: |
d8116890 | 2343 | CASE_CONST_ANY: |
2c88418c RS |
2344 | case CC0: |
2345 | case PC: | |
2346 | case REG: | |
2347 | case SCRATCH: | |
2348 | case CLOBBER: | |
2c88418c RS |
2349 | case ADDR_VEC: |
2350 | case ADDR_DIFF_VEC: | |
2351 | return 0; | |
2352 | ||
2ac4fed0 | 2353 | case UNSPEC_VOLATILE: |
2c88418c RS |
2354 | return 1; |
2355 | ||
2356 | case MEM: | |
4c46ea23 | 2357 | case ASM_INPUT: |
2c88418c RS |
2358 | case ASM_OPERANDS: |
2359 | if (MEM_VOLATILE_P (x)) | |
2360 | return 1; | |
e9a25f70 JL |
2361 | |
2362 | default: | |
2363 | break; | |
2c88418c RS |
2364 | } |
2365 | ||
2366 | /* Recursively scan the operands of this expression. */ | |
2367 | ||
2368 | { | |
f7d504c2 | 2369 | const char *const fmt = GET_RTX_FORMAT (code); |
b3694847 | 2370 | int i; |
a6a2274a | 2371 | |
2c88418c RS |
2372 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2373 | { | |
2374 | if (fmt[i] == 'e') | |
2375 | { | |
2376 | if (volatile_refs_p (XEXP (x, i))) | |
2377 | return 1; | |
2378 | } | |
d4757e6a | 2379 | else if (fmt[i] == 'E') |
2c88418c | 2380 | { |
b3694847 | 2381 | int j; |
2c88418c RS |
2382 | for (j = 0; j < XVECLEN (x, i); j++) |
2383 | if (volatile_refs_p (XVECEXP (x, i, j))) | |
2384 | return 1; | |
2385 | } | |
2386 | } | |
2387 | } | |
2388 | return 0; | |
2389 | } | |
2390 | ||
2391 | /* Similar to above, except that it also rejects register pre- and post- | |
2392 | incrementing. */ | |
2393 | ||
2394 | int | |
f7d504c2 | 2395 | side_effects_p (const_rtx x) |
2c88418c | 2396 | { |
f7d504c2 | 2397 | const RTX_CODE code = GET_CODE (x); |
2c88418c RS |
2398 | switch (code) |
2399 | { | |
2400 | case LABEL_REF: | |
2401 | case SYMBOL_REF: | |
2c88418c | 2402 | case CONST: |
d8116890 | 2403 | CASE_CONST_ANY: |
2c88418c RS |
2404 | case CC0: |
2405 | case PC: | |
2406 | case REG: | |
2407 | case SCRATCH: | |
2c88418c RS |
2408 | case ADDR_VEC: |
2409 | case ADDR_DIFF_VEC: | |
b5b8b0ac | 2410 | case VAR_LOCATION: |
2c88418c RS |
2411 | return 0; |
2412 | ||
2413 | case CLOBBER: | |
2414 | /* Reject CLOBBER with a non-VOID mode. These are made by combine.c | |
2415 | when some combination can't be done. If we see one, don't think | |
2416 | that we can simplify the expression. */ | |
2417 | return (GET_MODE (x) != VOIDmode); | |
2418 | ||
2419 | case PRE_INC: | |
2420 | case PRE_DEC: | |
2421 | case POST_INC: | |
2422 | case POST_DEC: | |
1fb9c5cd MH |
2423 | case PRE_MODIFY: |
2424 | case POST_MODIFY: | |
2c88418c | 2425 | case CALL: |
2ac4fed0 | 2426 | case UNSPEC_VOLATILE: |
2c88418c RS |
2427 | return 1; |
2428 | ||
2429 | case MEM: | |
4c46ea23 | 2430 | case ASM_INPUT: |
2c88418c RS |
2431 | case ASM_OPERANDS: |
2432 | if (MEM_VOLATILE_P (x)) | |
2433 | return 1; | |
e9a25f70 JL |
2434 | |
2435 | default: | |
2436 | break; | |
2c88418c RS |
2437 | } |
2438 | ||
2439 | /* Recursively scan the operands of this expression. */ | |
2440 | ||
2441 | { | |
b3694847 SS |
2442 | const char *fmt = GET_RTX_FORMAT (code); |
2443 | int i; | |
a6a2274a | 2444 | |
2c88418c RS |
2445 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
2446 | { | |
2447 | if (fmt[i] == 'e') | |
2448 | { | |
2449 | if (side_effects_p (XEXP (x, i))) | |
2450 | return 1; | |
2451 | } | |
d4757e6a | 2452 | else if (fmt[i] == 'E') |
2c88418c | 2453 | { |
b3694847 | 2454 | int j; |
2c88418c RS |
2455 | for (j = 0; j < XVECLEN (x, i); j++) |
2456 | if (side_effects_p (XVECEXP (x, i, j))) | |
2457 | return 1; | |
2458 | } | |
2459 | } | |
2460 | } | |
2461 | return 0; | |
2462 | } | |
2463 | \f | |
e755fcf5 | 2464 | /* Return nonzero if evaluating rtx X might cause a trap. |
48e8382e PB |
2465 | FLAGS controls how to consider MEMs. A nonzero means the context |
2466 | of the access may have changed from the original, such that the | |
2467 | address may have become invalid. */ | |
2c88418c | 2468 | |
215b063c | 2469 | int |
f7d504c2 | 2470 | may_trap_p_1 (const_rtx x, unsigned flags) |
2c88418c RS |
2471 | { |
2472 | int i; | |
2473 | enum rtx_code code; | |
6f7d635c | 2474 | const char *fmt; |
48e8382e PB |
2475 | |
2476 | /* We make no distinction currently, but this function is part of | |
2477 | the internal target-hooks ABI so we keep the parameter as | |
2478 | "unsigned flags". */ | |
2479 | bool code_changed = flags != 0; | |
2c88418c RS |
2480 | |
2481 | if (x == 0) | |
2482 | return 0; | |
2483 | code = GET_CODE (x); | |
2484 | switch (code) | |
2485 | { | |
2486 | /* Handle these cases quickly. */ | |
d8116890 | 2487 | CASE_CONST_ANY: |
2c88418c RS |
2488 | case SYMBOL_REF: |
2489 | case LABEL_REF: | |
2490 | case CONST: | |
2491 | case PC: | |
2492 | case CC0: | |
2493 | case REG: | |
2494 | case SCRATCH: | |
2495 | return 0; | |
2496 | ||
215b063c | 2497 | case UNSPEC: |
215b063c PB |
2498 | return targetm.unspec_may_trap_p (x, flags); |
2499 | ||
c84a808e | 2500 | case UNSPEC_VOLATILE: |
215b063c | 2501 | case ASM_INPUT: |
2c88418c RS |
2502 | case TRAP_IF: |
2503 | return 1; | |
2504 | ||
22aa60a1 RH |
2505 | case ASM_OPERANDS: |
2506 | return MEM_VOLATILE_P (x); | |
2507 | ||
2c88418c RS |
2508 | /* Memory ref can trap unless it's a static var or a stack slot. */ |
2509 | case MEM: | |
d809253a EB |
2510 | /* Recognize specific pattern of stack checking probes. */ |
2511 | if (flag_stack_check | |
2512 | && MEM_VOLATILE_P (x) | |
2513 | && XEXP (x, 0) == stack_pointer_rtx) | |
2514 | return 1; | |
e755fcf5 | 2515 | if (/* MEM_NOTRAP_P only relates to the actual position of the memory |
48e8382e PB |
2516 | reference; moving it out of context such as when moving code |
2517 | when optimizing, might cause its address to become invalid. */ | |
2518 | code_changed | |
2519 | || !MEM_NOTRAP_P (x)) | |
2520 | { | |
f5541398 | 2521 | HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0; |
48e8382e PB |
2522 | return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size, |
2523 | GET_MODE (x), code_changed); | |
2524 | } | |
2525 | ||
2526 | return 0; | |
2c88418c RS |
2527 | |
2528 | /* Division by a non-constant might trap. */ | |
2529 | case DIV: | |
2530 | case MOD: | |
2531 | case UDIV: | |
2532 | case UMOD: | |
52bfebf0 RS |
2533 | if (HONOR_SNANS (GET_MODE (x))) |
2534 | return 1; | |
3d8bf70f | 2535 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x))) |
f9013075 DE |
2536 | return flag_trapping_math; |
2537 | if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx)) | |
2c88418c | 2538 | return 1; |
e9a25f70 JL |
2539 | break; |
2540 | ||
b278301b RK |
2541 | case EXPR_LIST: |
2542 | /* An EXPR_LIST is used to represent a function call. This | |
2543 | certainly may trap. */ | |
2544 | return 1; | |
e9a25f70 | 2545 | |
734508ea JW |
2546 | case GE: |
2547 | case GT: | |
2548 | case LE: | |
2549 | case LT: | |
19aec195 | 2550 | case LTGT: |
55143861 | 2551 | case COMPARE: |
734508ea | 2552 | /* Some floating point comparisons may trap. */ |
f5eb5fd0 JH |
2553 | if (!flag_trapping_math) |
2554 | break; | |
734508ea JW |
2555 | /* ??? There is no machine independent way to check for tests that trap |
2556 | when COMPARE is used, though many targets do make this distinction. | |
2557 | For instance, sparc uses CCFPE for compares which generate exceptions | |
2558 | and CCFP for compares which do not generate exceptions. */ | |
52bfebf0 | 2559 | if (HONOR_NANS (GET_MODE (x))) |
55143861 JJ |
2560 | return 1; |
2561 | /* But often the compare has some CC mode, so check operand | |
2562 | modes as well. */ | |
52bfebf0 RS |
2563 | if (HONOR_NANS (GET_MODE (XEXP (x, 0))) |
2564 | || HONOR_NANS (GET_MODE (XEXP (x, 1)))) | |
2565 | return 1; | |
2566 | break; | |
2567 | ||
2568 | case EQ: | |
2569 | case NE: | |
2570 | if (HONOR_SNANS (GET_MODE (x))) | |
2571 | return 1; | |
2572 | /* Often comparison is CC mode, so check operand modes. */ | |
2573 | if (HONOR_SNANS (GET_MODE (XEXP (x, 0))) | |
2574 | || HONOR_SNANS (GET_MODE (XEXP (x, 1)))) | |
55143861 JJ |
2575 | return 1; |
2576 | break; | |
2577 | ||
22fd5743 FH |
2578 | case FIX: |
2579 | /* Conversion of floating point might trap. */ | |
2580 | if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0)))) | |
2581 | return 1; | |
2582 | break; | |
2583 | ||
05cc23e8 RH |
2584 | case NEG: |
2585 | case ABS: | |
e3947b34 | 2586 | case SUBREG: |
05cc23e8 RH |
2587 | /* These operations don't trap even with floating point. */ |
2588 | break; | |
2589 | ||
2c88418c RS |
2590 | default: |
2591 | /* Any floating arithmetic may trap. */ | |
c84a808e | 2592 | if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math) |
2c88418c RS |
2593 | return 1; |
2594 | } | |
2595 | ||
2596 | fmt = GET_RTX_FORMAT (code); | |
2597 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2598 | { | |
2599 | if (fmt[i] == 'e') | |
2600 | { | |
e755fcf5 | 2601 | if (may_trap_p_1 (XEXP (x, i), flags)) |
2c88418c RS |
2602 | return 1; |
2603 | } | |
2604 | else if (fmt[i] == 'E') | |
2605 | { | |
b3694847 | 2606 | int j; |
2c88418c | 2607 | for (j = 0; j < XVECLEN (x, i); j++) |
e755fcf5 | 2608 | if (may_trap_p_1 (XVECEXP (x, i, j), flags)) |
2c88418c RS |
2609 | return 1; |
2610 | } | |
2611 | } | |
2612 | return 0; | |
2613 | } | |
2358ff91 EB |
2614 | |
2615 | /* Return nonzero if evaluating rtx X might cause a trap. */ | |
2616 | ||
2617 | int | |
f7d504c2 | 2618 | may_trap_p (const_rtx x) |
2358ff91 | 2619 | { |
e755fcf5 ZD |
2620 | return may_trap_p_1 (x, 0); |
2621 | } | |
2622 | ||
c0220ea4 | 2623 | /* Same as above, but additionally return nonzero if evaluating rtx X might |
2358ff91 EB |
2624 | cause a fault. We define a fault for the purpose of this function as a |
2625 | erroneous execution condition that cannot be encountered during the normal | |
2626 | execution of a valid program; the typical example is an unaligned memory | |
2627 | access on a strict alignment machine. The compiler guarantees that it | |
2628 | doesn't generate code that will fault from a valid program, but this | |
2629 | guarantee doesn't mean anything for individual instructions. Consider | |
2630 | the following example: | |
2631 | ||
2632 | struct S { int d; union { char *cp; int *ip; }; }; | |
2633 | ||
2634 | int foo(struct S *s) | |
2635 | { | |
2636 | if (s->d == 1) | |
2637 | return *s->ip; | |
2638 | else | |
2639 | return *s->cp; | |
2640 | } | |
2641 | ||
2642 | on a strict alignment machine. In a valid program, foo will never be | |
2643 | invoked on a structure for which d is equal to 1 and the underlying | |
2644 | unique field of the union not aligned on a 4-byte boundary, but the | |
2645 | expression *s->ip might cause a fault if considered individually. | |
2646 | ||
2647 | At the RTL level, potentially problematic expressions will almost always | |
2648 | verify may_trap_p; for example, the above dereference can be emitted as | |
2649 | (mem:SI (reg:P)) and this expression is may_trap_p for a generic register. | |
2650 | However, suppose that foo is inlined in a caller that causes s->cp to | |
2651 | point to a local character variable and guarantees that s->d is not set | |
2652 | to 1; foo may have been effectively translated into pseudo-RTL as: | |
2653 | ||
2654 | if ((reg:SI) == 1) | |
2655 | (set (reg:SI) (mem:SI (%fp - 7))) | |
2656 | else | |
2657 | (set (reg:QI) (mem:QI (%fp - 7))) | |
2658 | ||
2659 | Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a | |
2660 | memory reference to a stack slot, but it will certainly cause a fault | |
2661 | on a strict alignment machine. */ | |
2662 | ||
2663 | int | |
f7d504c2 | 2664 | may_trap_or_fault_p (const_rtx x) |
2358ff91 | 2665 | { |
48e8382e | 2666 | return may_trap_p_1 (x, 1); |
2358ff91 | 2667 | } |
2c88418c RS |
2668 | \f |
2669 | /* Return nonzero if X contains a comparison that is not either EQ or NE, | |
2670 | i.e., an inequality. */ | |
2671 | ||
2672 | int | |
f7d504c2 | 2673 | inequality_comparisons_p (const_rtx x) |
2c88418c | 2674 | { |
b3694847 SS |
2675 | const char *fmt; |
2676 | int len, i; | |
f7d504c2 | 2677 | const enum rtx_code code = GET_CODE (x); |
2c88418c RS |
2678 | |
2679 | switch (code) | |
2680 | { | |
2681 | case REG: | |
2682 | case SCRATCH: | |
2683 | case PC: | |
2684 | case CC0: | |
d8116890 | 2685 | CASE_CONST_ANY: |
2c88418c RS |
2686 | case CONST: |
2687 | case LABEL_REF: | |
2688 | case SYMBOL_REF: | |
2689 | return 0; | |
2690 | ||
2691 | case LT: | |
2692 | case LTU: | |
2693 | case GT: | |
2694 | case GTU: | |
2695 | case LE: | |
2696 | case LEU: | |
2697 | case GE: | |
2698 | case GEU: | |
2699 | return 1; | |
a6a2274a | 2700 | |
e9a25f70 JL |
2701 | default: |
2702 | break; | |
2c88418c RS |
2703 | } |
2704 | ||
2705 | len = GET_RTX_LENGTH (code); | |
2706 | fmt = GET_RTX_FORMAT (code); | |
2707 | ||
2708 | for (i = 0; i < len; i++) | |
2709 | { | |
2710 | if (fmt[i] == 'e') | |
2711 | { | |
2712 | if (inequality_comparisons_p (XEXP (x, i))) | |
2713 | return 1; | |
2714 | } | |
2715 | else if (fmt[i] == 'E') | |
2716 | { | |
b3694847 | 2717 | int j; |
2c88418c RS |
2718 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
2719 | if (inequality_comparisons_p (XVECEXP (x, i, j))) | |
2720 | return 1; | |
2721 | } | |
2722 | } | |
a6a2274a | 2723 | |
2c88418c RS |
2724 | return 0; |
2725 | } | |
2726 | \f | |
1ed0205e VM |
2727 | /* Replace any occurrence of FROM in X with TO. The function does |
2728 | not enter into CONST_DOUBLE for the replace. | |
2c88418c RS |
2729 | |
2730 | Note that copying is not done so X must not be shared unless all copies | |
2731 | are to be modified. */ | |
2732 | ||
2733 | rtx | |
0c20a65f | 2734 | replace_rtx (rtx x, rtx from, rtx to) |
2c88418c | 2735 | { |
b3694847 SS |
2736 | int i, j; |
2737 | const char *fmt; | |
2c88418c RS |
2738 | |
2739 | if (x == from) | |
2740 | return to; | |
2741 | ||
2742 | /* Allow this function to make replacements in EXPR_LISTs. */ | |
2743 | if (x == 0) | |
2744 | return 0; | |
2745 | ||
9dd791c8 AO |
2746 | if (GET_CODE (x) == SUBREG) |
2747 | { | |
55d796da | 2748 | rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to); |
9dd791c8 | 2749 | |
481683e1 | 2750 | if (CONST_INT_P (new_rtx)) |
9dd791c8 | 2751 | { |
55d796da | 2752 | x = simplify_subreg (GET_MODE (x), new_rtx, |
9dd791c8 AO |
2753 | GET_MODE (SUBREG_REG (x)), |
2754 | SUBREG_BYTE (x)); | |
41374e13 | 2755 | gcc_assert (x); |
9dd791c8 AO |
2756 | } |
2757 | else | |
55d796da | 2758 | SUBREG_REG (x) = new_rtx; |
9dd791c8 AO |
2759 | |
2760 | return x; | |
2761 | } | |
2762 | else if (GET_CODE (x) == ZERO_EXTEND) | |
2763 | { | |
55d796da | 2764 | rtx new_rtx = replace_rtx (XEXP (x, 0), from, to); |
9dd791c8 | 2765 | |
481683e1 | 2766 | if (CONST_INT_P (new_rtx)) |
9dd791c8 AO |
2767 | { |
2768 | x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
55d796da | 2769 | new_rtx, GET_MODE (XEXP (x, 0))); |
41374e13 | 2770 | gcc_assert (x); |
9dd791c8 AO |
2771 | } |
2772 | else | |
55d796da | 2773 | XEXP (x, 0) = new_rtx; |
9dd791c8 AO |
2774 | |
2775 | return x; | |
2776 | } | |
2777 | ||
2c88418c RS |
2778 | fmt = GET_RTX_FORMAT (GET_CODE (x)); |
2779 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
2780 | { | |
2781 | if (fmt[i] == 'e') | |
2782 | XEXP (x, i) = replace_rtx (XEXP (x, i), from, to); | |
2783 | else if (fmt[i] == 'E') | |
2784 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
2785 | XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to); | |
2786 | } | |
2787 | ||
2788 | return x; | |
a6a2274a | 2789 | } |
2c88418c | 2790 | \f |
a2b7026c RS |
2791 | /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track |
2792 | the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */ | |
39811184 | 2793 | |
a2b7026c RS |
2794 | void |
2795 | replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses) | |
39811184 | 2796 | { |
a2b7026c RS |
2797 | /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */ |
2798 | rtx x = *loc; | |
2799 | if (JUMP_TABLE_DATA_P (x)) | |
4af16369 | 2800 | { |
a2b7026c RS |
2801 | x = PATTERN (x); |
2802 | rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC); | |
2803 | int len = GET_NUM_ELEM (vec); | |
2804 | for (int i = 0; i < len; ++i) | |
4af16369 | 2805 | { |
a2b7026c RS |
2806 | rtx ref = RTVEC_ELT (vec, i); |
2807 | if (XEXP (ref, 0) == old_label) | |
2808 | { | |
2809 | XEXP (ref, 0) = new_label; | |
2810 | if (update_label_nuses) | |
2811 | { | |
2812 | ++LABEL_NUSES (new_label); | |
2813 | --LABEL_NUSES (old_label); | |
2814 | } | |
2815 | } | |
4af16369 | 2816 | } |
a2b7026c | 2817 | return; |
4af16369 JZ |
2818 | } |
2819 | ||
39811184 | 2820 | /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL |
a2b7026c | 2821 | field. This is not handled by the iterator because it doesn't |
39811184 | 2822 | handle unprinted ('0') fields. */ |
a2b7026c RS |
2823 | if (JUMP_P (x) && JUMP_LABEL (x) == old_label) |
2824 | JUMP_LABEL (x) = new_label; | |
39811184 | 2825 | |
a2b7026c RS |
2826 | subrtx_ptr_iterator::array_type array; |
2827 | FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL) | |
4af16369 | 2828 | { |
a2b7026c RS |
2829 | rtx *loc = *iter; |
2830 | if (rtx x = *loc) | |
4af16369 | 2831 | { |
a2b7026c RS |
2832 | if (GET_CODE (x) == SYMBOL_REF |
2833 | && CONSTANT_POOL_ADDRESS_P (x)) | |
2834 | { | |
2835 | rtx c = get_pool_constant (x); | |
2836 | if (rtx_referenced_p (old_label, c)) | |
2837 | { | |
2838 | /* Create a copy of constant C; replace the label inside | |
2839 | but do not update LABEL_NUSES because uses in constant pool | |
2840 | are not counted. */ | |
2841 | rtx new_c = copy_rtx (c); | |
2842 | replace_label (&new_c, old_label, new_label, false); | |
2843 | ||
2844 | /* Add the new constant NEW_C to constant pool and replace | |
2845 | the old reference to constant by new reference. */ | |
2846 | rtx new_mem = force_const_mem (get_pool_mode (x), new_c); | |
2847 | *loc = replace_rtx (x, x, XEXP (new_mem, 0)); | |
2848 | } | |
2849 | } | |
2850 | ||
2851 | if ((GET_CODE (x) == LABEL_REF | |
2852 | || GET_CODE (x) == INSN_LIST) | |
2853 | && XEXP (x, 0) == old_label) | |
2854 | { | |
2855 | XEXP (x, 0) = new_label; | |
2856 | if (update_label_nuses) | |
2857 | { | |
2858 | ++LABEL_NUSES (new_label); | |
2859 | --LABEL_NUSES (old_label); | |
2860 | } | |
2861 | } | |
4af16369 | 2862 | } |
4af16369 | 2863 | } |
a2b7026c | 2864 | } |
39811184 | 2865 | |
a2b7026c RS |
2866 | void |
2867 | replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label, | |
2868 | bool update_label_nuses) | |
2869 | { | |
2870 | rtx insn_as_rtx = insn; | |
2871 | replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses); | |
2872 | gcc_checking_assert (insn_as_rtx == insn); | |
39811184 JZ |
2873 | } |
2874 | ||
e08cf836 | 2875 | /* Return true if X is referenced in BODY. */ |
39811184 | 2876 | |
e08cf836 RS |
2877 | bool |
2878 | rtx_referenced_p (const_rtx x, const_rtx body) | |
39811184 | 2879 | { |
e08cf836 RS |
2880 | subrtx_iterator::array_type array; |
2881 | FOR_EACH_SUBRTX (iter, array, body, ALL) | |
2882 | if (const_rtx y = *iter) | |
2883 | { | |
2884 | /* Check if a label_ref Y refers to label X. */ | |
a827d9b1 DM |
2885 | if (GET_CODE (y) == LABEL_REF |
2886 | && LABEL_P (x) | |
2887 | && LABEL_REF_LABEL (y) == x) | |
e08cf836 | 2888 | return true; |
39811184 | 2889 | |
e08cf836 RS |
2890 | if (rtx_equal_p (x, y)) |
2891 | return true; | |
39811184 | 2892 | |
e08cf836 RS |
2893 | /* If Y is a reference to pool constant traverse the constant. */ |
2894 | if (GET_CODE (y) == SYMBOL_REF | |
2895 | && CONSTANT_POOL_ADDRESS_P (y)) | |
2896 | iter.substitute (get_pool_constant (y)); | |
2897 | } | |
2898 | return false; | |
39811184 JZ |
2899 | } |
2900 | ||
ee735eef JZ |
2901 | /* If INSN is a tablejump return true and store the label (before jump table) to |
2902 | *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */ | |
39811184 JZ |
2903 | |
2904 | bool | |
c5241a21 | 2905 | tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep) |
39811184 | 2906 | { |
ee735eef JZ |
2907 | rtx label, table; |
2908 | ||
dc0ff1c8 BS |
2909 | if (!JUMP_P (insn)) |
2910 | return false; | |
2911 | ||
2912 | label = JUMP_LABEL (insn); | |
2913 | if (label != NULL_RTX && !ANY_RETURN_P (label) | |
b32d5189 | 2914 | && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX |
481683e1 | 2915 | && JUMP_TABLE_DATA_P (table)) |
39811184 | 2916 | { |
ee735eef JZ |
2917 | if (labelp) |
2918 | *labelp = label; | |
2919 | if (tablep) | |
8942ee0f | 2920 | *tablep = as_a <rtx_jump_table_data *> (table); |
39811184 JZ |
2921 | return true; |
2922 | } | |
2923 | return false; | |
2924 | } | |
2925 | ||
fce7e199 RH |
2926 | /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or |
2927 | constant that is not in the constant pool and not in the condition | |
2928 | of an IF_THEN_ELSE. */ | |
2a1777af JL |
2929 | |
2930 | static int | |
f7d504c2 | 2931 | computed_jump_p_1 (const_rtx x) |
2a1777af | 2932 | { |
f7d504c2 | 2933 | const enum rtx_code code = GET_CODE (x); |
2a1777af | 2934 | int i, j; |
6f7d635c | 2935 | const char *fmt; |
2a1777af JL |
2936 | |
2937 | switch (code) | |
2938 | { | |
2a1777af JL |
2939 | case LABEL_REF: |
2940 | case PC: | |
2941 | return 0; | |
2942 | ||
fce7e199 | 2943 | case CONST: |
d8116890 | 2944 | CASE_CONST_ANY: |
fce7e199 | 2945 | case SYMBOL_REF: |
2a1777af JL |
2946 | case REG: |
2947 | return 1; | |
2948 | ||
2949 | case MEM: | |
2950 | return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
2951 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))); | |
2952 | ||
2953 | case IF_THEN_ELSE: | |
fce7e199 RH |
2954 | return (computed_jump_p_1 (XEXP (x, 1)) |
2955 | || computed_jump_p_1 (XEXP (x, 2))); | |
1d300e19 KG |
2956 | |
2957 | default: | |
2958 | break; | |
2a1777af JL |
2959 | } |
2960 | ||
2961 | fmt = GET_RTX_FORMAT (code); | |
2962 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2963 | { | |
2964 | if (fmt[i] == 'e' | |
fce7e199 | 2965 | && computed_jump_p_1 (XEXP (x, i))) |
2a1777af JL |
2966 | return 1; |
2967 | ||
d4757e6a | 2968 | else if (fmt[i] == 'E') |
2a1777af | 2969 | for (j = 0; j < XVECLEN (x, i); j++) |
fce7e199 | 2970 | if (computed_jump_p_1 (XVECEXP (x, i, j))) |
2a1777af JL |
2971 | return 1; |
2972 | } | |
2973 | ||
2974 | return 0; | |
2975 | } | |
2976 | ||
2977 | /* Return nonzero if INSN is an indirect jump (aka computed jump). | |
2978 | ||
2979 | Tablejumps and casesi insns are not considered indirect jumps; | |
4eb00163 | 2980 | we can recognize them by a (use (label_ref)). */ |
2a1777af JL |
2981 | |
2982 | int | |
f7d504c2 | 2983 | computed_jump_p (const_rtx insn) |
2a1777af JL |
2984 | { |
2985 | int i; | |
4b4bf941 | 2986 | if (JUMP_P (insn)) |
2a1777af JL |
2987 | { |
2988 | rtx pat = PATTERN (insn); | |
2a1777af | 2989 | |
cf7c4aa6 HPN |
2990 | /* If we have a JUMP_LABEL set, we're not a computed jump. */ |
2991 | if (JUMP_LABEL (insn) != NULL) | |
f759eb8b | 2992 | return 0; |
cf7c4aa6 HPN |
2993 | |
2994 | if (GET_CODE (pat) == PARALLEL) | |
2a1777af JL |
2995 | { |
2996 | int len = XVECLEN (pat, 0); | |
2997 | int has_use_labelref = 0; | |
2998 | ||
2999 | for (i = len - 1; i >= 0; i--) | |
3000 | if (GET_CODE (XVECEXP (pat, 0, i)) == USE | |
3001 | && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) | |
3002 | == LABEL_REF)) | |
c7b3b99f PCC |
3003 | { |
3004 | has_use_labelref = 1; | |
3005 | break; | |
3006 | } | |
2a1777af JL |
3007 | |
3008 | if (! has_use_labelref) | |
3009 | for (i = len - 1; i >= 0; i--) | |
3010 | if (GET_CODE (XVECEXP (pat, 0, i)) == SET | |
3011 | && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx | |
fce7e199 | 3012 | && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i)))) |
2a1777af JL |
3013 | return 1; |
3014 | } | |
3015 | else if (GET_CODE (pat) == SET | |
3016 | && SET_DEST (pat) == pc_rtx | |
fce7e199 | 3017 | && computed_jump_p_1 (SET_SRC (pat))) |
2a1777af JL |
3018 | return 1; |
3019 | } | |
3020 | return 0; | |
3021 | } | |
ccc2d6d0 | 3022 | |
cf94b0fc PB |
3023 | /* Optimized loop of for_each_rtx, trying to avoid useless recursive |
3024 | calls. Processes the subexpressions of EXP and passes them to F. */ | |
3025 | static int | |
3026 | for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data) | |
3027 | { | |
3028 | int result, i, j; | |
3029 | const char *format = GET_RTX_FORMAT (GET_CODE (exp)); | |
3030 | rtx *x; | |
3031 | ||
3032 | for (; format[n] != '\0'; n++) | |
3033 | { | |
3034 | switch (format[n]) | |
3035 | { | |
3036 | case 'e': | |
3037 | /* Call F on X. */ | |
3038 | x = &XEXP (exp, n); | |
3039 | result = (*f) (x, data); | |
3040 | if (result == -1) | |
3041 | /* Do not traverse sub-expressions. */ | |
3042 | continue; | |
3043 | else if (result != 0) | |
3044 | /* Stop the traversal. */ | |
3045 | return result; | |
b8698a0f | 3046 | |
cf94b0fc PB |
3047 | if (*x == NULL_RTX) |
3048 | /* There are no sub-expressions. */ | |
3049 | continue; | |
b8698a0f | 3050 | |
cf94b0fc PB |
3051 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
3052 | if (i >= 0) | |
3053 | { | |
3054 | result = for_each_rtx_1 (*x, i, f, data); | |
3055 | if (result != 0) | |
3056 | return result; | |
3057 | } | |
3058 | break; | |
3059 | ||
3060 | case 'V': | |
3061 | case 'E': | |
3062 | if (XVEC (exp, n) == 0) | |
3063 | continue; | |
3064 | for (j = 0; j < XVECLEN (exp, n); ++j) | |
3065 | { | |
3066 | /* Call F on X. */ | |
3067 | x = &XVECEXP (exp, n, j); | |
3068 | result = (*f) (x, data); | |
3069 | if (result == -1) | |
3070 | /* Do not traverse sub-expressions. */ | |
3071 | continue; | |
3072 | else if (result != 0) | |
3073 | /* Stop the traversal. */ | |
3074 | return result; | |
b8698a0f | 3075 | |
cf94b0fc PB |
3076 | if (*x == NULL_RTX) |
3077 | /* There are no sub-expressions. */ | |
3078 | continue; | |
b8698a0f | 3079 | |
cf94b0fc PB |
3080 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
3081 | if (i >= 0) | |
3082 | { | |
3083 | result = for_each_rtx_1 (*x, i, f, data); | |
3084 | if (result != 0) | |
3085 | return result; | |
3086 | } | |
3087 | } | |
3088 | break; | |
3089 | ||
3090 | default: | |
3091 | /* Nothing to do. */ | |
3092 | break; | |
3093 | } | |
3094 | } | |
3095 | ||
3096 | return 0; | |
3097 | } | |
3098 | ||
ccc2d6d0 MM |
3099 | /* Traverse X via depth-first search, calling F for each |
3100 | sub-expression (including X itself). F is also passed the DATA. | |
3101 | If F returns -1, do not traverse sub-expressions, but continue | |
3102 | traversing the rest of the tree. If F ever returns any other | |
40f03658 | 3103 | nonzero value, stop the traversal, and return the value returned |
ccc2d6d0 MM |
3104 | by F. Otherwise, return 0. This function does not traverse inside |
3105 | tree structure that contains RTX_EXPRs, or into sub-expressions | |
3106 | whose format code is `0' since it is not known whether or not those | |
3107 | codes are actually RTL. | |
3108 | ||
3109 | This routine is very general, and could (should?) be used to | |
3110 | implement many of the other routines in this file. */ | |
3111 | ||
ae0b51ef | 3112 | int |
0c20a65f | 3113 | for_each_rtx (rtx *x, rtx_function f, void *data) |
ccc2d6d0 MM |
3114 | { |
3115 | int result; | |
ccc2d6d0 MM |
3116 | int i; |
3117 | ||
3118 | /* Call F on X. */ | |
b987f237 | 3119 | result = (*f) (x, data); |
ccc2d6d0 MM |
3120 | if (result == -1) |
3121 | /* Do not traverse sub-expressions. */ | |
3122 | return 0; | |
3123 | else if (result != 0) | |
3124 | /* Stop the traversal. */ | |
3125 | return result; | |
3126 | ||
3127 | if (*x == NULL_RTX) | |
3128 | /* There are no sub-expressions. */ | |
3129 | return 0; | |
3130 | ||
cf94b0fc PB |
3131 | i = non_rtx_starting_operands[GET_CODE (*x)]; |
3132 | if (i < 0) | |
3133 | return 0; | |
ccc2d6d0 | 3134 | |
cf94b0fc | 3135 | return for_each_rtx_1 (*x, i, f, data); |
ccc2d6d0 | 3136 | } |
3ec2b590 | 3137 | |
70e7f57d DM |
3138 | /* Like "for_each_rtx", but for calling on an rtx_insn **. */ |
3139 | ||
3140 | int | |
3141 | for_each_rtx_in_insn (rtx_insn **insn, rtx_function f, void *data) | |
3142 | { | |
3143 | rtx insn_as_rtx = *insn; | |
3144 | int result; | |
3145 | ||
3146 | result = for_each_rtx (&insn_as_rtx, f, data); | |
3147 | ||
3148 | if (insn_as_rtx != *insn) | |
3149 | *insn = safe_as_a <rtx_insn *> (insn_as_rtx); | |
3150 | ||
3151 | return result; | |
3152 | } | |
3153 | ||
4deef538 AO |
3154 | \f |
3155 | ||
8d8e205b RS |
3156 | /* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of |
3157 | the equivalent add insn and pass the result to FN, using DATA as the | |
3158 | final argument. */ | |
4deef538 AO |
3159 | |
3160 | static int | |
8d8e205b | 3161 | for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data) |
4deef538 | 3162 | { |
8d8e205b | 3163 | rtx x = XEXP (mem, 0); |
4deef538 AO |
3164 | switch (GET_CODE (x)) |
3165 | { | |
3166 | case PRE_INC: | |
3167 | case POST_INC: | |
3168 | { | |
8d8e205b | 3169 | int size = GET_MODE_SIZE (GET_MODE (mem)); |
4deef538 AO |
3170 | rtx r1 = XEXP (x, 0); |
3171 | rtx c = gen_int_mode (size, GET_MODE (r1)); | |
8d8e205b | 3172 | return fn (mem, x, r1, r1, c, data); |
4deef538 AO |
3173 | } |
3174 | ||
3175 | case PRE_DEC: | |
3176 | case POST_DEC: | |
3177 | { | |
8d8e205b | 3178 | int size = GET_MODE_SIZE (GET_MODE (mem)); |
4deef538 AO |
3179 | rtx r1 = XEXP (x, 0); |
3180 | rtx c = gen_int_mode (-size, GET_MODE (r1)); | |
8d8e205b | 3181 | return fn (mem, x, r1, r1, c, data); |
4deef538 AO |
3182 | } |
3183 | ||
3184 | case PRE_MODIFY: | |
3185 | case POST_MODIFY: | |
3186 | { | |
3187 | rtx r1 = XEXP (x, 0); | |
3188 | rtx add = XEXP (x, 1); | |
8d8e205b | 3189 | return fn (mem, x, r1, add, NULL, data); |
4deef538 AO |
3190 | } |
3191 | ||
3192 | default: | |
8d8e205b | 3193 | gcc_unreachable (); |
4deef538 AO |
3194 | } |
3195 | } | |
3196 | ||
8d8e205b RS |
3197 | /* Traverse *LOC looking for MEMs that have autoinc addresses. |
3198 | For each such autoinc operation found, call FN, passing it | |
4deef538 AO |
3199 | the innermost enclosing MEM, the operation itself, the RTX modified |
3200 | by the operation, two RTXs (the second may be NULL) that, once | |
3201 | added, represent the value to be held by the modified RTX | |
8d8e205b RS |
3202 | afterwards, and DATA. FN is to return 0 to continue the |
3203 | traversal or any other value to have it returned to the caller of | |
4deef538 AO |
3204 | for_each_inc_dec. */ |
3205 | ||
3206 | int | |
8d8e205b | 3207 | for_each_inc_dec (rtx x, |
4deef538 | 3208 | for_each_inc_dec_fn fn, |
8d8e205b | 3209 | void *data) |
4deef538 | 3210 | { |
8d8e205b RS |
3211 | subrtx_var_iterator::array_type array; |
3212 | FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST) | |
3213 | { | |
3214 | rtx mem = *iter; | |
3215 | if (mem | |
3216 | && MEM_P (mem) | |
3217 | && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC) | |
3218 | { | |
3219 | int res = for_each_inc_dec_find_inc_dec (mem, fn, data); | |
3220 | if (res != 0) | |
3221 | return res; | |
3222 | iter.skip_subrtxes (); | |
3223 | } | |
3224 | } | |
3225 | return 0; | |
4deef538 AO |
3226 | } |
3227 | ||
3228 | \f | |
777b1b71 RH |
3229 | /* Searches X for any reference to REGNO, returning the rtx of the |
3230 | reference found if any. Otherwise, returns NULL_RTX. */ | |
3231 | ||
3232 | rtx | |
0c20a65f | 3233 | regno_use_in (unsigned int regno, rtx x) |
777b1b71 | 3234 | { |
b3694847 | 3235 | const char *fmt; |
777b1b71 RH |
3236 | int i, j; |
3237 | rtx tem; | |
3238 | ||
f8cfc6aa | 3239 | if (REG_P (x) && REGNO (x) == regno) |
777b1b71 RH |
3240 | return x; |
3241 | ||
3242 | fmt = GET_RTX_FORMAT (GET_CODE (x)); | |
3243 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
3244 | { | |
3245 | if (fmt[i] == 'e') | |
3246 | { | |
3247 | if ((tem = regno_use_in (regno, XEXP (x, i)))) | |
3248 | return tem; | |
3249 | } | |
3250 | else if (fmt[i] == 'E') | |
3251 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
3252 | if ((tem = regno_use_in (regno , XVECEXP (x, i, j)))) | |
3253 | return tem; | |
3254 | } | |
3255 | ||
3256 | return NULL_RTX; | |
3257 | } | |
2dfa9a87 | 3258 | |
e5c56fd9 JH |
3259 | /* Return a value indicating whether OP, an operand of a commutative |
3260 | operation, is preferred as the first or second operand. The higher | |
3261 | the value, the stronger the preference for being the first operand. | |
3262 | We use negative values to indicate a preference for the first operand | |
3263 | and positive values for the second operand. */ | |
3264 | ||
9b3bd424 | 3265 | int |
0c20a65f | 3266 | commutative_operand_precedence (rtx op) |
e5c56fd9 | 3267 | { |
e3d6e740 | 3268 | enum rtx_code code = GET_CODE (op); |
b8698a0f | 3269 | |
e5c56fd9 | 3270 | /* Constants always come the second operand. Prefer "nice" constants. */ |
e3d6e740 | 3271 | if (code == CONST_INT) |
7e0b4eae | 3272 | return -8; |
807e902e KZ |
3273 | if (code == CONST_WIDE_INT) |
3274 | return -8; | |
e3d6e740 | 3275 | if (code == CONST_DOUBLE) |
7e0b4eae | 3276 | return -7; |
091a3ac7 CF |
3277 | if (code == CONST_FIXED) |
3278 | return -7; | |
9ce79a7a | 3279 | op = avoid_constant_pool_reference (op); |
79b82df3 | 3280 | code = GET_CODE (op); |
ec8e098d PB |
3281 | |
3282 | switch (GET_RTX_CLASS (code)) | |
3283 | { | |
3284 | case RTX_CONST_OBJ: | |
3285 | if (code == CONST_INT) | |
7e0b4eae | 3286 | return -6; |
807e902e KZ |
3287 | if (code == CONST_WIDE_INT) |
3288 | return -6; | |
ec8e098d | 3289 | if (code == CONST_DOUBLE) |
7e0b4eae | 3290 | return -5; |
091a3ac7 CF |
3291 | if (code == CONST_FIXED) |
3292 | return -5; | |
7e0b4eae | 3293 | return -4; |
ec8e098d PB |
3294 | |
3295 | case RTX_EXTRA: | |
3296 | /* SUBREGs of objects should come second. */ | |
3297 | if (code == SUBREG && OBJECT_P (SUBREG_REG (op))) | |
7e0b4eae | 3298 | return -3; |
6fb5fa3c | 3299 | return 0; |
ec8e098d PB |
3300 | |
3301 | case RTX_OBJ: | |
3302 | /* Complex expressions should be the first, so decrease priority | |
7e0b4eae PB |
3303 | of objects. Prefer pointer objects over non pointer objects. */ |
3304 | if ((REG_P (op) && REG_POINTER (op)) | |
3305 | || (MEM_P (op) && MEM_POINTER (op))) | |
3306 | return -1; | |
3307 | return -2; | |
ec8e098d PB |
3308 | |
3309 | case RTX_COMM_ARITH: | |
3310 | /* Prefer operands that are themselves commutative to be first. | |
3311 | This helps to make things linear. In particular, | |
3312 | (and (and (reg) (reg)) (not (reg))) is canonical. */ | |
3313 | return 4; | |
3314 | ||
3315 | case RTX_BIN_ARITH: | |
3316 | /* If only one operand is a binary expression, it will be the first | |
3317 | operand. In particular, (plus (minus (reg) (reg)) (neg (reg))) | |
3318 | is canonical, although it will usually be further simplified. */ | |
3319 | return 2; | |
b8698a0f | 3320 | |
ec8e098d PB |
3321 | case RTX_UNARY: |
3322 | /* Then prefer NEG and NOT. */ | |
3323 | if (code == NEG || code == NOT) | |
3324 | return 1; | |
e5c56fd9 | 3325 | |
ec8e098d PB |
3326 | default: |
3327 | return 0; | |
3328 | } | |
e5c56fd9 JH |
3329 | } |
3330 | ||
f63d1bf7 | 3331 | /* Return 1 iff it is necessary to swap operands of commutative operation |
e5c56fd9 JH |
3332 | in order to canonicalize expression. */ |
3333 | ||
7e0b4eae | 3334 | bool |
0c20a65f | 3335 | swap_commutative_operands_p (rtx x, rtx y) |
e5c56fd9 | 3336 | { |
9b3bd424 RH |
3337 | return (commutative_operand_precedence (x) |
3338 | < commutative_operand_precedence (y)); | |
e5c56fd9 | 3339 | } |
2dfa9a87 MH |
3340 | |
3341 | /* Return 1 if X is an autoincrement side effect and the register is | |
3342 | not the stack pointer. */ | |
3343 | int | |
f7d504c2 | 3344 | auto_inc_p (const_rtx x) |
2dfa9a87 MH |
3345 | { |
3346 | switch (GET_CODE (x)) | |
3347 | { | |
3348 | case PRE_INC: | |
3349 | case POST_INC: | |
3350 | case PRE_DEC: | |
3351 | case POST_DEC: | |
3352 | case PRE_MODIFY: | |
3353 | case POST_MODIFY: | |
3354 | /* There are no REG_INC notes for SP. */ | |
3355 | if (XEXP (x, 0) != stack_pointer_rtx) | |
3356 | return 1; | |
3357 | default: | |
3358 | break; | |
3359 | } | |
3360 | return 0; | |
3361 | } | |
3b10cf4b | 3362 | |
f9da5064 | 3363 | /* Return nonzero if IN contains a piece of rtl that has the address LOC. */ |
db7ba742 | 3364 | int |
f7d504c2 | 3365 | loc_mentioned_in_p (rtx *loc, const_rtx in) |
db7ba742 | 3366 | { |
a52b023a PB |
3367 | enum rtx_code code; |
3368 | const char *fmt; | |
db7ba742 R |
3369 | int i, j; |
3370 | ||
a52b023a PB |
3371 | if (!in) |
3372 | return 0; | |
3373 | ||
3374 | code = GET_CODE (in); | |
3375 | fmt = GET_RTX_FORMAT (code); | |
db7ba742 R |
3376 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
3377 | { | |
db7ba742 R |
3378 | if (fmt[i] == 'e') |
3379 | { | |
e0651058 | 3380 | if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i))) |
db7ba742 R |
3381 | return 1; |
3382 | } | |
3383 | else if (fmt[i] == 'E') | |
3384 | for (j = XVECLEN (in, i) - 1; j >= 0; j--) | |
e0651058 AO |
3385 | if (loc == &XVECEXP (in, i, j) |
3386 | || loc_mentioned_in_p (loc, XVECEXP (in, i, j))) | |
db7ba742 R |
3387 | return 1; |
3388 | } | |
3389 | return 0; | |
3390 | } | |
ddef6bc7 | 3391 | |
bb51e270 RS |
3392 | /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE, |
3393 | and SUBREG_BYTE, return the bit offset where the subreg begins | |
3394 | (counting from the least significant bit of the operand). */ | |
33aceff2 JW |
3395 | |
3396 | unsigned int | |
bb51e270 RS |
3397 | subreg_lsb_1 (enum machine_mode outer_mode, |
3398 | enum machine_mode inner_mode, | |
3399 | unsigned int subreg_byte) | |
33aceff2 | 3400 | { |
33aceff2 JW |
3401 | unsigned int bitpos; |
3402 | unsigned int byte; | |
3403 | unsigned int word; | |
3404 | ||
3405 | /* A paradoxical subreg begins at bit position 0. */ | |
5511bc5a | 3406 | if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode)) |
33aceff2 JW |
3407 | return 0; |
3408 | ||
3409 | if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) | |
3410 | /* If the subreg crosses a word boundary ensure that | |
3411 | it also begins and ends on a word boundary. */ | |
41374e13 NS |
3412 | gcc_assert (!((subreg_byte % UNITS_PER_WORD |
3413 | + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD | |
3414 | && (subreg_byte % UNITS_PER_WORD | |
3415 | || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))); | |
33aceff2 JW |
3416 | |
3417 | if (WORDS_BIG_ENDIAN) | |
3418 | word = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3419 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD; |
33aceff2 | 3420 | else |
bb51e270 | 3421 | word = subreg_byte / UNITS_PER_WORD; |
33aceff2 JW |
3422 | bitpos = word * BITS_PER_WORD; |
3423 | ||
3424 | if (BYTES_BIG_ENDIAN) | |
3425 | byte = (GET_MODE_SIZE (inner_mode) | |
bb51e270 | 3426 | - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD; |
33aceff2 | 3427 | else |
bb51e270 | 3428 | byte = subreg_byte % UNITS_PER_WORD; |
33aceff2 JW |
3429 | bitpos += byte * BITS_PER_UNIT; |
3430 | ||
3431 | return bitpos; | |
3432 | } | |
3433 | ||
bb51e270 RS |
3434 | /* Given a subreg X, return the bit offset where the subreg begins |
3435 | (counting from the least significant bit of the reg). */ | |
3436 | ||
3437 | unsigned int | |
f7d504c2 | 3438 | subreg_lsb (const_rtx x) |
bb51e270 RS |
3439 | { |
3440 | return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | |
3441 | SUBREG_BYTE (x)); | |
3442 | } | |
3443 | ||
f1f4e530 | 3444 | /* Fill in information about a subreg of a hard register. |
ddef6bc7 JJ |
3445 | xregno - A regno of an inner hard subreg_reg (or what will become one). |
3446 | xmode - The mode of xregno. | |
3447 | offset - The byte offset. | |
3448 | ymode - The mode of a top level SUBREG (or what may become one). | |
0cb07998 RS |
3449 | info - Pointer to structure to fill in. |
3450 | ||
3451 | Rather than considering one particular inner register (and thus one | |
3452 | particular "outer" register) in isolation, this function really uses | |
3453 | XREGNO as a model for a sequence of isomorphic hard registers. Thus the | |
3454 | function does not check whether adding INFO->offset to XREGNO gives | |
3455 | a valid hard register; even if INFO->offset + XREGNO is out of range, | |
3456 | there might be another register of the same type that is in range. | |
3457 | Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new | |
3458 | register, since that can depend on things like whether the final | |
3459 | register number is even or odd. Callers that want to check whether | |
3460 | this particular subreg can be replaced by a simple (reg ...) should | |
3461 | use simplify_subreg_regno. */ | |
3462 | ||
c619e982 | 3463 | void |
f1f4e530 JM |
3464 | subreg_get_info (unsigned int xregno, enum machine_mode xmode, |
3465 | unsigned int offset, enum machine_mode ymode, | |
3466 | struct subreg_info *info) | |
04c5580f | 3467 | { |
8521c414 | 3468 | int nregs_xmode, nregs_ymode; |
04c5580f | 3469 | int mode_multiple, nregs_multiple; |
f1f4e530 | 3470 | int offset_adj, y_offset, y_offset_adj; |
8521c414 | 3471 | int regsize_xmode, regsize_ymode; |
f1f4e530 | 3472 | bool rknown; |
04c5580f | 3473 | |
41374e13 | 3474 | gcc_assert (xregno < FIRST_PSEUDO_REGISTER); |
04c5580f | 3475 | |
f1f4e530 JM |
3476 | rknown = false; |
3477 | ||
dd79bb7e GK |
3478 | /* If there are holes in a non-scalar mode in registers, we expect |
3479 | that it is made up of its units concatenated together. */ | |
8521c414 | 3480 | if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)) |
dd79bb7e | 3481 | { |
8521c414 JM |
3482 | enum machine_mode xmode_unit; |
3483 | ||
3484 | nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode); | |
3485 | if (GET_MODE_INNER (xmode) == VOIDmode) | |
3486 | xmode_unit = xmode; | |
3487 | else | |
3488 | xmode_unit = GET_MODE_INNER (xmode); | |
3489 | gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit)); | |
3490 | gcc_assert (nregs_xmode | |
3491 | == (GET_MODE_NUNITS (xmode) | |
3492 | * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit))); | |
3493 | gcc_assert (hard_regno_nregs[xregno][xmode] | |
3494 | == (hard_regno_nregs[xregno][xmode_unit] | |
3495 | * GET_MODE_NUNITS (xmode))); | |
dd79bb7e GK |
3496 | |
3497 | /* You can only ask for a SUBREG of a value with holes in the middle | |
3498 | if you don't cross the holes. (Such a SUBREG should be done by | |
3499 | picking a different register class, or doing it in memory if | |
3500 | necessary.) An example of a value with holes is XCmode on 32-bit | |
3501 | x86 with -m128bit-long-double; it's represented in 6 32-bit registers, | |
b8698a0f | 3502 | 3 for each part, but in memory it's two 128-bit parts. |
dd79bb7e GK |
3503 | Padding is assumed to be at the end (not necessarily the 'high part') |
3504 | of each unit. */ | |
b8698a0f | 3505 | if ((offset / GET_MODE_SIZE (xmode_unit) + 1 |
8521c414 JM |
3506 | < GET_MODE_NUNITS (xmode)) |
3507 | && (offset / GET_MODE_SIZE (xmode_unit) | |
dd79bb7e | 3508 | != ((offset + GET_MODE_SIZE (ymode) - 1) |
8521c414 | 3509 | / GET_MODE_SIZE (xmode_unit)))) |
f1f4e530 JM |
3510 | { |
3511 | info->representable_p = false; | |
3512 | rknown = true; | |
3513 | } | |
dd79bb7e GK |
3514 | } |
3515 | else | |
3516 | nregs_xmode = hard_regno_nregs[xregno][xmode]; | |
b8698a0f | 3517 | |
66fd46b6 | 3518 | nregs_ymode = hard_regno_nregs[xregno][ymode]; |
04c5580f | 3519 | |
dd79bb7e | 3520 | /* Paradoxical subregs are otherwise valid. */ |
f1f4e530 JM |
3521 | if (!rknown |
3522 | && offset == 0 | |
5511bc5a | 3523 | && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode)) |
f1f4e530 JM |
3524 | { |
3525 | info->representable_p = true; | |
3526 | /* If this is a big endian paradoxical subreg, which uses more | |
3527 | actual hard registers than the original register, we must | |
3528 | return a negative offset so that we find the proper highpart | |
3529 | of the register. */ | |
3530 | if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD | |
c0a6a1ef | 3531 | ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
f1f4e530 JM |
3532 | info->offset = nregs_xmode - nregs_ymode; |
3533 | else | |
3534 | info->offset = 0; | |
3535 | info->nregs = nregs_ymode; | |
3536 | return; | |
3537 | } | |
04c5580f | 3538 | |
8521c414 JM |
3539 | /* If registers store different numbers of bits in the different |
3540 | modes, we cannot generally form this subreg. */ | |
f1f4e530 | 3541 | if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode) |
5f7fc2b8 JM |
3542 | && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode) |
3543 | && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0 | |
3544 | && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0) | |
f1f4e530 JM |
3545 | { |
3546 | regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode; | |
f1f4e530 | 3547 | regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode; |
f1f4e530 JM |
3548 | if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1) |
3549 | { | |
3550 | info->representable_p = false; | |
3551 | info->nregs | |
3552 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3553 | info->offset = offset / regsize_xmode; | |
3554 | return; | |
3555 | } | |
3556 | if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1) | |
3557 | { | |
3558 | info->representable_p = false; | |
3559 | info->nregs | |
3560 | = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode; | |
3561 | info->offset = offset / regsize_xmode; | |
3562 | return; | |
3563 | } | |
3564 | } | |
8521c414 | 3565 | |
dd79bb7e | 3566 | /* Lowpart subregs are otherwise valid. */ |
f1f4e530 JM |
3567 | if (!rknown && offset == subreg_lowpart_offset (ymode, xmode)) |
3568 | { | |
3569 | info->representable_p = true; | |
3570 | rknown = true; | |
a446b4e8 JM |
3571 | |
3572 | if (offset == 0 || nregs_xmode == nregs_ymode) | |
3573 | { | |
3574 | info->offset = 0; | |
3575 | info->nregs = nregs_ymode; | |
3576 | return; | |
3577 | } | |
f1f4e530 | 3578 | } |
04c5580f | 3579 | |
dd79bb7e GK |
3580 | /* This should always pass, otherwise we don't know how to verify |
3581 | the constraint. These conditions may be relaxed but | |
3582 | subreg_regno_offset would need to be redesigned. */ | |
41374e13 | 3583 | gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0); |
41374e13 | 3584 | gcc_assert ((nregs_xmode % nregs_ymode) == 0); |
04c5580f | 3585 | |
c0a6a1ef BS |
3586 | if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN |
3587 | && GET_MODE_SIZE (xmode) > UNITS_PER_WORD) | |
3588 | { | |
3589 | HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode); | |
3590 | HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode); | |
3591 | HOST_WIDE_INT off_low = offset & (ysize - 1); | |
3592 | HOST_WIDE_INT off_high = offset & ~(ysize - 1); | |
3593 | offset = (xsize - ysize - off_high) | off_low; | |
3594 | } | |
b20b352b | 3595 | /* The XMODE value can be seen as a vector of NREGS_XMODE |
dcc24678 | 3596 | values. The subreg must represent a lowpart of given field. |
04c5580f | 3597 | Compute what field it is. */ |
f1f4e530 JM |
3598 | offset_adj = offset; |
3599 | offset_adj -= subreg_lowpart_offset (ymode, | |
3600 | mode_for_size (GET_MODE_BITSIZE (xmode) | |
3601 | / nregs_xmode, | |
3602 | MODE_INT, 0)); | |
04c5580f | 3603 | |
dd79bb7e | 3604 | /* Size of ymode must not be greater than the size of xmode. */ |
04c5580f | 3605 | mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode); |
41374e13 | 3606 | gcc_assert (mode_multiple != 0); |
04c5580f JH |
3607 | |
3608 | y_offset = offset / GET_MODE_SIZE (ymode); | |
f1f4e530 JM |
3609 | y_offset_adj = offset_adj / GET_MODE_SIZE (ymode); |
3610 | nregs_multiple = nregs_xmode / nregs_ymode; | |
41374e13 | 3611 | |
f1f4e530 | 3612 | gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0); |
41374e13 NS |
3613 | gcc_assert ((mode_multiple % nregs_multiple) == 0); |
3614 | ||
f1f4e530 JM |
3615 | if (!rknown) |
3616 | { | |
3617 | info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple))); | |
3618 | rknown = true; | |
3619 | } | |
3620 | info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode; | |
3621 | info->nregs = nregs_ymode; | |
3622 | } | |
3623 | ||
3624 | /* This function returns the regno offset of a subreg expression. | |
3625 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3626 | xmode - The mode of xregno. | |
3627 | offset - The byte offset. | |
3628 | ymode - The mode of a top level SUBREG (or what may become one). | |
3629 | RETURN - The regno offset which would be used. */ | |
3630 | unsigned int | |
3631 | subreg_regno_offset (unsigned int xregno, enum machine_mode xmode, | |
3632 | unsigned int offset, enum machine_mode ymode) | |
3633 | { | |
3634 | struct subreg_info info; | |
3635 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3636 | return info.offset; | |
3637 | } | |
3638 | ||
3639 | /* This function returns true when the offset is representable via | |
3640 | subreg_offset in the given regno. | |
3641 | xregno - A regno of an inner hard subreg_reg (or what will become one). | |
3642 | xmode - The mode of xregno. | |
3643 | offset - The byte offset. | |
3644 | ymode - The mode of a top level SUBREG (or what may become one). | |
3645 | RETURN - Whether the offset is representable. */ | |
3646 | bool | |
3647 | subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode, | |
3648 | unsigned int offset, enum machine_mode ymode) | |
3649 | { | |
3650 | struct subreg_info info; | |
3651 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
05cee290 | 3652 | return info.representable_p; |
04c5580f JH |
3653 | } |
3654 | ||
eef302d2 RS |
3655 | /* Return the number of a YMODE register to which |
3656 | ||
3657 | (subreg:YMODE (reg:XMODE XREGNO) OFFSET) | |
3658 | ||
3659 | can be simplified. Return -1 if the subreg can't be simplified. | |
3660 | ||
3661 | XREGNO is a hard register number. */ | |
3662 | ||
3663 | int | |
3664 | simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode, | |
3665 | unsigned int offset, enum machine_mode ymode) | |
3666 | { | |
3667 | struct subreg_info info; | |
3668 | unsigned int yregno; | |
3669 | ||
3670 | #ifdef CANNOT_CHANGE_MODE_CLASS | |
3671 | /* Give the backend a chance to disallow the mode change. */ | |
3672 | if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT | |
3673 | && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT | |
55a2c322 VM |
3674 | && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode) |
3675 | /* We can use mode change in LRA for some transformations. */ | |
3676 | && ! lra_in_progress) | |
eef302d2 RS |
3677 | return -1; |
3678 | #endif | |
3679 | ||
3680 | /* We shouldn't simplify stack-related registers. */ | |
3681 | if ((!reload_completed || frame_pointer_needed) | |
d4e0d036 | 3682 | && xregno == FRAME_POINTER_REGNUM) |
eef302d2 RS |
3683 | return -1; |
3684 | ||
3685 | if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
98072ee5 | 3686 | && xregno == ARG_POINTER_REGNUM) |
eef302d2 RS |
3687 | return -1; |
3688 | ||
55a2c322 VM |
3689 | if (xregno == STACK_POINTER_REGNUM |
3690 | /* We should convert hard stack register in LRA if it is | |
3691 | possible. */ | |
3692 | && ! lra_in_progress) | |
eef302d2 RS |
3693 | return -1; |
3694 | ||
3695 | /* Try to get the register offset. */ | |
3696 | subreg_get_info (xregno, xmode, offset, ymode, &info); | |
3697 | if (!info.representable_p) | |
3698 | return -1; | |
3699 | ||
3700 | /* Make sure that the offsetted register value is in range. */ | |
3701 | yregno = xregno + info.offset; | |
3702 | if (!HARD_REGISTER_NUM_P (yregno)) | |
3703 | return -1; | |
3704 | ||
3705 | /* See whether (reg:YMODE YREGNO) is valid. | |
3706 | ||
3707 | ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid. | |
eb93b31f EB |
3708 | This is a kludge to work around how complex FP arguments are passed |
3709 | on IA-64 and should be fixed. See PR target/49226. */ | |
eef302d2 RS |
3710 | if (!HARD_REGNO_MODE_OK (yregno, ymode) |
3711 | && HARD_REGNO_MODE_OK (xregno, xmode)) | |
3712 | return -1; | |
3713 | ||
3714 | return (int) yregno; | |
3715 | } | |
3716 | ||
dc297297 | 3717 | /* Return the final regno that a subreg expression refers to. */ |
a6a2274a | 3718 | unsigned int |
f7d504c2 | 3719 | subreg_regno (const_rtx x) |
ddef6bc7 JJ |
3720 | { |
3721 | unsigned int ret; | |
3722 | rtx subreg = SUBREG_REG (x); | |
3723 | int regno = REGNO (subreg); | |
3724 | ||
a6a2274a KH |
3725 | ret = regno + subreg_regno_offset (regno, |
3726 | GET_MODE (subreg), | |
ddef6bc7 JJ |
3727 | SUBREG_BYTE (x), |
3728 | GET_MODE (x)); | |
3729 | return ret; | |
3730 | ||
3731 | } | |
f1f4e530 JM |
3732 | |
3733 | /* Return the number of registers that a subreg expression refers | |
3734 | to. */ | |
3735 | unsigned int | |
f7d504c2 | 3736 | subreg_nregs (const_rtx x) |
ba49cb7b KZ |
3737 | { |
3738 | return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x); | |
3739 | } | |
3740 | ||
3741 | /* Return the number of registers that a subreg REG with REGNO | |
3742 | expression refers to. This is a copy of the rtlanal.c:subreg_nregs | |
3743 | changed so that the regno can be passed in. */ | |
3744 | ||
3745 | unsigned int | |
3746 | subreg_nregs_with_regno (unsigned int regno, const_rtx x) | |
f1f4e530 JM |
3747 | { |
3748 | struct subreg_info info; | |
3749 | rtx subreg = SUBREG_REG (x); | |
f1f4e530 JM |
3750 | |
3751 | subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x), | |
3752 | &info); | |
3753 | return info.nregs; | |
3754 | } | |
3755 | ||
ba49cb7b | 3756 | |
833366d6 JH |
3757 | struct parms_set_data |
3758 | { | |
3759 | int nregs; | |
3760 | HARD_REG_SET regs; | |
3761 | }; | |
3762 | ||
3763 | /* Helper function for noticing stores to parameter registers. */ | |
3764 | static void | |
7bc980e1 | 3765 | parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) |
833366d6 | 3766 | { |
1634b18f | 3767 | struct parms_set_data *const d = (struct parms_set_data *) data; |
833366d6 JH |
3768 | if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER |
3769 | && TEST_HARD_REG_BIT (d->regs, REGNO (x))) | |
3770 | { | |
3771 | CLEAR_HARD_REG_BIT (d->regs, REGNO (x)); | |
3772 | d->nregs--; | |
3773 | } | |
3774 | } | |
3775 | ||
a6a2274a | 3776 | /* Look backward for first parameter to be loaded. |
b2df20b4 DJ |
3777 | Note that loads of all parameters will not necessarily be |
3778 | found if CSE has eliminated some of them (e.g., an argument | |
3779 | to the outer function is passed down as a parameter). | |
833366d6 | 3780 | Do not skip BOUNDARY. */ |
62fc98cc | 3781 | rtx_insn * |
9321cf00 | 3782 | find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary) |
833366d6 JH |
3783 | { |
3784 | struct parms_set_data parm; | |
9321cf00 DM |
3785 | rtx p; |
3786 | rtx_insn *before, *first_set; | |
833366d6 JH |
3787 | |
3788 | /* Since different machines initialize their parameter registers | |
3789 | in different orders, assume nothing. Collect the set of all | |
3790 | parameter registers. */ | |
3791 | CLEAR_HARD_REG_SET (parm.regs); | |
3792 | parm.nregs = 0; | |
3793 | for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) | |
3794 | if (GET_CODE (XEXP (p, 0)) == USE | |
f8cfc6aa | 3795 | && REG_P (XEXP (XEXP (p, 0), 0))) |
833366d6 | 3796 | { |
41374e13 | 3797 | gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER); |
833366d6 JH |
3798 | |
3799 | /* We only care about registers which can hold function | |
3800 | arguments. */ | |
3801 | if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0)))) | |
3802 | continue; | |
3803 | ||
3804 | SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0))); | |
3805 | parm.nregs++; | |
3806 | } | |
3807 | before = call_insn; | |
b2df20b4 | 3808 | first_set = call_insn; |
833366d6 JH |
3809 | |
3810 | /* Search backward for the first set of a register in this set. */ | |
3811 | while (parm.nregs && before != boundary) | |
3812 | { | |
3813 | before = PREV_INSN (before); | |
3814 | ||
3815 | /* It is possible that some loads got CSEed from one call to | |
3816 | another. Stop in that case. */ | |
4b4bf941 | 3817 | if (CALL_P (before)) |
833366d6 JH |
3818 | break; |
3819 | ||
dbc1a163 | 3820 | /* Our caller needs either ensure that we will find all sets |
833366d6 | 3821 | (in case code has not been optimized yet), or take care |
eaec9b3d | 3822 | for possible labels in a way by setting boundary to preceding |
833366d6 | 3823 | CODE_LABEL. */ |
4b4bf941 | 3824 | if (LABEL_P (before)) |
dbc1a163 | 3825 | { |
41374e13 | 3826 | gcc_assert (before == boundary); |
dbc1a163 RH |
3827 | break; |
3828 | } | |
833366d6 | 3829 | |
0d025d43 | 3830 | if (INSN_P (before)) |
b2df20b4 DJ |
3831 | { |
3832 | int nregs_old = parm.nregs; | |
3833 | note_stores (PATTERN (before), parms_set, &parm); | |
3834 | /* If we found something that did not set a parameter reg, | |
3835 | we're done. Do not keep going, as that might result | |
3836 | in hoisting an insn before the setting of a pseudo | |
3837 | that is used by the hoisted insn. */ | |
3838 | if (nregs_old != parm.nregs) | |
3839 | first_set = before; | |
3840 | else | |
3841 | break; | |
3842 | } | |
833366d6 | 3843 | } |
9321cf00 | 3844 | return first_set; |
833366d6 | 3845 | } |
3dec4024 | 3846 | |
14b493d6 | 3847 | /* Return true if we should avoid inserting code between INSN and preceding |
3dec4024 JH |
3848 | call instruction. */ |
3849 | ||
3850 | bool | |
e4685bc8 | 3851 | keep_with_call_p (const rtx_insn *insn) |
3dec4024 JH |
3852 | { |
3853 | rtx set; | |
3854 | ||
3855 | if (INSN_P (insn) && (set = single_set (insn)) != NULL) | |
3856 | { | |
f8cfc6aa | 3857 | if (REG_P (SET_DEST (set)) |
5df533b3 | 3858 | && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER |
3dec4024 JH |
3859 | && fixed_regs[REGNO (SET_DEST (set))] |
3860 | && general_operand (SET_SRC (set), VOIDmode)) | |
3861 | return true; | |
f8cfc6aa | 3862 | if (REG_P (SET_SRC (set)) |
82f81f18 | 3863 | && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set))) |
f8cfc6aa | 3864 | && REG_P (SET_DEST (set)) |
3dec4024 JH |
3865 | && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER) |
3866 | return true; | |
bc204393 RH |
3867 | /* There may be a stack pop just after the call and before the store |
3868 | of the return register. Search for the actual store when deciding | |
3869 | if we can break or not. */ | |
3dec4024 JH |
3870 | if (SET_DEST (set) == stack_pointer_rtx) |
3871 | { | |
75547801 | 3872 | /* This CONST_CAST is okay because next_nonnote_insn just |
4e9b57fa | 3873 | returns its argument and we assign it to a const_rtx |
75547801 | 3874 | variable. */ |
e4685bc8 TS |
3875 | const rtx_insn *i2 |
3876 | = next_nonnote_insn (const_cast<rtx_insn *> (insn)); | |
bc204393 | 3877 | if (i2 && keep_with_call_p (i2)) |
3dec4024 JH |
3878 | return true; |
3879 | } | |
3880 | } | |
3881 | return false; | |
3882 | } | |
71d2c5bd | 3883 | |
432f982f JH |
3884 | /* Return true if LABEL is a target of JUMP_INSN. This applies only |
3885 | to non-complex jumps. That is, direct unconditional, conditional, | |
3886 | and tablejumps, but not computed jumps or returns. It also does | |
3887 | not apply to the fallthru case of a conditional jump. */ | |
3888 | ||
3889 | bool | |
c5241a21 | 3890 | label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn) |
432f982f JH |
3891 | { |
3892 | rtx tmp = JUMP_LABEL (jump_insn); | |
8942ee0f | 3893 | rtx_jump_table_data *table; |
432f982f JH |
3894 | |
3895 | if (label == tmp) | |
3896 | return true; | |
3897 | ||
8942ee0f | 3898 | if (tablejump_p (jump_insn, NULL, &table)) |
432f982f | 3899 | { |
95c43227 | 3900 | rtvec vec = table->get_labels (); |
432f982f JH |
3901 | int i, veclen = GET_NUM_ELEM (vec); |
3902 | ||
3903 | for (i = 0; i < veclen; ++i) | |
3904 | if (XEXP (RTVEC_ELT (vec, i), 0) == label) | |
3905 | return true; | |
3906 | } | |
3907 | ||
cb2f563b HPN |
3908 | if (find_reg_note (jump_insn, REG_LABEL_TARGET, label)) |
3909 | return true; | |
3910 | ||
432f982f JH |
3911 | return false; |
3912 | } | |
3913 | ||
f894b69b PB |
3914 | \f |
3915 | /* Return an estimate of the cost of computing rtx X. | |
3916 | One use is in cse, to decide which expression to keep in the hash table. | |
3917 | Another is in rtl generation, to pick the cheapest way to multiply. | |
b8698a0f | 3918 | Other uses like the latter are expected in the future. |
f40751dd | 3919 | |
68f932c4 RS |
3920 | X appears as operand OPNO in an expression with code OUTER_CODE. |
3921 | SPEED specifies whether costs optimized for speed or size should | |
f40751dd | 3922 | be returned. */ |
f894b69b PB |
3923 | |
3924 | int | |
68f932c4 | 3925 | rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed) |
f894b69b PB |
3926 | { |
3927 | int i, j; | |
3928 | enum rtx_code code; | |
3929 | const char *fmt; | |
3930 | int total; | |
e098c169 | 3931 | int factor; |
f894b69b PB |
3932 | |
3933 | if (x == 0) | |
3934 | return 0; | |
3935 | ||
e098c169 HPN |
3936 | /* A size N times larger than UNITS_PER_WORD likely needs N times as |
3937 | many insns, taking N times as long. */ | |
3938 | factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD; | |
3939 | if (factor == 0) | |
3940 | factor = 1; | |
3941 | ||
f894b69b PB |
3942 | /* Compute the default costs of certain things. |
3943 | Note that targetm.rtx_costs can override the defaults. */ | |
3944 | ||
3945 | code = GET_CODE (x); | |
3946 | switch (code) | |
3947 | { | |
3948 | case MULT: | |
e098c169 HPN |
3949 | /* Multiplication has time-complexity O(N*N), where N is the |
3950 | number of units (translated from digits) when using | |
3951 | schoolbook long multiplication. */ | |
3952 | total = factor * factor * COSTS_N_INSNS (5); | |
f894b69b PB |
3953 | break; |
3954 | case DIV: | |
3955 | case UDIV: | |
3956 | case MOD: | |
3957 | case UMOD: | |
e098c169 HPN |
3958 | /* Similarly, complexity for schoolbook long division. */ |
3959 | total = factor * factor * COSTS_N_INSNS (7); | |
f894b69b PB |
3960 | break; |
3961 | case USE: | |
db3edc20 | 3962 | /* Used in combine.c as a marker. */ |
f894b69b PB |
3963 | total = 0; |
3964 | break; | |
e098c169 HPN |
3965 | case SET: |
3966 | /* A SET doesn't have a mode, so let's look at the SET_DEST to get | |
3967 | the mode for the factor. */ | |
3968 | factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD; | |
3969 | if (factor == 0) | |
3970 | factor = 1; | |
3971 | /* Pass through. */ | |
f894b69b | 3972 | default: |
e098c169 | 3973 | total = factor * COSTS_N_INSNS (1); |
f894b69b PB |
3974 | } |
3975 | ||
3976 | switch (code) | |
3977 | { | |
3978 | case REG: | |
3979 | return 0; | |
3980 | ||
3981 | case SUBREG: | |
edb81165 | 3982 | total = 0; |
f894b69b PB |
3983 | /* If we can't tie these modes, make this expensive. The larger |
3984 | the mode, the more expensive it is. */ | |
3985 | if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x)))) | |
e098c169 | 3986 | return COSTS_N_INSNS (2 + factor); |
f894b69b PB |
3987 | break; |
3988 | ||
3989 | default: | |
68f932c4 | 3990 | if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed)) |
f894b69b PB |
3991 | return total; |
3992 | break; | |
3993 | } | |
3994 | ||
3995 | /* Sum the costs of the sub-rtx's, plus cost of this operation, | |
3996 | which is already in total. */ | |
3997 | ||
3998 | fmt = GET_RTX_FORMAT (code); | |
3999 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
4000 | if (fmt[i] == 'e') | |
68f932c4 | 4001 | total += rtx_cost (XEXP (x, i), code, i, speed); |
f894b69b PB |
4002 | else if (fmt[i] == 'E') |
4003 | for (j = 0; j < XVECLEN (x, i); j++) | |
68f932c4 | 4004 | total += rtx_cost (XVECEXP (x, i, j), code, i, speed); |
f894b69b PB |
4005 | |
4006 | return total; | |
4007 | } | |
22939744 BS |
4008 | |
4009 | /* Fill in the structure C with information about both speed and size rtx | |
68f932c4 | 4010 | costs for X, which is operand OPNO in an expression with code OUTER. */ |
22939744 BS |
4011 | |
4012 | void | |
68f932c4 RS |
4013 | get_full_rtx_cost (rtx x, enum rtx_code outer, int opno, |
4014 | struct full_rtx_costs *c) | |
22939744 | 4015 | { |
68f932c4 RS |
4016 | c->speed = rtx_cost (x, outer, opno, true); |
4017 | c->size = rtx_cost (x, outer, opno, false); | |
22939744 BS |
4018 | } |
4019 | ||
f894b69b PB |
4020 | \f |
4021 | /* Return cost of address expression X. | |
b8698a0f | 4022 | Expect that X is properly formed address reference. |
f40751dd JH |
4023 | |
4024 | SPEED parameter specify whether costs optimized for speed or size should | |
4025 | be returned. */ | |
f894b69b PB |
4026 | |
4027 | int | |
09e881c9 | 4028 | address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed) |
f894b69b | 4029 | { |
f894b69b PB |
4030 | /* We may be asked for cost of various unusual addresses, such as operands |
4031 | of push instruction. It is not worthwhile to complicate writing | |
4032 | of the target hook by such cases. */ | |
4033 | ||
09e881c9 | 4034 | if (!memory_address_addr_space_p (mode, x, as)) |
f894b69b PB |
4035 | return 1000; |
4036 | ||
b413068c | 4037 | return targetm.address_cost (x, mode, as, speed); |
f894b69b PB |
4038 | } |
4039 | ||
4040 | /* If the target doesn't override, compute the cost as with arithmetic. */ | |
4041 | ||
4042 | int | |
b413068c | 4043 | default_address_cost (rtx x, enum machine_mode, addr_space_t, bool speed) |
f894b69b | 4044 | { |
68f932c4 | 4045 | return rtx_cost (x, MEM, 0, speed); |
f894b69b | 4046 | } |
2f93eea8 PB |
4047 | \f |
4048 | ||
4049 | unsigned HOST_WIDE_INT | |
fa233e34 | 4050 | nonzero_bits (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
4051 | { |
4052 | return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0); | |
4053 | } | |
4054 | ||
4055 | unsigned int | |
fa233e34 | 4056 | num_sign_bit_copies (const_rtx x, enum machine_mode mode) |
2f93eea8 PB |
4057 | { |
4058 | return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0); | |
4059 | } | |
4060 | ||
4061 | /* The function cached_nonzero_bits is a wrapper around nonzero_bits1. | |
4062 | It avoids exponential behavior in nonzero_bits1 when X has | |
4063 | identical subexpressions on the first or the second level. */ | |
4064 | ||
4065 | static unsigned HOST_WIDE_INT | |
fa233e34 | 4066 | cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4067 | enum machine_mode known_mode, |
4068 | unsigned HOST_WIDE_INT known_ret) | |
4069 | { | |
4070 | if (x == known_x && mode == known_mode) | |
4071 | return known_ret; | |
4072 | ||
4073 | /* Try to find identical subexpressions. If found call | |
4074 | nonzero_bits1 on X with the subexpressions as KNOWN_X and the | |
4075 | precomputed value for the subexpression as KNOWN_RET. */ | |
4076 | ||
4077 | if (ARITHMETIC_P (x)) | |
4078 | { | |
4079 | rtx x0 = XEXP (x, 0); | |
4080 | rtx x1 = XEXP (x, 1); | |
4081 | ||
4082 | /* Check the first level. */ | |
4083 | if (x0 == x1) | |
4084 | return nonzero_bits1 (x, mode, x0, mode, | |
4085 | cached_nonzero_bits (x0, mode, known_x, | |
4086 | known_mode, known_ret)); | |
4087 | ||
4088 | /* Check the second level. */ | |
4089 | if (ARITHMETIC_P (x0) | |
4090 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
4091 | return nonzero_bits1 (x, mode, x1, mode, | |
4092 | cached_nonzero_bits (x1, mode, known_x, | |
4093 | known_mode, known_ret)); | |
4094 | ||
4095 | if (ARITHMETIC_P (x1) | |
4096 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
4097 | return nonzero_bits1 (x, mode, x0, mode, | |
4098 | cached_nonzero_bits (x0, mode, known_x, | |
4099 | known_mode, known_ret)); | |
4100 | } | |
4101 | ||
4102 | return nonzero_bits1 (x, mode, known_x, known_mode, known_ret); | |
4103 | } | |
4104 | ||
4105 | /* We let num_sign_bit_copies recur into nonzero_bits as that is useful. | |
4106 | We don't let nonzero_bits recur into num_sign_bit_copies, because that | |
4107 | is less useful. We can't allow both, because that results in exponential | |
4108 | run time recursion. There is a nullstone testcase that triggered | |
4109 | this. This macro avoids accidental uses of num_sign_bit_copies. */ | |
4110 | #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior | |
4111 | ||
4112 | /* Given an expression, X, compute which bits in X can be nonzero. | |
4113 | We don't care about bits outside of those defined in MODE. | |
4114 | ||
4115 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
4116 | an arithmetic operation, we can do better. */ | |
4117 | ||
4118 | static unsigned HOST_WIDE_INT | |
fa233e34 | 4119 | nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4120 | enum machine_mode known_mode, |
4121 | unsigned HOST_WIDE_INT known_ret) | |
4122 | { | |
4123 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); | |
4124 | unsigned HOST_WIDE_INT inner_nz; | |
4125 | enum rtx_code code; | |
2d0c270f | 4126 | enum machine_mode inner_mode; |
5511bc5a | 4127 | unsigned int mode_width = GET_MODE_PRECISION (mode); |
2f93eea8 | 4128 | |
ff596cd2 RL |
4129 | /* For floating-point and vector values, assume all bits are needed. */ |
4130 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode) | |
4131 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
2f93eea8 PB |
4132 | return nonzero; |
4133 | ||
4134 | /* If X is wider than MODE, use its mode instead. */ | |
5511bc5a | 4135 | if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width) |
2f93eea8 PB |
4136 | { |
4137 | mode = GET_MODE (x); | |
4138 | nonzero = GET_MODE_MASK (mode); | |
5511bc5a | 4139 | mode_width = GET_MODE_PRECISION (mode); |
2f93eea8 PB |
4140 | } |
4141 | ||
4142 | if (mode_width > HOST_BITS_PER_WIDE_INT) | |
4143 | /* Our only callers in this case look for single bit values. So | |
4144 | just return the mode mask. Those tests will then be false. */ | |
4145 | return nonzero; | |
4146 | ||
4147 | #ifndef WORD_REGISTER_OPERATIONS | |
4148 | /* If MODE is wider than X, but both are a single word for both the host | |
4149 | and target machines, we can compute this from which bits of the | |
4150 | object might be nonzero in its own mode, taking into account the fact | |
4151 | that on many CISC machines, accessing an object in a wider mode | |
4152 | causes the high-order bits to become undefined. So they are | |
4153 | not known to be zero. */ | |
4154 | ||
4155 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
5511bc5a BS |
4156 | && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD |
4157 | && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
4158 | && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x))) | |
2f93eea8 PB |
4159 | { |
4160 | nonzero &= cached_nonzero_bits (x, GET_MODE (x), | |
4161 | known_x, known_mode, known_ret); | |
4162 | nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); | |
4163 | return nonzero; | |
4164 | } | |
4165 | #endif | |
4166 | ||
4167 | code = GET_CODE (x); | |
4168 | switch (code) | |
4169 | { | |
4170 | case REG: | |
4171 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4172 | /* If pointers extend unsigned and this is a pointer in Pmode, say that | |
4173 | all the bits above ptr_mode are known to be zero. */ | |
5932a4d4 | 4174 | /* As we do not know which address space the pointer is referring to, |
d4ebfa65 BE |
4175 | we can do this only if the target does not support different pointer |
4176 | or address modes depending on the address space. */ | |
4177 | if (target_default_pointer_address_modes_p () | |
4178 | && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
2f93eea8 PB |
4179 | && REG_POINTER (x)) |
4180 | nonzero &= GET_MODE_MASK (ptr_mode); | |
4181 | #endif | |
4182 | ||
4183 | /* Include declared information about alignment of pointers. */ | |
4184 | /* ??? We don't properly preserve REG_POINTER changes across | |
4185 | pointer-to-integer casts, so we can't trust it except for | |
4186 | things that we know must be pointers. See execute/960116-1.c. */ | |
4187 | if ((x == stack_pointer_rtx | |
4188 | || x == frame_pointer_rtx | |
4189 | || x == arg_pointer_rtx) | |
4190 | && REGNO_POINTER_ALIGN (REGNO (x))) | |
4191 | { | |
4192 | unsigned HOST_WIDE_INT alignment | |
4193 | = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT; | |
4194 | ||
4195 | #ifdef PUSH_ROUNDING | |
4196 | /* If PUSH_ROUNDING is defined, it is possible for the | |
4197 | stack to be momentarily aligned only to that amount, | |
4198 | so we pick the least alignment. */ | |
4199 | if (x == stack_pointer_rtx && PUSH_ARGS) | |
4200 | alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1), | |
4201 | alignment); | |
4202 | #endif | |
4203 | ||
4204 | nonzero &= ~(alignment - 1); | |
4205 | } | |
4206 | ||
4207 | { | |
4208 | unsigned HOST_WIDE_INT nonzero_for_hook = nonzero; | |
55d796da | 4209 | rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x, |
2f93eea8 PB |
4210 | known_mode, known_ret, |
4211 | &nonzero_for_hook); | |
4212 | ||
55d796da KG |
4213 | if (new_rtx) |
4214 | nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x, | |
2f93eea8 PB |
4215 | known_mode, known_ret); |
4216 | ||
4217 | return nonzero_for_hook; | |
4218 | } | |
4219 | ||
4220 | case CONST_INT: | |
4221 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
4222 | /* If X is negative in MODE, sign-extend the value. */ | |
c04fc4f0 EB |
4223 | if (INTVAL (x) > 0 |
4224 | && mode_width < BITS_PER_WORD | |
4225 | && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) | |
4226 | != 0) | |
0cadbfaa | 4227 | return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width); |
2f93eea8 PB |
4228 | #endif |
4229 | ||
c04fc4f0 | 4230 | return UINTVAL (x); |
2f93eea8 PB |
4231 | |
4232 | case MEM: | |
4233 | #ifdef LOAD_EXTEND_OP | |
4234 | /* In many, if not most, RISC machines, reading a byte from memory | |
4235 | zeros the rest of the register. Noticing that fact saves a lot | |
4236 | of extra zero-extends. */ | |
4237 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) | |
4238 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
4239 | #endif | |
4240 | break; | |
4241 | ||
4242 | case EQ: case NE: | |
4243 | case UNEQ: case LTGT: | |
4244 | case GT: case GTU: case UNGT: | |
4245 | case LT: case LTU: case UNLT: | |
4246 | case GE: case GEU: case UNGE: | |
4247 | case LE: case LEU: case UNLE: | |
4248 | case UNORDERED: case ORDERED: | |
2f93eea8 PB |
4249 | /* If this produces an integer result, we know which bits are set. |
4250 | Code here used to clear bits outside the mode of X, but that is | |
4251 | now done above. */ | |
b8698a0f L |
4252 | /* Mind that MODE is the mode the caller wants to look at this |
4253 | operation in, and not the actual operation mode. We can wind | |
505ac507 RH |
4254 | up with (subreg:DI (gt:V4HI x y)), and we don't have anything |
4255 | that describes the results of a vector compare. */ | |
4256 | if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT | |
2f93eea8 PB |
4257 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
4258 | nonzero = STORE_FLAG_VALUE; | |
4259 | break; | |
4260 | ||
4261 | case NEG: | |
4262 | #if 0 | |
4263 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
4264 | and num_sign_bit_copies. */ | |
4265 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
5511bc5a | 4266 | == GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4267 | nonzero = 1; |
4268 | #endif | |
4269 | ||
86cdf393 | 4270 | if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width) |
2f93eea8 PB |
4271 | nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); |
4272 | break; | |
4273 | ||
4274 | case ABS: | |
4275 | #if 0 | |
4276 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
4277 | and num_sign_bit_copies. */ | |
4278 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) | |
5511bc5a | 4279 | == GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4280 | nonzero = 1; |
4281 | #endif | |
4282 | break; | |
4283 | ||
4284 | case TRUNCATE: | |
4285 | nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode, | |
4286 | known_x, known_mode, known_ret) | |
4287 | & GET_MODE_MASK (mode)); | |
4288 | break; | |
4289 | ||
4290 | case ZERO_EXTEND: | |
4291 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
4292 | known_x, known_mode, known_ret); | |
4293 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
4294 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
4295 | break; | |
4296 | ||
4297 | case SIGN_EXTEND: | |
4298 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
4299 | Otherwise, show all the bits in the outer mode but not the inner | |
4300 | may be nonzero. */ | |
4301 | inner_nz = cached_nonzero_bits (XEXP (x, 0), mode, | |
4302 | known_x, known_mode, known_ret); | |
4303 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) | |
4304 | { | |
4305 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); | |
2d0c270f | 4306 | if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz)) |
2f93eea8 PB |
4307 | inner_nz |= (GET_MODE_MASK (mode) |
4308 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); | |
4309 | } | |
4310 | ||
4311 | nonzero &= inner_nz; | |
4312 | break; | |
4313 | ||
4314 | case AND: | |
4315 | nonzero &= cached_nonzero_bits (XEXP (x, 0), mode, | |
4316 | known_x, known_mode, known_ret) | |
4317 | & cached_nonzero_bits (XEXP (x, 1), mode, | |
4318 | known_x, known_mode, known_ret); | |
4319 | break; | |
4320 | ||
4321 | case XOR: case IOR: | |
4322 | case UMIN: case UMAX: case SMIN: case SMAX: | |
4323 | { | |
c04fc4f0 EB |
4324 | unsigned HOST_WIDE_INT nonzero0 |
4325 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4326 | known_x, known_mode, known_ret); | |
2f93eea8 PB |
4327 | |
4328 | /* Don't call nonzero_bits for the second time if it cannot change | |
4329 | anything. */ | |
4330 | if ((nonzero & nonzero0) != nonzero) | |
4331 | nonzero &= nonzero0 | |
4332 | | cached_nonzero_bits (XEXP (x, 1), mode, | |
4333 | known_x, known_mode, known_ret); | |
4334 | } | |
4335 | break; | |
4336 | ||
4337 | case PLUS: case MINUS: | |
4338 | case MULT: | |
4339 | case DIV: case UDIV: | |
4340 | case MOD: case UMOD: | |
4341 | /* We can apply the rules of arithmetic to compute the number of | |
4342 | high- and low-order zero bits of these operations. We start by | |
4343 | computing the width (position of the highest-order nonzero bit) | |
4344 | and the number of low-order zero bits for each value. */ | |
4345 | { | |
c04fc4f0 EB |
4346 | unsigned HOST_WIDE_INT nz0 |
4347 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4348 | known_x, known_mode, known_ret); | |
4349 | unsigned HOST_WIDE_INT nz1 | |
4350 | = cached_nonzero_bits (XEXP (x, 1), mode, | |
4351 | known_x, known_mode, known_ret); | |
5511bc5a | 4352 | int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1; |
2f93eea8 PB |
4353 | int width0 = floor_log2 (nz0) + 1; |
4354 | int width1 = floor_log2 (nz1) + 1; | |
4355 | int low0 = floor_log2 (nz0 & -nz0); | |
4356 | int low1 = floor_log2 (nz1 & -nz1); | |
c04fc4f0 EB |
4357 | unsigned HOST_WIDE_INT op0_maybe_minusp |
4358 | = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index); | |
4359 | unsigned HOST_WIDE_INT op1_maybe_minusp | |
4360 | = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index); | |
2f93eea8 PB |
4361 | unsigned int result_width = mode_width; |
4362 | int result_low = 0; | |
4363 | ||
4364 | switch (code) | |
4365 | { | |
4366 | case PLUS: | |
4367 | result_width = MAX (width0, width1) + 1; | |
4368 | result_low = MIN (low0, low1); | |
4369 | break; | |
4370 | case MINUS: | |
4371 | result_low = MIN (low0, low1); | |
4372 | break; | |
4373 | case MULT: | |
4374 | result_width = width0 + width1; | |
4375 | result_low = low0 + low1; | |
4376 | break; | |
4377 | case DIV: | |
4378 | if (width1 == 0) | |
4379 | break; | |
c04fc4f0 | 4380 | if (!op0_maybe_minusp && !op1_maybe_minusp) |
2f93eea8 PB |
4381 | result_width = width0; |
4382 | break; | |
4383 | case UDIV: | |
4384 | if (width1 == 0) | |
4385 | break; | |
4386 | result_width = width0; | |
4387 | break; | |
4388 | case MOD: | |
4389 | if (width1 == 0) | |
4390 | break; | |
c04fc4f0 | 4391 | if (!op0_maybe_minusp && !op1_maybe_minusp) |
2f93eea8 PB |
4392 | result_width = MIN (width0, width1); |
4393 | result_low = MIN (low0, low1); | |
4394 | break; | |
4395 | case UMOD: | |
4396 | if (width1 == 0) | |
4397 | break; | |
4398 | result_width = MIN (width0, width1); | |
4399 | result_low = MIN (low0, low1); | |
4400 | break; | |
4401 | default: | |
41374e13 | 4402 | gcc_unreachable (); |
2f93eea8 PB |
4403 | } |
4404 | ||
4405 | if (result_width < mode_width) | |
c04fc4f0 | 4406 | nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1; |
2f93eea8 PB |
4407 | |
4408 | if (result_low > 0) | |
c04fc4f0 | 4409 | nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1); |
2f93eea8 PB |
4410 | } |
4411 | break; | |
4412 | ||
4413 | case ZERO_EXTRACT: | |
481683e1 | 4414 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4415 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
c04fc4f0 | 4416 | nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; |
2f93eea8 PB |
4417 | break; |
4418 | ||
4419 | case SUBREG: | |
4420 | /* If this is a SUBREG formed for a promoted variable that has | |
4421 | been zero-extended, we know that at least the high-order bits | |
4422 | are zero, though others might be too. */ | |
4423 | ||
362d42dc | 4424 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x)) |
2f93eea8 PB |
4425 | nonzero = GET_MODE_MASK (GET_MODE (x)) |
4426 | & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x), | |
4427 | known_x, known_mode, known_ret); | |
4428 | ||
2d0c270f | 4429 | inner_mode = GET_MODE (SUBREG_REG (x)); |
2f93eea8 PB |
4430 | /* If the inner mode is a single word for both the host and target |
4431 | machines, we can compute this from which bits of the inner | |
4432 | object might be nonzero. */ | |
5511bc5a BS |
4433 | if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD |
4434 | && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT)) | |
2f93eea8 PB |
4435 | { |
4436 | nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, | |
4437 | known_x, known_mode, known_ret); | |
4438 | ||
4439 | #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) | |
4440 | /* If this is a typical RISC machine, we only have to worry | |
4441 | about the way loads are extended. */ | |
2d0c270f BS |
4442 | if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND |
4443 | ? val_signbit_known_set_p (inner_mode, nonzero) | |
4444 | : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND) | |
3c0cb5de | 4445 | || !MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
4446 | #endif |
4447 | { | |
4448 | /* On many CISC machines, accessing an object in a wider mode | |
4449 | causes the high-order bits to become undefined. So they are | |
4450 | not known to be zero. */ | |
5511bc5a BS |
4451 | if (GET_MODE_PRECISION (GET_MODE (x)) |
4452 | > GET_MODE_PRECISION (inner_mode)) | |
2f93eea8 | 4453 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) |
2d0c270f | 4454 | & ~GET_MODE_MASK (inner_mode)); |
2f93eea8 PB |
4455 | } |
4456 | } | |
4457 | break; | |
4458 | ||
4459 | case ASHIFTRT: | |
4460 | case LSHIFTRT: | |
4461 | case ASHIFT: | |
4462 | case ROTATE: | |
4463 | /* The nonzero bits are in two classes: any bits within MODE | |
4464 | that aren't in GET_MODE (x) are always significant. The rest of the | |
4465 | nonzero bits are those that are significant in the operand of | |
4466 | the shift when shifted the appropriate number of bits. This | |
4467 | shows that high-order bits are cleared by the right shift and | |
4468 | low-order bits by left shifts. */ | |
481683e1 | 4469 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4470 | && INTVAL (XEXP (x, 1)) >= 0 |
39b2ac74 | 4471 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
5511bc5a | 4472 | && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4473 | { |
4474 | enum machine_mode inner_mode = GET_MODE (x); | |
5511bc5a | 4475 | unsigned int width = GET_MODE_PRECISION (inner_mode); |
2f93eea8 PB |
4476 | int count = INTVAL (XEXP (x, 1)); |
4477 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); | |
c04fc4f0 EB |
4478 | unsigned HOST_WIDE_INT op_nonzero |
4479 | = cached_nonzero_bits (XEXP (x, 0), mode, | |
4480 | known_x, known_mode, known_ret); | |
2f93eea8 PB |
4481 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; |
4482 | unsigned HOST_WIDE_INT outer = 0; | |
4483 | ||
4484 | if (mode_width > width) | |
4485 | outer = (op_nonzero & nonzero & ~mode_mask); | |
4486 | ||
4487 | if (code == LSHIFTRT) | |
4488 | inner >>= count; | |
4489 | else if (code == ASHIFTRT) | |
4490 | { | |
4491 | inner >>= count; | |
4492 | ||
4493 | /* If the sign bit may have been nonzero before the shift, we | |
4494 | need to mark all the places it could have been copied to | |
4495 | by the shift as possibly nonzero. */ | |
c04fc4f0 EB |
4496 | if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count))) |
4497 | inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1) | |
4498 | << (width - count); | |
2f93eea8 PB |
4499 | } |
4500 | else if (code == ASHIFT) | |
4501 | inner <<= count; | |
4502 | else | |
4503 | inner = ((inner << (count % width) | |
4504 | | (inner >> (width - (count % width)))) & mode_mask); | |
4505 | ||
4506 | nonzero &= (outer | inner); | |
4507 | } | |
4508 | break; | |
4509 | ||
4510 | case FFS: | |
4511 | case POPCOUNT: | |
4512 | /* This is at most the number of bits in the mode. */ | |
c04fc4f0 | 4513 | nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1; |
2f93eea8 PB |
4514 | break; |
4515 | ||
4516 | case CLZ: | |
4517 | /* If CLZ has a known value at zero, then the nonzero bits are | |
4518 | that value, plus the number of bits in the mode minus one. */ | |
4519 | if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
c04fc4f0 EB |
4520 | nonzero |
4521 | |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
2f93eea8 PB |
4522 | else |
4523 | nonzero = -1; | |
4524 | break; | |
4525 | ||
4526 | case CTZ: | |
4527 | /* If CTZ has a known value at zero, then the nonzero bits are | |
4528 | that value, plus the number of bits in the mode minus one. */ | |
4529 | if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero)) | |
c04fc4f0 EB |
4530 | nonzero |
4531 | |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
2f93eea8 PB |
4532 | else |
4533 | nonzero = -1; | |
4534 | break; | |
4535 | ||
8840ae2b JJ |
4536 | case CLRSB: |
4537 | /* This is at most the number of bits in the mode minus 1. */ | |
4538 | nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1; | |
4539 | break; | |
4540 | ||
2f93eea8 PB |
4541 | case PARITY: |
4542 | nonzero = 1; | |
4543 | break; | |
4544 | ||
4545 | case IF_THEN_ELSE: | |
4546 | { | |
c04fc4f0 EB |
4547 | unsigned HOST_WIDE_INT nonzero_true |
4548 | = cached_nonzero_bits (XEXP (x, 1), mode, | |
4549 | known_x, known_mode, known_ret); | |
2f93eea8 PB |
4550 | |
4551 | /* Don't call nonzero_bits for the second time if it cannot change | |
4552 | anything. */ | |
4553 | if ((nonzero & nonzero_true) != nonzero) | |
4554 | nonzero &= nonzero_true | |
4555 | | cached_nonzero_bits (XEXP (x, 2), mode, | |
4556 | known_x, known_mode, known_ret); | |
4557 | } | |
4558 | break; | |
4559 | ||
4560 | default: | |
4561 | break; | |
4562 | } | |
4563 | ||
4564 | return nonzero; | |
4565 | } | |
4566 | ||
4567 | /* See the macro definition above. */ | |
4568 | #undef cached_num_sign_bit_copies | |
4569 | ||
4570 | \f | |
4571 | /* The function cached_num_sign_bit_copies is a wrapper around | |
4572 | num_sign_bit_copies1. It avoids exponential behavior in | |
4573 | num_sign_bit_copies1 when X has identical subexpressions on the | |
4574 | first or the second level. */ | |
4575 | ||
4576 | static unsigned int | |
fa233e34 | 4577 | cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4578 | enum machine_mode known_mode, |
4579 | unsigned int known_ret) | |
4580 | { | |
4581 | if (x == known_x && mode == known_mode) | |
4582 | return known_ret; | |
4583 | ||
4584 | /* Try to find identical subexpressions. If found call | |
4585 | num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and | |
4586 | the precomputed value for the subexpression as KNOWN_RET. */ | |
4587 | ||
4588 | if (ARITHMETIC_P (x)) | |
4589 | { | |
4590 | rtx x0 = XEXP (x, 0); | |
4591 | rtx x1 = XEXP (x, 1); | |
4592 | ||
4593 | /* Check the first level. */ | |
4594 | if (x0 == x1) | |
4595 | return | |
4596 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4597 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4598 | known_mode, | |
4599 | known_ret)); | |
4600 | ||
4601 | /* Check the second level. */ | |
4602 | if (ARITHMETIC_P (x0) | |
4603 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) | |
4604 | return | |
4605 | num_sign_bit_copies1 (x, mode, x1, mode, | |
4606 | cached_num_sign_bit_copies (x1, mode, known_x, | |
4607 | known_mode, | |
4608 | known_ret)); | |
4609 | ||
4610 | if (ARITHMETIC_P (x1) | |
4611 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) | |
4612 | return | |
4613 | num_sign_bit_copies1 (x, mode, x0, mode, | |
4614 | cached_num_sign_bit_copies (x0, mode, known_x, | |
4615 | known_mode, | |
4616 | known_ret)); | |
4617 | } | |
4618 | ||
4619 | return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret); | |
4620 | } | |
4621 | ||
4622 | /* Return the number of bits at the high-order end of X that are known to | |
4623 | be equal to the sign bit. X will be used in mode MODE; if MODE is | |
4624 | VOIDmode, X will be used in its own mode. The returned value will always | |
4625 | be between 1 and the number of bits in MODE. */ | |
4626 | ||
4627 | static unsigned int | |
fa233e34 | 4628 | num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x, |
2f93eea8 PB |
4629 | enum machine_mode known_mode, |
4630 | unsigned int known_ret) | |
4631 | { | |
4632 | enum rtx_code code = GET_CODE (x); | |
5511bc5a | 4633 | unsigned int bitwidth = GET_MODE_PRECISION (mode); |
2f93eea8 PB |
4634 | int num0, num1, result; |
4635 | unsigned HOST_WIDE_INT nonzero; | |
4636 | ||
4637 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
4638 | VOIDmode, we don't know anything. Likewise if one of the modes is | |
4639 | floating-point. */ | |
4640 | ||
4641 | if (mode == VOIDmode) | |
4642 | mode = GET_MODE (x); | |
4643 | ||
ff596cd2 RL |
4644 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)) |
4645 | || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode)) | |
2f93eea8 PB |
4646 | return 1; |
4647 | ||
4648 | /* For a smaller object, just ignore the high bits. */ | |
5511bc5a | 4649 | if (bitwidth < GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4650 | { |
4651 | num0 = cached_num_sign_bit_copies (x, GET_MODE (x), | |
4652 | known_x, known_mode, known_ret); | |
4653 | return MAX (1, | |
5511bc5a | 4654 | num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth)); |
2f93eea8 PB |
4655 | } |
4656 | ||
5511bc5a | 4657 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4658 | { |
4659 | #ifndef WORD_REGISTER_OPERATIONS | |
5511bc5a BS |
4660 | /* If this machine does not do all register operations on the entire |
4661 | register and MODE is wider than the mode of X, we can say nothing | |
4662 | at all about the high-order bits. */ | |
2f93eea8 PB |
4663 | return 1; |
4664 | #else | |
4665 | /* Likewise on machines that do, if the mode of the object is smaller | |
4666 | than a word and loads of that size don't sign extend, we can say | |
4667 | nothing about the high order bits. */ | |
5511bc5a | 4668 | if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD |
2f93eea8 PB |
4669 | #ifdef LOAD_EXTEND_OP |
4670 | && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND | |
4671 | #endif | |
4672 | ) | |
4673 | return 1; | |
4674 | #endif | |
4675 | } | |
4676 | ||
4677 | switch (code) | |
4678 | { | |
4679 | case REG: | |
4680 | ||
4681 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
4682 | /* If pointers extend signed and this is a pointer in Pmode, say that | |
4683 | all the bits above ptr_mode are known to be sign bit copies. */ | |
5932a4d4 | 4684 | /* As we do not know which address space the pointer is referring to, |
d4ebfa65 BE |
4685 | we can do this only if the target does not support different pointer |
4686 | or address modes depending on the address space. */ | |
4687 | if (target_default_pointer_address_modes_p () | |
4688 | && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
4689 | && mode == Pmode && REG_POINTER (x)) | |
5511bc5a | 4690 | return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1; |
2f93eea8 PB |
4691 | #endif |
4692 | ||
4693 | { | |
4694 | unsigned int copies_for_hook = 1, copies = 1; | |
55d796da | 4695 | rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x, |
2f93eea8 PB |
4696 | known_mode, known_ret, |
4697 | &copies_for_hook); | |
4698 | ||
55d796da KG |
4699 | if (new_rtx) |
4700 | copies = cached_num_sign_bit_copies (new_rtx, mode, known_x, | |
2f93eea8 PB |
4701 | known_mode, known_ret); |
4702 | ||
4703 | if (copies > 1 || copies_for_hook > 1) | |
4704 | return MAX (copies, copies_for_hook); | |
4705 | ||
4706 | /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */ | |
4707 | } | |
4708 | break; | |
4709 | ||
4710 | case MEM: | |
4711 | #ifdef LOAD_EXTEND_OP | |
4712 | /* Some RISC machines sign-extend all loads of smaller than a word. */ | |
4713 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) | |
4714 | return MAX (1, ((int) bitwidth | |
5511bc5a | 4715 | - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1)); |
2f93eea8 PB |
4716 | #endif |
4717 | break; | |
4718 | ||
4719 | case CONST_INT: | |
4720 | /* If the constant is negative, take its 1's complement and remask. | |
4721 | Then see how many zero bits we have. */ | |
c04fc4f0 | 4722 | nonzero = UINTVAL (x) & GET_MODE_MASK (mode); |
2f93eea8 | 4723 | if (bitwidth <= HOST_BITS_PER_WIDE_INT |
c04fc4f0 | 4724 | && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 PB |
4725 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
4726 | ||
4727 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
4728 | ||
4729 | case SUBREG: | |
4730 | /* If this is a SUBREG for a promoted object that is sign-extended | |
4731 | and we are looking at it in a wider mode, we know that at least the | |
4732 | high-order bits are known to be sign bit copies. */ | |
4733 | ||
362d42dc | 4734 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x)) |
2f93eea8 PB |
4735 | { |
4736 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode, | |
4737 | known_x, known_mode, known_ret); | |
4738 | return MAX ((int) bitwidth | |
5511bc5a | 4739 | - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1, |
2f93eea8 PB |
4740 | num0); |
4741 | } | |
4742 | ||
4743 | /* For a smaller object, just ignore the high bits. */ | |
5511bc5a | 4744 | if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))) |
2f93eea8 PB |
4745 | { |
4746 | num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode, | |
4747 | known_x, known_mode, known_ret); | |
4748 | return MAX (1, (num0 | |
5511bc5a | 4749 | - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))) |
2f93eea8 PB |
4750 | - bitwidth))); |
4751 | } | |
4752 | ||
4753 | #ifdef WORD_REGISTER_OPERATIONS | |
4754 | #ifdef LOAD_EXTEND_OP | |
4755 | /* For paradoxical SUBREGs on machines where all register operations | |
4756 | affect the entire register, just look inside. Note that we are | |
4757 | passing MODE to the recursive call, so the number of sign bit copies | |
4758 | will remain relative to that mode, not the inner mode. */ | |
4759 | ||
4760 | /* This works only if loads sign extend. Otherwise, if we get a | |
4761 | reload for the inner part, it may be loaded from the stack, and | |
4762 | then we lose all sign bit copies that existed before the store | |
4763 | to the stack. */ | |
4764 | ||
6a4bdc79 | 4765 | if (paradoxical_subreg_p (x) |
2f93eea8 | 4766 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND |
3c0cb5de | 4767 | && MEM_P (SUBREG_REG (x))) |
2f93eea8 PB |
4768 | return cached_num_sign_bit_copies (SUBREG_REG (x), mode, |
4769 | known_x, known_mode, known_ret); | |
4770 | #endif | |
4771 | #endif | |
4772 | break; | |
4773 | ||
4774 | case SIGN_EXTRACT: | |
481683e1 | 4775 | if (CONST_INT_P (XEXP (x, 1))) |
2f93eea8 PB |
4776 | return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); |
4777 | break; | |
4778 | ||
4779 | case SIGN_EXTEND: | |
5511bc5a | 4780 | return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) |
2f93eea8 PB |
4781 | + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, |
4782 | known_x, known_mode, known_ret)); | |
4783 | ||
4784 | case TRUNCATE: | |
4785 | /* For a smaller object, just ignore the high bits. */ | |
4786 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode, | |
4787 | known_x, known_mode, known_ret); | |
5511bc5a | 4788 | return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) |
2f93eea8 PB |
4789 | - bitwidth))); |
4790 | ||
4791 | case NOT: | |
4792 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4793 | known_x, known_mode, known_ret); | |
4794 | ||
4795 | case ROTATE: case ROTATERT: | |
4796 | /* If we are rotating left by a number of bits less than the number | |
4797 | of sign bit copies, we can just subtract that amount from the | |
4798 | number. */ | |
481683e1 | 4799 | if (CONST_INT_P (XEXP (x, 1)) |
2f93eea8 PB |
4800 | && INTVAL (XEXP (x, 1)) >= 0 |
4801 | && INTVAL (XEXP (x, 1)) < (int) bitwidth) | |
4802 | { | |
4803 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4804 | known_x, known_mode, known_ret); | |
4805 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
4806 | : (int) bitwidth - INTVAL (XEXP (x, 1)))); | |
4807 | } | |
4808 | break; | |
4809 | ||
4810 | case NEG: | |
4811 | /* In general, this subtracts one sign bit copy. But if the value | |
4812 | is known to be positive, the number of sign bit copies is the | |
4813 | same as that of the input. Finally, if the input has just one bit | |
4814 | that might be nonzero, all the bits are copies of the sign bit. */ | |
4815 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4816 | known_x, known_mode, known_ret); | |
4817 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4818 | return num0 > 1 ? num0 - 1 : 1; | |
4819 | ||
4820 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
4821 | if (nonzero == 1) | |
4822 | return bitwidth; | |
4823 | ||
4824 | if (num0 > 1 | |
c04fc4f0 | 4825 | && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) |
2f93eea8 PB |
4826 | num0--; |
4827 | ||
4828 | return num0; | |
4829 | ||
4830 | case IOR: case AND: case XOR: | |
4831 | case SMIN: case SMAX: case UMIN: case UMAX: | |
4832 | /* Logical operations will preserve the number of sign-bit copies. | |
4833 | MIN and MAX operations always return one of the operands. */ | |
4834 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4835 | known_x, known_mode, known_ret); | |
4836 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4837 | known_x, known_mode, known_ret); | |
22761ec3 AN |
4838 | |
4839 | /* If num1 is clearing some of the top bits then regardless of | |
4840 | the other term, we are guaranteed to have at least that many | |
4841 | high-order zero bits. */ | |
4842 | if (code == AND | |
4843 | && num1 > 1 | |
4844 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 4845 | && CONST_INT_P (XEXP (x, 1)) |
c04fc4f0 EB |
4846 | && (UINTVAL (XEXP (x, 1)) |
4847 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0) | |
22761ec3 AN |
4848 | return num1; |
4849 | ||
4850 | /* Similarly for IOR when setting high-order bits. */ | |
4851 | if (code == IOR | |
4852 | && num1 > 1 | |
4853 | && bitwidth <= HOST_BITS_PER_WIDE_INT | |
481683e1 | 4854 | && CONST_INT_P (XEXP (x, 1)) |
c04fc4f0 EB |
4855 | && (UINTVAL (XEXP (x, 1)) |
4856 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
22761ec3 AN |
4857 | return num1; |
4858 | ||
2f93eea8 PB |
4859 | return MIN (num0, num1); |
4860 | ||
4861 | case PLUS: case MINUS: | |
4862 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
4863 | if we are subtracting 1 from a positive number, there will not | |
4864 | be such a carry. Furthermore, if the positive number is known to | |
4865 | be 0 or 1, we know the result is either -1 or 0. */ | |
4866 | ||
4867 | if (code == PLUS && XEXP (x, 1) == constm1_rtx | |
4868 | && bitwidth <= HOST_BITS_PER_WIDE_INT) | |
4869 | { | |
4870 | nonzero = nonzero_bits (XEXP (x, 0), mode); | |
c04fc4f0 | 4871 | if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) |
2f93eea8 PB |
4872 | return (nonzero == 1 || nonzero == 0 ? bitwidth |
4873 | : bitwidth - floor_log2 (nonzero) - 1); | |
4874 | } | |
4875 | ||
4876 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4877 | known_x, known_mode, known_ret); | |
4878 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4879 | known_x, known_mode, known_ret); | |
4880 | result = MAX (1, MIN (num0, num1) - 1); | |
4881 | ||
2f93eea8 PB |
4882 | return result; |
4883 | ||
4884 | case MULT: | |
4885 | /* The number of bits of the product is the sum of the number of | |
4886 | bits of both terms. However, unless one of the terms if known | |
4887 | to be positive, we must allow for an additional bit since negating | |
4888 | a negative number can remove one sign bit copy. */ | |
4889 | ||
4890 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4891 | known_x, known_mode, known_ret); | |
4892 | num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4893 | known_x, known_mode, known_ret); | |
4894 | ||
4895 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
4896 | if (result > 0 | |
4897 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4898 | || (((nonzero_bits (XEXP (x, 0), mode) | |
c04fc4f0 | 4899 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 | 4900 | && ((nonzero_bits (XEXP (x, 1), mode) |
c04fc4f0 EB |
4901 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) |
4902 | != 0)))) | |
2f93eea8 PB |
4903 | result--; |
4904 | ||
4905 | return MAX (1, result); | |
4906 | ||
4907 | case UDIV: | |
4908 | /* The result must be <= the first operand. If the first operand | |
4909 | has the high bit set, we know nothing about the number of sign | |
4910 | bit copies. */ | |
4911 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4912 | return 1; | |
4913 | else if ((nonzero_bits (XEXP (x, 0), mode) | |
c04fc4f0 | 4914 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 PB |
4915 | return 1; |
4916 | else | |
4917 | return cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4918 | known_x, known_mode, known_ret); | |
4919 | ||
4920 | case UMOD: | |
24d179b4 JJ |
4921 | /* The result must be <= the second operand. If the second operand |
4922 | has (or just might have) the high bit set, we know nothing about | |
4923 | the number of sign bit copies. */ | |
4924 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
4925 | return 1; | |
4926 | else if ((nonzero_bits (XEXP (x, 1), mode) | |
c04fc4f0 | 4927 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
24d179b4 JJ |
4928 | return 1; |
4929 | else | |
4930 | return cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
2f93eea8 PB |
4931 | known_x, known_mode, known_ret); |
4932 | ||
4933 | case DIV: | |
4934 | /* Similar to unsigned division, except that we have to worry about | |
4935 | the case where the divisor is negative, in which case we have | |
4936 | to add 1. */ | |
4937 | result = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4938 | known_x, known_mode, known_ret); | |
4939 | if (result > 1 | |
4940 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4941 | || (nonzero_bits (XEXP (x, 1), mode) | |
c04fc4f0 | 4942 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) |
2f93eea8 PB |
4943 | result--; |
4944 | ||
4945 | return result; | |
4946 | ||
4947 | case MOD: | |
4948 | result = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4949 | known_x, known_mode, known_ret); | |
4950 | if (result > 1 | |
4951 | && (bitwidth > HOST_BITS_PER_WIDE_INT | |
4952 | || (nonzero_bits (XEXP (x, 1), mode) | |
c04fc4f0 | 4953 | & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) |
2f93eea8 PB |
4954 | result--; |
4955 | ||
4956 | return result; | |
4957 | ||
4958 | case ASHIFTRT: | |
4959 | /* Shifts by a constant add to the number of bits equal to the | |
4960 | sign bit. */ | |
4961 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4962 | known_x, known_mode, known_ret); | |
481683e1 | 4963 | if (CONST_INT_P (XEXP (x, 1)) |
39b2ac74 | 4964 | && INTVAL (XEXP (x, 1)) > 0 |
5511bc5a | 4965 | && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4966 | num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); |
4967 | ||
4968 | return num0; | |
4969 | ||
4970 | case ASHIFT: | |
4971 | /* Left shifts destroy copies. */ | |
481683e1 | 4972 | if (!CONST_INT_P (XEXP (x, 1)) |
2f93eea8 | 4973 | || INTVAL (XEXP (x, 1)) < 0 |
39b2ac74 | 4974 | || INTVAL (XEXP (x, 1)) >= (int) bitwidth |
5511bc5a | 4975 | || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x))) |
2f93eea8 PB |
4976 | return 1; |
4977 | ||
4978 | num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode, | |
4979 | known_x, known_mode, known_ret); | |
4980 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
4981 | ||
4982 | case IF_THEN_ELSE: | |
4983 | num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode, | |
4984 | known_x, known_mode, known_ret); | |
4985 | num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode, | |
4986 | known_x, known_mode, known_ret); | |
4987 | return MIN (num0, num1); | |
4988 | ||
4989 | case EQ: case NE: case GE: case GT: case LE: case LT: | |
4990 | case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT: | |
4991 | case GEU: case GTU: case LEU: case LTU: | |
4992 | case UNORDERED: case ORDERED: | |
4993 | /* If the constant is negative, take its 1's complement and remask. | |
4994 | Then see how many zero bits we have. */ | |
4995 | nonzero = STORE_FLAG_VALUE; | |
4996 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
c04fc4f0 | 4997 | && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
2f93eea8 PB |
4998 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
4999 | ||
5000 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
5001 | ||
5002 | default: | |
5003 | break; | |
5004 | } | |
5005 | ||
5006 | /* If we haven't been able to figure it out by one of the above rules, | |
5007 | see if some of the high-order bits are known to be zero. If so, | |
5008 | count those bits and return one less than that amount. If we can't | |
5009 | safely compute the mask for this mode, always return BITWIDTH. */ | |
5010 | ||
5511bc5a | 5011 | bitwidth = GET_MODE_PRECISION (mode); |
2f93eea8 PB |
5012 | if (bitwidth > HOST_BITS_PER_WIDE_INT) |
5013 | return 1; | |
5014 | ||
5015 | nonzero = nonzero_bits (x, mode); | |
c04fc4f0 | 5016 | return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) |
2f93eea8 PB |
5017 | ? 1 : bitwidth - floor_log2 (nonzero) - 1; |
5018 | } | |
6fd21094 RS |
5019 | |
5020 | /* Calculate the rtx_cost of a single instruction. A return value of | |
5021 | zero indicates an instruction pattern without a known cost. */ | |
5022 | ||
5023 | int | |
f40751dd | 5024 | insn_rtx_cost (rtx pat, bool speed) |
6fd21094 RS |
5025 | { |
5026 | int i, cost; | |
5027 | rtx set; | |
5028 | ||
5029 | /* Extract the single set rtx from the instruction pattern. | |
5030 | We can't use single_set since we only have the pattern. */ | |
5031 | if (GET_CODE (pat) == SET) | |
5032 | set = pat; | |
5033 | else if (GET_CODE (pat) == PARALLEL) | |
5034 | { | |
5035 | set = NULL_RTX; | |
5036 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
5037 | { | |
5038 | rtx x = XVECEXP (pat, 0, i); | |
5039 | if (GET_CODE (x) == SET) | |
5040 | { | |
5041 | if (set) | |
5042 | return 0; | |
5043 | set = x; | |
5044 | } | |
5045 | } | |
5046 | if (!set) | |
5047 | return 0; | |
5048 | } | |
5049 | else | |
5050 | return 0; | |
5051 | ||
5e8f01f4 | 5052 | cost = set_src_cost (SET_SRC (set), speed); |
6fd21094 RS |
5053 | return cost > 0 ? cost : COSTS_N_INSNS (1); |
5054 | } | |
75473b02 | 5055 | |
11204b2d ZC |
5056 | /* Returns estimate on cost of computing SEQ. */ |
5057 | ||
5058 | unsigned | |
5059 | seq_cost (const rtx_insn *seq, bool speed) | |
5060 | { | |
5061 | unsigned cost = 0; | |
5062 | rtx set; | |
5063 | ||
5064 | for (; seq; seq = NEXT_INSN (seq)) | |
5065 | { | |
5066 | set = single_set (seq); | |
5067 | if (set) | |
5068 | cost += set_rtx_cost (set, speed); | |
5069 | else | |
5070 | cost++; | |
5071 | } | |
5072 | ||
5073 | return cost; | |
5074 | } | |
5075 | ||
75473b02 SB |
5076 | /* Given an insn INSN and condition COND, return the condition in a |
5077 | canonical form to simplify testing by callers. Specifically: | |
5078 | ||
5079 | (1) The code will always be a comparison operation (EQ, NE, GT, etc.). | |
5080 | (2) Both operands will be machine operands; (cc0) will have been replaced. | |
5081 | (3) If an operand is a constant, it will be the second operand. | |
5082 | (4) (LE x const) will be replaced with (LT x <const+1>) and similarly | |
5083 | for GE, GEU, and LEU. | |
5084 | ||
5085 | If the condition cannot be understood, or is an inequality floating-point | |
5086 | comparison which needs to be reversed, 0 will be returned. | |
5087 | ||
5088 | If REVERSE is nonzero, then reverse the condition prior to canonizing it. | |
5089 | ||
5090 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
5091 | insn used in locating the condition was found. If a replacement test | |
5092 | of the condition is desired, it should be placed in front of that | |
5093 | insn and we will be sure that the inputs are still valid. | |
5094 | ||
5095 | If WANT_REG is nonzero, we wish the condition to be relative to that | |
5096 | register, if possible. Therefore, do not canonicalize the condition | |
b8698a0f | 5097 | further. If ALLOW_CC_MODE is nonzero, allow the condition returned |
75473b02 SB |
5098 | to be a compare to a CC mode register. |
5099 | ||
5100 | If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST | |
5101 | and at INSN. */ | |
5102 | ||
5103 | rtx | |
61aa0978 DM |
5104 | canonicalize_condition (rtx_insn *insn, rtx cond, int reverse, |
5105 | rtx_insn **earliest, | |
75473b02 SB |
5106 | rtx want_reg, int allow_cc_mode, int valid_at_insn_p) |
5107 | { | |
5108 | enum rtx_code code; | |
61aa0978 | 5109 | rtx_insn *prev = insn; |
f7d504c2 | 5110 | const_rtx set; |
75473b02 SB |
5111 | rtx tem; |
5112 | rtx op0, op1; | |
5113 | int reverse_code = 0; | |
5114 | enum machine_mode mode; | |
569f8d98 | 5115 | basic_block bb = BLOCK_FOR_INSN (insn); |
75473b02 SB |
5116 | |
5117 | code = GET_CODE (cond); | |
5118 | mode = GET_MODE (cond); | |
5119 | op0 = XEXP (cond, 0); | |
5120 | op1 = XEXP (cond, 1); | |
5121 | ||
5122 | if (reverse) | |
5123 | code = reversed_comparison_code (cond, insn); | |
5124 | if (code == UNKNOWN) | |
5125 | return 0; | |
5126 | ||
5127 | if (earliest) | |
5128 | *earliest = insn; | |
5129 | ||
5130 | /* If we are comparing a register with zero, see if the register is set | |
5131 | in the previous insn to a COMPARE or a comparison operation. Perform | |
5132 | the same tests as a function of STORE_FLAG_VALUE as find_comparison_args | |
5133 | in cse.c */ | |
5134 | ||
5135 | while ((GET_RTX_CLASS (code) == RTX_COMPARE | |
5136 | || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) | |
5137 | && op1 == CONST0_RTX (GET_MODE (op0)) | |
5138 | && op0 != want_reg) | |
5139 | { | |
5140 | /* Set nonzero when we find something of interest. */ | |
5141 | rtx x = 0; | |
5142 | ||
5143 | #ifdef HAVE_cc0 | |
5144 | /* If comparison with cc0, import actual comparison from compare | |
5145 | insn. */ | |
5146 | if (op0 == cc0_rtx) | |
5147 | { | |
5148 | if ((prev = prev_nonnote_insn (prev)) == 0 | |
5149 | || !NONJUMP_INSN_P (prev) | |
5150 | || (set = single_set (prev)) == 0 | |
5151 | || SET_DEST (set) != cc0_rtx) | |
5152 | return 0; | |
5153 | ||
5154 | op0 = SET_SRC (set); | |
5155 | op1 = CONST0_RTX (GET_MODE (op0)); | |
5156 | if (earliest) | |
5157 | *earliest = prev; | |
5158 | } | |
5159 | #endif | |
5160 | ||
5161 | /* If this is a COMPARE, pick up the two things being compared. */ | |
5162 | if (GET_CODE (op0) == COMPARE) | |
5163 | { | |
5164 | op1 = XEXP (op0, 1); | |
5165 | op0 = XEXP (op0, 0); | |
5166 | continue; | |
5167 | } | |
5168 | else if (!REG_P (op0)) | |
5169 | break; | |
5170 | ||
5171 | /* Go back to the previous insn. Stop if it is not an INSN. We also | |
5172 | stop if it isn't a single set or if it has a REG_INC note because | |
5173 | we don't want to bother dealing with it. */ | |
5174 | ||
f0fc0803 | 5175 | prev = prev_nonnote_nondebug_insn (prev); |
b5b8b0ac AO |
5176 | |
5177 | if (prev == 0 | |
75473b02 | 5178 | || !NONJUMP_INSN_P (prev) |
569f8d98 ZD |
5179 | || FIND_REG_INC_NOTE (prev, NULL_RTX) |
5180 | /* In cfglayout mode, there do not have to be labels at the | |
5181 | beginning of a block, or jumps at the end, so the previous | |
5182 | conditions would not stop us when we reach bb boundary. */ | |
5183 | || BLOCK_FOR_INSN (prev) != bb) | |
75473b02 SB |
5184 | break; |
5185 | ||
5186 | set = set_of (op0, prev); | |
5187 | ||
5188 | if (set | |
5189 | && (GET_CODE (set) != SET | |
5190 | || !rtx_equal_p (SET_DEST (set), op0))) | |
5191 | break; | |
5192 | ||
5193 | /* If this is setting OP0, get what it sets it to if it looks | |
5194 | relevant. */ | |
5195 | if (set) | |
5196 | { | |
5197 | enum machine_mode inner_mode = GET_MODE (SET_DEST (set)); | |
5198 | #ifdef FLOAT_STORE_FLAG_VALUE | |
5199 | REAL_VALUE_TYPE fsfv; | |
5200 | #endif | |
5201 | ||
5202 | /* ??? We may not combine comparisons done in a CCmode with | |
5203 | comparisons not done in a CCmode. This is to aid targets | |
5204 | like Alpha that have an IEEE compliant EQ instruction, and | |
5205 | a non-IEEE compliant BEQ instruction. The use of CCmode is | |
5206 | actually artificial, simply to prevent the combination, but | |
5207 | should not affect other platforms. | |
5208 | ||
5209 | However, we must allow VOIDmode comparisons to match either | |
5210 | CCmode or non-CCmode comparison, because some ports have | |
5211 | modeless comparisons inside branch patterns. | |
5212 | ||
5213 | ??? This mode check should perhaps look more like the mode check | |
5214 | in simplify_comparison in combine. */ | |
2c8798a2 RS |
5215 | if (((GET_MODE_CLASS (mode) == MODE_CC) |
5216 | != (GET_MODE_CLASS (inner_mode) == MODE_CC)) | |
5217 | && mode != VOIDmode | |
5218 | && inner_mode != VOIDmode) | |
5219 | break; | |
5220 | if (GET_CODE (SET_SRC (set)) == COMPARE | |
5221 | || (((code == NE | |
5222 | || (code == LT | |
5223 | && val_signbit_known_set_p (inner_mode, | |
5224 | STORE_FLAG_VALUE)) | |
75473b02 | 5225 | #ifdef FLOAT_STORE_FLAG_VALUE |
2c8798a2 RS |
5226 | || (code == LT |
5227 | && SCALAR_FLOAT_MODE_P (inner_mode) | |
5228 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), | |
5229 | REAL_VALUE_NEGATIVE (fsfv))) | |
75473b02 | 5230 | #endif |
2c8798a2 RS |
5231 | )) |
5232 | && COMPARISON_P (SET_SRC (set)))) | |
75473b02 SB |
5233 | x = SET_SRC (set); |
5234 | else if (((code == EQ | |
5235 | || (code == GE | |
2d0c270f BS |
5236 | && val_signbit_known_set_p (inner_mode, |
5237 | STORE_FLAG_VALUE)) | |
75473b02 SB |
5238 | #ifdef FLOAT_STORE_FLAG_VALUE |
5239 | || (code == GE | |
3d8bf70f | 5240 | && SCALAR_FLOAT_MODE_P (inner_mode) |
75473b02 SB |
5241 | && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode), |
5242 | REAL_VALUE_NEGATIVE (fsfv))) | |
5243 | #endif | |
5244 | )) | |
2c8798a2 | 5245 | && COMPARISON_P (SET_SRC (set))) |
75473b02 SB |
5246 | { |
5247 | reverse_code = 1; | |
5248 | x = SET_SRC (set); | |
5249 | } | |
2c8798a2 RS |
5250 | else if ((code == EQ || code == NE) |
5251 | && GET_CODE (SET_SRC (set)) == XOR) | |
5252 | /* Handle sequences like: | |
5253 | ||
5254 | (set op0 (xor X Y)) | |
5255 | ...(eq|ne op0 (const_int 0))... | |
5256 | ||
5257 | in which case: | |
5258 | ||
5259 | (eq op0 (const_int 0)) reduces to (eq X Y) | |
5260 | (ne op0 (const_int 0)) reduces to (ne X Y) | |
5261 | ||
5262 | This is the form used by MIPS16, for example. */ | |
5263 | x = SET_SRC (set); | |
75473b02 SB |
5264 | else |
5265 | break; | |
5266 | } | |
5267 | ||
5268 | else if (reg_set_p (op0, prev)) | |
5269 | /* If this sets OP0, but not directly, we have to give up. */ | |
5270 | break; | |
5271 | ||
5272 | if (x) | |
5273 | { | |
5274 | /* If the caller is expecting the condition to be valid at INSN, | |
5275 | make sure X doesn't change before INSN. */ | |
5276 | if (valid_at_insn_p) | |
5277 | if (modified_in_p (x, prev) || modified_between_p (x, prev, insn)) | |
5278 | break; | |
5279 | if (COMPARISON_P (x)) | |
5280 | code = GET_CODE (x); | |
5281 | if (reverse_code) | |
5282 | { | |
5283 | code = reversed_comparison_code (x, prev); | |
5284 | if (code == UNKNOWN) | |
5285 | return 0; | |
5286 | reverse_code = 0; | |
5287 | } | |
5288 | ||
5289 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
5290 | if (earliest) | |
5291 | *earliest = prev; | |
5292 | } | |
5293 | } | |
5294 | ||
5295 | /* If constant is first, put it last. */ | |
5296 | if (CONSTANT_P (op0)) | |
5297 | code = swap_condition (code), tem = op0, op0 = op1, op1 = tem; | |
5298 | ||
5299 | /* If OP0 is the result of a comparison, we weren't able to find what | |
5300 | was really being compared, so fail. */ | |
5301 | if (!allow_cc_mode | |
5302 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) | |
5303 | return 0; | |
5304 | ||
5305 | /* Canonicalize any ordered comparison with integers involving equality | |
5306 | if we can do computations in the relevant mode and we do not | |
5307 | overflow. */ | |
5308 | ||
5309 | if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC | |
481683e1 | 5310 | && CONST_INT_P (op1) |
75473b02 | 5311 | && GET_MODE (op0) != VOIDmode |
5511bc5a | 5312 | && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT) |
75473b02 SB |
5313 | { |
5314 | HOST_WIDE_INT const_val = INTVAL (op1); | |
5315 | unsigned HOST_WIDE_INT uconst_val = const_val; | |
5316 | unsigned HOST_WIDE_INT max_val | |
5317 | = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0)); | |
5318 | ||
5319 | switch (code) | |
5320 | { | |
5321 | case LE: | |
5322 | if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1) | |
5323 | code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0)); | |
5324 | break; | |
5325 | ||
5326 | /* When cross-compiling, const_val might be sign-extended from | |
5327 | BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */ | |
5328 | case GE: | |
c04fc4f0 EB |
5329 | if ((const_val & max_val) |
5330 | != ((unsigned HOST_WIDE_INT) 1 | |
5511bc5a | 5331 | << (GET_MODE_PRECISION (GET_MODE (op0)) - 1))) |
75473b02 SB |
5332 | code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0)); |
5333 | break; | |
5334 | ||
5335 | case LEU: | |
5336 | if (uconst_val < max_val) | |
5337 | code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0)); | |
5338 | break; | |
5339 | ||
5340 | case GEU: | |
5341 | if (uconst_val != 0) | |
5342 | code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0)); | |
5343 | break; | |
5344 | ||
5345 | default: | |
5346 | break; | |
5347 | } | |
5348 | } | |
5349 | ||
5350 | /* Never return CC0; return zero instead. */ | |
5351 | if (CC0_P (op0)) | |
5352 | return 0; | |
5353 | ||
5354 | return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); | |
5355 | } | |
5356 | ||
5357 | /* Given a jump insn JUMP, return the condition that will cause it to branch | |
5358 | to its JUMP_LABEL. If the condition cannot be understood, or is an | |
5359 | inequality floating-point comparison which needs to be reversed, 0 will | |
5360 | be returned. | |
5361 | ||
5362 | If EARLIEST is nonzero, it is a pointer to a place where the earliest | |
5363 | insn used in locating the condition was found. If a replacement test | |
5364 | of the condition is desired, it should be placed in front of that | |
5365 | insn and we will be sure that the inputs are still valid. If EARLIEST | |
5366 | is null, the returned condition will be valid at INSN. | |
5367 | ||
5368 | If ALLOW_CC_MODE is nonzero, allow the condition returned to be a | |
5369 | compare CC mode register. | |
5370 | ||
5371 | VALID_AT_INSN_P is the same as for canonicalize_condition. */ | |
5372 | ||
5373 | rtx | |
61aa0978 DM |
5374 | get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode, |
5375 | int valid_at_insn_p) | |
75473b02 SB |
5376 | { |
5377 | rtx cond; | |
5378 | int reverse; | |
5379 | rtx set; | |
5380 | ||
5381 | /* If this is not a standard conditional jump, we can't parse it. */ | |
5382 | if (!JUMP_P (jump) | |
5383 | || ! any_condjump_p (jump)) | |
5384 | return 0; | |
5385 | set = pc_set (jump); | |
5386 | ||
5387 | cond = XEXP (SET_SRC (set), 0); | |
5388 | ||
5389 | /* If this branches to JUMP_LABEL when the condition is false, reverse | |
5390 | the condition. */ | |
5391 | reverse | |
5392 | = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF | |
a827d9b1 | 5393 | && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump); |
75473b02 SB |
5394 | |
5395 | return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX, | |
5396 | allow_cc_mode, valid_at_insn_p); | |
5397 | } | |
5398 | ||
b12cbf2c AN |
5399 | /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on |
5400 | TARGET_MODE_REP_EXTENDED. | |
5401 | ||
5402 | Note that we assume that the property of | |
5403 | TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes | |
5404 | narrower than mode B. I.e., if A is a mode narrower than B then in | |
5405 | order to be able to operate on it in mode B, mode A needs to | |
5406 | satisfy the requirements set by the representation of mode B. */ | |
5407 | ||
5408 | static void | |
5409 | init_num_sign_bit_copies_in_rep (void) | |
5410 | { | |
5411 | enum machine_mode mode, in_mode; | |
5412 | ||
5413 | for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode; | |
5414 | in_mode = GET_MODE_WIDER_MODE (mode)) | |
5415 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode; | |
5416 | mode = GET_MODE_WIDER_MODE (mode)) | |
5417 | { | |
5418 | enum machine_mode i; | |
5419 | ||
5420 | /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED | |
5421 | extends to the next widest mode. */ | |
5422 | gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN | |
5423 | || GET_MODE_WIDER_MODE (mode) == in_mode); | |
5424 | ||
5425 | /* We are in in_mode. Count how many bits outside of mode | |
5426 | have to be copies of the sign-bit. */ | |
5427 | for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i)) | |
5428 | { | |
5429 | enum machine_mode wider = GET_MODE_WIDER_MODE (i); | |
5430 | ||
5431 | if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND | |
5432 | /* We can only check sign-bit copies starting from the | |
5433 | top-bit. In order to be able to check the bits we | |
5434 | have already seen we pretend that subsequent bits | |
5435 | have to be sign-bit copies too. */ | |
5436 | || num_sign_bit_copies_in_rep [in_mode][mode]) | |
5437 | num_sign_bit_copies_in_rep [in_mode][mode] | |
5511bc5a | 5438 | += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i); |
b12cbf2c AN |
5439 | } |
5440 | } | |
5441 | } | |
5442 | ||
d3b72690 PB |
5443 | /* Suppose that truncation from the machine mode of X to MODE is not a |
5444 | no-op. See if there is anything special about X so that we can | |
5445 | assume it already contains a truncated value of MODE. */ | |
5446 | ||
5447 | bool | |
fa233e34 | 5448 | truncated_to_mode (enum machine_mode mode, const_rtx x) |
d3b72690 | 5449 | { |
b12cbf2c AN |
5450 | /* This register has already been used in MODE without explicit |
5451 | truncation. */ | |
5452 | if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x)) | |
5453 | return true; | |
5454 | ||
5455 | /* See if we already satisfy the requirements of MODE. If yes we | |
5456 | can just switch to MODE. */ | |
5457 | if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode] | |
5458 | && (num_sign_bit_copies (x, GET_MODE (x)) | |
5459 | >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1)) | |
5460 | return true; | |
d3b72690 | 5461 | |
b12cbf2c AN |
5462 | return false; |
5463 | } | |
cf94b0fc | 5464 | \f |
476dd0ce RS |
5465 | /* Return true if RTX code CODE has a single sequence of zero or more |
5466 | "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds | |
5467 | entry in that case. */ | |
5468 | ||
5469 | static bool | |
5470 | setup_reg_subrtx_bounds (unsigned int code) | |
5471 | { | |
5472 | const char *format = GET_RTX_FORMAT ((enum rtx_code) code); | |
5473 | unsigned int i = 0; | |
5474 | for (; format[i] != 'e'; ++i) | |
5475 | { | |
5476 | if (!format[i]) | |
5477 | /* No subrtxes. Leave start and count as 0. */ | |
5478 | return true; | |
5479 | if (format[i] == 'E' || format[i] == 'V') | |
5480 | return false; | |
5481 | } | |
5482 | ||
5483 | /* Record the sequence of 'e's. */ | |
5484 | rtx_all_subrtx_bounds[code].start = i; | |
5485 | do | |
5486 | ++i; | |
5487 | while (format[i] == 'e'); | |
5488 | rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start; | |
5489 | /* rtl-iter.h relies on this. */ | |
5490 | gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3); | |
5491 | ||
5492 | for (; format[i]; ++i) | |
5493 | if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e') | |
5494 | return false; | |
5495 | ||
5496 | return true; | |
5497 | } | |
5498 | ||
cf94b0fc | 5499 | /* Initialize non_rtx_starting_operands, which is used to speed up |
476dd0ce | 5500 | for_each_rtx, and rtx_all_subrtx_bounds. */ |
cf94b0fc PB |
5501 | void |
5502 | init_rtlanal (void) | |
5503 | { | |
5504 | int i; | |
5505 | for (i = 0; i < NUM_RTX_CODE; i++) | |
5506 | { | |
5507 | const char *format = GET_RTX_FORMAT (i); | |
5508 | const char *first = strpbrk (format, "eEV"); | |
5509 | non_rtx_starting_operands[i] = first ? first - format : -1; | |
476dd0ce RS |
5510 | if (!setup_reg_subrtx_bounds (i)) |
5511 | rtx_all_subrtx_bounds[i].count = UCHAR_MAX; | |
5512 | if (GET_RTX_CLASS (i) != RTX_CONST_OBJ) | |
5513 | rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i]; | |
cf94b0fc | 5514 | } |
b12cbf2c AN |
5515 | |
5516 | init_num_sign_bit_copies_in_rep (); | |
cf94b0fc | 5517 | } |
3d8504ac RS |
5518 | \f |
5519 | /* Check whether this is a constant pool constant. */ | |
5520 | bool | |
5521 | constant_pool_constant_p (rtx x) | |
5522 | { | |
5523 | x = avoid_constant_pool_reference (x); | |
48175537 | 5524 | return CONST_DOUBLE_P (x); |
3d8504ac | 5525 | } |
842e098c AN |
5526 | \f |
5527 | /* If M is a bitmask that selects a field of low-order bits within an item but | |
5528 | not the entire word, return the length of the field. Return -1 otherwise. | |
5529 | M is used in machine mode MODE. */ | |
5530 | ||
5531 | int | |
5532 | low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m) | |
5533 | { | |
5534 | if (mode != VOIDmode) | |
5535 | { | |
5511bc5a | 5536 | if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT) |
842e098c AN |
5537 | return -1; |
5538 | m &= GET_MODE_MASK (mode); | |
5539 | } | |
5540 | ||
5541 | return exact_log2 (m + 1); | |
5542 | } | |
372d6395 RS |
5543 | |
5544 | /* Return the mode of MEM's address. */ | |
5545 | ||
5546 | enum machine_mode | |
5547 | get_address_mode (rtx mem) | |
5548 | { | |
5549 | enum machine_mode mode; | |
5550 | ||
5551 | gcc_assert (MEM_P (mem)); | |
5552 | mode = GET_MODE (XEXP (mem, 0)); | |
5553 | if (mode != VOIDmode) | |
5554 | return mode; | |
5555 | return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem)); | |
5556 | } | |
ca3f2950 SB |
5557 | \f |
5558 | /* Split up a CONST_DOUBLE or integer constant rtx | |
5559 | into two rtx's for single words, | |
5560 | storing in *FIRST the word that comes first in memory in the target | |
807e902e KZ |
5561 | and in *SECOND the other. |
5562 | ||
5563 | TODO: This function needs to be rewritten to work on any size | |
5564 | integer. */ | |
ca3f2950 SB |
5565 | |
5566 | void | |
5567 | split_double (rtx value, rtx *first, rtx *second) | |
5568 | { | |
5569 | if (CONST_INT_P (value)) | |
5570 | { | |
5571 | if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD)) | |
5572 | { | |
5573 | /* In this case the CONST_INT holds both target words. | |
5574 | Extract the bits from it into two word-sized pieces. | |
5575 | Sign extend each half to HOST_WIDE_INT. */ | |
5576 | unsigned HOST_WIDE_INT low, high; | |
5577 | unsigned HOST_WIDE_INT mask, sign_bit, sign_extend; | |
5578 | unsigned bits_per_word = BITS_PER_WORD; | |
5579 | ||
5580 | /* Set sign_bit to the most significant bit of a word. */ | |
5581 | sign_bit = 1; | |
5582 | sign_bit <<= bits_per_word - 1; | |
5583 | ||
5584 | /* Set mask so that all bits of the word are set. We could | |
5585 | have used 1 << BITS_PER_WORD instead of basing the | |
5586 | calculation on sign_bit. However, on machines where | |
5587 | HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a | |
5588 | compiler warning, even though the code would never be | |
5589 | executed. */ | |
5590 | mask = sign_bit << 1; | |
5591 | mask--; | |
5592 | ||
5593 | /* Set sign_extend as any remaining bits. */ | |
5594 | sign_extend = ~mask; | |
5595 | ||
5596 | /* Pick the lower word and sign-extend it. */ | |
5597 | low = INTVAL (value); | |
5598 | low &= mask; | |
5599 | if (low & sign_bit) | |
5600 | low |= sign_extend; | |
5601 | ||
5602 | /* Pick the higher word, shifted to the least significant | |
5603 | bits, and sign-extend it. */ | |
5604 | high = INTVAL (value); | |
5605 | high >>= bits_per_word - 1; | |
5606 | high >>= 1; | |
5607 | high &= mask; | |
5608 | if (high & sign_bit) | |
5609 | high |= sign_extend; | |
5610 | ||
5611 | /* Store the words in the target machine order. */ | |
5612 | if (WORDS_BIG_ENDIAN) | |
5613 | { | |
5614 | *first = GEN_INT (high); | |
5615 | *second = GEN_INT (low); | |
5616 | } | |
5617 | else | |
5618 | { | |
5619 | *first = GEN_INT (low); | |
5620 | *second = GEN_INT (high); | |
5621 | } | |
5622 | } | |
5623 | else | |
5624 | { | |
5625 | /* The rule for using CONST_INT for a wider mode | |
5626 | is that we regard the value as signed. | |
5627 | So sign-extend it. */ | |
5628 | rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx); | |
5629 | if (WORDS_BIG_ENDIAN) | |
5630 | { | |
5631 | *first = high; | |
5632 | *second = value; | |
5633 | } | |
5634 | else | |
5635 | { | |
5636 | *first = value; | |
5637 | *second = high; | |
5638 | } | |
5639 | } | |
5640 | } | |
807e902e KZ |
5641 | else if (GET_CODE (value) == CONST_WIDE_INT) |
5642 | { | |
5643 | /* All of this is scary code and needs to be converted to | |
5644 | properly work with any size integer. */ | |
5645 | gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2); | |
5646 | if (WORDS_BIG_ENDIAN) | |
5647 | { | |
5648 | *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1)); | |
5649 | *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0)); | |
5650 | } | |
5651 | else | |
5652 | { | |
5653 | *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0)); | |
5654 | *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1)); | |
5655 | } | |
5656 | } | |
48175537 | 5657 | else if (!CONST_DOUBLE_P (value)) |
ca3f2950 SB |
5658 | { |
5659 | if (WORDS_BIG_ENDIAN) | |
5660 | { | |
5661 | *first = const0_rtx; | |
5662 | *second = value; | |
5663 | } | |
5664 | else | |
5665 | { | |
5666 | *first = value; | |
5667 | *second = const0_rtx; | |
5668 | } | |
5669 | } | |
5670 | else if (GET_MODE (value) == VOIDmode | |
5671 | /* This is the old way we did CONST_DOUBLE integers. */ | |
5672 | || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT) | |
5673 | { | |
5674 | /* In an integer, the words are defined as most and least significant. | |
5675 | So order them by the target's convention. */ | |
5676 | if (WORDS_BIG_ENDIAN) | |
5677 | { | |
5678 | *first = GEN_INT (CONST_DOUBLE_HIGH (value)); | |
5679 | *second = GEN_INT (CONST_DOUBLE_LOW (value)); | |
5680 | } | |
5681 | else | |
5682 | { | |
5683 | *first = GEN_INT (CONST_DOUBLE_LOW (value)); | |
5684 | *second = GEN_INT (CONST_DOUBLE_HIGH (value)); | |
5685 | } | |
5686 | } | |
5687 | else | |
5688 | { | |
5689 | REAL_VALUE_TYPE r; | |
5690 | long l[2]; | |
5691 | REAL_VALUE_FROM_CONST_DOUBLE (r, value); | |
5692 | ||
5693 | /* Note, this converts the REAL_VALUE_TYPE to the target's | |
5694 | format, splits up the floating point double and outputs | |
5695 | exactly 32 bits of it into each of l[0] and l[1] -- | |
5696 | not necessarily BITS_PER_WORD bits. */ | |
5697 | REAL_VALUE_TO_TARGET_DOUBLE (r, l); | |
5698 | ||
5699 | /* If 32 bits is an entire word for the target, but not for the host, | |
5700 | then sign-extend on the host so that the number will look the same | |
5701 | way on the host that it would on the target. See for instance | |
5702 | simplify_unary_operation. The #if is needed to avoid compiler | |
5703 | warnings. */ | |
5704 | ||
5705 | #if HOST_BITS_PER_LONG > 32 | |
5706 | if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32) | |
5707 | { | |
5708 | if (l[0] & ((long) 1 << 31)) | |
5709 | l[0] |= ((long) (-1) << 32); | |
5710 | if (l[1] & ((long) 1 << 31)) | |
5711 | l[1] |= ((long) (-1) << 32); | |
5712 | } | |
5713 | #endif | |
5714 | ||
5715 | *first = GEN_INT (l[0]); | |
5716 | *second = GEN_INT (l[1]); | |
5717 | } | |
5718 | } | |
5719 | ||
3936bafc YR |
5720 | /* Return true if X is a sign_extract or zero_extract from the least |
5721 | significant bit. */ | |
5722 | ||
5723 | static bool | |
5724 | lsb_bitfield_op_p (rtx x) | |
5725 | { | |
5726 | if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS) | |
5727 | { | |
5728 | enum machine_mode mode = GET_MODE (XEXP (x, 0)); | |
a9195970 | 5729 | HOST_WIDE_INT len = INTVAL (XEXP (x, 1)); |
3936bafc YR |
5730 | HOST_WIDE_INT pos = INTVAL (XEXP (x, 2)); |
5731 | ||
5732 | return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0)); | |
5733 | } | |
5734 | return false; | |
5735 | } | |
5736 | ||
277f65de RS |
5737 | /* Strip outer address "mutations" from LOC and return a pointer to the |
5738 | inner value. If OUTER_CODE is nonnull, store the code of the innermost | |
5739 | stripped expression there. | |
5740 | ||
5741 | "Mutations" either convert between modes or apply some kind of | |
3936bafc | 5742 | extension, truncation or alignment. */ |
277f65de RS |
5743 | |
5744 | rtx * | |
5745 | strip_address_mutations (rtx *loc, enum rtx_code *outer_code) | |
5746 | { | |
5747 | for (;;) | |
5748 | { | |
5749 | enum rtx_code code = GET_CODE (*loc); | |
5750 | if (GET_RTX_CLASS (code) == RTX_UNARY) | |
5751 | /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be | |
5752 | used to convert between pointer sizes. */ | |
5753 | loc = &XEXP (*loc, 0); | |
3936bafc YR |
5754 | else if (lsb_bitfield_op_p (*loc)) |
5755 | /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively | |
5756 | acts as a combined truncation and extension. */ | |
5757 | loc = &XEXP (*loc, 0); | |
277f65de RS |
5758 | else if (code == AND && CONST_INT_P (XEXP (*loc, 1))) |
5759 | /* (and ... (const_int -X)) is used to align to X bytes. */ | |
5760 | loc = &XEXP (*loc, 0); | |
163497f1 VM |
5761 | else if (code == SUBREG |
5762 | && !OBJECT_P (SUBREG_REG (*loc)) | |
5763 | && subreg_lowpart_p (*loc)) | |
5764 | /* (subreg (operator ...) ...) inside and is used for mode | |
5765 | conversion too. */ | |
99a0106f | 5766 | loc = &SUBREG_REG (*loc); |
277f65de RS |
5767 | else |
5768 | return loc; | |
5769 | if (outer_code) | |
5770 | *outer_code = code; | |
5771 | } | |
5772 | } | |
5773 | ||
ec5a3504 RS |
5774 | /* Return true if CODE applies some kind of scale. The scaled value is |
5775 | is the first operand and the scale is the second. */ | |
277f65de RS |
5776 | |
5777 | static bool | |
ec5a3504 | 5778 | binary_scale_code_p (enum rtx_code code) |
277f65de | 5779 | { |
ec5a3504 RS |
5780 | return (code == MULT |
5781 | || code == ASHIFT | |
5782 | /* Needed by ARM targets. */ | |
5783 | || code == ASHIFTRT | |
5784 | || code == LSHIFTRT | |
5785 | || code == ROTATE | |
5786 | || code == ROTATERT); | |
277f65de RS |
5787 | } |
5788 | ||
ec5a3504 RS |
5789 | /* If *INNER can be interpreted as a base, return a pointer to the inner term |
5790 | (see address_info). Return null otherwise. */ | |
277f65de | 5791 | |
ec5a3504 RS |
5792 | static rtx * |
5793 | get_base_term (rtx *inner) | |
277f65de | 5794 | { |
ec5a3504 RS |
5795 | if (GET_CODE (*inner) == LO_SUM) |
5796 | inner = strip_address_mutations (&XEXP (*inner, 0)); | |
5797 | if (REG_P (*inner) | |
5798 | || MEM_P (*inner) | |
948cd9a5 MK |
5799 | || GET_CODE (*inner) == SUBREG |
5800 | || GET_CODE (*inner) == SCRATCH) | |
ec5a3504 RS |
5801 | return inner; |
5802 | return 0; | |
5803 | } | |
5804 | ||
5805 | /* If *INNER can be interpreted as an index, return a pointer to the inner term | |
5806 | (see address_info). Return null otherwise. */ | |
5807 | ||
5808 | static rtx * | |
5809 | get_index_term (rtx *inner) | |
5810 | { | |
5811 | /* At present, only constant scales are allowed. */ | |
5812 | if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1))) | |
5813 | inner = strip_address_mutations (&XEXP (*inner, 0)); | |
5814 | if (REG_P (*inner) | |
5815 | || MEM_P (*inner) | |
5816 | || GET_CODE (*inner) == SUBREG) | |
5817 | return inner; | |
5818 | return 0; | |
277f65de RS |
5819 | } |
5820 | ||
5821 | /* Set the segment part of address INFO to LOC, given that INNER is the | |
5822 | unmutated value. */ | |
5823 | ||
5824 | static void | |
5825 | set_address_segment (struct address_info *info, rtx *loc, rtx *inner) | |
5826 | { | |
277f65de RS |
5827 | gcc_assert (!info->segment); |
5828 | info->segment = loc; | |
5829 | info->segment_term = inner; | |
5830 | } | |
5831 | ||
5832 | /* Set the base part of address INFO to LOC, given that INNER is the | |
5833 | unmutated value. */ | |
5834 | ||
5835 | static void | |
5836 | set_address_base (struct address_info *info, rtx *loc, rtx *inner) | |
5837 | { | |
277f65de RS |
5838 | gcc_assert (!info->base); |
5839 | info->base = loc; | |
5840 | info->base_term = inner; | |
5841 | } | |
5842 | ||
5843 | /* Set the index part of address INFO to LOC, given that INNER is the | |
5844 | unmutated value. */ | |
5845 | ||
5846 | static void | |
5847 | set_address_index (struct address_info *info, rtx *loc, rtx *inner) | |
5848 | { | |
277f65de RS |
5849 | gcc_assert (!info->index); |
5850 | info->index = loc; | |
5851 | info->index_term = inner; | |
5852 | } | |
5853 | ||
5854 | /* Set the displacement part of address INFO to LOC, given that INNER | |
5855 | is the constant term. */ | |
5856 | ||
5857 | static void | |
5858 | set_address_disp (struct address_info *info, rtx *loc, rtx *inner) | |
5859 | { | |
277f65de RS |
5860 | gcc_assert (!info->disp); |
5861 | info->disp = loc; | |
5862 | info->disp_term = inner; | |
5863 | } | |
5864 | ||
5865 | /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the | |
5866 | rest of INFO accordingly. */ | |
5867 | ||
5868 | static void | |
5869 | decompose_incdec_address (struct address_info *info) | |
5870 | { | |
5871 | info->autoinc_p = true; | |
5872 | ||
5873 | rtx *base = &XEXP (*info->inner, 0); | |
5874 | set_address_base (info, base, base); | |
5875 | gcc_checking_assert (info->base == info->base_term); | |
5876 | ||
5877 | /* These addresses are only valid when the size of the addressed | |
5878 | value is known. */ | |
5879 | gcc_checking_assert (info->mode != VOIDmode); | |
5880 | } | |
5881 | ||
5882 | /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest | |
5883 | of INFO accordingly. */ | |
5884 | ||
5885 | static void | |
5886 | decompose_automod_address (struct address_info *info) | |
5887 | { | |
5888 | info->autoinc_p = true; | |
5889 | ||
5890 | rtx *base = &XEXP (*info->inner, 0); | |
5891 | set_address_base (info, base, base); | |
5892 | gcc_checking_assert (info->base == info->base_term); | |
5893 | ||
5894 | rtx plus = XEXP (*info->inner, 1); | |
5895 | gcc_assert (GET_CODE (plus) == PLUS); | |
5896 | ||
5897 | info->base_term2 = &XEXP (plus, 0); | |
5898 | gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2)); | |
5899 | ||
5900 | rtx *step = &XEXP (plus, 1); | |
5901 | rtx *inner_step = strip_address_mutations (step); | |
5902 | if (CONSTANT_P (*inner_step)) | |
5903 | set_address_disp (info, step, inner_step); | |
5904 | else | |
5905 | set_address_index (info, step, inner_step); | |
5906 | } | |
5907 | ||
5908 | /* Treat *LOC as a tree of PLUS operands and store pointers to the summed | |
5909 | values in [PTR, END). Return a pointer to the end of the used array. */ | |
5910 | ||
5911 | static rtx ** | |
5912 | extract_plus_operands (rtx *loc, rtx **ptr, rtx **end) | |
5913 | { | |
5914 | rtx x = *loc; | |
5915 | if (GET_CODE (x) == PLUS) | |
5916 | { | |
5917 | ptr = extract_plus_operands (&XEXP (x, 0), ptr, end); | |
5918 | ptr = extract_plus_operands (&XEXP (x, 1), ptr, end); | |
5919 | } | |
5920 | else | |
5921 | { | |
5922 | gcc_assert (ptr != end); | |
5923 | *ptr++ = loc; | |
5924 | } | |
5925 | return ptr; | |
5926 | } | |
5927 | ||
5928 | /* Evaluate the likelihood of X being a base or index value, returning | |
5929 | positive if it is likely to be a base, negative if it is likely to be | |
5930 | an index, and 0 if we can't tell. Make the magnitude of the return | |
5931 | value reflect the amount of confidence we have in the answer. | |
5932 | ||
5933 | MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */ | |
5934 | ||
5935 | static int | |
5936 | baseness (rtx x, enum machine_mode mode, addr_space_t as, | |
5937 | enum rtx_code outer_code, enum rtx_code index_code) | |
5938 | { | |
277f65de RS |
5939 | /* Believe *_POINTER unless the address shape requires otherwise. */ |
5940 | if (REG_P (x) && REG_POINTER (x)) | |
5941 | return 2; | |
5942 | if (MEM_P (x) && MEM_POINTER (x)) | |
5943 | return 2; | |
5944 | ||
5945 | if (REG_P (x) && HARD_REGISTER_P (x)) | |
5946 | { | |
5947 | /* X is a hard register. If it only fits one of the base | |
5948 | or index classes, choose that interpretation. */ | |
5949 | int regno = REGNO (x); | |
5950 | bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code); | |
5951 | bool index_p = REGNO_OK_FOR_INDEX_P (regno); | |
5952 | if (base_p != index_p) | |
5953 | return base_p ? 1 : -1; | |
5954 | } | |
5955 | return 0; | |
5956 | } | |
5957 | ||
5958 | /* INFO->INNER describes a normal, non-automodified address. | |
5959 | Fill in the rest of INFO accordingly. */ | |
5960 | ||
5961 | static void | |
5962 | decompose_normal_address (struct address_info *info) | |
5963 | { | |
5964 | /* Treat the address as the sum of up to four values. */ | |
5965 | rtx *ops[4]; | |
5966 | size_t n_ops = extract_plus_operands (info->inner, ops, | |
5967 | ops + ARRAY_SIZE (ops)) - ops; | |
5968 | ||
5969 | /* If there is more than one component, any base component is in a PLUS. */ | |
5970 | if (n_ops > 1) | |
5971 | info->base_outer_code = PLUS; | |
5972 | ||
ec5a3504 RS |
5973 | /* Try to classify each sum operand now. Leave those that could be |
5974 | either a base or an index in OPS. */ | |
277f65de RS |
5975 | rtx *inner_ops[4]; |
5976 | size_t out = 0; | |
5977 | for (size_t in = 0; in < n_ops; ++in) | |
5978 | { | |
5979 | rtx *loc = ops[in]; | |
5980 | rtx *inner = strip_address_mutations (loc); | |
5981 | if (CONSTANT_P (*inner)) | |
5982 | set_address_disp (info, loc, inner); | |
5983 | else if (GET_CODE (*inner) == UNSPEC) | |
5984 | set_address_segment (info, loc, inner); | |
5985 | else | |
5986 | { | |
ec5a3504 RS |
5987 | /* The only other possibilities are a base or an index. */ |
5988 | rtx *base_term = get_base_term (inner); | |
5989 | rtx *index_term = get_index_term (inner); | |
5990 | gcc_assert (base_term || index_term); | |
5991 | if (!base_term) | |
5992 | set_address_index (info, loc, index_term); | |
5993 | else if (!index_term) | |
5994 | set_address_base (info, loc, base_term); | |
5995 | else | |
5996 | { | |
5997 | gcc_assert (base_term == index_term); | |
5998 | ops[out] = loc; | |
5999 | inner_ops[out] = base_term; | |
6000 | ++out; | |
6001 | } | |
277f65de RS |
6002 | } |
6003 | } | |
6004 | ||
6005 | /* Classify the remaining OPS members as bases and indexes. */ | |
6006 | if (out == 1) | |
6007 | { | |
ec5a3504 RS |
6008 | /* If we haven't seen a base or an index yet, assume that this is |
6009 | the base. If we were confident that another term was the base | |
6010 | or index, treat the remaining operand as the other kind. */ | |
6011 | if (!info->base) | |
277f65de RS |
6012 | set_address_base (info, ops[0], inner_ops[0]); |
6013 | else | |
6014 | set_address_index (info, ops[0], inner_ops[0]); | |
6015 | } | |
6016 | else if (out == 2) | |
6017 | { | |
6018 | /* In the event of a tie, assume the base comes first. */ | |
6019 | if (baseness (*inner_ops[0], info->mode, info->as, PLUS, | |
6020 | GET_CODE (*ops[1])) | |
6021 | >= baseness (*inner_ops[1], info->mode, info->as, PLUS, | |
6022 | GET_CODE (*ops[0]))) | |
6023 | { | |
6024 | set_address_base (info, ops[0], inner_ops[0]); | |
6025 | set_address_index (info, ops[1], inner_ops[1]); | |
6026 | } | |
6027 | else | |
6028 | { | |
6029 | set_address_base (info, ops[1], inner_ops[1]); | |
6030 | set_address_index (info, ops[0], inner_ops[0]); | |
6031 | } | |
6032 | } | |
6033 | else | |
6034 | gcc_assert (out == 0); | |
6035 | } | |
6036 | ||
6037 | /* Describe address *LOC in *INFO. MODE is the mode of the addressed value, | |
6038 | or VOIDmode if not known. AS is the address space associated with LOC. | |
6039 | OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */ | |
6040 | ||
6041 | void | |
6042 | decompose_address (struct address_info *info, rtx *loc, enum machine_mode mode, | |
6043 | addr_space_t as, enum rtx_code outer_code) | |
6044 | { | |
6045 | memset (info, 0, sizeof (*info)); | |
6046 | info->mode = mode; | |
6047 | info->as = as; | |
6048 | info->addr_outer_code = outer_code; | |
6049 | info->outer = loc; | |
6050 | info->inner = strip_address_mutations (loc, &outer_code); | |
6051 | info->base_outer_code = outer_code; | |
6052 | switch (GET_CODE (*info->inner)) | |
6053 | { | |
6054 | case PRE_DEC: | |
6055 | case PRE_INC: | |
6056 | case POST_DEC: | |
6057 | case POST_INC: | |
6058 | decompose_incdec_address (info); | |
6059 | break; | |
6060 | ||
6061 | case PRE_MODIFY: | |
6062 | case POST_MODIFY: | |
6063 | decompose_automod_address (info); | |
6064 | break; | |
6065 | ||
6066 | default: | |
6067 | decompose_normal_address (info); | |
6068 | break; | |
6069 | } | |
6070 | } | |
6071 | ||
6072 | /* Describe address operand LOC in INFO. */ | |
6073 | ||
6074 | void | |
6075 | decompose_lea_address (struct address_info *info, rtx *loc) | |
6076 | { | |
6077 | decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS); | |
6078 | } | |
6079 | ||
6080 | /* Describe the address of MEM X in INFO. */ | |
6081 | ||
6082 | void | |
6083 | decompose_mem_address (struct address_info *info, rtx x) | |
6084 | { | |
6085 | gcc_assert (MEM_P (x)); | |
6086 | decompose_address (info, &XEXP (x, 0), GET_MODE (x), | |
6087 | MEM_ADDR_SPACE (x), MEM); | |
6088 | } | |
6089 | ||
6090 | /* Update INFO after a change to the address it describes. */ | |
6091 | ||
6092 | void | |
6093 | update_address (struct address_info *info) | |
6094 | { | |
6095 | decompose_address (info, info->outer, info->mode, info->as, | |
6096 | info->addr_outer_code); | |
6097 | } | |
6098 | ||
6099 | /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is | |
6100 | more complicated than that. */ | |
6101 | ||
6102 | HOST_WIDE_INT | |
6103 | get_index_scale (const struct address_info *info) | |
6104 | { | |
6105 | rtx index = *info->index; | |
6106 | if (GET_CODE (index) == MULT | |
6107 | && CONST_INT_P (XEXP (index, 1)) | |
6108 | && info->index_term == &XEXP (index, 0)) | |
6109 | return INTVAL (XEXP (index, 1)); | |
6110 | ||
6111 | if (GET_CODE (index) == ASHIFT | |
6112 | && CONST_INT_P (XEXP (index, 1)) | |
6113 | && info->index_term == &XEXP (index, 0)) | |
6114 | return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1)); | |
6115 | ||
6116 | if (info->index == info->index_term) | |
6117 | return 1; | |
6118 | ||
6119 | return 0; | |
6120 | } | |
6121 | ||
6122 | /* Return the "index code" of INFO, in the form required by | |
6123 | ok_for_base_p_1. */ | |
6124 | ||
6125 | enum rtx_code | |
6126 | get_index_code (const struct address_info *info) | |
6127 | { | |
6128 | if (info->index) | |
6129 | return GET_CODE (*info->index); | |
6130 | ||
6131 | if (info->disp) | |
6132 | return GET_CODE (*info->disp); | |
6133 | ||
6134 | return SCRATCH; | |
6135 | } | |
093a6c99 | 6136 | |
093a6c99 RS |
6137 | /* Return true if X contains a thread-local symbol. */ |
6138 | ||
6139 | bool | |
6180e3d8 | 6140 | tls_referenced_p (const_rtx x) |
093a6c99 RS |
6141 | { |
6142 | if (!targetm.have_tls) | |
6143 | return false; | |
6144 | ||
6180e3d8 | 6145 | subrtx_iterator::array_type array; |
ebd3cb12 | 6146 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
6180e3d8 RS |
6147 | if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0) |
6148 | return true; | |
6149 | return false; | |
093a6c99 | 6150 | } |